diff --git a/.eslintignore b/.eslintignore index 9740ac429fc..fd409251590 100644 --- a/.eslintignore +++ b/.eslintignore @@ -9,3 +9,4 @@ vendor integration-tests/esbuild/out.js integration-tests/esbuild/aws-sdk-out.js packages/dd-trace/src/appsec/blocked_templates.js +packages/dd-trace/src/payload-tagging/jsonpath-plus.js diff --git a/.github/actions/node/latest/action.yml b/.github/actions/node/latest/action.yml index 74e5d531f94..9e4c62ceca5 100644 --- a/.github/actions/node/latest/action.yml +++ b/.github/actions/node/latest/action.yml @@ -4,4 +4,4 @@ runs: steps: - uses: actions/setup-node@v3 with: - node-version: 'latest' + node-version: '22' # Update this line to the latest Node.js version diff --git a/.github/workflows/appsec.yml b/.github/workflows/appsec.yml index e6d073f5aaf..9679f7e3b24 100644 --- a/.github/workflows/appsec.yml +++ b/.github/workflows/appsec.yml @@ -256,3 +256,17 @@ jobs: - run: yarn test:integration:appsec - uses: ./.github/actions/node/latest - run: yarn test:integration:appsec + + passport: + runs-on: ubuntu-latest + env: + PLUGINS: passport-local|passport-http + steps: + - uses: actions/checkout@v4 + - uses: ./.github/actions/node/setup + - uses: ./.github/actions/install + - uses: ./.github/actions/node/oldest + - run: yarn test:appsec:plugins:ci + - uses: ./.github/actions/node/latest + - run: yarn test:appsec:plugins:ci + - uses: codecov/codecov-action@v3 diff --git a/.github/workflows/plugins.yml b/.github/workflows/plugins.yml index 7502e256749..48e33395efe 100644 --- a/.github/workflows/plugins.yml +++ b/.github/workflows/plugins.yml @@ -127,6 +127,15 @@ jobs: - uses: actions/checkout@v4 - uses: ./.github/actions/plugins/test-and-upstream + avsc: + runs-on: ubuntu-latest + env: + PLUGINS: avsc + DD_DATA_STREAMS_ENABLED: true + steps: + - uses: actions/checkout@v4 + - uses: ./.github/actions/plugins/test-and-upstream + aws-sdk: strategy: matrix: @@ -187,6 +196,14 @@ jobs: - uses: actions/checkout@v4 - uses: ./.github/actions/plugins/upstream + azure-functions: + runs-on: ubuntu-latest + env: + PLUGINS: azure-functions + steps: + - uses: actions/checkout@v4 + - uses: ./.github/actions/plugins/test + bluebird: runs-on: ubuntu-latest env: @@ -798,6 +815,15 @@ jobs: - uses: actions/checkout@v4 - uses: ./.github/actions/plugins/test + protobufjs: + runs-on: ubuntu-latest + env: + PLUGINS: protobufjs + DD_DATA_STREAMS_ENABLED: true + steps: + - uses: actions/checkout@v4 + - uses: ./.github/actions/plugins/test-and-upstream + q: runs-on: ubuntu-latest env: diff --git a/.github/workflows/project.yml b/.github/workflows/project.yml index f72a46db8d3..a07d64b3a69 100644 --- a/.github/workflows/project.yml +++ b/.github/workflows/project.yml @@ -42,7 +42,7 @@ jobs: with: node-version: ${{ matrix.version }} - uses: ./.github/actions/install - - run: node node_modules/.bin/mocha --colors --timeout 30000 -r packages/dd-trace/test/setup/core.js integration-tests/init.spec.js + - run: node node_modules/.bin/mocha --colors --timeout 30000 integration-tests/init.spec.js integration-ci: strategy: diff --git a/.github/workflows/system-tests.yml b/.github/workflows/system-tests.yml index e1ce6f7d767..c53c5b3064c 100644 --- a/.github/workflows/system-tests.yml +++ b/.github/workflows/system-tests.yml @@ -47,10 +47,8 @@ jobs: TEST_LIBRARY: nodejs WEBLOG_VARIANT: ${{ matrix.weblog-variant }} DD_API_KEY: ${{ secrets.DD_API_KEY }} - AWS_ACCESS_KEY_ID: ${{ secrets.IDM_AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.IDM_AWS_SECRET_ACCESS_KEY }} - AWS_REGION: us-east-1 - AWS_DEFAULT_REGION: us-east-1 # AWS services should use `AWS_REGION`, but some still use the older `AWS_DEFAULT_REGION` + SYSTEM_TESTS_AWS_ACCESS_KEY_ID: ${{ secrets.IDM_AWS_ACCESS_KEY_ID }} + SYSTEM_TESTS_AWS_SECRET_ACCESS_KEY: ${{ secrets.IDM_AWS_SECRET_ACCESS_KEY }} steps: - name: Checkout system tests diff --git a/.gitignore b/.gitignore index ff2cfaa8e23..a8dcafe063b 100644 --- a/.gitignore +++ b/.gitignore @@ -125,3 +125,4 @@ packages/dd-trace/test/appsec/next/*/package.json packages/dd-trace/test/appsec/next/*/node_modules packages/dd-trace/test/appsec/next/*/yarn.lock !packages/dd-trace/**/telemetry/logs +packages/datadog-plugin-azure-functions/test/integration-test/fixtures/node_modules diff --git a/LICENSE-3rdparty.csv b/LICENSE-3rdparty.csv index 45f88f66cb4..f36fac2da6c 100644 --- a/LICENSE-3rdparty.csv +++ b/LICENSE-3rdparty.csv @@ -14,7 +14,6 @@ require,import-in-the-middle,Apache license 2.0,Copyright 2021 Datadog Inc. require,int64-buffer,MIT,Copyright 2015-2016 Yusuke Kawasaki require,istanbul-lib-coverage,BSD-3-Clause,Copyright 2012-2015 Yahoo! Inc. require,jest-docblock,MIT,Copyright Meta Platforms, Inc. and affiliates. -require,jsonpath-plus,MIT,Copyright (c) 2011-2019 Stefan Goessner, Subbu Allamaraju, Mike Brevoort, Robert Krahn, Brett Zamir, Richard Schneider require,koalas,MIT,Copyright 2013-2017 Brian Woodward require,limiter,MIT,Copyright 2011 John Hurliman require,lodash.sortby,MIT,Copyright JS Foundation and other contributors diff --git a/docs/API.md b/docs/API.md index 68cdc3747cb..19827e5977d 100644 --- a/docs/API.md +++ b/docs/API.md @@ -24,9 +24,11 @@ tracer.use('pg', {
+
+
@@ -87,6 +89,7 @@ tracer.use('pg', {
+
@@ -101,7 +104,9 @@ tracer.use('pg', { * [amqp10](./interfaces/export_.plugins.amqp10.html) * [amqplib](./interfaces/export_.plugins.amqplib.html) +* [avsc](./interfaces/export_.plugins.avsc.html) * [aws-sdk](./interfaces/export_.plugins.aws_sdk.html) +* [azure-functions](./interfaces/export_.plugins.azure_functions.html) * [bluebird](./interfaces/export_.plugins.bluebird.html) * [couchbase](./interfaces/export_.plugins.couchbase.html) * [cucumber](./interfaces/export_.plugins.cucumber.html) @@ -142,6 +147,7 @@ tracer.use('pg', { * [pg](./interfaces/export_.plugins.pg.html) * [promise](./interfaces/export_.plugins.promise.html) * [promise-js](./interfaces/export_.plugins.promise_js.html) +* [protobufjs](./interfaces/export_.plugins.protobufjs.html) * [q](./interfaces/export_.plugins.q.html) * [redis](./interfaces/export_.plugins.redis.html) * [restify](./interfaces/export_.plugins.restify.html) @@ -375,7 +381,7 @@ Options can be configured as a parameter to the [init()](./interfaces/tracer.htm

Custom Logging

-By default, logging from this library is disabled. In order to get debugging information and errors sent to logs, the `debug` options should be set to `true` in the [init()](./interfaces/tracer.html#init) method. +By default, logging from this library is disabled. In order to get debugging information and errors sent to logs, the `DD_TRACE_DEBUG` env var should be set to `true`. The tracer will then log debug information to `console.log()` and errors to `console.error()`. This behavior can be changed by passing a custom logger to the tracer. The logger should contain a `debug()` and `error()` methods that can handle messages and errors, respectively. @@ -388,14 +394,15 @@ const logger = bunyan.createLogger({ level: 'trace' }) +process.env.DD_TRACE_DEBUG = 'true' + const tracer = require('dd-trace').init({ logger: { error: err => logger.error(err), warn: message => logger.warn(message), info: message => logger.info(message), debug: message => logger.trace(message), - }, - debug: true + } }) ``` diff --git a/docs/add-redirects.sh b/docs/add-redirects.sh index fd0590a934a..92d58ba3263 100755 --- a/docs/add-redirects.sh +++ b/docs/add-redirects.sh @@ -14,6 +14,7 @@ echo "writing redirects..." declare -a plugins=( "amqp10" "amqplib" + "avsc" "aws_sdk" "bluebird" "couchbase" @@ -55,6 +56,7 @@ declare -a plugins=( "pg" "promise" "promise_js" + "protobufjs" "q" "redis" "restify" diff --git a/docs/test.ts b/docs/test.ts index e37177e0898..9c6c7df6211 100644 --- a/docs/test.ts +++ b/docs/test.ts @@ -128,6 +128,7 @@ tracer.init({ }, iast: { enabled: true, + cookieFilterPattern: '.*', requestSampling: 50, maxConcurrentRequests: 4, maxContextOperations: 30, @@ -143,6 +144,7 @@ tracer.init({ experimental: { iast: { enabled: true, + cookieFilterPattern: '.*', requestSampling: 50, maxConcurrentRequests: 4, maxContextOperations: 30, @@ -281,8 +283,10 @@ const openSearchOptions: plugins.opensearch = { tracer.use('amqp10'); tracer.use('amqplib'); +tracer.use('avsc'); tracer.use('aws-sdk'); tracer.use('aws-sdk', awsSdkOptions); +tracer.use('azure-functions'); tracer.use('bunyan'); tracer.use('couchbase'); tracer.use('cassandra-driver'); @@ -362,6 +366,7 @@ tracer.use('playwright'); tracer.use('pg'); tracer.use('pg', { service: params => `${params.host}-${params.database}` }); tracer.use('pino'); +tracer.use('protobufjs'); tracer.use('redis'); tracer.use('redis', redisOptions); tracer.use('restify'); diff --git a/ext/types.d.ts b/ext/types.d.ts index 703d88f794b..549f4d58ec1 100644 --- a/ext/types.d.ts +++ b/ext/types.d.ts @@ -1,5 +1,6 @@ declare const types: { HTTP: 'http' + SERVERLESS: 'serverless' WEB: 'web' } diff --git a/ext/types.js b/ext/types.js index d8863f04bb2..884b6a495e5 100644 --- a/ext/types.js +++ b/ext/types.js @@ -2,5 +2,6 @@ module.exports = { HTTP: 'http', + SERVERLESS: 'serverless', WEB: 'web' } diff --git a/index.d.ts b/index.d.ts index 55a67cc8f8b..f969984162a 100644 --- a/index.d.ts +++ b/index.d.ts @@ -147,7 +147,9 @@ interface Plugins { "amqp10": tracer.plugins.amqp10; "amqplib": tracer.plugins.amqplib; "apollo": tracer.plugins.apollo; + "avsc": tracer.plugins.avsc; "aws-sdk": tracer.plugins.aws_sdk; + "azure-functions": tracer.plugins.azure_functions; "bunyan": tracer.plugins.bunyan; "cassandra-driver": tracer.plugins.cassandra_driver; "child_process": tracer.plugins.child_process; @@ -190,6 +192,7 @@ interface Plugins { "playwright": tracer.plugins.playwright; "pg": tracer.plugins.pg; "pino": tracer.plugins.pino; + "protobufjs": tracer.plugins.protobufjs; "redis": tracer.plugins.redis; "restify": tracer.plugins.restify; "rhea": tracer.plugins.rhea; @@ -1191,6 +1194,12 @@ declare namespace tracer { signature?: boolean; } + /** + * This plugin automatically patches the [avsc](https://github.com/mtth/avsc) module + * to collect avro message schemas when Datastreams Monitoring is enabled. + */ + interface avsc extends Integration {} + /** * This plugin automatically instruments the * [aws-sdk](https://github.com/aws/aws-sdk-js) module. @@ -1229,6 +1238,12 @@ declare namespace tracer { [key: string]: boolean | Object | undefined; } + /** + * This plugin automatically instruments the + * @azure/functions module. + */ + interface azure_functions extends Instrumentation {} + /** * This plugin patches the [bunyan](https://github.com/trentm/node-bunyan) * to automatically inject trace identifiers in log records when the @@ -1731,6 +1746,11 @@ declare namespace tracer { * on the tracer. */ interface pino extends Integration {} + /** + * This plugin automatically patches the [protobufjs](https://protobufjs.github.io/protobuf.js/) + * to collect protobuf message schemas when Datastreams Monitoring is enabled. + */ + interface protobufjs extends Integration {} /** * This plugin automatically instruments the @@ -2146,6 +2166,12 @@ declare namespace tracer { */ maxContextOperations?: number, + /** + * Defines the pattern to ignore cookie names in the vulnerability hash calculation + * @default ".{32,}" + */ + cookieFilterPattern?: string, + /** * Whether to enable vulnerability deduplication */ diff --git a/integration-tests/automatic-log-submission.spec.js b/integration-tests/automatic-log-submission.spec.js new file mode 100644 index 00000000000..eade717dcf1 --- /dev/null +++ b/integration-tests/automatic-log-submission.spec.js @@ -0,0 +1,207 @@ +'use strict' + +const { exec } = require('child_process') + +const { assert } = require('chai') +const getPort = require('get-port') + +const { + createSandbox, + getCiVisAgentlessConfig, + getCiVisEvpProxyConfig +} = require('./helpers') +const { FakeCiVisIntake } = require('./ci-visibility-intake') +const webAppServer = require('./ci-visibility/web-app-server') +const { NODE_MAJOR } = require('../version') + +const cucumberVersion = NODE_MAJOR <= 16 ? '9' : 'latest' + +describe('test visibility automatic log submission', () => { + let sandbox, cwd, receiver, childProcess, webAppPort + let testOutput = '' + + before(async () => { + sandbox = await createSandbox([ + 'mocha', + `@cucumber/cucumber@${cucumberVersion}`, + 'jest', + 'winston', + 'chai@4' + ], true) + cwd = sandbox.folder + webAppPort = await getPort() + webAppServer.listen(webAppPort) + }) + + after(async () => { + await sandbox.remove() + await new Promise(resolve => webAppServer.close(resolve)) + }) + + beforeEach(async function () { + const port = await getPort() + receiver = await new FakeCiVisIntake(port).start() + }) + + afterEach(async () => { + testOutput = '' + childProcess.kill() + await receiver.stop() + }) + + const testFrameworks = [ + { + name: 'mocha', + command: 'mocha ./ci-visibility/automatic-log-submission/automatic-log-submission-test.js' + }, + { + name: 'jest', + command: 'node ./node_modules/jest/bin/jest --config ./ci-visibility/automatic-log-submission/config-jest.js' + }, + { + name: 'cucumber', + command: './node_modules/.bin/cucumber-js ci-visibility/automatic-log-submission-cucumber/*.feature' + } + ] + + testFrameworks.forEach(({ name, command }) => { + context(`with ${name}`, () => { + it('can automatically submit logs', (done) => { + let logIds, testIds + + const logsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.includes('/api/v2/logs'), payloads => { + payloads.forEach(({ headers }) => { + assert.equal(headers['dd-api-key'], '1') + }) + const logMessages = payloads.flatMap(({ logMessage }) => logMessage) + const [url] = payloads.flatMap(({ url }) => url) + + assert.equal(url, '/api/v2/logs?ddsource=winston&service=my-service') + assert.equal(logMessages.length, 2) + + logMessages.forEach(({ dd, level }) => { + assert.equal(level, 'info') + assert.equal(dd.service, 'my-service') + assert.hasAllKeys(dd, ['trace_id', 'span_id', 'service']) + }) + + assert.includeMembers(logMessages.map(({ message }) => message), [ + 'Hello simple log!', + 'sum function being called' + ]) + + logIds = { + logSpanId: logMessages[0].dd.span_id, + logTraceId: logMessages[0].dd.trace_id + } + }) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const testEventContent = events.find(event => event.type === 'test').content + + testIds = { + testSpanId: testEventContent.span_id.toString(), + testTraceId: testEventContent.trace_id.toString() + } + }) + + childProcess = exec(command, + { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + DD_AGENTLESS_LOG_SUBMISSION_ENABLED: '1', + DD_AGENTLESS_LOG_SUBMISSION_URL: `http://localhost:${receiver.port}`, + DD_API_KEY: '1', + DD_SERVICE: 'my-service' + }, + stdio: 'pipe' + } + ) + childProcess.on('exit', () => { + Promise.all([logsPromise, eventsPromise]).then(() => { + const { logSpanId, logTraceId } = logIds + const { testSpanId, testTraceId } = testIds + assert.include(testOutput, 'Hello simple log!') + assert.include(testOutput, 'sum function being called') + // cucumber has `cucumber.step`, and that's the active span, not the test. + // logs are queried by trace id, so it should be OK + if (name !== 'cucumber') { + assert.include(testOutput, `"span_id":"${testSpanId}"`) + assert.equal(logSpanId, testSpanId) + } + assert.include(testOutput, `"trace_id":"${testTraceId}"`) + assert.equal(logTraceId, testTraceId) + done() + }).catch(done) + }) + + childProcess.stdout.on('data', (chunk) => { + testOutput += chunk.toString() + }) + childProcess.stderr.on('data', (chunk) => { + testOutput += chunk.toString() + }) + }) + + it('does not submit logs when DD_AGENTLESS_LOG_SUBMISSION_ENABLED is not set', (done) => { + childProcess = exec(command, + { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + DD_AGENTLESS_LOG_SUBMISSION_URL: `http://localhost:${receiver.port}`, + DD_SERVICE: 'my-service' + }, + stdio: 'pipe' + } + ) + childProcess.on('exit', () => { + assert.include(testOutput, 'Hello simple log!') + assert.notInclude(testOutput, 'span_id') + done() + }) + + childProcess.stdout.on('data', (chunk) => { + testOutput += chunk.toString() + }) + childProcess.stderr.on('data', (chunk) => { + testOutput += chunk.toString() + }) + }) + + it('does not submit logs when DD_AGENTLESS_LOG_SUBMISSION_ENABLED is set but DD_API_KEY is not', (done) => { + childProcess = exec(command, + { + cwd, + env: { + ...getCiVisEvpProxyConfig(receiver.port), + DD_AGENTLESS_LOG_SUBMISSION_ENABLED: '1', + DD_AGENTLESS_LOG_SUBMISSION_URL: `http://localhost:${receiver.port}`, + DD_SERVICE: 'my-service', + DD_TRACE_DEBUG: '1', + DD_TRACE_LOG_LEVEL: 'warn', + DD_API_KEY: '' + }, + stdio: 'pipe' + } + ) + childProcess.on('exit', () => { + assert.include(testOutput, 'Hello simple log!') + assert.include(testOutput, 'no automatic log submission will be performed') + done() + }) + + childProcess.stdout.on('data', (chunk) => { + testOutput += chunk.toString() + }) + childProcess.stderr.on('data', (chunk) => { + testOutput += chunk.toString() + }) + }) + }) + }) +}) diff --git a/integration-tests/ci-visibility-intake.js b/integration-tests/ci-visibility-intake.js index 5096efaba42..c133a7a31fe 100644 --- a/integration-tests/ci-visibility-intake.js +++ b/integration-tests/ci-visibility-intake.js @@ -208,6 +208,15 @@ class FakeCiVisIntake extends FakeAgent { }) }) + app.post('/api/v2/logs', express.json(), (req, res) => { + res.status(200).send('OK') + this.emit('message', { + headers: req.headers, + url: req.url, + logMessage: req.body + }) + }) + return new Promise((resolve, reject) => { const timeoutObj = setTimeout(() => { reject(new Error('Intake timed out starting up')) diff --git a/integration-tests/ci-visibility/automatic-log-submission-cucumber/automatic-log-submission.feature b/integration-tests/ci-visibility/automatic-log-submission-cucumber/automatic-log-submission.feature new file mode 100644 index 00000000000..bcce6b75bea --- /dev/null +++ b/integration-tests/ci-visibility/automatic-log-submission-cucumber/automatic-log-submission.feature @@ -0,0 +1,4 @@ +Feature: Automatic Log Submission + Scenario: Run Automatic Log Submission + When we run a test + Then I should have made a log diff --git a/integration-tests/ci-visibility/automatic-log-submission-cucumber/support/logger.js b/integration-tests/ci-visibility/automatic-log-submission-cucumber/support/logger.js new file mode 100644 index 00000000000..5480f1ee574 --- /dev/null +++ b/integration-tests/ci-visibility/automatic-log-submission-cucumber/support/logger.js @@ -0,0 +1,10 @@ +const { createLogger, format, transports } = require('winston') + +module.exports = createLogger({ + level: 'info', + exitOnError: false, + format: format.json(), + transports: [ + new transports.Console() + ] +}) diff --git a/integration-tests/ci-visibility/automatic-log-submission-cucumber/support/steps.js b/integration-tests/ci-visibility/automatic-log-submission-cucumber/support/steps.js new file mode 100644 index 00000000000..2d1bdb4e906 --- /dev/null +++ b/integration-tests/ci-visibility/automatic-log-submission-cucumber/support/steps.js @@ -0,0 +1,14 @@ +const { expect } = require('chai') +const { When, Then } = require('@cucumber/cucumber') + +const logger = require('./logger') +const sum = require('./sum') + +Then('I should have made a log', async function () { + expect(true).to.equal(true) + expect(sum(1, 2)).to.equal(3) +}) + +When('we run a test', async function () { + logger.log('info', 'Hello simple log!') +}) diff --git a/integration-tests/ci-visibility/automatic-log-submission-cucumber/support/sum.js b/integration-tests/ci-visibility/automatic-log-submission-cucumber/support/sum.js new file mode 100644 index 00000000000..cce61142972 --- /dev/null +++ b/integration-tests/ci-visibility/automatic-log-submission-cucumber/support/sum.js @@ -0,0 +1,6 @@ +const logger = require('./logger') + +module.exports = function (a, b) { + logger.log('info', 'sum function being called') + return a + b +} diff --git a/integration-tests/ci-visibility/automatic-log-submission/automatic-log-submission-test.js b/integration-tests/ci-visibility/automatic-log-submission/automatic-log-submission-test.js new file mode 100644 index 00000000000..cfc60b8d3b0 --- /dev/null +++ b/integration-tests/ci-visibility/automatic-log-submission/automatic-log-submission-test.js @@ -0,0 +1,13 @@ +const { expect } = require('chai') + +const logger = require('./logger') +const sum = require('./sum') + +describe('test', () => { + it('should return true', () => { + logger.log('info', 'Hello simple log!') + + expect(true).to.be.true + expect(sum(1, 2)).to.equal(3) + }) +}) diff --git a/integration-tests/ci-visibility/automatic-log-submission/config-jest.js b/integration-tests/ci-visibility/automatic-log-submission/config-jest.js new file mode 100644 index 00000000000..56afa0d36db --- /dev/null +++ b/integration-tests/ci-visibility/automatic-log-submission/config-jest.js @@ -0,0 +1,8 @@ +module.exports = { + projects: [], + testPathIgnorePatterns: ['/node_modules/'], + cache: false, + testMatch: [ + '**/ci-visibility/automatic-log-submission/automatic-log-submission-*' + ] +} diff --git a/integration-tests/ci-visibility/automatic-log-submission/logger.js b/integration-tests/ci-visibility/automatic-log-submission/logger.js new file mode 100644 index 00000000000..5480f1ee574 --- /dev/null +++ b/integration-tests/ci-visibility/automatic-log-submission/logger.js @@ -0,0 +1,10 @@ +const { createLogger, format, transports } = require('winston') + +module.exports = createLogger({ + level: 'info', + exitOnError: false, + format: format.json(), + transports: [ + new transports.Console() + ] +}) diff --git a/integration-tests/ci-visibility/automatic-log-submission/sum.js b/integration-tests/ci-visibility/automatic-log-submission/sum.js new file mode 100644 index 00000000000..cce61142972 --- /dev/null +++ b/integration-tests/ci-visibility/automatic-log-submission/sum.js @@ -0,0 +1,6 @@ +const logger = require('./logger') + +module.exports = function (a, b) { + logger.log('info', 'sum function being called') + return a + b +} diff --git a/integration-tests/debugger/index.spec.js b/integration-tests/debugger/index.spec.js index 241f57f722f..613c4eeb695 100644 --- a/integration-tests/debugger/index.spec.js +++ b/integration-tests/debugger/index.spec.js @@ -2,6 +2,8 @@ const path = require('path') const { randomUUID } = require('crypto') +const os = require('os') + const getPort = require('get-port') const Axios = require('axios') const { assert } = require('chai') @@ -10,7 +12,7 @@ const { ACKNOWLEDGED, ERROR } = require('../../packages/dd-trace/src/appsec/remo const { version } = require('../../package.json') const probeFile = 'debugger/target-app/index.js' -const probeLineNo = 9 +const probeLineNo = 14 const pollInterval = 1 describe('Dynamic Instrumentation', function () { @@ -273,7 +275,7 @@ describe('Dynamic Instrumentation', function () { }) describe('input messages', function () { - it('should capture and send expected snapshot when a log line probe is triggered', function (done) { + it('should capture and send expected payload when a log line probe is triggered', function (done) { agent.on('debugger-diagnostics', ({ payload }) => { if (payload.debugger.diagnostics.status === 'INSTALLED') { axios.get('/foo') @@ -283,6 +285,7 @@ describe('Dynamic Instrumentation', function () { agent.on('debugger-input', ({ payload }) => { const expected = { ddsource: 'dd_debugger', + hostname: os.hostname(), service: 'node', message: 'Hello World!', logger: { @@ -295,7 +298,7 @@ describe('Dynamic Instrumentation', function () { probe: { id: rcConfig.config.id, version: 0, - location: { file: probeFile, lines: [probeLineNo] } + location: { file: probeFile, lines: [String(probeLineNo)] } }, language: 'javascript' } @@ -308,6 +311,22 @@ describe('Dynamic Instrumentation', function () { assert.isTrue(payload['debugger.snapshot'].timestamp > Date.now() - 1000 * 60) assert.isTrue(payload['debugger.snapshot'].timestamp <= Date.now()) + assert.isArray(payload['debugger.snapshot'].stack) + assert.isAbove(payload['debugger.snapshot'].stack.length, 0) + for (const frame of payload['debugger.snapshot'].stack) { + assert.isObject(frame) + assert.hasAllKeys(frame, ['fileName', 'function', 'lineNumber', 'columnNumber']) + assert.isString(frame.fileName) + assert.isString(frame.function) + assert.isAbove(frame.lineNumber, 0) + assert.isAbove(frame.columnNumber, 0) + } + const topFrame = payload['debugger.snapshot'].stack[0] + assert.match(topFrame.fileName, new RegExp(`${appFile}$`)) // path seems to be prefeixed with `/private` on Mac + assert.strictEqual(topFrame.function, 'handler') + assert.strictEqual(topFrame.lineNumber, probeLineNo) + assert.strictEqual(topFrame.columnNumber, 3) + done() }) @@ -373,6 +392,171 @@ describe('Dynamic Instrumentation', function () { agent.addRemoteConfig(rcConfig) }) + + describe('with snapshot', () => { + beforeEach(() => { + // Trigger the breakpoint once probe is successfully installed + agent.on('debugger-diagnostics', ({ payload }) => { + if (payload.debugger.diagnostics.status === 'INSTALLED') { + axios.get('/foo') + } + }) + }) + + it('should capture a snapshot', (done) => { + agent.on('debugger-input', ({ payload: { 'debugger.snapshot': { captures } } }) => { + assert.deepEqual(Object.keys(captures), ['lines']) + assert.deepEqual(Object.keys(captures.lines), [String(probeLineNo)]) + + const { locals } = captures.lines[probeLineNo] + const { request, fastify, getSomeData } = locals + delete locals.request + delete locals.fastify + delete locals.getSomeData + + // from block scope + assert.deepEqual(locals, { + nil: { type: 'null', isNull: true }, + undef: { type: 'undefined' }, + bool: { type: 'boolean', value: 'true' }, + num: { type: 'number', value: '42' }, + bigint: { type: 'bigint', value: '42' }, + str: { type: 'string', value: 'foo' }, + lstr: { + type: 'string', + // eslint-disable-next-line max-len + value: 'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor i', + truncated: true, + size: 445 + }, + sym: { type: 'symbol', value: 'Symbol(foo)' }, + regex: { type: 'RegExp', value: '/bar/i' }, + arr: { + type: 'Array', + elements: [ + { type: 'number', value: '1' }, + { type: 'number', value: '2' }, + { type: 'number', value: '3' } + ] + }, + obj: { + type: 'Object', + fields: { + foo: { + type: 'Object', + fields: { + baz: { type: 'number', value: '42' }, + nil: { type: 'null', isNull: true }, + undef: { type: 'undefined' }, + deep: { + type: 'Object', + fields: { nested: { type: 'Object', notCapturedReason: 'depth' } } + } + } + }, + bar: { type: 'boolean', value: 'true' } + } + }, + emptyObj: { type: 'Object', fields: {} }, + fn: { + type: 'Function', + fields: { + length: { type: 'number', value: '0' }, + name: { type: 'string', value: 'fn' } + } + }, + p: { + type: 'Promise', + fields: { + '[[PromiseState]]': { type: 'string', value: 'fulfilled' }, + '[[PromiseResult]]': { type: 'undefined' } + } + } + }) + + // from local scope + // There's no reason to test the `request` object 100%, instead just check its fingerprint + assert.deepEqual(Object.keys(request), ['type', 'fields']) + assert.equal(request.type, 'Request') + assert.deepEqual(request.fields.id, { type: 'string', value: 'req-1' }) + assert.deepEqual(request.fields.params, { + type: 'NullObject', fields: { name: { type: 'string', value: 'foo' } } + }) + assert.deepEqual(request.fields.query, { type: 'Object', fields: {} }) + assert.deepEqual(request.fields.body, { type: 'undefined' }) + + // from closure scope + // There's no reason to test the `fastify` object 100%, instead just check its fingerprint + assert.deepEqual(Object.keys(fastify), ['type', 'fields']) + assert.equal(fastify.type, 'Object') + + assert.deepEqual(getSomeData, { + type: 'Function', + fields: { + length: { type: 'number', value: '0' }, + name: { type: 'string', value: 'getSomeData' } + } + }) + + done() + }) + + agent.addRemoteConfig(generateRemoteConfig({ captureSnapshot: true })) + }) + + it('should respect maxReferenceDepth', (done) => { + agent.on('debugger-input', ({ payload: { 'debugger.snapshot': { captures } } }) => { + const { locals } = captures.lines[probeLineNo] + delete locals.request + delete locals.fastify + delete locals.getSomeData + + assert.deepEqual(locals, { + nil: { type: 'null', isNull: true }, + undef: { type: 'undefined' }, + bool: { type: 'boolean', value: 'true' }, + num: { type: 'number', value: '42' }, + bigint: { type: 'bigint', value: '42' }, + str: { type: 'string', value: 'foo' }, + lstr: { + type: 'string', + // eslint-disable-next-line max-len + value: 'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor i', + truncated: true, + size: 445 + }, + sym: { type: 'symbol', value: 'Symbol(foo)' }, + regex: { type: 'RegExp', value: '/bar/i' }, + arr: { type: 'Array', notCapturedReason: 'depth' }, + obj: { type: 'Object', notCapturedReason: 'depth' }, + emptyObj: { type: 'Object', notCapturedReason: 'depth' }, + fn: { type: 'Function', notCapturedReason: 'depth' }, + p: { type: 'Promise', notCapturedReason: 'depth' } + }) + + done() + }) + + agent.addRemoteConfig(generateRemoteConfig({ captureSnapshot: true, capture: { maxReferenceDepth: 0 } })) + }) + + it('should respect maxLength', (done) => { + agent.on('debugger-input', ({ payload: { 'debugger.snapshot': { captures } } }) => { + const { locals } = captures.lines[probeLineNo] + + assert.deepEqual(locals.lstr, { + type: 'string', + value: 'Lorem ipsu', + truncated: true, + size: 445 + }) + + done() + }) + + agent.addRemoteConfig(generateRemoteConfig({ captureSnapshot: true, capture: { maxLength: 10 } })) + }) + }) }) describe('race conditions', () => { diff --git a/integration-tests/debugger/target-app/index.js b/integration-tests/debugger/target-app/index.js index d0e1b7fb6dd..dd7f5e6328a 100644 --- a/integration-tests/debugger/target-app/index.js +++ b/integration-tests/debugger/target-app/index.js @@ -5,10 +5,17 @@ const Fastify = require('fastify') const fastify = Fastify() +// Since line probes have hardcoded line numbers, we want to try and keep the line numbers from changing within the +// `handler` function below when making changes to this file. This is achieved by calling `getSomeData` and keeping all +// variable names on the same line as much as possible. fastify.get('/:name', function handler (request) { + // eslint-disable-next-line no-unused-vars + const { nil, undef, bool, num, bigint, str, lstr, sym, regex, arr, obj, emptyObj, fn, p } = getSomeData() return { hello: request.params.name } }) +// WARNING: Breakpoints present above this line - Any changes to the lines above might influence tests! + fastify.listen({ port: process.env.APP_PORT }, (err) => { if (err) { fastify.log.error(err) @@ -16,3 +23,31 @@ fastify.listen({ port: process.env.APP_PORT }, (err) => { } process.send({ port: process.env.APP_PORT }) }) + +function getSomeData () { + return { + nil: null, + undef: undefined, + bool: true, + num: 42, + bigint: 42n, + str: 'foo', + // eslint-disable-next-line max-len + lstr: 'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.', + sym: Symbol('foo'), + regex: /bar/i, + arr: [1, 2, 3], + obj: { + foo: { + baz: 42, + nil: null, + undef: undefined, + deep: { nested: { obj: { that: { goes: { on: { forever: true } } } } } } + }, + bar: true + }, + emptyObj: {}, + fn: () => {}, + p: Promise.resolve() + } +} diff --git a/integration-tests/helpers/index.js b/integration-tests/helpers/index.js index 98074ba89b4..09cc6c5bee4 100644 --- a/integration-tests/helpers/index.js +++ b/integration-tests/helpers/index.js @@ -358,6 +358,7 @@ function assertUUID (actual, msg = 'not a valid UUID') { module.exports = { FakeAgent, + hookFile, assertObjectContains, assertUUID, spawnProc, diff --git a/integration-tests/selenium/selenium.spec.js b/integration-tests/selenium/selenium.spec.js index a95acb6aaa2..50fc9d19568 100644 --- a/integration-tests/selenium/selenium.spec.js +++ b/integration-tests/selenium/selenium.spec.js @@ -18,7 +18,7 @@ const { } = require('../../packages/dd-trace/src/plugins/util/test') const { NODE_MAJOR } = require('../../version') -const cucumberVersion = NODE_MAJOR <= 16 ? '9' : '10' +const cucumberVersion = NODE_MAJOR <= 16 ? '9' : 'latest' const webAppServer = require('../ci-visibility/web-app-server') diff --git a/package.json b/package.json index c27742121e5..52982e0cce6 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "dd-trace", - "version": "4.47.1", + "version": "4.48.0", "description": "Datadog APM tracing client for JavaScript", "main": "index.js", "typings": "index.d.ts", @@ -20,8 +20,8 @@ "test:appsec:ci": "nyc --no-clean --include \"packages/dd-trace/src/appsec/**/*.js\" --exclude \"packages/dd-trace/test/appsec/**/*.plugin.spec.js\" -- npm run test:appsec", "test:appsec:plugins": "mocha -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/dd-trace/test/appsec/**/*.@($(echo $PLUGINS)).plugin.spec.js\"", "test:appsec:plugins:ci": "yarn services && nyc --no-clean --include \"packages/dd-trace/src/appsec/**/*.js\" -- npm run test:appsec:plugins", - "test:debugger": "tap packages/dd-trace/test/debugger/**/*.spec.js", - "test:debugger:ci": "npm run test:debugger -- --coverage --nyc-arg=--include=\"packages/dd-trace/src/debugger/**/*.js\"", + "test:debugger": "mocha -r 'packages/dd-trace/test/setup/mocha.js' 'packages/dd-trace/test/debugger/**/*.spec.js'", + "test:debugger:ci": "nyc --no-clean --include 'packages/dd-trace/src/debugger/**/*.js' -- npm run test:debugger", "test:trace:core": "tap packages/dd-trace/test/*.spec.js \"packages/dd-trace/test/{ci-visibility,datastreams,encode,exporters,opentelemetry,opentracing,plugins,service-naming,telemetry}/**/*.spec.js\"", "test:trace:core:ci": "npm run test:trace:core -- --coverage --nyc-arg=--include=\"packages/dd-trace/src/**/*.js\"", "test:instrumentations": "mocha -r 'packages/dd-trace/test/setup/mocha.js' 'packages/datadog-instrumentations/test/**/*.spec.js'", @@ -77,7 +77,7 @@ }, "dependencies": { "@datadog/native-appsec": "8.1.1", - "@datadog/native-iast-rewriter": "2.4.1", + "@datadog/native-iast-rewriter": "2.5.0", "@datadog/native-iast-taint-tracking": "3.1.0", "@datadog/native-metrics": "^2.0.0", "@datadog/pprof": "5.3.0", @@ -91,7 +91,6 @@ "int64-buffer": "^0.1.9", "istanbul-lib-coverage": "3.2.0", "jest-docblock": "^29.7.0", - "jsonpath-plus": "^9.0.0", "koalas": "^1.0.2", "limiter": "1.1.5", "lodash.sortby": "^4.7.0", @@ -114,7 +113,7 @@ "aws-sdk": "^2.1446.0", "axios": "^1.7.4", "benchmark": "^2.1.4", - "body-parser": "^1.20.2", + "body-parser": "^1.20.3", "chai": "^4.3.7", "chalk": "^5.3.0", "checksum": "^1.0.0", @@ -140,7 +139,7 @@ "nyc": "^15.1.0", "proxyquire": "^1.8.0", "rimraf": "^3.0.0", - "sinon": "^15.2.0", + "sinon": "^16.1.3", "sinon-chai": "^3.7.0", "tap": "^16.3.7", "tiktoken": "^1.0.15" diff --git a/packages/datadog-code-origin/index.js b/packages/datadog-code-origin/index.js new file mode 100644 index 00000000000..530dd3cc8ae --- /dev/null +++ b/packages/datadog-code-origin/index.js @@ -0,0 +1,38 @@ +'use strict' + +const { getUserLandFrames } = require('../dd-trace/src/plugins/util/stacktrace') + +const limit = Number(process.env._DD_CODE_ORIGIN_MAX_USER_FRAMES) || 8 + +module.exports = { + entryTag, + exitTag +} + +function entryTag (topOfStackFunc) { + return tag('entry', topOfStackFunc) +} + +function exitTag (topOfStackFunc) { + return tag('exit', topOfStackFunc) +} + +function tag (type, topOfStackFunc) { + const frames = getUserLandFrames(topOfStackFunc, limit) + const tags = { + '_dd.code_origin.type': type + } + for (let i = 0; i < frames.length; i++) { + const frame = frames[i] + tags[`_dd.code_origin.frames.${i}.file`] = frame.file + tags[`_dd.code_origin.frames.${i}.line`] = String(frame.line) + tags[`_dd.code_origin.frames.${i}.column`] = String(frame.column) + if (frame.method) { + tags[`_dd.code_origin.frames.${i}.method`] = frame.method + } + if (frame.type) { + tags[`_dd.code_origin.frames.${i}.type`] = frame.type + } + } + return tags +} diff --git a/packages/datadog-core/index.js b/packages/datadog-core/index.js index 72b0403aa75..9819b32f3ba 100644 --- a/packages/datadog-core/index.js +++ b/packages/datadog-core/index.js @@ -1,7 +1,7 @@ 'use strict' -const LocalStorage = require('./src/storage') +const { AsyncLocalStorage } = require('async_hooks') -const storage = new LocalStorage() +const storage = new AsyncLocalStorage() module.exports = { storage } diff --git a/packages/datadog-core/src/storage/async_resource.js b/packages/datadog-core/src/storage/async_resource.js deleted file mode 100644 index 4738845e415..00000000000 --- a/packages/datadog-core/src/storage/async_resource.js +++ /dev/null @@ -1,108 +0,0 @@ -'use strict' - -const { createHook, executionAsyncResource } = require('async_hooks') -const { channel } = require('dc-polyfill') - -const beforeCh = channel('dd-trace:storage:before') -const afterCh = channel('dd-trace:storage:after') -const enterCh = channel('dd-trace:storage:enter') - -let PrivateSymbol = Symbol -function makePrivateSymbol () { - // eslint-disable-next-line no-new-func - PrivateSymbol = new Function('name', 'return %CreatePrivateSymbol(name)') -} - -try { - makePrivateSymbol() -} catch (e) { - try { - const v8 = require('v8') - v8.setFlagsFromString('--allow-natives-syntax') - makePrivateSymbol() - v8.setFlagsFromString('--no-allow-natives-syntax') - // eslint-disable-next-line no-empty - } catch (e) {} -} - -class AsyncResourceStorage { - constructor () { - this._ddResourceStore = PrivateSymbol('ddResourceStore') - this._enabled = false - this._hook = createHook(this._createHook()) - } - - disable () { - if (!this._enabled) return - - this._hook.disable() - this._enabled = false - } - - getStore () { - if (!this._enabled) return - - const resource = this._executionAsyncResource() - - return resource[this._ddResourceStore] - } - - enterWith (store) { - this._enable() - - const resource = this._executionAsyncResource() - - resource[this._ddResourceStore] = store - enterCh.publish() - } - - run (store, callback, ...args) { - this._enable() - - const resource = this._executionAsyncResource() - const oldStore = resource[this._ddResourceStore] - - resource[this._ddResourceStore] = store - enterCh.publish() - - try { - return callback(...args) - } finally { - resource[this._ddResourceStore] = oldStore - enterCh.publish() - } - } - - _createHook () { - return { - init: this._init.bind(this), - before () { - beforeCh.publish() - }, - after () { - afterCh.publish() - } - } - } - - _enable () { - if (this._enabled) return - - this._enabled = true - this._hook.enable() - } - - _init (asyncId, type, triggerAsyncId, resource) { - const currentResource = this._executionAsyncResource() - - if (Object.prototype.hasOwnProperty.call(currentResource, this._ddResourceStore)) { - resource[this._ddResourceStore] = currentResource[this._ddResourceStore] - } - } - - _executionAsyncResource () { - return executionAsyncResource() || {} - } -} - -module.exports = AsyncResourceStorage diff --git a/packages/datadog-core/src/storage/index.js b/packages/datadog-core/src/storage/index.js deleted file mode 100644 index e522e61ced2..00000000000 --- a/packages/datadog-core/src/storage/index.js +++ /dev/null @@ -1,5 +0,0 @@ -'use strict' - -// TODO: default to AsyncLocalStorage when it supports triggerAsyncResource - -module.exports = require('./async_resource') diff --git a/packages/datadog-core/test/setup.js b/packages/datadog-core/test/setup.js deleted file mode 100644 index 2f8af45cdd2..00000000000 --- a/packages/datadog-core/test/setup.js +++ /dev/null @@ -1,8 +0,0 @@ -'use strict' - -require('tap').mochaGlobals() - -const chai = require('chai') -const sinonChai = require('sinon-chai') - -chai.use(sinonChai) diff --git a/packages/datadog-core/test/storage/async_resource.spec.js b/packages/datadog-core/test/storage/async_resource.spec.js deleted file mode 100644 index ce19b216260..00000000000 --- a/packages/datadog-core/test/storage/async_resource.spec.js +++ /dev/null @@ -1,20 +0,0 @@ -'use strict' - -require('../setup') - -const StorageBackend = require('../../src/storage/async_resource') -const testStorage = require('./test') - -describe('storage/async_resource', () => { - let storage - - beforeEach(() => { - storage = new StorageBackend() - }) - - afterEach(() => { - storage.disable() - }) - - testStorage(() => storage) -}) diff --git a/packages/datadog-core/test/storage/test.js b/packages/datadog-core/test/storage/test.js deleted file mode 100644 index 0f69a43d9f0..00000000000 --- a/packages/datadog-core/test/storage/test.js +++ /dev/null @@ -1,160 +0,0 @@ -'use strict' - -const { expect } = require('chai') -const { inspect } = require('util') -const { - AsyncResource, - executionAsyncId, - executionAsyncResource -} = require('async_hooks') - -module.exports = factory => { - let storage - let store - - beforeEach(() => { - storage = factory() - store = {} - }) - - describe('getStore()', () => { - it('should return undefined by default', () => { - expect(storage.getStore()).to.be.undefined - }) - }) - - describe('run()', () => { - it('should return the value returned by the callback', () => { - expect(storage.run(store, () => 'test')).to.equal('test') - }) - - it('should preserve the surrounding scope', () => { - expect(storage.getStore()).to.be.undefined - - storage.run(store, () => {}) - - expect(storage.getStore()).to.be.undefined - }) - - it('should run the span on the current scope', () => { - expect(storage.getStore()).to.be.undefined - - storage.run(store, () => { - expect(storage.getStore()).to.equal(store) - }) - - expect(storage.getStore()).to.be.undefined - }) - - it('should persist through setTimeout', done => { - storage.run(store, () => { - setTimeout(() => { - expect(storage.getStore()).to.equal(store) - done() - }, 0) - }) - }) - - it('should persist through setImmediate', done => { - storage.run(store, () => { - setImmediate(() => { - expect(storage.getStore()).to.equal(store) - done() - }, 0) - }) - }) - - it('should persist through setInterval', done => { - storage.run(store, () => { - let shouldReturn = false - - const timer = setInterval(() => { - expect(storage.getStore()).to.equal(store) - - if (shouldReturn) { - clearInterval(timer) - return done() - } - - shouldReturn = true - }, 0) - }) - }) - - it('should persist through process.nextTick', done => { - storage.run(store, () => { - process.nextTick(() => { - expect(storage.getStore()).to.equal(store) - done() - }, 0) - }) - }) - - it('should persist through promises', () => { - const promise = Promise.resolve() - - return storage.run(store, () => { - return promise.then(() => { - expect(storage.getStore()).to.equal(store) - }) - }) - }) - - it('should handle concurrency', done => { - storage.run(store, () => { - setImmediate(() => { - expect(storage.getStore()).to.equal(store) - done() - }) - }) - - storage.run(store, () => {}) - }) - - it('should not break propagation for nested resources', done => { - storage.run(store, () => { - const asyncResource = new AsyncResource( - 'TEST', { triggerAsyncId: executionAsyncId(), requireManualDestroy: false } - ) - - asyncResource.runInAsyncScope(() => {}) - - expect(storage.getStore()).to.equal(store) - - done() - }) - }) - - it('should not log ddResourceStore contents', done => { - function getKeys (output) { - return output.split('\n').slice(1, -1).map(line => { - return line.split(':').map(v => v.trim())[0] - }) - } - - setImmediate(() => { - const withoutStore = getKeys(inspect(executionAsyncResource(), { depth: 0 })) - storage.run(store, () => { - setImmediate(() => { - const withStore = getKeys(inspect(executionAsyncResource(), { depth: 0 })) - expect(withStore).to.deep.equal(withoutStore) - done() - }) - }) - }) - }) - }) - - describe('enterWith()', () => { - it('should transition into the context for the remainder of the current execution', () => { - const newStore = {} - - storage.run(store, () => { - storage.enterWith(newStore) - expect(storage.getStore()).to.equal(newStore) - }) - - expect(storage.getStore()).to.be.undefined - }) - }) -} diff --git a/packages/datadog-instrumentations/src/avsc.js b/packages/datadog-instrumentations/src/avsc.js new file mode 100644 index 00000000000..6d71b1744bf --- /dev/null +++ b/packages/datadog-instrumentations/src/avsc.js @@ -0,0 +1,37 @@ +const shimmer = require('../../datadog-shimmer') +const { addHook } = require('./helpers/instrument') + +const dc = require('dc-polyfill') +const serializeChannel = dc.channel('apm:avsc:serialize-start') +const deserializeChannel = dc.channel('apm:avsc:deserialize-end') + +function wrapSerialization (Type) { + shimmer.wrap(Type.prototype, 'toBuffer', original => function () { + if (!serializeChannel.hasSubscribers) { + return original.apply(this, arguments) + } + serializeChannel.publish({ messageClass: this }) + return original.apply(this, arguments) + }) +} + +function wrapDeserialization (Type) { + shimmer.wrap(Type.prototype, 'fromBuffer', original => function () { + if (!deserializeChannel.hasSubscribers) { + return original.apply(this, arguments) + } + const result = original.apply(this, arguments) + deserializeChannel.publish({ messageClass: result }) + return result + }) +} + +addHook({ + name: 'avsc', + versions: ['>=5.0.0'] +}, avro => { + wrapDeserialization(avro.Type) + wrapSerialization(avro.Type) + + return avro +}) diff --git a/packages/datadog-instrumentations/src/azure-functions.js b/packages/datadog-instrumentations/src/azure-functions.js new file mode 100644 index 00000000000..2527d9afb3f --- /dev/null +++ b/packages/datadog-instrumentations/src/azure-functions.js @@ -0,0 +1,48 @@ +'use strict' + +const { + addHook +} = require('./helpers/instrument') +const shimmer = require('../../datadog-shimmer') +const dc = require('dc-polyfill') + +const azureFunctionsChannel = dc.tracingChannel('datadog:azure-functions:invoke') + +addHook({ name: '@azure/functions', versions: ['>=4'] }, azureFunction => { + const { app } = azureFunction + + shimmer.wrap(app, 'deleteRequest', wrapHandler) + shimmer.wrap(app, 'http', wrapHandler) + shimmer.wrap(app, 'get', wrapHandler) + shimmer.wrap(app, 'patch', wrapHandler) + shimmer.wrap(app, 'post', wrapHandler) + shimmer.wrap(app, 'put', wrapHandler) + + return azureFunction +}) + +// The http methods are overloaded so we need to check which type of argument was passed in order to wrap the handler +// The arguments are either an object with a handler property or the handler function itself +function wrapHandler (method) { + return function (name, arg) { + if (typeof arg === 'object' && arg.hasOwnProperty('handler')) { + const options = arg + shimmer.wrap(options, 'handler', handler => traceHandler(handler, name, method.name)) + } else if (typeof arg === 'function') { + const handler = arg + arguments[1] = shimmer.wrapFunction(handler, handler => traceHandler(handler, name, method.name)) + } + return method.apply(this, arguments) + } +} + +function traceHandler (handler, functionName, methodName) { + return function (...args) { + const httpRequest = args[0] + const invocationContext = args[1] + return azureFunctionsChannel.tracePromise( + handler, + { functionName, httpRequest, invocationContext, methodName }, + this, ...args) + } +} diff --git a/packages/datadog-instrumentations/src/child_process.js b/packages/datadog-instrumentations/src/child_process.js index d8f56f90981..8af49788007 100644 --- a/packages/datadog-instrumentations/src/child_process.js +++ b/packages/datadog-instrumentations/src/child_process.js @@ -61,14 +61,17 @@ function wrapChildProcessSyncMethod (shell = false) { const childProcessInfo = normalizeArgs(arguments, shell) - return childProcessChannel.traceSync( - childProcessMethod, - { - command: childProcessInfo.command, - shell: childProcessInfo.shell - }, - this, - ...arguments) + const innerResource = new AsyncResource('bound-anonymous-fn') + return innerResource.runInAsyncScope(() => { + return childProcessChannel.traceSync( + childProcessMethod, + { + command: childProcessInfo.command, + shell: childProcessInfo.shell + }, + this, + ...arguments) + }) } } } @@ -101,6 +104,12 @@ function wrapChildProcessAsyncMethod (shell = false) { const childProcessInfo = normalizeArgs(arguments, shell) + const cb = arguments[arguments.length - 1] + if (typeof cb === 'function') { + const callbackResource = new AsyncResource('bound-anonymous-fn') + arguments[arguments.length - 1] = callbackResource.bind(cb) + } + const innerResource = new AsyncResource('bound-anonymous-fn') return innerResource.runInAsyncScope(() => { childProcessChannel.start.publish({ command: childProcessInfo.command, shell: childProcessInfo.shell }) diff --git a/packages/datadog-instrumentations/src/express.js b/packages/datadog-instrumentations/src/express.js index d3113821364..b093eab7830 100644 --- a/packages/datadog-instrumentations/src/express.js +++ b/packages/datadog-instrumentations/src/express.js @@ -3,6 +3,7 @@ const { createWrapRouterMethod } = require('./router') const shimmer = require('../../datadog-shimmer') const { addHook, channel } = require('./helpers/instrument') +const tracingChannel = require('dc-polyfill').tracingChannel const handleChannel = channel('apm:express:request:handle') @@ -35,6 +36,27 @@ function wrapResponseJson (json) { } } +const responseRenderChannel = tracingChannel('datadog:express:response:render') + +function wrapResponseRender (render) { + return function wrappedRender (view, options, callback) { + if (!responseRenderChannel.start.hasSubscribers) { + return render.apply(this, arguments) + } + + return responseRenderChannel.traceSync( + render, + { + req: this.req, + view, + options + }, + this, + ...arguments + ) + } +} + addHook({ name: 'express', versions: ['>=4'] }, express => { shimmer.wrap(express.application, 'handle', wrapHandle) shimmer.wrap(express.Router, 'use', wrapRouterMethod) @@ -42,6 +64,7 @@ addHook({ name: 'express', versions: ['>=4'] }, express => { shimmer.wrap(express.response, 'json', wrapResponseJson) shimmer.wrap(express.response, 'jsonp', wrapResponseJson) + shimmer.wrap(express.response, 'render', wrapResponseRender) return express }) @@ -79,11 +102,21 @@ addHook({ }) const processParamsStartCh = channel('datadog:express:process_params:start') -const wrapProcessParamsMethod = (requestPositionInArguments) => { - return (original) => { - return function () { +function wrapProcessParamsMethod (requestPositionInArguments) { + return function wrapProcessParams (original) { + return function wrappedProcessParams () { if (processParamsStartCh.hasSubscribers) { - processParamsStartCh.publish({ req: arguments[requestPositionInArguments] }) + const req = arguments[requestPositionInArguments] + const abortController = new AbortController() + + processParamsStartCh.publish({ + req, + res: req?.res, + abortController, + params: req?.params + }) + + if (abortController.signal.aborted) return } return original.apply(this, arguments) diff --git a/packages/datadog-instrumentations/src/fastify.js b/packages/datadog-instrumentations/src/fastify.js index a6d954a9460..726e8284f92 100644 --- a/packages/datadog-instrumentations/src/fastify.js +++ b/packages/datadog-instrumentations/src/fastify.js @@ -5,6 +5,7 @@ const { addHook, channel, AsyncResource } = require('./helpers/instrument') const errorChannel = channel('apm:fastify:middleware:error') const handleChannel = channel('apm:fastify:request:handle') +const routeAddedChannel = channel('apm:fastify:route:added') const parsingResources = new WeakMap() @@ -16,6 +17,7 @@ function wrapFastify (fastify, hasParsingEvents) { if (!app || typeof app.addHook !== 'function') return app + app.addHook('onRoute', onRoute) app.addHook('onRequest', onRequest) app.addHook('preHandler', preHandler) @@ -86,8 +88,9 @@ function onRequest (request, reply, done) { const req = getReq(request) const res = getRes(reply) + const routeConfig = getRouteConfig(request) - handleChannel.publish({ req, res }) + handleChannel.publish({ req, res, routeConfig }) return done() } @@ -142,6 +145,10 @@ function getRes (reply) { return reply && (reply.raw || reply.res || reply) } +function getRouteConfig (request) { + return request?.routeOptions?.config +} + function publishError (error, req) { if (error) { errorChannel.publish({ error, req }) @@ -150,6 +157,10 @@ function publishError (error, req) { return error } +function onRoute (routeOptions) { + routeAddedChannel.publish({ routeOptions, onRoute }) +} + addHook({ name: 'fastify', versions: ['>=3'] }, fastify => { const wrapped = shimmer.wrapFunction(fastify, fastify => wrapFastify(fastify, true)) diff --git a/packages/datadog-instrumentations/src/fs.js b/packages/datadog-instrumentations/src/fs.js index e0e57f1ebce..9ae201b9860 100644 --- a/packages/datadog-instrumentations/src/fs.js +++ b/packages/datadog-instrumentations/src/fs.js @@ -266,24 +266,44 @@ function createWrapFunction (prefix = '', override = '') { const lastIndex = arguments.length - 1 const cb = typeof arguments[lastIndex] === 'function' && arguments[lastIndex] const innerResource = new AsyncResource('bound-anonymous-fn') - const message = getMessage(method, getMethodParamsRelationByPrefix(prefix)[operation], arguments, this) + const params = getMethodParamsRelationByPrefix(prefix)[operation] + const abortController = new AbortController() + const message = { ...getMessage(method, params, arguments, this), abortController } + + const finish = innerResource.bind(function (error) { + if (error !== null && typeof error === 'object') { // fs.exists receives a boolean + errorChannel.publish(error) + } + finishChannel.publish() + }) if (cb) { const outerResource = new AsyncResource('bound-anonymous-fn') arguments[lastIndex] = shimmer.wrapFunction(cb, cb => innerResource.bind(function (e) { - if (e !== null && typeof e === 'object') { // fs.exists receives a boolean - errorChannel.publish(e) - } - - finishChannel.publish() - + finish(e) return outerResource.runInAsyncScope(() => cb.apply(this, arguments)) })) } return innerResource.runInAsyncScope(() => { startChannel.publish(message) + + if (abortController.signal.aborted) { + const error = abortController.signal.reason || new Error('Aborted') + + if (prefix === 'promises.') { + finish(error) + return Promise.reject(error) + } else if (name.includes('Sync') || !cb) { + finish(error) + throw error + } else if (cb) { + arguments[lastIndex](error) + return + } + } + try { const result = original.apply(this, arguments) if (cb) return result diff --git a/packages/datadog-instrumentations/src/helpers/hooks.js b/packages/datadog-instrumentations/src/helpers/hooks.js index 284e4ed5950..62d45e37008 100644 --- a/packages/datadog-instrumentations/src/helpers/hooks.js +++ b/packages/datadog-instrumentations/src/helpers/hooks.js @@ -5,6 +5,7 @@ module.exports = { '@apollo/gateway': () => require('../apollo'), 'apollo-server-core': () => require('../apollo-server-core'), '@aws-sdk/smithy-client': () => require('../aws-sdk'), + '@azure/functions': () => require('../azure-functions'), '@cucumber/cucumber': () => require('../cucumber'), '@playwright/test': () => require('../playwright'), '@elastic/elasticsearch': () => require('../elasticsearch'), @@ -27,6 +28,7 @@ module.exports = { aerospike: () => require('../aerospike'), amqp10: () => require('../amqp10'), amqplib: () => require('../amqplib'), + avsc: () => require('../avsc'), 'aws-sdk': () => require('../aws-sdk'), bluebird: () => require('../bluebird'), 'body-parser': () => require('../body-parser'), @@ -100,6 +102,7 @@ module.exports = { playwright: () => require('../playwright'), 'promise-js': () => require('../promise-js'), promise: () => require('../promise'), + protobufjs: () => require('../protobufjs'), q: () => require('../q'), qs: () => require('../qs'), redis: () => require('../redis'), diff --git a/packages/datadog-instrumentations/src/jest.js b/packages/datadog-instrumentations/src/jest.js index e2baf3f9d42..e006f311dc3 100644 --- a/packages/datadog-instrumentations/src/jest.js +++ b/packages/datadog-instrumentations/src/jest.js @@ -850,7 +850,8 @@ addHook({ }, jestConfigSyncWrapper) const LIBRARIES_BYPASSING_JEST_REQUIRE_ENGINE = [ - 'selenium-webdriver' + 'selenium-webdriver', + 'winston' ] function shouldBypassJestRequireEngine (moduleName) { diff --git a/packages/datadog-instrumentations/src/mocha/common.js b/packages/datadog-instrumentations/src/mocha/common.js index 1d31290ce6c..c25ab2fdb21 100644 --- a/packages/datadog-instrumentations/src/mocha/common.js +++ b/packages/datadog-instrumentations/src/mocha/common.js @@ -1,6 +1,6 @@ const { addHook, channel } = require('../helpers/instrument') const shimmer = require('../../../datadog-shimmer') -const { getCallSites } = require('../../../dd-trace/src/plugins/util/test') +const { getCallSites } = require('../../../dd-trace/src/plugins/util/stacktrace') const { testToStartLine } = require('./utils') const parameterizedTestCh = channel('ci:mocha:test:parameterize') diff --git a/packages/datadog-instrumentations/src/mysql2.js b/packages/datadog-instrumentations/src/mysql2.js index 0077b6b9dda..096eec0e80e 100644 --- a/packages/datadog-instrumentations/src/mysql2.js +++ b/packages/datadog-instrumentations/src/mysql2.js @@ -6,11 +6,14 @@ const { AsyncResource } = require('./helpers/instrument') const shimmer = require('../../datadog-shimmer') +const semver = require('semver') -addHook({ name: 'mysql2', file: 'lib/connection.js', versions: ['>=1'] }, Connection => { +addHook({ name: 'mysql2', file: 'lib/connection.js', versions: ['>=1'] }, (Connection, version) => { const startCh = channel('apm:mysql2:query:start') const finishCh = channel('apm:mysql2:query:finish') const errorCh = channel('apm:mysql2:query:error') + const startOuterQueryCh = channel('datadog:mysql2:outerquery:start') + const shouldEmitEndAfterQueryAbort = semver.intersects(version, '>=1.3.3') shimmer.wrap(Connection.prototype, 'addCommand', addCommand => function (cmd) { if (!startCh.hasSubscribers) return addCommand.apply(this, arguments) @@ -28,6 +31,76 @@ addHook({ name: 'mysql2', file: 'lib/connection.js', versions: ['>=1'] }, Connec return asyncResource.bind(addCommand, this).apply(this, arguments) }) + shimmer.wrap(Connection.prototype, 'query', query => function (sql, values, cb) { + if (!startOuterQueryCh.hasSubscribers) return query.apply(this, arguments) + + if (typeof sql === 'object') sql = sql?.sql + + if (!sql) return query.apply(this, arguments) + + const abortController = new AbortController() + startOuterQueryCh.publish({ sql, abortController }) + + if (abortController.signal.aborted) { + const addCommand = this.addCommand + this.addCommand = function (cmd) { return cmd } + + let queryCommand + try { + queryCommand = query.apply(this, arguments) + } finally { + this.addCommand = addCommand + } + + cb = queryCommand.onResult + + process.nextTick(() => { + if (cb) { + cb(abortController.signal.reason) + } else { + queryCommand.emit('error', abortController.signal.reason) + } + + if (shouldEmitEndAfterQueryAbort) { + queryCommand.emit('end') + } + }) + + return queryCommand + } + + return query.apply(this, arguments) + }) + + shimmer.wrap(Connection.prototype, 'execute', execute => function (sql, values, cb) { + if (!startOuterQueryCh.hasSubscribers) return execute.apply(this, arguments) + + if (typeof sql === 'object') sql = sql?.sql + + if (!sql) return execute.apply(this, arguments) + + const abortController = new AbortController() + startOuterQueryCh.publish({ sql, abortController }) + + if (abortController.signal.aborted) { + const addCommand = this.addCommand + this.addCommand = function (cmd) { return cmd } + + let result + try { + result = execute.apply(this, arguments) + } finally { + this.addCommand = addCommand + } + + result?.onResult(abortController.signal.reason) + + return result + } + + return execute.apply(this, arguments) + }) + return Connection function bindExecute (cmd, execute, asyncResource) { @@ -79,3 +152,149 @@ addHook({ name: 'mysql2', file: 'lib/connection.js', versions: ['>=1'] }, Connec }, cmd)) } }) + +addHook({ name: 'mysql2', file: 'lib/pool.js', versions: ['>=1'] }, (Pool, version) => { + const startOuterQueryCh = channel('datadog:mysql2:outerquery:start') + const shouldEmitEndAfterQueryAbort = semver.intersects(version, '>=1.3.3') + + shimmer.wrap(Pool.prototype, 'query', query => function (sql, values, cb) { + if (!startOuterQueryCh.hasSubscribers) return query.apply(this, arguments) + + if (typeof sql === 'object') sql = sql?.sql + + if (!sql) return query.apply(this, arguments) + + const abortController = new AbortController() + startOuterQueryCh.publish({ sql, abortController }) + + if (abortController.signal.aborted) { + const getConnection = this.getConnection + this.getConnection = function () {} + + let queryCommand + try { + queryCommand = query.apply(this, arguments) + } finally { + this.getConnection = getConnection + } + + process.nextTick(() => { + if (queryCommand.onResult) { + queryCommand.onResult(abortController.signal.reason) + } else { + queryCommand.emit('error', abortController.signal.reason) + } + + if (shouldEmitEndAfterQueryAbort) { + queryCommand.emit('end') + } + }) + + return queryCommand + } + + return query.apply(this, arguments) + }) + + shimmer.wrap(Pool.prototype, 'execute', execute => function (sql, values, cb) { + if (!startOuterQueryCh.hasSubscribers) return execute.apply(this, arguments) + + if (typeof sql === 'object') sql = sql?.sql + + if (!sql) return execute.apply(this, arguments) + + const abortController = new AbortController() + startOuterQueryCh.publish({ sql, abortController }) + + if (abortController.signal.aborted) { + if (typeof values === 'function') { + cb = values + } + + process.nextTick(() => { + cb(abortController.signal.reason) + }) + return + } + + return execute.apply(this, arguments) + }) + + return Pool +}) + +// PoolNamespace.prototype.query does not exist in mysql2<2.3.0 +addHook({ name: 'mysql2', file: 'lib/pool_cluster.js', versions: ['>=2.3.0'] }, PoolCluster => { + const startOuterQueryCh = channel('datadog:mysql2:outerquery:start') + const wrappedPoolNamespaces = new WeakSet() + + shimmer.wrap(PoolCluster.prototype, 'of', of => function () { + const poolNamespace = of.apply(this, arguments) + + if (startOuterQueryCh.hasSubscribers && !wrappedPoolNamespaces.has(poolNamespace)) { + shimmer.wrap(poolNamespace, 'query', query => function (sql, values, cb) { + if (typeof sql === 'object') sql = sql?.sql + + if (!sql) return query.apply(this, arguments) + + const abortController = new AbortController() + startOuterQueryCh.publish({ sql, abortController }) + + if (abortController.signal.aborted) { + const getConnection = this.getConnection + this.getConnection = function () {} + + let queryCommand + try { + queryCommand = query.apply(this, arguments) + } finally { + this.getConnection = getConnection + } + + process.nextTick(() => { + if (queryCommand.onResult) { + queryCommand.onResult(abortController.signal.reason) + } else { + queryCommand.emit('error', abortController.signal.reason) + } + + queryCommand.emit('end') + }) + + return queryCommand + } + + return query.apply(this, arguments) + }) + + shimmer.wrap(poolNamespace, 'execute', execute => function (sql, values, cb) { + if (typeof sql === 'object') sql = sql?.sql + + if (!sql) return execute.apply(this, arguments) + + const abortController = new AbortController() + startOuterQueryCh.publish({ sql, abortController }) + + if (abortController.signal.aborted) { + if (typeof values === 'function') { + cb = values + } + + process.nextTick(() => { + cb(abortController.signal.reason) + }) + + return + } + + return execute.apply(this, arguments) + }) + + wrappedPoolNamespaces.add(poolNamespace) + } + + return poolNamespace + }) + + return PoolCluster +}) diff --git a/packages/datadog-instrumentations/src/protobufjs.js b/packages/datadog-instrumentations/src/protobufjs.js new file mode 100644 index 00000000000..79cbb4ee3a1 --- /dev/null +++ b/packages/datadog-instrumentations/src/protobufjs.js @@ -0,0 +1,127 @@ +const shimmer = require('../../datadog-shimmer') +const { addHook } = require('./helpers/instrument') + +const dc = require('dc-polyfill') +const serializeChannel = dc.channel('apm:protobufjs:serialize-start') +const deserializeChannel = dc.channel('apm:protobufjs:deserialize-end') + +function wrapSerialization (messageClass) { + if (messageClass?.encode) { + shimmer.wrap(messageClass, 'encode', original => function () { + if (!serializeChannel.hasSubscribers) { + return original.apply(this, arguments) + } + serializeChannel.publish({ messageClass: this }) + return original.apply(this, arguments) + }) + } +} + +function wrapDeserialization (messageClass) { + if (messageClass?.decode) { + shimmer.wrap(messageClass, 'decode', original => function () { + if (!deserializeChannel.hasSubscribers) { + return original.apply(this, arguments) + } + const result = original.apply(this, arguments) + deserializeChannel.publish({ messageClass: result }) + return result + }) + } +} + +function wrapSetup (messageClass) { + if (messageClass?.setup) { + shimmer.wrap(messageClass, 'setup', original => function () { + const result = original.apply(this, arguments) + + wrapSerialization(messageClass) + wrapDeserialization(messageClass) + + return result + }) + } +} + +function wrapProtobufClasses (root) { + if (!root) { + return + } + + if (root.decode) { + wrapSetup(root) + } + + if (root.nestedArray) { + for (const subRoot of root.nestedArray) { + wrapProtobufClasses(subRoot) + } + } +} + +function wrapReflection (protobuf) { + const reflectionMethods = [ + { + target: protobuf.Root, + name: 'fromJSON' + }, + { + target: protobuf.Type.prototype, + name: 'fromObject' + } + ] + + reflectionMethods.forEach(method => { + shimmer.wrap(method.target, method.name, original => function () { + const result = original.apply(this, arguments) + if (result.nested) { + for (const type in result.nested) { + wrapSetup(result.nested[type]) + } + } + if (result.$type) { + wrapSetup(result.$type) + } + return result + }) + }) +} + +function isPromise (obj) { + return !!obj && (typeof obj === 'object' || typeof obj === 'function') && typeof obj.then === 'function' +} + +addHook({ + name: 'protobufjs', + versions: ['>=6.8.0'] +}, protobuf => { + shimmer.wrap(protobuf.Root.prototype, 'load', original => function () { + const result = original.apply(this, arguments) + if (isPromise(result)) { + return result.then(root => { + wrapProtobufClasses(root) + return root + }) + } else { + // If result is not a promise, directly wrap the protobuf classes + wrapProtobufClasses(result) + return result + } + }) + + shimmer.wrap(protobuf.Root.prototype, 'loadSync', original => function () { + const root = original.apply(this, arguments) + wrapProtobufClasses(root) + return root + }) + + shimmer.wrap(protobuf, 'Type', Original => function () { + const typeInstance = new Original(...arguments) + wrapSetup(typeInstance) + return typeInstance + }) + + wrapReflection(protobuf) + + return protobuf +}) diff --git a/packages/datadog-instrumentations/src/winston.js b/packages/datadog-instrumentations/src/winston.js index dbd91026bf2..9b9c4e811aa 100644 --- a/packages/datadog-instrumentations/src/winston.js +++ b/packages/datadog-instrumentations/src/winston.js @@ -8,6 +8,18 @@ const shimmer = require('../../datadog-shimmer') const patched = new WeakSet() +// Test Visibility log submission channels +const configureCh = channel('ci:log-submission:winston:configure') +const addTransport = channel('ci:log-submission:winston:add-transport') + +addHook({ name: 'winston', file: 'lib/winston/transports/index.js', versions: ['>=3'] }, transportsPackage => { + if (configureCh.hasSubscribers) { + configureCh.publish(transportsPackage.Http) + } + + return transportsPackage +}) + addHook({ name: 'winston', file: 'lib/winston/logger.js', versions: ['>=3'] }, Logger => { const logCh = channel('apm:winston:log') shimmer.wrap(Logger.prototype, 'write', write => { @@ -20,6 +32,16 @@ addHook({ name: 'winston', file: 'lib/winston/logger.js', versions: ['>=3'] }, L return write.apply(this, arguments) } }) + + shimmer.wrap(Logger.prototype, 'configure', configure => function () { + const configureResponse = configure.apply(this, arguments) + // After the original `configure`, because it resets transports + if (addTransport.hasSubscribers) { + addTransport.publish(this) + } + return configureResponse + }) + return Logger }) diff --git a/packages/datadog-instrumentations/test/mysql2.spec.js b/packages/datadog-instrumentations/test/mysql2.spec.js new file mode 100644 index 00000000000..89e35f2a1f7 --- /dev/null +++ b/packages/datadog-instrumentations/test/mysql2.spec.js @@ -0,0 +1,718 @@ +'use strict' + +const { channel } = require('../src/helpers/instrument') +const agent = require('../../dd-trace/test/plugins/agent') +const { assert } = require('chai') +const semver = require('semver') + +describe('mysql2 instrumentation', () => { + withVersions('mysql2', 'mysql2', version => { + function abort ({ sql, abortController }) { + assert.isString(sql) + const error = new Error('Test') + abortController.abort(error) + + if (!abortController.signal.reason) { + abortController.signal.reason = error + } + } + + function noop () {} + + const config = { + host: '127.0.0.1', + user: 'root', + database: 'db' + } + + const sql = 'SELECT 1' + let startCh, mysql2, shouldEmitEndAfterQueryAbort + let apmQueryStartChannel, apmQueryStart, mysql2Version + + before(() => { + startCh = channel('datadog:mysql2:outerquery:start') + return agent.load(['mysql2']) + }) + + before(() => { + const mysql2Require = require(`../../../versions/mysql2@${version}`) + mysql2Version = mysql2Require.version() + // in v1.3.3 CommandQuery started to emit 'end' after 'error' event + shouldEmitEndAfterQueryAbort = semver.intersects(mysql2Version, '>=1.3.3') + mysql2 = mysql2Require.get() + apmQueryStartChannel = channel('apm:mysql2:query:start') + }) + + beforeEach(() => { + apmQueryStart = sinon.stub() + apmQueryStartChannel.subscribe(apmQueryStart) + }) + + afterEach(() => { + if (startCh?.hasSubscribers) { + startCh.unsubscribe(abort) + startCh.unsubscribe(noop) + } + apmQueryStartChannel.unsubscribe(apmQueryStart) + }) + + describe('lib/connection.js', () => { + let connection + + beforeEach(() => { + connection = mysql2.createConnection(config) + + connection.connect() + }) + + afterEach((done) => { + connection.end(() => done()) + }) + + describe('Connection.prototype.query', () => { + describe('with string as query', () => { + describe('with callback', () => { + it('should abort the query on abortController.abort()', (done) => { + startCh.subscribe(abort) + const query = connection.query(sql, (err) => { + assert.propertyVal(err, 'message', 'Test') + sinon.assert.notCalled(apmQueryStart) + + if (!shouldEmitEndAfterQueryAbort) done() + }) + + query.on('end', () => done()) + }) + + it('should work without abortController.abort()', (done) => { + startCh.subscribe(noop) + connection.query(sql, (err) => { + assert.isNull(err) + sinon.assert.called(apmQueryStart) + + done() + }) + }) + + it('should work without subscriptions', (done) => { + connection.query(sql, (err) => { + assert.isNull(err) + sinon.assert.called(apmQueryStart) + + done() + }) + }) + }) + + describe('without callback', () => { + it('should abort the query on abortController.abort()', (done) => { + startCh.subscribe(abort) + + const query = connection.query(sql) + + query.on('error', (err) => { + assert.propertyVal(err, 'message', 'Test') + sinon.assert.notCalled(apmQueryStart) + if (!shouldEmitEndAfterQueryAbort) done() + }) + + query.on('end', () => done()) + }) + + it('should work without abortController.abort()', (done) => { + startCh.subscribe(noop) + + const query = connection.query(sql) + + query.on('error', (err) => done(err)) + query.on('end', () => { + sinon.assert.called(apmQueryStart) + + done() + }) + }) + + it('should work without subscriptions', (done) => { + const query = connection.query(sql) + + query.on('error', (err) => done(err)) + query.on('end', () => { + sinon.assert.called(apmQueryStart) + + done() + }) + }) + }) + }) + + describe('with object as query', () => { + describe('with callback', () => { + it('should abort the query on abortController.abort()', (done) => { + startCh.subscribe(abort) + const query = mysql2.Connection.createQuery(sql, (err) => { + assert.propertyVal(err, 'message', 'Test') + sinon.assert.notCalled(apmQueryStart) + + if (!shouldEmitEndAfterQueryAbort) done() + }, null, {}) + connection.query(query) + + query.on('end', () => done()) + }) + + it('should work without abortController.abort()', (done) => { + startCh.subscribe(noop) + + const query = mysql2.Connection.createQuery(sql, (err) => { + assert.isNull(err) + sinon.assert.called(apmQueryStart) + + done() + }, null, {}) + + connection.query(query) + }) + + it('should work without subscriptions', (done) => { + const query = mysql2.Connection.createQuery(sql, (err) => { + assert.isNull(err) + sinon.assert.called(apmQueryStart) + + done() + }, null, {}) + + connection.query(query) + }) + }) + + describe('without callback', () => { + it('should abort the query on abortController.abort()', (done) => { + startCh.subscribe(abort) + + const query = mysql2.Connection.createQuery(sql, null, null, {}) + query.on('error', (err) => { + assert.propertyVal(err, 'message', 'Test') + sinon.assert.notCalled(apmQueryStart) + + if (!shouldEmitEndAfterQueryAbort) done() + }) + + connection.query(query) + + query.on('end', () => done()) + }) + + it('should work without abortController.abort()', (done) => { + startCh.subscribe(noop) + + const query = mysql2.Connection.createQuery(sql, null, null, {}) + query.on('error', (err) => done(err)) + query.on('end', () => { + sinon.assert.called(apmQueryStart) + + done() + }) + + connection.query(query) + }) + + it('should work without subscriptions', (done) => { + const query = mysql2.Connection.createQuery(sql, null, null, {}) + query.on('error', (err) => done(err)) + query.on('end', () => { + sinon.assert.called(apmQueryStart) + + done() + }) + + connection.query(query) + }) + }) + }) + }) + + describe('Connection.prototype.execute', () => { + describe('with the query in options', () => { + it('should abort the query on abortController.abort()', (done) => { + startCh.subscribe(abort) + + const options = { sql } + const commandExecute = connection.execute(options, (err) => { + assert.propertyVal(err, 'message', 'Test') + sinon.assert.notCalled(apmQueryStart) + + done() + }) + + assert.equal(commandExecute.sql, options.sql) + }) + + it('should work without abortController.abort()', (done) => { + startCh.subscribe(noop) + + const options = { sql } + + connection.execute(options, (err) => { + assert.isNull(err) + sinon.assert.called(apmQueryStart) + + done() + }) + }) + + it('should work without subscriptions', (done) => { + const options = { sql } + + connection.execute(options, (err) => { + assert.isNull(err) + sinon.assert.called(apmQueryStart) + + done() + }) + }) + }) + + describe('with sql as string', () => { + it('should abort the query on abortController.abort()', (done) => { + startCh.subscribe(abort) + + connection.execute(sql, (err) => { + assert.propertyVal(err, 'message', 'Test') + sinon.assert.notCalled(apmQueryStart) + done() + }) + }) + + it('should work without abortController.abort()', (done) => { + startCh.subscribe(noop) + + connection.execute(sql, (err) => { + assert.isNull(err) + sinon.assert.called(apmQueryStart) + + done() + }) + }) + + it('should work without subscriptions', (done) => { + const options = { sql } + + connection.execute(options, (err) => { + assert.isNull(err) + sinon.assert.called(apmQueryStart) + + done() + }) + }) + }) + }) + }) + + describe('lib/pool.js', () => { + let pool + + before(() => { + pool = mysql2.createPool(config) + }) + + describe('Pool.prototype.query', () => { + describe('with object as query', () => { + describe('with callback', () => { + it('should abort the query on abortController.abort()', (done) => { + startCh.subscribe(abort) + const query = pool.query({ sql }, (err) => { + assert.propertyVal(err, 'message', 'Test') + sinon.assert.notCalled(apmQueryStart) + + if (!shouldEmitEndAfterQueryAbort) done() + }) + + query.on('end', () => done()) + }) + + it('should work without abortController.abort()', (done) => { + startCh.subscribe(noop) + + pool.query({ sql }, (err) => { + assert.isNull(err) + sinon.assert.called(apmQueryStart) + + done() + }) + }) + + it('should work without subscriptions', (done) => { + pool.query({ sql }, (err) => { + assert.isNull(err) + sinon.assert.called(apmQueryStart) + + done() + }) + }) + }) + + describe('without callback', () => { + it('should abort the query on abortController.abort()', (done) => { + startCh.subscribe(abort) + const query = pool.query({ sql }) + query.on('error', err => { + assert.propertyVal(err, 'message', 'Test') + sinon.assert.notCalled(apmQueryStart) + if (!shouldEmitEndAfterQueryAbort) done() + }) + + query.on('end', () => done()) + }) + + it('should work without abortController.abort()', (done) => { + startCh.subscribe(noop) + const query = pool.query({ sql }) + + query.on('error', err => done(err)) + query.on('end', () => { + sinon.assert.called(apmQueryStart) + + done() + }) + }) + + it('should work without subscriptions', (done) => { + pool.query({ sql }, (err) => { + assert.isNull(err) + sinon.assert.called(apmQueryStart) + + done() + }) + }) + }) + }) + + describe('with string as query', () => { + describe('with callback', () => { + it('should abort the query on abortController.abort()', (done) => { + startCh.subscribe(abort) + const query = pool.query(sql, (err) => { + assert.propertyVal(err, 'message', 'Test') + sinon.assert.notCalled(apmQueryStart) + + if (!shouldEmitEndAfterQueryAbort) done() + }) + + query.on('end', () => done()) + }) + + it('should work without abortController.abort()', (done) => { + startCh.subscribe(noop) + + pool.query(sql, (err) => { + assert.isNull(err) + sinon.assert.called(apmQueryStart) + + done() + }) + }) + + it('should work without subscriptions', (done) => { + pool.query(sql, (err) => { + assert.isNull(err) + sinon.assert.called(apmQueryStart) + + done() + }) + }) + }) + + describe('without callback', () => { + it('should abort the query on abortController.abort()', (done) => { + startCh.subscribe(abort) + const query = pool.query(sql) + query.on('error', err => { + assert.propertyVal(err, 'message', 'Test') + sinon.assert.notCalled(apmQueryStart) + if (!shouldEmitEndAfterQueryAbort) done() + }) + + query.on('end', () => done()) + }) + + it('should work without abortController.abort()', (done) => { + startCh.subscribe(noop) + const query = pool.query(sql) + + query.on('error', err => done(err)) + query.on('end', () => { + sinon.assert.called(apmQueryStart) + + done() + }) + }) + + it('should work without subscriptions', (done) => { + pool.query(sql, (err) => { + assert.isNull(err) + sinon.assert.called(apmQueryStart) + + done() + }) + }) + }) + }) + }) + + describe('Pool.prototype.execute', () => { + describe('with object as query', () => { + describe('with callback', () => { + it('should abort the query on abortController.abort()', (done) => { + startCh.subscribe(abort) + pool.execute({ sql }, (err) => { + assert.propertyVal(err, 'message', 'Test') + + setTimeout(() => { + sinon.assert.notCalled(apmQueryStart) + done() + }, 100) + }) + }) + + it('should work without abortController.abort()', (done) => { + startCh.subscribe(noop) + + pool.execute({ sql }, (err) => { + assert.isNull(err) + sinon.assert.called(apmQueryStart) + + done() + }) + }) + + it('should work without subscriptions', (done) => { + pool.execute({ sql }, (err) => { + assert.isNull(err) + sinon.assert.called(apmQueryStart) + + done() + }) + }) + }) + }) + + describe('with string as query', () => { + describe('with callback', () => { + it('should abort the query on abortController.abort()', (done) => { + startCh.subscribe(abort) + pool.execute(sql, (err) => { + assert.propertyVal(err, 'message', 'Test') + + setTimeout(() => { + sinon.assert.notCalled(apmQueryStart) + done() + }, 100) + }) + }) + + it('should work without abortController.abort()', (done) => { + startCh.subscribe(noop) + + pool.execute(sql, (err) => { + assert.isNull(err) + sinon.assert.called(apmQueryStart) + + done() + }) + }) + + it('should work without subscriptions', (done) => { + pool.execute(sql, (err) => { + assert.isNull(err) + sinon.assert.called(apmQueryStart) + + done() + }) + }) + }) + }) + }) + }) + + describe('lib/pool_cluster.js', () => { + let poolCluster, connection + + before(function () { + if (!semver.satisfies(mysql2Version, '>=2.3.0')) this.skip() + poolCluster = mysql2.createPoolCluster() + poolCluster.add('clusterA', config) + }) + + beforeEach((done) => { + poolCluster.getConnection('clusterA', function (err, _connection) { + if (err) { + done(err) + return + } + + connection = _connection + + done() + }) + }) + + afterEach(() => { + connection?.release() + }) + + describe('PoolNamespace.prototype.query', () => { + describe('with string as query', () => { + it('should abort the query on abortController.abort()', (done) => { + startCh.subscribe(abort) + const namespace = poolCluster.of() + namespace.query(sql, (err) => { + assert.propertyVal(err, 'message', 'Test') + + setTimeout(() => { + sinon.assert.notCalled(apmQueryStart) + done() + }, 100) + }) + }) + + it('should work without abortController.abort()', (done) => { + startCh.subscribe(noop) + + const namespace = poolCluster.of() + namespace.query(sql, (err) => { + assert.isNull(err) + sinon.assert.called(apmQueryStart) + + done() + }) + }) + + it('should work without subscriptions', (done) => { + const namespace = poolCluster.of() + namespace.query(sql, (err) => { + assert.isNull(err) + sinon.assert.called(apmQueryStart) + + done() + }) + }) + }) + + describe('with object as query', () => { + it('should abort the query on abortController.abort()', (done) => { + startCh.subscribe(abort) + const namespace = poolCluster.of() + namespace.query({ sql }, (err) => { + assert.propertyVal(err, 'message', 'Test') + + setTimeout(() => { + sinon.assert.notCalled(apmQueryStart) + done() + }, 100) + }) + }) + + it('should work without abortController.abort()', (done) => { + startCh.subscribe(noop) + + const namespace = poolCluster.of() + namespace.query({ sql }, (err) => { + assert.isNull(err) + sinon.assert.called(apmQueryStart) + + done() + }) + }) + + it('should work without subscriptions', (done) => { + const namespace = poolCluster.of() + namespace.query({ sql }, (err) => { + assert.isNull(err) + sinon.assert.called(apmQueryStart) + + done() + }) + }) + }) + }) + + describe('PoolNamespace.prototype.execute', () => { + describe('with string as query', () => { + it('should abort the query on abortController.abort()', (done) => { + startCh.subscribe(abort) + + const namespace = poolCluster.of() + namespace.execute(sql, (err) => { + assert.propertyVal(err, 'message', 'Test') + + setTimeout(() => { + sinon.assert.notCalled(apmQueryStart) + done() + }, 100) + }) + }) + + it('should work without abortController.abort()', (done) => { + startCh.subscribe(noop) + + const namespace = poolCluster.of() + namespace.execute(sql, (err) => { + assert.isNull(err) + sinon.assert.called(apmQueryStart) + + done() + }) + }) + + it('should work without subscriptions', (done) => { + const namespace = poolCluster.of() + namespace.execute(sql, (err) => { + assert.isNull(err) + sinon.assert.called(apmQueryStart) + + done() + }) + }) + }) + + describe('with object as query', () => { + it('should abort the query on abortController.abort()', (done) => { + startCh.subscribe(abort) + + const namespace = poolCluster.of() + namespace.execute({ sql }, (err) => { + assert.propertyVal(err, 'message', 'Test') + + setTimeout(() => { + sinon.assert.notCalled(apmQueryStart) + done() + }, 100) + }) + }) + + it('should work without abortController.abort()', (done) => { + startCh.subscribe(noop) + + const namespace = poolCluster.of() + namespace.execute({ sql }, (err) => { + assert.isNull(err) + sinon.assert.called(apmQueryStart) + + done() + }) + }) + + it('should work without subscriptions', (done) => { + const namespace = poolCluster.of() + namespace.execute({ sql }, (err) => { + assert.isNull(err) + sinon.assert.called(apmQueryStart) + + done() + }) + }) + }) + }) + }) + }) +}) diff --git a/packages/datadog-plugin-avsc/src/index.js b/packages/datadog-plugin-avsc/src/index.js new file mode 100644 index 00000000000..be0ef970e50 --- /dev/null +++ b/packages/datadog-plugin-avsc/src/index.js @@ -0,0 +1,9 @@ +const SchemaPlugin = require('../../dd-trace/src/plugins/schema') +const SchemaExtractor = require('./schema_iterator') + +class AvscPlugin extends SchemaPlugin { + static get id () { return 'avsc' } + static get schemaExtractor () { return SchemaExtractor } +} + +module.exports = AvscPlugin diff --git a/packages/datadog-plugin-avsc/src/schema_iterator.js b/packages/datadog-plugin-avsc/src/schema_iterator.js new file mode 100644 index 00000000000..c748bbf9e75 --- /dev/null +++ b/packages/datadog-plugin-avsc/src/schema_iterator.js @@ -0,0 +1,169 @@ +const AVRO = 'avro' +const { + SCHEMA_DEFINITION, + SCHEMA_ID, + SCHEMA_NAME, + SCHEMA_OPERATION, + SCHEMA_WEIGHT, + SCHEMA_TYPE +} = require('../../dd-trace/src/constants') +const log = require('../../dd-trace/src/log') +const { + SchemaBuilder +} = require('../../dd-trace/src/datastreams/schemas/schema_builder') + +class SchemaExtractor { + constructor (schema) { + this.schema = schema + } + + static getType (type) { + const typeMapping = { + string: 'string', + int: 'integer', + long: 'integer', + float: 'number', + double: 'number', + boolean: 'boolean', + bytes: 'string', + record: 'object', + enum: 'string', + array: 'array', + map: 'object', + fixed: 'string' + } + const typeName = type.typeName ?? type.name ?? type + return typeName === 'null' ? typeName : typeMapping[typeName] || 'string' + } + + static extractProperty (field, schemaName, fieldName, builder, depth) { + let array = false + let type + let format + let enumValues + let description + let ref + + const fieldType = field.type?.types ?? field.type?.typeName ?? field.type + + if (Array.isArray(fieldType)) { + // Union Type + type = 'union[' + fieldType.map(t => SchemaExtractor.getType(t.type || t)).join(',') + ']' + } else if (fieldType === 'array') { + // Array Type + array = true + const nestedType = field.type.itemsType.typeName + type = SchemaExtractor.getType(nestedType) + } else if (fieldType === 'record') { + // Nested Record Type + type = 'object' + ref = `#/components/schemas/${field.type.name}` + if (!SchemaExtractor.extractSchema(field.type, builder, depth + 1, this)) { + return false + } + } else if (fieldType === 'enum') { + enumValues = [] + let i = 0 + type = 'string' + while (field.type.symbols[i]) { + enumValues.push(field.type.symbols[i]) + i += 1 + } + } else { + // Primitive type + type = SchemaExtractor.getType(fieldType.type || fieldType) + if (fieldType === 'bytes') { + format = 'byte' + } else if (fieldType === 'int') { + format = 'int32' + } else if (fieldType === 'long') { + format = 'int64' + } else if (fieldType === 'float') { + format = 'float' + } else if (fieldType === 'double') { + format = 'double' + } + } + + return builder.addProperty(schemaName, fieldName, array, type, description, ref, format, enumValues) + } + + static extractSchema (schema, builder, depth, extractor) { + depth += 1 + const schemaName = schema.name + if (extractor) { + // if we already have a defined extractor, this is a nested schema. create a new extractor for the nested + // schema, ensure it is added to our schema builder's cache, and replace the builders iterator with our + // nested schema iterator / extractor. Once complete, add the new schema to our builder's schemas. + const nestedSchemaExtractor = new SchemaExtractor(schema) + builder.iterator = nestedSchemaExtractor + const nestedSchema = SchemaBuilder.getSchema(schemaName, nestedSchemaExtractor, builder) + for (const nestedSubSchemaName in nestedSchema.components.schemas) { + if (nestedSchema.components.schemas.hasOwnProperty(nestedSubSchemaName)) { + builder.schema.components.schemas[nestedSubSchemaName] = nestedSchema.components.schemas[nestedSubSchemaName] + } + } + return true + } else { + if (!builder.shouldExtractSchema(schemaName, depth)) { + return false + } + for (const field of schema.fields) { + if (!this.extractProperty(field, schemaName, field.name, builder, depth)) { + log.warn(`DSM: Unable to extract field with name: ${field.name} from Avro schema with name: ${schemaName}`) + } + } + } + return true + } + + static extractSchemas (descriptor, dataStreamsProcessor) { + return dataStreamsProcessor.getSchema(descriptor.name, new SchemaExtractor(descriptor)) + } + + iterateOverSchema (builder) { + this.constructor.extractSchema(this.schema, builder, 0) + } + + static attachSchemaOnSpan (args, span, operation, tracer) { + const { messageClass } = args + const descriptor = messageClass?.constructor?.type ?? messageClass + + if (!descriptor || !span) { + return + } + + if (span.context()._tags[SCHEMA_TYPE] && operation === 'serialization') { + // we have already added a schema to this span, this call is an encode of nested schema types + return + } + + span.setTag(SCHEMA_TYPE, AVRO) + span.setTag(SCHEMA_NAME, descriptor.name) + span.setTag(SCHEMA_OPERATION, operation) + + if (!tracer._dataStreamsProcessor.canSampleSchema(operation)) { + return + } + + // if the span is unsampled, do not sample the schema + if (!tracer._prioritySampler.isSampled(span)) { + return + } + + const weight = tracer._dataStreamsProcessor.trySampleSchema(operation) + if (weight === 0) { + return + } + + const schemaData = SchemaBuilder.getSchemaDefinition( + this.extractSchemas(descriptor, tracer._dataStreamsProcessor) + ) + + span.setTag(SCHEMA_DEFINITION, schemaData.definition) + span.setTag(SCHEMA_WEIGHT, weight) + span.setTag(SCHEMA_ID, schemaData.id) + } +} + +module.exports = SchemaExtractor diff --git a/packages/datadog-plugin-avsc/test/helpers.js b/packages/datadog-plugin-avsc/test/helpers.js new file mode 100644 index 00000000000..8e5be7ac433 --- /dev/null +++ b/packages/datadog-plugin-avsc/test/helpers.js @@ -0,0 +1,31 @@ +const fs = require('fs') + +async function loadMessage (avro, messageTypeName) { + if (messageTypeName === 'User') { + // Read and parse the Avro schema + const schema = JSON.parse(fs.readFileSync('packages/datadog-plugin-avsc/test/schemas/user.avsc', 'utf8')) + + // Create a file and write Avro data + const filePath = 'packages/datadog-plugin-avsc/test/schemas/users.avro' + + return { + schema, + path: filePath + } + } else if (messageTypeName === 'AdvancedUser') { + // Read and parse the Avro schema + const schema = JSON.parse(fs.readFileSync('packages/datadog-plugin-avsc/test/schemas/advanced_user.avsc', 'utf8')) + + // Create a file and write Avro data + const filePath = 'packages/datadog-plugin-avsc/test/schemas/advanced_users.avro' + + return { + schema, + path: filePath + } + } +} + +module.exports = { + loadMessage +} diff --git a/packages/datadog-plugin-avsc/test/index.spec.js b/packages/datadog-plugin-avsc/test/index.spec.js new file mode 100644 index 00000000000..b3a6db0c1f1 --- /dev/null +++ b/packages/datadog-plugin-avsc/test/index.spec.js @@ -0,0 +1,176 @@ +'use strict' + +const fs = require('fs') +const { expect } = require('chai') +const agent = require('../../dd-trace/test/plugins/agent') +const path = require('path') +const { + SCHEMA_DEFINITION, + SCHEMA_ID, + SCHEMA_NAME, + SCHEMA_OPERATION, + SCHEMA_WEIGHT, + SCHEMA_TYPE +} = require('../../dd-trace/src/constants') +const sinon = require('sinon') +const { loadMessage } = require('./helpers') +const { SchemaBuilder } = require('../../dd-trace/src/datastreams/schemas/schema_builder') + +const BASIC_USER_SCHEMA_DEF = JSON.parse( + fs.readFileSync(path.join(__dirname, 'schemas/expected_user_schema.json'), 'utf8') +) +const ADVANCED_USER_SCHEMA_DEF = JSON.parse( + fs.readFileSync(path.join(__dirname, 'schemas/expected_advanced_user_schema.json'), 'utf8') +) + +const BASIC_USER_SCHEMA_ID = '1605040621379664412' +const ADVANCED_USER_SCHEMA_ID = '919692610494986520' + +function compareJson (expected, span) { + const actual = JSON.parse(span.context()._tags[SCHEMA_DEFINITION]) + return JSON.stringify(actual) === JSON.stringify(expected) +} + +describe('Plugin', () => { + describe('avsc', function () { + this.timeout(0) + let tracer + let avro + let dateNowStub + let mockTime = 0 + + withVersions('avsc', ['avsc'], (version) => { + before(() => { + tracer = require('../../dd-trace').init() + // reset sampled schemas + if (tracer._dataStreamsProcessor?._schemaSamplers) { + tracer._dataStreamsProcessor._schemaSamplers = [] + } + }) + + describe('without configuration', () => { + before(() => { + dateNowStub = sinon.stub(Date, 'now').callsFake(() => { + const returnValue = mockTime + mockTime += 50000 // Increment by 50000 ms to ensure each DSM schema is sampled + return returnValue + }) + const cache = SchemaBuilder.getCache() + cache.clear() + return agent.load('avsc').then(() => { + avro = require(`../../../versions/avsc@${version}`).get() + }) + }) + + after(() => { + dateNowStub.restore() + return agent.close({ ritmReset: false }) + }) + + it('should serialize basic schema correctly', async () => { + const loaded = await loadMessage(avro, 'User') + const type = avro.parse(loaded.schema) + const filePath = loaded.path + + tracer.trace('user.serialize', span => { + const buf = type.toBuffer({ name: 'Alyssa', favorite_number: 256, favorite_color: null }) + fs.writeFileSync(filePath, buf) + + expect(span._name).to.equal('user.serialize') + + expect(compareJson(BASIC_USER_SCHEMA_DEF, span)).to.equal(true) + expect(span.context()._tags).to.have.property(SCHEMA_TYPE, 'avro') + expect(span.context()._tags).to.have.property(SCHEMA_NAME, 'example.avro.User') + expect(span.context()._tags).to.have.property(SCHEMA_OPERATION, 'serialization') + expect(span.context()._tags).to.have.property(SCHEMA_ID, BASIC_USER_SCHEMA_ID) + expect(span.context()._tags).to.have.property(SCHEMA_WEIGHT, 1) + }) + }) + + it('should serialize the advanced schema correctly', async () => { + const loaded = await loadMessage(avro, 'AdvancedUser') + const type = avro.parse(loaded.schema) + const filePath = loaded.path + + tracer.trace('advanced_user.serialize', span => { + const buf = type.toBuffer({ + name: 'Alyssa', + age: 30, + email: 'alyssa@example.com', + height: 5.6, + preferences: { theme: 'dark', notifications: 'enabled' }, + tags: ['vip', 'premium'], + status: 'ACTIVE', + profile_picture: Buffer.from('binarydata'), + metadata: Buffer.from('metadata12345678'), + address: { street: '123 Main St', city: 'Metropolis', zipcode: '12345' } + }) + fs.writeFileSync(filePath, buf) + + expect(span._name).to.equal('advanced_user.serialize') + + expect(compareJson(ADVANCED_USER_SCHEMA_DEF, span)).to.equal(true) + expect(span.context()._tags).to.have.property(SCHEMA_TYPE, 'avro') + expect(span.context()._tags).to.have.property(SCHEMA_NAME, 'example.avro.AdvancedUser') + expect(span.context()._tags).to.have.property(SCHEMA_OPERATION, 'serialization') + expect(span.context()._tags).to.have.property(SCHEMA_ID, ADVANCED_USER_SCHEMA_ID) + expect(span.context()._tags).to.have.property(SCHEMA_WEIGHT, 1) + }) + }) + + it('should deserialize basic schema correctly', async () => { + const loaded = await loadMessage(avro, 'User') + const type = avro.parse(loaded.schema) + const filePath = loaded.path + const buf = type.toBuffer({ name: 'Alyssa', favorite_number: 256, favorite_color: null }) + fs.writeFileSync(filePath, buf) + + tracer.trace('user.deserialize', span => { + type.fromBuffer(buf) + + expect(span._name).to.equal('user.deserialize') + + expect(compareJson(BASIC_USER_SCHEMA_DEF, span)).to.equal(true) + expect(span.context()._tags).to.have.property(SCHEMA_TYPE, 'avro') + expect(span.context()._tags).to.have.property(SCHEMA_NAME, 'example.avro.User') + expect(span.context()._tags).to.have.property(SCHEMA_OPERATION, 'deserialization') + expect(span.context()._tags).to.have.property(SCHEMA_ID, BASIC_USER_SCHEMA_ID) + expect(span.context()._tags).to.have.property(SCHEMA_WEIGHT, 1) + }) + }) + + it('should deserialize advanced schema correctly', async () => { + const loaded = await loadMessage(avro, 'AdvancedUser') + const type = avro.parse(loaded.schema) + const filePath = loaded.path + const buf = type.toBuffer({ + name: 'Alyssa', + age: 30, + email: 'alyssa@example.com', + height: 5.6, + preferences: { theme: 'dark', notifications: 'enabled' }, + tags: ['vip', 'premium'], + status: 'ACTIVE', + profile_picture: Buffer.from('binarydata'), + metadata: Buffer.from('metadata12345678'), + address: { street: '123 Main St', city: 'Metropolis', zipcode: '12345' } + }) + fs.writeFileSync(filePath, buf) + + tracer.trace('advanced_user.deserialize', span => { + type.fromBuffer(buf) + + expect(span._name).to.equal('advanced_user.deserialize') + + expect(compareJson(ADVANCED_USER_SCHEMA_DEF, span)).to.equal(true) + expect(span.context()._tags).to.have.property(SCHEMA_TYPE, 'avro') + expect(span.context()._tags).to.have.property(SCHEMA_NAME, 'example.avro.AdvancedUser') + expect(span.context()._tags).to.have.property(SCHEMA_OPERATION, 'deserialization') + expect(span.context()._tags).to.have.property(SCHEMA_ID, ADVANCED_USER_SCHEMA_ID) + expect(span.context()._tags).to.have.property(SCHEMA_WEIGHT, 1) + }) + }) + }) + }) + }) +}) diff --git a/packages/datadog-plugin-avsc/test/schemas/advanced_user.avsc b/packages/datadog-plugin-avsc/test/schemas/advanced_user.avsc new file mode 100644 index 00000000000..c25081c495e --- /dev/null +++ b/packages/datadog-plugin-avsc/test/schemas/advanced_user.avsc @@ -0,0 +1,74 @@ +{ + "namespace": "example.avro", + "type": "record", + "name": "AdvancedUser", + "fields": [ + { + "name": "email", + "type": [ + "null", + "string" + ], + "default": null + }, + { + "name": "preferences", + "type": { + "type": "map", + "values": "string" + } + }, + { + "name": "tags", + "type": { + "type": "array", + "items": "string" + } + }, + { + "name": "status", + "type": { + "type": "enum", + "name": "Status", + "symbols": [ + "ACTIVE", + "INACTIVE", + "BANNED" + ] + } + }, + { + "name": "profile_picture", + "type": "bytes" + }, + { + "name": "metadata", + "type": { + "type": "fixed", + "name": "Metadata", + "size": 16 + } + }, + { + "name": "address", + "type": { + "type": "record", + "name": "Address", + "fields": [ + { + "name": "street", + "type": "string" + }, + { + "name": "city", + "type": "string" + }, + { + "name": "zipcode", + "type": "string" + } + ] + } + } + ] +} \ No newline at end of file diff --git a/packages/datadog-plugin-avsc/test/schemas/advanced_users.avro b/packages/datadog-plugin-avsc/test/schemas/advanced_users.avro new file mode 100644 index 00000000000..1e31871c28e Binary files /dev/null and b/packages/datadog-plugin-avsc/test/schemas/advanced_users.avro differ diff --git a/packages/datadog-plugin-avsc/test/schemas/expected_advanced_user_schema.json b/packages/datadog-plugin-avsc/test/schemas/expected_advanced_user_schema.json new file mode 100644 index 00000000000..932230d2959 --- /dev/null +++ b/packages/datadog-plugin-avsc/test/schemas/expected_advanced_user_schema.json @@ -0,0 +1,57 @@ +{ + "openapi": "3.0.0", + "components": { + "schemas": { + "example.avro.AdvancedUser": { + "type": "object", + "properties": { + "email": { + "type": "union[null,string]" + }, + "preferences": { + "type": "object" + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "status": { + "type": "string", + "enum": [ + "ACTIVE", + "INACTIVE", + "BANNED" + ] + }, + "profile_picture": { + "type": "string", + "format": "byte" + }, + "metadata": { + "type": "string" + }, + "address": { + "type": "object", + "$ref": "#/components/schemas/example.avro.Address" + } + } + }, + "example.avro.Address": { + "type": "object", + "properties": { + "street": { + "type": "string" + }, + "city": { + "type": "string" + }, + "zipcode": { + "type": "string" + } + } + } + } + } +} \ No newline at end of file diff --git a/packages/datadog-plugin-avsc/test/schemas/expected_user_schema.json b/packages/datadog-plugin-avsc/test/schemas/expected_user_schema.json new file mode 100644 index 00000000000..43eec7221f0 --- /dev/null +++ b/packages/datadog-plugin-avsc/test/schemas/expected_user_schema.json @@ -0,0 +1,21 @@ +{ + "openapi": "3.0.0", + "components": { + "schemas": { + "example.avro.User": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "favorite_number": { + "type": "union[integer,null]" + }, + "favorite_color": { + "type": "union[string,null]" + } + } + } + } + } +} \ No newline at end of file diff --git a/packages/datadog-plugin-avsc/test/schemas/user.avsc b/packages/datadog-plugin-avsc/test/schemas/user.avsc new file mode 100644 index 00000000000..1e810fea0c3 --- /dev/null +++ b/packages/datadog-plugin-avsc/test/schemas/user.avsc @@ -0,0 +1,25 @@ +{ + "namespace": "example.avro", + "type": "record", + "name": "User", + "fields": [ + { + "name": "name", + "type": "string" + }, + { + "name": "favorite_number", + "type": [ + "int", + "null" + ] + }, + { + "name": "favorite_color", + "type": [ + "string", + "null" + ] + } + ] +} \ No newline at end of file diff --git a/packages/datadog-plugin-avsc/test/schemas/users.avro b/packages/datadog-plugin-avsc/test/schemas/users.avro new file mode 100644 index 00000000000..5f8bfbe9325 Binary files /dev/null and b/packages/datadog-plugin-avsc/test/schemas/users.avro differ diff --git a/packages/datadog-plugin-azure-functions/src/index.js b/packages/datadog-plugin-azure-functions/src/index.js new file mode 100644 index 00000000000..2c85403906c --- /dev/null +++ b/packages/datadog-plugin-azure-functions/src/index.js @@ -0,0 +1,77 @@ +'use strict' + +const TracingPlugin = require('../../dd-trace/src/plugins/tracing') +const { storage } = require('../../datadog-core') +const serverless = require('../../dd-trace/src/plugins/util/serverless') +const web = require('../../dd-trace/src/plugins/util/web') + +const triggerMap = { + deleteRequest: 'Http', + http: 'Http', + get: 'Http', + patch: 'Http', + post: 'Http', + put: 'Http' +} + +class AzureFunctionsPlugin extends TracingPlugin { + static get id () { return 'azure-functions' } + static get operation () { return 'invoke' } + static get kind () { return 'server' } + static get type () { return 'serverless' } + + static get prefix () { return 'tracing:datadog:azure-functions:invoke' } + + bindStart (ctx) { + const { functionName, methodName } = ctx + const store = storage.getStore() + + const span = this.startSpan(this.operationName(), { + service: this.serviceName(), + type: 'serverless', + meta: { + 'aas.function.name': functionName, + 'aas.function.trigger': mapTriggerTag(methodName) + } + }, false) + + ctx.span = span + ctx.parentStore = store + ctx.currentStore = { ...store, span } + + return ctx.currentStore + } + + error (ctx) { + this.addError(ctx.error) + ctx.currentStore.span.setTag('error.message', ctx.error) + } + + asyncEnd (ctx) { + const { httpRequest, result = {} } = ctx + const path = (new URL(httpRequest.url)).pathname + const req = { + method: httpRequest.method, + headers: Object.fromEntries(httpRequest.headers.entries()), + url: path + } + + const context = web.patch(req) + context.config = this.config + context.paths = [path] + context.res = { statusCode: result.status } + context.span = ctx.currentStore.span + + serverless.finishSpan(context) + } + + configure (config) { + return super.configure(web.normalizeConfig(config)) + } +} + +function mapTriggerTag (methodName) { + return triggerMap[methodName] || 'Unknown' +} + +module.exports = AzureFunctionsPlugin diff --git a/packages/datadog-plugin-azure-functions/test/integration-test/client.spec.js b/packages/datadog-plugin-azure-functions/test/integration-test/client.spec.js new file mode 100644 index 00000000000..8d5a0d43fdb --- /dev/null +++ b/packages/datadog-plugin-azure-functions/test/integration-test/client.spec.js @@ -0,0 +1,100 @@ +'use strict' + +const { + FakeAgent, + hookFile, + createSandbox, + curlAndAssertMessage +} = require('../../../../integration-tests/helpers') +const { spawn } = require('child_process') +const { assert } = require('chai') + +describe('esm', () => { + let agent + let proc + let sandbox + + withVersions('azure-functions', '@azure/functions', version => { + before(async function () { + this.timeout(50000) + sandbox = await createSandbox([`@azure/functions@${version}`, 'azure-functions-core-tools@4'], false, + ['./packages/datadog-plugin-azure-functions/test/integration-test/fixtures/*']) + }) + + after(async function () { + this.timeout(50000) + await sandbox.remove() + }) + + beforeEach(async () => { + agent = await new FakeAgent().start() + }) + + afterEach(async () => { + proc && proc.kill('SIGINT') + await agent.stop() + }) + + it('is instrumented', async () => { + const envArgs = { + PATH: `${sandbox.folder}/node_modules/azure-functions-core-tools/bin:${process.env.PATH}` + } + proc = await spawnPluginIntegrationTestProc(sandbox.folder, 'func', ['start'], agent.port, undefined, envArgs) + + return curlAndAssertMessage(agent, 'http://127.0.0.1:7071/api/httptest', ({ headers, payload }) => { + assert.propertyVal(headers, 'host', `127.0.0.1:${agent.port}`) + assert.isArray(payload) + assert.strictEqual(payload.length, 1) + assert.isArray(payload[0]) + assert.strictEqual(payload[0].length, 1) + assert.propertyVal(payload[0][0], 'name', 'azure-functions.invoke') + }) + }).timeout(50000) + }) +}) + +async function spawnPluginIntegrationTestProc (cwd, command, args, agentPort, stdioHandler, additionalEnvArgs = {}) { + let env = { + NODE_OPTIONS: `--loader=${hookFile}`, + DD_TRACE_AGENT_PORT: agentPort + } + env = { ...env, ...additionalEnvArgs } + return spawnProc(command, args, { + cwd, + env + }, stdioHandler) +} + +function spawnProc (command, args, options = {}, stdioHandler, stderrHandler) { + const proc = spawn(command, args, { ...options, stdio: 'pipe' }) + return new Promise((resolve, reject) => { + proc + .on('error', reject) + .on('exit', code => { + if (code !== 0) { + reject(new Error(`Process exited with status code ${code}.`)) + } + resolve() + }) + + proc.stdout.on('data', data => { + if (stdioHandler) { + stdioHandler(data) + } + // eslint-disable-next-line no-console + if (!options.silent) console.log(data.toString()) + + if (data.toString().includes('http://localhost:7071/api/httptest')) { + resolve(proc) + } + }) + + proc.stderr.on('data', data => { + if (stderrHandler) { + stderrHandler(data) + } + // eslint-disable-next-line no-console + if (!options.silent) console.error(data.toString()) + }) + }) +} diff --git a/packages/datadog-plugin-azure-functions/test/integration-test/fixtures/host.json b/packages/datadog-plugin-azure-functions/test/integration-test/fixtures/host.json new file mode 100644 index 00000000000..06d01bdaa95 --- /dev/null +++ b/packages/datadog-plugin-azure-functions/test/integration-test/fixtures/host.json @@ -0,0 +1,15 @@ +{ + "version": "2.0", + "logging": { + "applicationInsights": { + "samplingSettings": { + "isEnabled": true, + "excludedTypes": "Request" + } + } + }, + "extensionBundle": { + "id": "Microsoft.Azure.Functions.ExtensionBundle", + "version": "[4.*, 5.0.0)" + } +} diff --git a/packages/datadog-plugin-azure-functions/test/integration-test/fixtures/local.settings.json b/packages/datadog-plugin-azure-functions/test/integration-test/fixtures/local.settings.json new file mode 100644 index 00000000000..6beb0236ad6 --- /dev/null +++ b/packages/datadog-plugin-azure-functions/test/integration-test/fixtures/local.settings.json @@ -0,0 +1,8 @@ +{ + "IsEncrypted": false, + "Values": { + "FUNCTIONS_WORKER_RUNTIME": "node", + "AzureWebJobsFeatureFlags": "EnableWorkerIndexing", + "AzureWebJobsStorage": "" + } +} diff --git a/packages/datadog-plugin-azure-functions/test/integration-test/fixtures/package.json b/packages/datadog-plugin-azure-functions/test/integration-test/fixtures/package.json new file mode 100644 index 00000000000..07b0ac311ee --- /dev/null +++ b/packages/datadog-plugin-azure-functions/test/integration-test/fixtures/package.json @@ -0,0 +1,15 @@ +{ + "name": "azure-function-node-integration-test", + "version": "1.0.0", + "description": "", + "main": "src/functions/server.mjs", + "scripts": { + "start": "func start" + }, + "dependencies": { + "@azure/functions": "^4.0.0" + }, + "devDependencies": { + "azure-functions-core-tools": "^4.x" + } +} diff --git a/packages/datadog-plugin-azure-functions/test/integration-test/fixtures/src/functions/server.mjs b/packages/datadog-plugin-azure-functions/test/integration-test/fixtures/src/functions/server.mjs new file mode 100644 index 00000000000..2efdd200732 --- /dev/null +++ b/packages/datadog-plugin-azure-functions/test/integration-test/fixtures/src/functions/server.mjs @@ -0,0 +1,15 @@ +import 'dd-trace/init.js' +import { app } from '@azure/functions' + +async function handlerFunction (request, context) { + return { + status: 200, + body: 'Hello Datadog!' + } +} + +app.http('httptest', { + methods: ['GET'], + authLevel: 'anonymous', + handler: handlerFunction +}) diff --git a/packages/datadog-plugin-azure-functions/test/integration-test/fixtures/yarn.lock b/packages/datadog-plugin-azure-functions/test/integration-test/fixtures/yarn.lock new file mode 100644 index 00000000000..98c420c8953 --- /dev/null +++ b/packages/datadog-plugin-azure-functions/test/integration-test/fixtures/yarn.lock @@ -0,0 +1,269 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@azure/functions@^4.0.0": + version "4.5.1" + resolved "https://registry.yarnpkg.com/@azure/functions/-/functions-4.5.1.tgz#70d1a99d335af87579a55d3c149ef1ae77da0a66" + integrity sha512-ikiw1IrM2W9NlQM3XazcX+4Sq3XAjZi4eeG22B5InKC2x5i7MatGF2S/Gn1ACZ+fEInwu+Ru9J8DlnBv1/hIvg== + dependencies: + cookie "^0.6.0" + long "^4.0.0" + undici "^5.13.0" + +"@fastify/busboy@^2.0.0": + version "2.1.1" + resolved "https://registry.yarnpkg.com/@fastify/busboy/-/busboy-2.1.1.tgz#b9da6a878a371829a0502c9b6c1c143ef6663f4d" + integrity sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA== + +"@types/node@*": + version "22.7.4" + resolved "https://registry.yarnpkg.com/@types/node/-/node-22.7.4.tgz#e35d6f48dca3255ce44256ddc05dee1c23353fcc" + integrity sha512-y+NPi1rFzDs1NdQHHToqeiX2TIS79SWEAw9GYhkkx8bD0ChpfqC+n2j5OXOCpzfojBEBt6DnEnnG9MY0zk1XLg== + dependencies: + undici-types "~6.19.2" + +"@types/yauzl@^2.9.1": + version "2.10.3" + resolved "https://registry.yarnpkg.com/@types/yauzl/-/yauzl-2.10.3.tgz#e9b2808b4f109504a03cda958259876f61017999" + integrity sha512-oJoftv0LSuaDZE3Le4DbKX+KS9G36NzOeSap90UIK0yMA/NhKJhqlSGtNDORNRaIbQfzjXDrQa0ytJ6mNRGz/Q== + dependencies: + "@types/node" "*" + +agent-base@6: + version "6.0.2" + resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" + integrity sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ== + dependencies: + debug "4" + +ansi-styles@^4.1.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" + integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== + dependencies: + color-convert "^2.0.1" + +azure-functions-core-tools@^4.x: + version "4.0.6280" + resolved "https://registry.yarnpkg.com/azure-functions-core-tools/-/azure-functions-core-tools-4.0.6280.tgz#59b4d9402846760aef3ad292355c3eeb4e5f21ad" + integrity sha512-DVSgYNnT4POLbj/YV3FKtNdo9KT/M5Dl//slWEmVwZo1y4aJEsUApn6DtkZswut76I3S9eKGC5IaC84j5OGNaA== + dependencies: + chalk "3.0.0" + extract-zip "^2.0.1" + https-proxy-agent "5.0.0" + progress "2.0.3" + rimraf "4.4.1" + +balanced-match@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" + integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== + +brace-expansion@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" + integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== + dependencies: + balanced-match "^1.0.0" + +buffer-crc32@~0.2.3: + version "0.2.13" + resolved "https://registry.yarnpkg.com/buffer-crc32/-/buffer-crc32-0.2.13.tgz#0d333e3f00eac50aa1454abd30ef8c2a5d9a7242" + integrity sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ== + +chalk@3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-3.0.0.tgz#3f73c2bf526591f574cc492c51e2456349f844e4" + integrity sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +color-convert@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" + integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== + dependencies: + color-name "~1.1.4" + +color-name@~1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + +cookie@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.6.0.tgz#2798b04b071b0ecbff0dbb62a505a8efa4e19051" + integrity sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw== + +debug@4, debug@^4.1.1: + version "4.3.7" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.7.tgz#87945b4151a011d76d95a198d7111c865c360a52" + integrity sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ== + dependencies: + ms "^2.1.3" + +end-of-stream@^1.1.0: + version "1.4.4" + resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" + integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== + dependencies: + once "^1.4.0" + +extract-zip@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/extract-zip/-/extract-zip-2.0.1.tgz#663dca56fe46df890d5f131ef4a06d22bb8ba13a" + integrity sha512-GDhU9ntwuKyGXdZBUgTIe+vXnWj0fppUEtMDL0+idd5Sta8TGpHssn/eusA9mrPr9qNDym6SxAYZjNvCn/9RBg== + dependencies: + debug "^4.1.1" + get-stream "^5.1.0" + yauzl "^2.10.0" + optionalDependencies: + "@types/yauzl" "^2.9.1" + +fd-slicer@~1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/fd-slicer/-/fd-slicer-1.1.0.tgz#25c7c89cb1f9077f8891bbe61d8f390eae256f1e" + integrity sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g== + dependencies: + pend "~1.2.0" + +fs.realpath@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== + +get-stream@^5.1.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-5.2.0.tgz#4966a1795ee5ace65e706c4b7beb71257d6e22d3" + integrity sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA== + dependencies: + pump "^3.0.0" + +glob@^9.2.0: + version "9.3.5" + resolved "https://registry.yarnpkg.com/glob/-/glob-9.3.5.tgz#ca2ed8ca452781a3009685607fdf025a899dfe21" + integrity sha512-e1LleDykUz2Iu+MTYdkSsuWX8lvAjAcs0Xef0lNIu0S2wOAzuTxCJtcd9S3cijlwYF18EsU3rzb8jPVobxDh9Q== + dependencies: + fs.realpath "^1.0.0" + minimatch "^8.0.2" + minipass "^4.2.4" + path-scurry "^1.6.1" + +has-flag@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" + integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== + +https-proxy-agent@5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz#e2a90542abb68a762e0a0850f6c9edadfd8506b2" + integrity sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA== + dependencies: + agent-base "6" + debug "4" + +long@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/long/-/long-4.0.0.tgz#9a7b71cfb7d361a194ea555241c92f7468d5bf28" + integrity sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA== + +lru-cache@^10.2.0: + version "10.4.3" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-10.4.3.tgz#410fc8a17b70e598013df257c2446b7f3383f119" + integrity sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ== + +minimatch@^8.0.2: + version "8.0.4" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-8.0.4.tgz#847c1b25c014d4e9a7f68aaf63dedd668a626229" + integrity sha512-W0Wvr9HyFXZRGIDgCicunpQ299OKXs9RgZfaukz4qAW/pJhcpUfupc9c+OObPOFueNy8VSrZgEmDtk6Kh4WzDA== + dependencies: + brace-expansion "^2.0.1" + +minipass@^4.2.4: + version "4.2.8" + resolved "https://registry.yarnpkg.com/minipass/-/minipass-4.2.8.tgz#f0010f64393ecfc1d1ccb5f582bcaf45f48e1a3a" + integrity sha512-fNzuVyifolSLFL4NzpF+wEF4qrgqaaKX0haXPQEdQ7NKAN+WecoKMHV09YcuL/DHxrUsYQOK3MiuDf7Ip2OXfQ== + +"minipass@^5.0.0 || ^6.0.2 || ^7.0.0": + version "7.1.2" + resolved "https://registry.yarnpkg.com/minipass/-/minipass-7.1.2.tgz#93a9626ce5e5e66bd4db86849e7515e92340a707" + integrity sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw== + +ms@^2.1.3: + version "2.1.3" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" + integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== + +once@^1.3.1, once@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== + dependencies: + wrappy "1" + +path-scurry@^1.6.1: + version "1.11.1" + resolved "https://registry.yarnpkg.com/path-scurry/-/path-scurry-1.11.1.tgz#7960a668888594a0720b12a911d1a742ab9f11d2" + integrity sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA== + dependencies: + lru-cache "^10.2.0" + minipass "^5.0.0 || ^6.0.2 || ^7.0.0" + +pend@~1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/pend/-/pend-1.2.0.tgz#7a57eb550a6783f9115331fcf4663d5c8e007a50" + integrity sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg== + +progress@2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8" + integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA== + +pump@^3.0.0: + version "3.0.2" + resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.2.tgz#836f3edd6bc2ee599256c924ffe0d88573ddcbf8" + integrity sha512-tUPXtzlGM8FE3P0ZL6DVs/3P58k9nk8/jZeQCurTJylQA8qFYzHFfhBJkuqyE0FifOsQ0uKWekiZ5g8wtr28cw== + dependencies: + end-of-stream "^1.1.0" + once "^1.3.1" + +rimraf@4.4.1: + version "4.4.1" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-4.4.1.tgz#bd33364f67021c5b79e93d7f4fa0568c7c21b755" + integrity sha512-Gk8NlF062+T9CqNGn6h4tls3k6T1+/nXdOcSZVikNVtlRdYpA7wRJJMoXmuvOnLW844rPjdQ7JgXCYM6PPC/og== + dependencies: + glob "^9.2.0" + +supports-color@^7.1.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" + integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== + dependencies: + has-flag "^4.0.0" + +undici-types@~6.19.2: + version "6.19.8" + resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-6.19.8.tgz#35111c9d1437ab83a7cdc0abae2f26d88eda0a02" + integrity sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw== + +undici@^5.13.0: + version "5.28.4" + resolved "https://registry.yarnpkg.com/undici/-/undici-5.28.4.tgz#6b280408edb6a1a604a9b20340f45b422e373068" + integrity sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g== + dependencies: + "@fastify/busboy" "^2.0.0" + +wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== + +yauzl@^2.10.0: + version "2.10.0" + resolved "https://registry.yarnpkg.com/yauzl/-/yauzl-2.10.0.tgz#c7eb17c93e112cb1086fa6d8e51fb0667b79a5f9" + integrity sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g== + dependencies: + buffer-crc32 "~0.2.3" + fd-slicer "~1.1.0" diff --git a/packages/datadog-plugin-child_process/test/index.spec.js b/packages/datadog-plugin-child_process/test/index.spec.js index 4598457274e..33624eab4d8 100644 --- a/packages/datadog-plugin-child_process/test/index.spec.js +++ b/packages/datadog-plugin-child_process/test/index.spec.js @@ -283,6 +283,82 @@ describe('Child process plugin', () => { }) }) + describe('context maintenance', () => { + let parent + let childProcess + let tracer + + before(() => { + return agent.load(['child_process']) + .then(() => { + childProcess = require('child_process') + tracer = require('../../dd-trace') + tracer.init() + parent = tracer.startSpan('parent') + parent.finish() + }).then(_port => { + return new Promise(resolve => setImmediate(resolve)) + }) + }) + + after(() => { + return agent.close() + }) + + it('should preserve context around execSync calls', () => { + tracer.scope().activate(parent, () => { + expect(tracer.scope().active()).to.equal(parent) + childProcess.execSync('ls') + expect(tracer.scope().active()).to.equal(parent) + }) + }) + + it('should preserve context around exec calls', (done) => { + tracer.scope().activate(parent, () => { + expect(tracer.scope().active()).to.equal(parent) + childProcess.exec('ls', () => { + expect(tracer.scope().active()).to.equal(parent) + done() + }) + }) + }) + + it('should preserve context around execFileSync calls', () => { + tracer.scope().activate(parent, () => { + expect(tracer.scope().active()).to.equal(parent) + childProcess.execFileSync('ls') + expect(tracer.scope().active()).to.equal(parent) + }) + }) + + it('should preserve context around execFile calls', (done) => { + tracer.scope().activate(parent, () => { + expect(tracer.scope().active()).to.equal(parent) + childProcess.execFile('ls', () => { + expect(tracer.scope().active()).to.equal(parent) + done() + }) + }) + }) + + it('should preserve context around spawnSync calls', () => { + tracer.scope().activate(parent, () => { + expect(tracer.scope().active()).to.equal(parent) + childProcess.spawnSync('ls') + expect(tracer.scope().active()).to.equal(parent) + }) + }) + + it('should preserve context around spawn calls', (done) => { + tracer.scope().activate(parent, () => { + expect(tracer.scope().active()).to.equal(parent) + childProcess.spawn('ls') + expect(tracer.scope().active()).to.equal(parent) + done() + }) + }) + }) + describe('Integration', () => { describe('Methods which spawn a shell by default', () => { const execAsyncMethods = ['exec'] @@ -299,19 +375,25 @@ describe('Child process plugin', () => { afterEach(() => agent.close({ ritmReset: false })) const parentSpanList = [true, false] - parentSpanList.forEach(parentSpan => { - describe(`${parentSpan ? 'with' : 'without'} parent span`, () => { + parentSpanList.forEach(hasParentSpan => { + let parentSpan + + describe(`${hasParentSpan ? 'with' : 'without'} parent span`, () => { const methods = [ ...execAsyncMethods.map(methodName => ({ methodName, async: true })), ...execSyncMethods.map(methodName => ({ methodName, async: false })) ] - if (parentSpan) { - beforeEach((done) => { - const parentSpan = tracer.startSpan('parent') + + beforeEach((done) => { + if (hasParentSpan) { + parentSpan = tracer.startSpan('parent') parentSpan.finish() tracer.scope().activate(parentSpan, done) - }) - } + } else { + storage.enterWith({}) + done() + } + }) methods.forEach(({ methodName, async }) => { describe(methodName, () => { @@ -335,6 +417,30 @@ describe('Child process plugin', () => { } }) + it('should maintain previous span after the execution', (done) => { + const res = childProcess[methodName]('ls') + const span = storage.getStore()?.span + expect(span).to.be.equals(parentSpan) + if (async) { + res.on('close', () => { + expect(span).to.be.equals(parentSpan) + done() + }) + } else { + done() + } + }) + + if (async) { + it('should maintain previous span in the callback', (done) => { + childProcess[methodName]('ls', () => { + const span = storage.getStore()?.span + expect(span).to.be.equals(parentSpan) + done() + }) + }) + } + it('command should be scrubbed', (done) => { const expected = { type: 'system', diff --git a/packages/datadog-plugin-fastify/src/code_origin.js b/packages/datadog-plugin-fastify/src/code_origin.js new file mode 100644 index 00000000000..3e6f58d5624 --- /dev/null +++ b/packages/datadog-plugin-fastify/src/code_origin.js @@ -0,0 +1,31 @@ +'use strict' + +const { entryTag } = require('../../datadog-code-origin') +const Plugin = require('../../dd-trace/src/plugins/plugin') +const web = require('../../dd-trace/src/plugins/util/web') + +const kCodeOriginForSpansTagsSym = Symbol('datadog.codeOriginForSpansTags') + +class FastifyCodeOriginForSpansPlugin extends Plugin { + static get id () { + return 'fastify' + } + + constructor (...args) { + super(...args) + + this.addSub('apm:fastify:request:handle', ({ req, routeConfig }) => { + const tags = routeConfig?.[kCodeOriginForSpansTagsSym] + if (!tags) return + const context = web.getContext(req) + context.span?.addTags(tags) + }) + + this.addSub('apm:fastify:route:added', ({ routeOptions, onRoute }) => { + if (!routeOptions.config) routeOptions.config = {} + routeOptions.config[kCodeOriginForSpansTagsSym] = entryTag(onRoute) + }) + } +} + +module.exports = FastifyCodeOriginForSpansPlugin diff --git a/packages/datadog-plugin-fastify/src/index.js b/packages/datadog-plugin-fastify/src/index.js index 6b4768279f8..18371458346 100644 --- a/packages/datadog-plugin-fastify/src/index.js +++ b/packages/datadog-plugin-fastify/src/index.js @@ -1,18 +1,16 @@ 'use strict' -const RouterPlugin = require('../../datadog-plugin-router/src') +const FastifyTracingPlugin = require('./tracing') +const FastifyCodeOriginForSpansPlugin = require('./code_origin') +const CompositePlugin = require('../../dd-trace/src/plugins/composite') -class FastifyPlugin extends RouterPlugin { - static get id () { - return 'fastify' - } - - constructor (...args) { - super(...args) - - this.addSub('apm:fastify:request:handle', ({ req }) => { - this.setFramework(req, 'fastify', this.config) - }) +class FastifyPlugin extends CompositePlugin { + static get id () { return 'fastify' } + static get plugins () { + return { + tracing: FastifyTracingPlugin, + codeOriginForSpans: FastifyCodeOriginForSpansPlugin + } } } diff --git a/packages/datadog-plugin-fastify/src/tracing.js b/packages/datadog-plugin-fastify/src/tracing.js new file mode 100644 index 00000000000..90b2e5e8451 --- /dev/null +++ b/packages/datadog-plugin-fastify/src/tracing.js @@ -0,0 +1,19 @@ +'use strict' + +const RouterPlugin = require('../../datadog-plugin-router/src') + +class FastifyTracingPlugin extends RouterPlugin { + static get id () { + return 'fastify' + } + + constructor (...args) { + super(...args) + + this.addSub('apm:fastify:request:handle', ({ req }) => { + this.setFramework(req, 'fastify', this.config) + }) + } +} + +module.exports = FastifyTracingPlugin diff --git a/packages/datadog-plugin-fastify/test/code_origin.spec.js b/packages/datadog-plugin-fastify/test/code_origin.spec.js new file mode 100644 index 00000000000..711c2ffff6c --- /dev/null +++ b/packages/datadog-plugin-fastify/test/code_origin.spec.js @@ -0,0 +1,216 @@ +'use strict' + +const axios = require('axios') +const semver = require('semver') +const agent = require('../../dd-trace/test/plugins/agent') +const { NODE_MAJOR } = require('../../../version') + +const host = 'localhost' + +describe('Plugin', () => { + let fastify + let app + + describe('fastify', () => { + withVersions('fastify', 'fastify', (version, _, specificVersion) => { + if (NODE_MAJOR <= 18 && semver.satisfies(specificVersion, '>=5')) return + + afterEach(() => { + app.close() + }) + + withExports('fastify', version, ['default', 'fastify'], '>=3', getExport => { + describe('with tracer config codeOriginForSpans.enabled: true', () => { + if (semver.satisfies(specificVersion, '<4')) return // TODO: Why doesn't it work on older versions? + + before(() => { + return agent.load( + ['fastify', 'find-my-way', 'http'], + [{}, {}, { client: false }], + { codeOriginForSpans: { enabled: true } } + ) + }) + + after(() => { + return agent.close({ ritmReset: false }) + }) + + beforeEach(() => { + fastify = getExport() + app = fastify() + + if (semver.intersects(version, '>=3')) { + return app.register(require('../../../versions/middie').get()) + } + }) + + it('should add code_origin tag on entry spans when feature is enabled', done => { + let routeRegisterLine + + // Wrap in a named function to have at least one frame with a function name + function wrapperFunction () { + routeRegisterLine = getNextLineNumber() + app.get('/user', function userHandler (request, reply) { + reply.send() + }) + } + + const callWrapperLine = getNextLineNumber() + wrapperFunction() + + app.listen(() => { + const port = app.server.address().port + + agent + .use(traces => { + const spans = traces[0] + const tags = spans[0].meta + + expect(tags).to.have.property('_dd.code_origin.type', 'entry') + + expect(tags).to.have.property('_dd.code_origin.frames.0.file', __filename) + expect(tags).to.have.property('_dd.code_origin.frames.0.line', routeRegisterLine) + expect(tags).to.have.property('_dd.code_origin.frames.0.column').to.match(/^\d+$/) + expect(tags).to.have.property('_dd.code_origin.frames.0.method', 'wrapperFunction') + expect(tags).to.not.have.property('_dd.code_origin.frames.0.type') + + expect(tags).to.have.property('_dd.code_origin.frames.1.file', __filename) + expect(tags).to.have.property('_dd.code_origin.frames.1.line', callWrapperLine) + expect(tags).to.have.property('_dd.code_origin.frames.1.column').to.match(/^\d+$/) + expect(tags).to.not.have.property('_dd.code_origin.frames.1.method') + expect(tags).to.have.property('_dd.code_origin.frames.1.type', 'Context') + + expect(tags).to.not.have.property('_dd.code_origin.frames.2.file') + }) + .then(done) + .catch(done) + + axios + .get(`http://localhost:${port}/user`) + .catch(done) + }) + }) + + it('should point to where actual route handler is configured, not the prefix', done => { + let routeRegisterLine + + app.register(function v1Handler (app, opts, done) { + routeRegisterLine = getNextLineNumber() + app.get('/user', function userHandler (request, reply) { + reply.send() + }) + done() + }, { prefix: '/v1' }) + + app.listen(() => { + const port = app.server.address().port + + agent + .use(traces => { + const spans = traces[0] + const tags = spans[0].meta + + expect(tags).to.have.property('_dd.code_origin.type', 'entry') + + expect(tags).to.have.property('_dd.code_origin.frames.0.file', __filename) + expect(tags).to.have.property('_dd.code_origin.frames.0.line', routeRegisterLine) + expect(tags).to.have.property('_dd.code_origin.frames.0.column').to.match(/^\d+$/) + expect(tags).to.have.property('_dd.code_origin.frames.0.method', 'v1Handler') + expect(tags).to.not.have.property('_dd.code_origin.frames.0.type') + + expect(tags).to.not.have.property('_dd.code_origin.frames.1.file') + }) + .then(done) + .catch(done) + + axios + .get(`http://localhost:${port}/v1/user`) + .catch(done) + }) + }) + + it('should point to route handler even if passed through a middleware', function testCase (done) { + app.use(function middleware (req, res, next) { + next() + }) + + const routeRegisterLine = getNextLineNumber() + app.get('/user', function userHandler (request, reply) { + reply.send() + }) + + app.listen({ host, port: 0 }, () => { + const port = app.server.address().port + + agent + .use(traces => { + const spans = traces[0] + const tags = spans[0].meta + + expect(tags).to.have.property('_dd.code_origin.type', 'entry') + + expect(tags).to.have.property('_dd.code_origin.frames.0.file', __filename) + expect(tags).to.have.property('_dd.code_origin.frames.0.line', routeRegisterLine) + expect(tags).to.have.property('_dd.code_origin.frames.0.column').to.match(/^\d+$/) + expect(tags).to.have.property('_dd.code_origin.frames.0.method', 'testCase') + expect(tags).to.have.property('_dd.code_origin.frames.0.type', 'Context') + + expect(tags).to.not.have.property('_dd.code_origin.frames.1.file') + }) + .then(done) + .catch(done) + + axios + .get(`http://localhost:${port}/user`) + .catch(done) + }) + }) + + // TODO: In Fastify, the route is resolved before the middleware is called, so we actually can get the line + // number of where the route handler is defined. However, this might not be the right choice and it might be + // better to point to the middleware. + it.skip('should point to middleware if middleware responds early', function testCase (done) { + const middlewareRegisterLine = getNextLineNumber() + app.use(function middleware (req, res, next) { + res.end() + }) + + app.get('/user', function userHandler (request, reply) { + reply.send() + }) + + app.listen({ host, port: 0 }, () => { + const port = app.server.address().port + + agent + .use(traces => { + const spans = traces[0] + const tags = spans[0].meta + + expect(tags).to.have.property('_dd.code_origin.type', 'entry') + + expect(tags).to.have.property('_dd.code_origin.frames.0.file', __filename) + expect(tags).to.have.property('_dd.code_origin.frames.0.line', middlewareRegisterLine) + expect(tags).to.have.property('_dd.code_origin.frames.0.column').to.match(/^\d+$/) + expect(tags).to.have.property('_dd.code_origin.frames.0.method', 'testCase') + expect(tags).to.have.property('_dd.code_origin.frames.0.type', 'Context') + + expect(tags).to.not.have.property('_dd.code_origin.frames.1.file') + }) + .then(done) + .catch(done) + + axios + .get(`http://localhost:${port}/user`) + .catch(done) + }) + }) + }) + }) + }) + }) +}) + +function getNextLineNumber () { + return String(Number(new Error().stack.split('\n')[2].match(/:(\d+):/)[1]) + 1) +} diff --git a/packages/datadog-plugin-fastify/test/index.spec.js b/packages/datadog-plugin-fastify/test/tracing.spec.js similarity index 99% rename from packages/datadog-plugin-fastify/test/index.spec.js rename to packages/datadog-plugin-fastify/test/tracing.spec.js index 6b20e58a728..c8924c98dfd 100644 --- a/packages/datadog-plugin-fastify/test/index.spec.js +++ b/packages/datadog-plugin-fastify/test/tracing.spec.js @@ -16,6 +16,8 @@ describe('Plugin', () => { describe('fastify', () => { withVersions('fastify', 'fastify', (version, _, specificVersion) => { + if (NODE_MAJOR <= 18 && semver.satisfies(specificVersion, '>=5')) return + beforeEach(() => { tracer = require('../../dd-trace') }) @@ -26,8 +28,6 @@ describe('Plugin', () => { withExports('fastify', version, ['default', 'fastify'], '>=3', getExport => { describe('without configuration', () => { - if (NODE_MAJOR <= 18 && semver.satisfies(specificVersion, '>=5')) return - before(() => { return agent.load(['fastify', 'find-my-way', 'http'], [{}, {}, { client: false }]) }) diff --git a/packages/datadog-plugin-protobufjs/src/index.js b/packages/datadog-plugin-protobufjs/src/index.js new file mode 100644 index 00000000000..800c3d9e3cb --- /dev/null +++ b/packages/datadog-plugin-protobufjs/src/index.js @@ -0,0 +1,14 @@ +const SchemaPlugin = require('../../dd-trace/src/plugins/schema') +const SchemaExtractor = require('./schema_iterator') + +class ProtobufjsPlugin extends SchemaPlugin { + static get id () { + return 'protobufjs' + } + + static get schemaExtractor () { + return SchemaExtractor + } +} + +module.exports = ProtobufjsPlugin diff --git a/packages/datadog-plugin-protobufjs/src/schema_iterator.js b/packages/datadog-plugin-protobufjs/src/schema_iterator.js new file mode 100644 index 00000000000..ea3c8ba2bf0 --- /dev/null +++ b/packages/datadog-plugin-protobufjs/src/schema_iterator.js @@ -0,0 +1,180 @@ +const PROTOBUF = 'protobuf' +const { + SCHEMA_DEFINITION, + SCHEMA_ID, + SCHEMA_NAME, + SCHEMA_OPERATION, + SCHEMA_WEIGHT, + SCHEMA_TYPE +} = require('../../dd-trace/src/constants') +const log = require('../../dd-trace/src/log') +const { + SchemaBuilder +} = require('../../dd-trace/src/datastreams/schemas/schema_builder') + +class SchemaExtractor { + constructor (schema) { + this.schema = schema + } + + static getTypeAndFormat (type) { + const typeFormatMapping = { + int32: ['integer', 'int32'], + int64: ['integer', 'int64'], + uint32: ['integer', 'uint32'], + uint64: ['integer', 'uint64'], + sint32: ['integer', 'sint32'], + sint64: ['integer', 'sint64'], + fixed32: ['integer', 'fixed32'], + fixed64: ['integer', 'fixed64'], + sfixed32: ['integer', 'sfixed32'], + sfixed64: ['integer', 'sfixed64'], + float: ['number', 'float'], + double: ['number', 'double'], + bool: ['boolean', null], + string: ['string', null], + bytes: ['string', 'byte'], + Enum: ['enum', null], + Type: ['type', null], + map: ['map', null], + repeated: ['array', null] + } + + return typeFormatMapping[type] || ['string', null] + } + + static extractProperty (field, schemaName, fieldName, builder, depth) { + let array = false + let description + let ref + let enumValues + + const resolvedType = field.resolvedType ? field.resolvedType.constructor.name : field.type + + const isRepeatedField = field.rule === 'repeated' + + let typeFormat = this.getTypeAndFormat(isRepeatedField ? 'repeated' : resolvedType) + let type = typeFormat[0] + let format = typeFormat[1] + + if (type === 'array') { + array = true + typeFormat = this.getTypeAndFormat(resolvedType) + type = typeFormat[0] + format = typeFormat[1] + } + + if (type === 'type') { + format = null + ref = `#/components/schemas/${removeLeadingPeriod(field.resolvedType.fullName)}` + // keep a reference to the original builder iterator since when we recurse this reference will get reset to + // deeper schemas + const originalSchemaExtractor = builder.iterator + if (!this.extractSchema(field.resolvedType, builder, depth, this)) { + return false + } + type = 'object' + builder.iterator = originalSchemaExtractor + } else if (type === 'enum') { + enumValues = [] + let i = 0 + while (field.resolvedType.valuesById[i]) { + enumValues.push(field.resolvedType.valuesById[i]) + i += 1 + } + } + return builder.addProperty(schemaName, fieldName, array, type, description, ref, format, enumValues) + } + + static extractSchema (schema, builder, depth, extractor) { + depth += 1 + const schemaName = removeLeadingPeriod(schema.resolvedType ? schema.resolvedType.fullName : schema.fullName) + if (extractor) { + // if we already have a defined extractor, this is a nested schema. create a new extractor for the nested + // schema, ensure it is added to our schema builder's cache, and replace the builders iterator with our + // nested schema iterator / extractor. Once complete, add the new schema to our builder's schemas. + const nestedSchemaExtractor = new SchemaExtractor(schema) + builder.iterator = nestedSchemaExtractor + const nestedSchema = SchemaBuilder.getSchema(schemaName, nestedSchemaExtractor, builder) + for (const nestedSubSchemaName in nestedSchema.components.schemas) { + if (nestedSchema.components.schemas.hasOwnProperty(nestedSubSchemaName)) { + builder.schema.components.schemas[nestedSubSchemaName] = nestedSchema.components.schemas[nestedSubSchemaName] + } + } + return true + } else { + if (!builder.shouldExtractSchema(schemaName, depth)) { + return false + } + for (const field of schema.fieldsArray) { + if (!this.extractProperty(field, schemaName, field.name, builder, depth)) { + log.warn(`DSM: Unable to extract field with name: ${field.name} from Avro schema with name: ${schemaName}`) + } + } + return true + } + } + + static extractSchemas (descriptor, dataStreamsProcessor) { + const schemaName = removeLeadingPeriod( + descriptor.resolvedType ? descriptor.resolvedType.fullName : descriptor.fullName + ) + return dataStreamsProcessor.getSchema(schemaName, new SchemaExtractor(descriptor)) + } + + iterateOverSchema (builder) { + this.constructor.extractSchema(this.schema, builder, 0) + } + + static attachSchemaOnSpan (args, span, operation, tracer) { + const { messageClass } = args + const descriptor = messageClass.$type ?? messageClass + + if (!descriptor || !span) { + return + } + + if (span.context()._tags[SCHEMA_TYPE] && operation === 'serialization') { + // we have already added a schema to this span, this call is an encode of nested schema types + return + } + + span.setTag(SCHEMA_TYPE, PROTOBUF) + span.setTag(SCHEMA_NAME, removeLeadingPeriod(descriptor.fullName)) + span.setTag(SCHEMA_OPERATION, operation) + + if (!tracer._dataStreamsProcessor.canSampleSchema(operation)) { + return + } + + // if the span is unsampled, do not sample the schema + if (!tracer._prioritySampler.isSampled(span)) { + return + } + + const weight = tracer._dataStreamsProcessor.trySampleSchema(operation) + if (weight === 0) { + return + } + + const schemaData = SchemaBuilder.getSchemaDefinition( + this.extractSchemas(descriptor, tracer._dataStreamsProcessor) + ) + + span.setTag(SCHEMA_DEFINITION, schemaData.definition) + span.setTag(SCHEMA_WEIGHT, weight) + span.setTag(SCHEMA_ID, schemaData.id) + } +} + +function removeLeadingPeriod (str) { + // Check if the first character is a period + if (str.charAt(0) === '.') { + // Remove the first character + return str.slice(1) + } + // Return the original string if the first character is not a period + return str +} + +module.exports = SchemaExtractor diff --git a/packages/datadog-plugin-protobufjs/test/helpers.js b/packages/datadog-plugin-protobufjs/test/helpers.js new file mode 100644 index 00000000000..d91be2e496b --- /dev/null +++ b/packages/datadog-plugin-protobufjs/test/helpers.js @@ -0,0 +1,104 @@ +async function loadMessage (protobuf, messageTypeName) { + if (messageTypeName === 'OtherMessage') { + const root = await protobuf.load('packages/datadog-plugin-protobufjs/test/schemas/other_message.proto') + const OtherMessage = root.lookupType('OtherMessage') + const message = OtherMessage.create({ + name: ['Alice'], + age: 30 + }) + return { + OtherMessage: { + type: OtherMessage, + instance: message + } + } + } else if (messageTypeName === 'MyMessage') { + const messageProto = await protobuf.load('packages/datadog-plugin-protobufjs/test/schemas/message.proto') + const otherMessageProto = await protobuf.load( + 'packages/datadog-plugin-protobufjs/test/schemas/other_message.proto' + ) + const Status = messageProto.lookupEnum('Status') + const MyMessage = messageProto.lookupType('MyMessage') + const OtherMessage = otherMessageProto.lookupType('OtherMessage') + const message = MyMessage.create({ + id: '123', + value: 'example_value', + status: Status.values.ACTIVE, + otherMessage: [ + OtherMessage.create({ name: ['Alice'], age: 30 }), + OtherMessage.create({ name: ['Bob'], age: 25 }) + ] + }) + return { + OtherMessage: { + type: OtherMessage, + instance: null + }, + MyMessage: { + type: MyMessage, + instance: message + } + } + } else if (messageTypeName === 'MainMessage') { + const root = await protobuf.load('packages/datadog-plugin-protobufjs/test/schemas/all_types.proto') + + const Status = root.lookupEnum('example.Status') + const Scalars = root.lookupType('example.Scalars') + const NestedMessage = root.lookupType('example.NestedMessage') + const ComplexMessage = root.lookupType('example.ComplexMessage') + const MainMessage = root.lookupType('example.MainMessage') + + // Create instances of the messages + const scalarsInstance = Scalars.create({ + int32Field: 42, + int64Field: 123456789012345, + uint32Field: 123, + uint64Field: 123456789012345, + sint32Field: -42, + sint64Field: -123456789012345, + fixed32Field: 42, + fixed64Field: 123456789012345, + sfixed32Field: -42, + sfixed64Field: -123456789012345, + floatField: 3.14, + doubleField: 2.718281828459, + boolField: true, + stringField: 'Hello, world!', + bytesField: Buffer.from('bytes data') + }) + + const nestedMessageInstance = NestedMessage.create({ + id: 'nested_id_123', + scalars: scalarsInstance + }) + + const complexMessageInstance = ComplexMessage.create({ + repeatedField: ['item1', 'item2', 'item3'], + mapField: { + key1: scalarsInstance, + key2: Scalars.create({ + int32Field: 24, + stringField: 'Another string' + }) + } + }) + + const mainMessageInstance = MainMessage.create({ + status: Status.values.ACTIVE, + scalars: scalarsInstance, + nested: nestedMessageInstance, + complex: complexMessageInstance + }) + + return { + MainMessage: { + type: MainMessage, + instance: mainMessageInstance + } + } + } +} + +module.exports = { + loadMessage +} diff --git a/packages/datadog-plugin-protobufjs/test/index.spec.js b/packages/datadog-plugin-protobufjs/test/index.spec.js new file mode 100644 index 00000000000..30e95687bac --- /dev/null +++ b/packages/datadog-plugin-protobufjs/test/index.spec.js @@ -0,0 +1,352 @@ +'use strict' + +const fs = require('fs') +const { expect } = require('chai') +const agent = require('../../dd-trace/test/plugins/agent') +const path = require('path') +const { + SCHEMA_DEFINITION, + SCHEMA_ID, + SCHEMA_NAME, + SCHEMA_OPERATION, + SCHEMA_WEIGHT, + SCHEMA_TYPE +} = require('../../dd-trace/src/constants') +const sinon = require('sinon') +const { loadMessage } = require('./helpers') +const { SchemaBuilder } = require('../../dd-trace/src/datastreams/schemas/schema_builder') + +const schemas = JSON.parse(fs.readFileSync(path.join(__dirname, 'schemas/expected_schemas.json'), 'utf8')) +const MESSAGE_SCHEMA_DEF = schemas.MESSAGE_SCHEMA_DEF +const OTHER_MESSAGE_SCHEMA_DEF = schemas.OTHER_MESSAGE_SCHEMA_DEF +const ALL_TYPES_MESSAGE_SCHEMA_DEF = schemas.ALL_TYPES_MESSAGE_SCHEMA_DEF + +const MESSAGE_SCHEMA_ID = '666607144722735562' +const OTHER_MESSAGE_SCHEMA_ID = '2691489402935632768' +const ALL_TYPES_MESSAGE_SCHEMA_ID = '15890948796193489151' + +function compareJson (expected, span) { + const actual = JSON.parse(span.context()._tags[SCHEMA_DEFINITION]) + return JSON.stringify(actual) === JSON.stringify(expected) +} + +describe('Plugin', () => { + describe('protobufjs', function () { + let tracer + let protobuf + let dateNowStub + let mockTime = 0 + + withVersions('protobufjs', ['protobufjs'], (version) => { + before(() => { + tracer = require('../../dd-trace').init() + // reset sampled schemas + if (tracer._dataStreamsProcessor?._schemaSamplers) { + tracer._dataStreamsProcessor._schemaSamplers = [] + } + }) + + describe('without configuration', () => { + before(() => { + dateNowStub = sinon.stub(Date, 'now').callsFake(() => { + const returnValue = mockTime + mockTime += 50000 // Increment by 50000 ms to ensure each DSM schema is sampled + return returnValue + }) + const cache = SchemaBuilder.getCache() + cache.clear() + return agent.load('protobufjs').then(() => { + protobuf = require(`../../../versions/protobufjs@${version}`).get() + }) + }) + + after(() => { + dateNowStub.restore() + return agent.close({ ritmReset: false }) + }) + + it('should serialize basic schema correctly', async () => { + const loadedMessages = await loadMessage(protobuf, 'OtherMessage') + + tracer.trace('other_message.serialize', span => { + loadedMessages.OtherMessage.type.encode(loadedMessages.OtherMessage.instance).finish() + + expect(span._name).to.equal('other_message.serialize') + + expect(compareJson(OTHER_MESSAGE_SCHEMA_DEF, span)).to.equal(true) + expect(span.context()._tags).to.have.property(SCHEMA_TYPE, 'protobuf') + expect(span.context()._tags).to.have.property(SCHEMA_NAME, 'OtherMessage') + expect(span.context()._tags).to.have.property(SCHEMA_OPERATION, 'serialization') + expect(span.context()._tags).to.have.property(SCHEMA_ID, OTHER_MESSAGE_SCHEMA_ID) + expect(span.context()._tags).to.have.property(SCHEMA_WEIGHT, 1) + }) + }) + + it('should load using a callback instead of promise', async () => { + const loadedMessages = loadMessage(protobuf, 'OtherMessage', () => { + tracer.trace('other_message.serialize', span => { + loadedMessages.OtherMessage.type.encode(loadedMessages.OtherMessage.instance).finish() + + expect(span._name).to.equal('other_message.serialize') + + expect(compareJson(OTHER_MESSAGE_SCHEMA_DEF, span)).to.equal(true) + expect(span.context()._tags).to.have.property(SCHEMA_TYPE, 'protobuf') + expect(span.context()._tags).to.have.property(SCHEMA_NAME, 'OtherMessage') + expect(span.context()._tags).to.have.property(SCHEMA_OPERATION, 'serialization') + expect(span.context()._tags).to.have.property(SCHEMA_ID, OTHER_MESSAGE_SCHEMA_ID) + expect(span.context()._tags).to.have.property(SCHEMA_WEIGHT, 1) + }) + }) + }) + + it('should serialize complex schema correctly', async () => { + const loadedMessages = await loadMessage(protobuf, 'MyMessage') + + tracer.trace('message_pb2.serialize', span => { + loadedMessages.MyMessage.type.encode(loadedMessages.MyMessage.instance).finish() + + expect(span._name).to.equal('message_pb2.serialize') + + expect(compareJson(MESSAGE_SCHEMA_DEF, span)).to.equal(true) + expect(span.context()._tags).to.have.property(SCHEMA_TYPE, 'protobuf') + expect(span.context()._tags).to.have.property(SCHEMA_NAME, 'MyMessage') + expect(span.context()._tags).to.have.property(SCHEMA_OPERATION, 'serialization') + expect(span.context()._tags).to.have.property(SCHEMA_ID, MESSAGE_SCHEMA_ID) + expect(span.context()._tags).to.have.property(SCHEMA_WEIGHT, 1) + }) + }) + + it('should serialize schema with all types correctly', async () => { + const loadedMessages = await loadMessage(protobuf, 'MainMessage') + + tracer.trace('all_types.serialize', span => { + loadedMessages.MainMessage.type.encode(loadedMessages.MainMessage.instance).finish() + + expect(span._name).to.equal('all_types.serialize') + + expect(compareJson(ALL_TYPES_MESSAGE_SCHEMA_DEF, span)).to.equal(true) + expect(span.context()._tags).to.have.property(SCHEMA_TYPE, 'protobuf') + expect(span.context()._tags).to.have.property(SCHEMA_NAME, 'example.MainMessage') + expect(span.context()._tags).to.have.property(SCHEMA_OPERATION, 'serialization') + expect(span.context()._tags).to.have.property(SCHEMA_ID, ALL_TYPES_MESSAGE_SCHEMA_ID) + expect(span.context()._tags).to.have.property(SCHEMA_WEIGHT, 1) + }) + }) + + it('should deserialize basic schema correctly', async () => { + const loadedMessages = await loadMessage(protobuf, 'OtherMessage') + + const bytes = loadedMessages.OtherMessage.type.encode(loadedMessages.OtherMessage.instance).finish() + + tracer.trace('other_message.deserialize', span => { + loadedMessages.OtherMessage.type.decode(bytes) + + expect(span._name).to.equal('other_message.deserialize') + + expect(compareJson(OTHER_MESSAGE_SCHEMA_DEF, span)).to.equal(true) + expect(span.context()._tags).to.have.property(SCHEMA_TYPE, 'protobuf') + expect(span.context()._tags).to.have.property(SCHEMA_NAME, 'OtherMessage') + expect(span.context()._tags).to.have.property(SCHEMA_OPERATION, 'deserialization') + expect(span.context()._tags).to.have.property(SCHEMA_ID, OTHER_MESSAGE_SCHEMA_ID) + expect(span.context()._tags).to.have.property(SCHEMA_WEIGHT, 1) + }) + }) + + it('should deserialize complex schema correctly', async () => { + const loadedMessages = await loadMessage(protobuf, 'MyMessage') + + const bytes = loadedMessages.MyMessage.type.encode(loadedMessages.MyMessage.instance).finish() + + tracer.trace('my_message.deserialize', span => { + loadedMessages.MyMessage.type.decode(bytes) + + expect(span._name).to.equal('my_message.deserialize') + + expect(compareJson(MESSAGE_SCHEMA_DEF, span)).to.equal(true) + expect(span.context()._tags).to.have.property(SCHEMA_TYPE, 'protobuf') + expect(span.context()._tags).to.have.property(SCHEMA_NAME, 'MyMessage') + expect(span.context()._tags).to.have.property(SCHEMA_OPERATION, 'deserialization') + expect(span.context()._tags).to.have.property(SCHEMA_ID, MESSAGE_SCHEMA_ID) + expect(span.context()._tags).to.have.property(SCHEMA_WEIGHT, 1) + }) + }) + + it('should deserialize all types schema correctly', async () => { + const loadedMessages = await loadMessage(protobuf, 'MainMessage') + + const bytes = loadedMessages.MainMessage.type.encode(loadedMessages.MainMessage.instance).finish() + + tracer.trace('all_types.deserialize', span => { + loadedMessages.MainMessage.type.decode(bytes) + + expect(span._name).to.equal('all_types.deserialize') + + expect(compareJson(ALL_TYPES_MESSAGE_SCHEMA_DEF, span)).to.equal(true) + expect(span.context()._tags).to.have.property(SCHEMA_TYPE, 'protobuf') + expect(span.context()._tags).to.have.property(SCHEMA_NAME, 'example.MainMessage') + expect(span.context()._tags).to.have.property(SCHEMA_OPERATION, 'deserialization') + expect(span.context()._tags).to.have.property(SCHEMA_ID, ALL_TYPES_MESSAGE_SCHEMA_ID) + expect(span.context()._tags).to.have.property(SCHEMA_WEIGHT, 1) + }) + }) + + it('should wrap encode and decode for fromObject', async () => { + const root = await protobuf.load('packages/datadog-plugin-protobufjs/test/schemas/other_message.proto') + const OtherMessage = root.lookupType('OtherMessage') + const messageObject = { + name: ['Alice'], + age: 30 + } + const message = OtherMessage.fromObject(messageObject) + + const bytes = OtherMessage.encode(message).finish() + + tracer.trace('other_message.deserialize', span => { + OtherMessage.decode(bytes) + + expect(span._name).to.equal('other_message.deserialize') + + expect(compareJson(OTHER_MESSAGE_SCHEMA_DEF, span)).to.equal(true) + expect(span.context()._tags).to.have.property(SCHEMA_TYPE, 'protobuf') + expect(span.context()._tags).to.have.property(SCHEMA_NAME, 'OtherMessage') + expect(span.context()._tags).to.have.property(SCHEMA_OPERATION, 'deserialization') + expect(span.context()._tags).to.have.property(SCHEMA_ID, OTHER_MESSAGE_SCHEMA_ID) + expect(span.context()._tags).to.have.property(SCHEMA_WEIGHT, 1) + }) + }) + + it('should wrap decodeDelimited', async () => { + const root = await protobuf.load('packages/datadog-plugin-protobufjs/test/schemas/other_message.proto') + const OtherMessage = root.lookupType('OtherMessage') + const message = OtherMessage.create({ + name: ['Alice'], + age: 30 + }) + + const bytes = OtherMessage.encodeDelimited(message).finish() + + tracer.trace('other_message.deserialize', span => { + OtherMessage.decodeDelimited(bytes) + + expect(span._name).to.equal('other_message.deserialize') + + expect(compareJson(OTHER_MESSAGE_SCHEMA_DEF, span)).to.equal(true) + expect(span.context()._tags).to.have.property(SCHEMA_TYPE, 'protobuf') + expect(span.context()._tags).to.have.property(SCHEMA_NAME, 'OtherMessage') + expect(span.context()._tags).to.have.property(SCHEMA_OPERATION, 'deserialization') + expect(span.context()._tags).to.have.property(SCHEMA_ID, OTHER_MESSAGE_SCHEMA_ID) + expect(span.context()._tags).to.have.property(SCHEMA_WEIGHT, 1) + }) + }) + + it('should load using direct type creation', () => { + const OtherMessage = new protobuf.Type('OtherMessage') + .add(new protobuf.Field('name', 1, 'string', 'repeated')) + .add(new protobuf.Field('age', 2, 'int32')) + + const message = OtherMessage.create({ + name: ['Alice'], + age: 30 + }) + + const bytes = OtherMessage.encodeDelimited(message).finish() + + tracer.trace('other_message.deserialize', span => { + OtherMessage.decodeDelimited(bytes) + + expect(span._name).to.equal('other_message.deserialize') + + expect(compareJson(OTHER_MESSAGE_SCHEMA_DEF, span)).to.equal(true) + expect(span.context()._tags).to.have.property(SCHEMA_TYPE, 'protobuf') + expect(span.context()._tags).to.have.property(SCHEMA_NAME, 'OtherMessage') + expect(span.context()._tags).to.have.property(SCHEMA_OPERATION, 'deserialization') + expect(span.context()._tags).to.have.property(SCHEMA_ID, OTHER_MESSAGE_SCHEMA_ID) + expect(span.context()._tags).to.have.property(SCHEMA_WEIGHT, 1) + }) + }) + + it('should load using JSON descriptors', () => { + const jsonDescriptor = require('./schemas/other_message_proto.json') + const root = protobuf.Root.fromJSON(jsonDescriptor) + const OtherMessage = root.lookupType('OtherMessage') + + const message = OtherMessage.create({ + name: ['Alice'], + age: 30 + }) + + const bytes = OtherMessage.encodeDelimited(message).finish() + + tracer.trace('other_message.deserialize', span => { + OtherMessage.decodeDelimited(bytes) + + expect(span._name).to.equal('other_message.deserialize') + + expect(compareJson(OTHER_MESSAGE_SCHEMA_DEF, span)).to.equal(true) + expect(span.context()._tags).to.have.property(SCHEMA_TYPE, 'protobuf') + expect(span.context()._tags).to.have.property(SCHEMA_NAME, 'OtherMessage') + expect(span.context()._tags).to.have.property(SCHEMA_OPERATION, 'deserialization') + expect(span.context()._tags).to.have.property(SCHEMA_ID, OTHER_MESSAGE_SCHEMA_ID) + expect(span.context()._tags).to.have.property(SCHEMA_WEIGHT, 1) + }) + }) + + describe('during schema sampling', function () { + let cacheSetSpy + let cacheGetSpy + + beforeEach(() => { + const cache = SchemaBuilder.getCache() + cache.clear() + cacheSetSpy = sinon.spy(cache, 'set') + cacheGetSpy = sinon.spy(cache, 'get') + }) + + afterEach(() => { + cacheSetSpy.restore() + cacheGetSpy.restore() + }) + + it('should use the schema cache and not re-extract an already sampled schema', async () => { + const loadedMessages = await loadMessage(protobuf, 'MyMessage') + + tracer.trace('message_pb2.serialize', span => { + loadedMessages.MyMessage.type.encode(loadedMessages.MyMessage.instance).finish() + + expect(span._name).to.equal('message_pb2.serialize') + + expect(compareJson(MESSAGE_SCHEMA_DEF, span)).to.equal(true) + expect(span.context()._tags).to.have.property(SCHEMA_TYPE, 'protobuf') + expect(span.context()._tags).to.have.property(SCHEMA_NAME, 'MyMessage') + expect(span.context()._tags).to.have.property(SCHEMA_OPERATION, 'serialization') + expect(span.context()._tags).to.have.property(SCHEMA_ID, MESSAGE_SCHEMA_ID) + expect(span.context()._tags).to.have.property(SCHEMA_WEIGHT, 1) + + // we sampled 1 schema with 1 subschema, so the constructor should've only been called twice + expect(cacheSetSpy.callCount).to.equal(2) + expect(cacheGetSpy.callCount).to.equal(2) + }) + + tracer.trace('message_pb2.serialize', span => { + loadedMessages.MyMessage.type.encode(loadedMessages.MyMessage.instance).finish() + + expect(span._name).to.equal('message_pb2.serialize') + + expect(compareJson(MESSAGE_SCHEMA_DEF, span)).to.equal(true) + expect(span.context()._tags).to.have.property(SCHEMA_TYPE, 'protobuf') + expect(span.context()._tags).to.have.property(SCHEMA_NAME, 'MyMessage') + expect(span.context()._tags).to.have.property(SCHEMA_OPERATION, 'serialization') + expect(span.context()._tags).to.have.property(SCHEMA_ID, MESSAGE_SCHEMA_ID) + expect(span.context()._tags).to.have.property(SCHEMA_WEIGHT, 1) + + // ensure schema was sampled and returned via the cache, so no extra cache set + // calls were needed, only gets + expect(cacheSetSpy.callCount).to.equal(2) + expect(cacheGetSpy.callCount).to.equal(3) + }) + }) + }) + }) + }) + }) +}) diff --git a/packages/datadog-plugin-protobufjs/test/schemas/all_types.proto b/packages/datadog-plugin-protobufjs/test/schemas/all_types.proto new file mode 100644 index 00000000000..6cfc3b3ee3d --- /dev/null +++ b/packages/datadog-plugin-protobufjs/test/schemas/all_types.proto @@ -0,0 +1,49 @@ +syntax = "proto3"; + +package example; + +// Enum definition +enum Status { + UNKNOWN = 0; + ACTIVE = 1; + INACTIVE = 2; +} + +// Message with various number types and other scalar types +message Scalars { + int32 int32Field = 1; + int64 int64Field = 2; + uint32 uint32Field = 3; + uint64 uint64Field = 4; + sint32 sint32Field = 5; + sint64 sint64Field = 6; + fixed32 fixed32Field = 7; + fixed64 fixed64Field = 8; + sfixed32 sfixed32Field = 9; + sfixed64 sfixed64Field = 10; + float floatField = 11; + double doubleField = 12; + bool boolField = 13; + string stringField = 14; + bytes bytesField = 15; +} + +// Nested message definition +message NestedMessage { + string id = 1; + Scalars scalars = 2; +} + +// Message demonstrating the use of repeated fields and maps +message ComplexMessage { + repeated string repeatedField = 1; + map mapField = 2; +} + +// Main message that uses all the above elements +message MainMessage { + Status status = 1; + Scalars scalars = 2; + NestedMessage nested = 3; + ComplexMessage complex = 4; +} \ No newline at end of file diff --git a/packages/datadog-plugin-protobufjs/test/schemas/expected_schemas.json b/packages/datadog-plugin-protobufjs/test/schemas/expected_schemas.json new file mode 100644 index 00000000000..1825013519d --- /dev/null +++ b/packages/datadog-plugin-protobufjs/test/schemas/expected_schemas.json @@ -0,0 +1,195 @@ +{ + "MESSAGE_SCHEMA_DEF":{ + "openapi":"3.0.0", + "components":{ + "schemas":{ + "MyMessage":{ + "type":"object", + "properties":{ + "id":{ + "type":"string" + }, + "value":{ + "type":"string" + }, + "otherMessage":{ + "type":"array", + "items":{ + "type":"object", + "$ref":"#/components/schemas/OtherMessage" + } + }, + "status":{ + "type":"enum", + "enum":[ + "UNKNOWN", + "ACTIVE", + "INACTIVE", + "DELETED" + ] + } + } + }, + "OtherMessage":{ + "type":"object", + "properties":{ + "name":{ + "type":"array", + "items":{ + "type":"string" + } + }, + "age":{ + "type":"integer", + "format":"int32" + } + } + } + } + } + }, + "OTHER_MESSAGE_SCHEMA_DEF":{ + "openapi":"3.0.0", + "components":{ + "schemas":{ + "OtherMessage":{ + "type":"object", + "properties":{ + "name":{ + "type":"array", + "items":{ + "type":"string" + } + }, + "age":{ + "type":"integer", + "format":"int32" + } + } + } + } + } + }, + "ALL_TYPES_MESSAGE_SCHEMA_DEF":{ + "openapi":"3.0.0", + "components":{ + "schemas":{ + "example.MainMessage":{ + "type":"object", + "properties":{ + "status":{ + "type":"enum", + "enum":[ + "UNKNOWN", + "ACTIVE", + "INACTIVE" + ] + }, + "scalars":{ + "type":"object", + "$ref":"#/components/schemas/example.Scalars" + }, + "nested":{ + "type":"object", + "$ref":"#/components/schemas/example.NestedMessage" + }, + "complex":{ + "type":"object", + "$ref":"#/components/schemas/example.ComplexMessage" + } + } + }, + "example.Scalars":{ + "type":"object", + "properties":{ + "int32Field":{ + "type":"integer", + "format":"int32" + }, + "int64Field":{ + "type":"integer", + "format":"int64" + }, + "uint32Field":{ + "type":"integer", + "format":"uint32" + }, + "uint64Field":{ + "type":"integer", + "format":"uint64" + }, + "sint32Field":{ + "type":"integer", + "format":"sint32" + }, + "sint64Field":{ + "type":"integer", + "format":"sint64" + }, + "fixed32Field":{ + "type":"integer", + "format":"fixed32" + }, + "fixed64Field":{ + "type":"integer", + "format":"fixed64" + }, + "sfixed32Field":{ + "type":"integer", + "format":"sfixed32" + }, + "sfixed64Field":{ + "type":"integer", + "format":"sfixed64" + }, + "floatField":{ + "type":"number", + "format":"float" + }, + "doubleField":{ + "type":"number", + "format":"double" + }, + "boolField":{ + "type":"boolean" + }, + "stringField":{ + "type":"string" + }, + "bytesField":{ + "type":"string", + "format":"byte" + } + } + }, + "example.NestedMessage":{ + "type":"object", + "properties":{ + "id":{ + "type":"string" + }, + "scalars":{ + "type":"object", + "$ref":"#/components/schemas/example.Scalars" + } + } + }, + "example.ComplexMessage":{ + "type":"object", + "properties":{ + "repeatedField":{ + "type":"array", + "items":{ + "type":"string" + } + }, + "mapField":{ + "type":"object", + "$ref":"#/components/schemas/example.Scalars" + } + } + } + } + } + } +} \ No newline at end of file diff --git a/packages/datadog-plugin-protobufjs/test/schemas/message.proto b/packages/datadog-plugin-protobufjs/test/schemas/message.proto new file mode 100644 index 00000000000..6fd1c65fe06 --- /dev/null +++ b/packages/datadog-plugin-protobufjs/test/schemas/message.proto @@ -0,0 +1,17 @@ +syntax = "proto3"; + +import "other_message.proto"; + +enum Status { + UNKNOWN = 0; + ACTIVE = 1; + INACTIVE = 2; + DELETED = 3; +} + +message MyMessage { + string id = 1; + string value = 2; + repeated OtherMessage otherMessage = 3; + Status status = 4; +} \ No newline at end of file diff --git a/packages/datadog-plugin-protobufjs/test/schemas/other_message.proto b/packages/datadog-plugin-protobufjs/test/schemas/other_message.proto new file mode 100644 index 00000000000..dbd6f368d7d --- /dev/null +++ b/packages/datadog-plugin-protobufjs/test/schemas/other_message.proto @@ -0,0 +1,6 @@ +syntax = "proto3"; + +message OtherMessage { + repeated string name = 1; + int32 age = 2; +} \ No newline at end of file diff --git a/packages/datadog-plugin-protobufjs/test/schemas/other_message_proto.json b/packages/datadog-plugin-protobufjs/test/schemas/other_message_proto.json new file mode 100644 index 00000000000..5a682ec89ca --- /dev/null +++ b/packages/datadog-plugin-protobufjs/test/schemas/other_message_proto.json @@ -0,0 +1,17 @@ +{ + "nested": { + "OtherMessage": { + "fields": { + "name": { + "rule": "repeated", + "type": "string", + "id": 1 + }, + "age": { + "type": "int32", + "id": 2 + } + } + } + } + } \ No newline at end of file diff --git a/packages/dd-trace/src/appsec/addresses.js b/packages/dd-trace/src/appsec/addresses.js index e2cf6c6940a..40c643012ef 100644 --- a/packages/dd-trace/src/appsec/addresses.js +++ b/packages/dd-trace/src/appsec/addresses.js @@ -23,6 +23,11 @@ module.exports = { WAF_CONTEXT_PROCESSOR: 'waf.context.processor', HTTP_OUTGOING_URL: 'server.io.net.url', + FS_OPERATION_PATH: 'server.io.fs.file', + DB_STATEMENT: 'server.db.statement', - DB_SYSTEM: 'server.db.system' + DB_SYSTEM: 'server.db.system', + + LOGIN_SUCCESS: 'server.business_logic.users.login.success', + LOGIN_FAILURE: 'server.business_logic.users.login.failure' } diff --git a/packages/dd-trace/src/appsec/channels.js b/packages/dd-trace/src/appsec/channels.js index c098efd5538..3081ed9974a 100644 --- a/packages/dd-trace/src/appsec/channels.js +++ b/packages/dd-trace/src/appsec/channels.js @@ -17,6 +17,7 @@ module.exports = { setCookieChannel: dc.channel('datadog:iast:set-cookie'), nextBodyParsed: dc.channel('apm:next:body-parsed'), nextQueryParsed: dc.channel('apm:next:query-parsed'), + expressProcessParams: dc.channel('datadog:express:process_params:start'), responseBody: dc.channel('datadog:express:response:json:start'), responseWriteHead: dc.channel('apm:http:server:response:writeHead:start'), httpClientRequestStart: dc.channel('apm:http:client:request:start'), @@ -24,5 +25,8 @@ module.exports = { setUncaughtExceptionCaptureCallbackStart: dc.channel('datadog:process:setUncaughtExceptionCaptureCallback:start'), pgQueryStart: dc.channel('apm:pg:query:start'), pgPoolQueryStart: dc.channel('datadog:pg:pool:query:start'), - wafRunFinished: dc.channel('datadog:waf:run:finish') + mysql2OuterQueryStart: dc.channel('datadog:mysql2:outerquery:start'), + wafRunFinished: dc.channel('datadog:waf:run:finish'), + fsOperationStart: dc.channel('apm:fs:operation:start'), + expressMiddlewareError: dc.channel('apm:express:middleware:error') } diff --git a/packages/dd-trace/src/appsec/iast/analyzers/cookie-analyzer.js b/packages/dd-trace/src/appsec/iast/analyzers/cookie-analyzer.js index e82fe65ef74..2b125b88403 100644 --- a/packages/dd-trace/src/appsec/iast/analyzers/cookie-analyzer.js +++ b/packages/dd-trace/src/appsec/iast/analyzers/cookie-analyzer.js @@ -2,6 +2,7 @@ const Analyzer = require('./vulnerability-analyzer') const { getNodeModulesPaths } = require('../path-line') +const iastLog = require('../iast-log') const EXCLUDED_PATHS = getNodeModulesPaths('express/lib/response.js') @@ -11,7 +12,14 @@ class CookieAnalyzer extends Analyzer { this.propertyToBeSafe = propertyToBeSafe.toLowerCase() } - onConfigure () { + onConfigure (config) { + try { + this.cookieFilterRegExp = new RegExp(config.iast.cookieFilterPattern) + } catch { + iastLog.error('Invalid regex in cookieFilterPattern') + this.cookieFilterRegExp = /.{32,}/ + } + this.addSub( { channelName: 'datadog:iast:set-cookie', moduleName: 'http' }, (cookieInfo) => this.analyze(cookieInfo) @@ -28,6 +36,10 @@ class CookieAnalyzer extends Analyzer { } _createHashSource (type, evidence, location) { + if (typeof evidence.value === 'string' && evidence.value.match(this.cookieFilterRegExp)) { + return 'FILTERED_' + this._type + } + return `${type}:${evidence.value}` } diff --git a/packages/dd-trace/src/appsec/iast/analyzers/path-traversal-analyzer.js b/packages/dd-trace/src/appsec/iast/analyzers/path-traversal-analyzer.js index 83bf2a87085..625dbde9150 100644 --- a/packages/dd-trace/src/appsec/iast/analyzers/path-traversal-analyzer.js +++ b/packages/dd-trace/src/appsec/iast/analyzers/path-traversal-analyzer.js @@ -29,7 +29,14 @@ class PathTraversalAnalyzer extends InjectionAnalyzer { onConfigure () { this.addSub('apm:fs:operation:start', (obj) => { - if (ignoredOperations.includes(obj.operation)) return + const store = storage.getStore() + const outOfReqOrChild = !store?.fs?.root + + // we could filter out all the nested fs.operations based on store.fs.root + // but if we spect a store in the context to be present we are going to exclude + // all out_of_the_request fs.operations + // AppsecFsPlugin must be enabled + if (ignoredOperations.includes(obj.operation) || outOfReqOrChild) return const pathArguments = [] if (obj.dest) { diff --git a/packages/dd-trace/src/appsec/iast/iast-plugin.js b/packages/dd-trace/src/appsec/iast/iast-plugin.js index 96759a530e2..5eb6e00410d 100644 --- a/packages/dd-trace/src/appsec/iast/iast-plugin.js +++ b/packages/dd-trace/src/appsec/iast/iast-plugin.js @@ -127,7 +127,7 @@ class IastPlugin extends Plugin { config = { enabled: config } } if (config.enabled && !this.configured) { - this.onConfigure() + this.onConfigure(config.tracerConfig) this.configured = true } diff --git a/packages/dd-trace/src/appsec/iast/index.js b/packages/dd-trace/src/appsec/iast/index.js index 0facaa39a2a..9330bfdbbb1 100644 --- a/packages/dd-trace/src/appsec/iast/index.js +++ b/packages/dd-trace/src/appsec/iast/index.js @@ -14,6 +14,7 @@ const { } = require('./taint-tracking') const { IAST_ENABLED_TAG_KEY } = require('./tags') const iastTelemetry = require('./telemetry') +const { enable: enableFsPlugin, disable: disableFsPlugin, IAST_MODULE } = require('../rasp/fs-plugin') // TODO Change to `apm:http:server:request:[start|close]` when the subscription // order of the callbacks can be enforce @@ -27,6 +28,7 @@ function enable (config, _tracer) { if (isEnabled) return iastTelemetry.configure(config, config.iast?.telemetryVerbosity) + enableFsPlugin(IAST_MODULE) enableAllAnalyzers(config) enableTaintTracking(config.iast, iastTelemetry.verbosity) requestStart.subscribe(onIncomingHttpRequestStart) @@ -44,6 +46,7 @@ function disable () { isEnabled = false iastTelemetry.stop() + disableFsPlugin(IAST_MODULE) disableAllAnalyzers() disableTaintTracking() overheadController.finishGlobalContext() diff --git a/packages/dd-trace/src/appsec/iast/taint-tracking/csi-methods.js b/packages/dd-trace/src/appsec/iast/taint-tracking/csi-methods.js index 62f49f2e830..2133971afb9 100644 --- a/packages/dd-trace/src/appsec/iast/taint-tracking/csi-methods.js +++ b/packages/dd-trace/src/appsec/iast/taint-tracking/csi-methods.js @@ -12,6 +12,7 @@ const csiMethods = [ { src: 'substring' }, { src: 'toLowerCase', dst: 'stringCase' }, { src: 'toUpperCase', dst: 'stringCase' }, + { src: 'tplOperator', operator: true }, { src: 'trim' }, { src: 'trimEnd' }, { src: 'trimStart', dst: 'trim' }, diff --git a/packages/dd-trace/src/appsec/iast/taint-tracking/taint-tracking-impl.js b/packages/dd-trace/src/appsec/iast/taint-tracking/taint-tracking-impl.js index 9f48a3add3f..5fa16d00d77 100644 --- a/packages/dd-trace/src/appsec/iast/taint-tracking/taint-tracking-impl.js +++ b/packages/dd-trace/src/appsec/iast/taint-tracking/taint-tracking-impl.js @@ -29,6 +29,7 @@ const TaintTrackingNoop = { substr: noop, substring: noop, stringCase: noop, + tplOperator: noop, trim: noop, trimEnd: noop } @@ -117,6 +118,20 @@ function csiMethodsOverrides (getContext) { return res }, + tplOperator: function (res, ...rest) { + try { + const iastContext = getContext() + const transactionId = getTransactionId(iastContext) + if (transactionId) { + return TaintedUtils.concat(transactionId, res, ...rest) + } + } catch (e) { + iastLog.error('Error invoking CSI tplOperator') + .errorAndPublish(e) + } + return res + }, + stringCase: getCsiFn( (transactionId, res, target) => TaintedUtils.stringCase(transactionId, res, target), getContext, diff --git a/packages/dd-trace/src/appsec/index.js b/packages/dd-trace/src/appsec/index.js index 10e63ebd2de..f3656e459e8 100644 --- a/packages/dd-trace/src/appsec/index.js +++ b/packages/dd-trace/src/appsec/index.js @@ -12,6 +12,7 @@ const { queryParser, nextBodyParsed, nextQueryParsed, + expressProcessParams, responseBody, responseWriteHead, responseSetHeader @@ -30,6 +31,8 @@ const { storage } = require('../../../datadog-core') const graphql = require('./graphql') const rasp = require('./rasp') +const responseAnalyzedSet = new WeakSet() + let isEnabled = false let config @@ -54,13 +57,14 @@ function enable (_config) { apiSecuritySampler.configure(_config.appsec) + bodyParser.subscribe(onRequestBodyParsed) + cookieParser.subscribe(onRequestCookieParser) incomingHttpRequestStart.subscribe(incomingHttpStartTranslator) incomingHttpRequestEnd.subscribe(incomingHttpEndTranslator) - bodyParser.subscribe(onRequestBodyParsed) + queryParser.subscribe(onRequestQueryParsed) nextBodyParsed.subscribe(onRequestBodyParsed) nextQueryParsed.subscribe(onRequestQueryParsed) - queryParser.subscribe(onRequestQueryParsed) - cookieParser.subscribe(onRequestCookieParser) + expressProcessParams.subscribe(onRequestProcessParams) responseBody.subscribe(onResponseBody) responseWriteHead.subscribe(onResponseWriteHead) responseSetHeader.subscribe(onResponseSetHeader) @@ -79,6 +83,41 @@ function enable (_config) { } } +function onRequestBodyParsed ({ req, res, body, abortController }) { + if (body === undefined || body === null) return + + if (!req) { + const store = storage.getStore() + req = store?.req + } + + const rootSpan = web.root(req) + if (!rootSpan) return + + const results = waf.run({ + persistent: { + [addresses.HTTP_INCOMING_BODY]: body + } + }, req) + + handleResults(results, req, res, rootSpan, abortController) +} + +function onRequestCookieParser ({ req, res, abortController, cookies }) { + if (!cookies || typeof cookies !== 'object') return + + const rootSpan = web.root(req) + if (!rootSpan) return + + const results = waf.run({ + persistent: { + [addresses.HTTP_INCOMING_COOKIES]: cookies + } + }, req) + + handleResults(results, req, res, rootSpan, abortController) +} + function incomingHttpStartTranslator ({ req, res, abortController }) { const rootSpan = web.root(req) if (!rootSpan) return @@ -122,11 +161,6 @@ function incomingHttpEndTranslator ({ req, res }) { persistent[addresses.HTTP_INCOMING_BODY] = req.body } - // TODO: temporary express instrumentation, will use express plugin later - if (req.params !== null && typeof req.params === 'object') { - persistent[addresses.HTTP_INCOMING_PARAMS] = req.params - } - // we need to keep this to support other cookie parsers if (req.cookies !== null && typeof req.cookies === 'object') { persistent[addresses.HTTP_INCOMING_COOKIES] = req.cookies @@ -145,24 +179,16 @@ function incomingHttpEndTranslator ({ req, res }) { Reporter.finishRequest(req, res) } -function onRequestBodyParsed ({ req, res, body, abortController }) { - if (body === undefined || body === null) return +function onPassportVerify ({ credentials, user }) { + const store = storage.getStore() + const rootSpan = store?.req && web.root(store.req) - if (!req) { - const store = storage.getStore() - req = store?.req + if (!rootSpan) { + log.warn('No rootSpan found in onPassportVerify') + return } - const rootSpan = web.root(req) - if (!rootSpan) return - - const results = waf.run({ - persistent: { - [addresses.HTTP_INCOMING_BODY]: body - } - }, req) - - handleResults(results, req, res, rootSpan, abortController) + passportTrackEvent(credentials, user, rootSpan, config.appsec.eventTracking.mode) } function onRequestQueryParsed ({ req, res, query, abortController }) { @@ -185,15 +211,15 @@ function onRequestQueryParsed ({ req, res, query, abortController }) { handleResults(results, req, res, rootSpan, abortController) } -function onRequestCookieParser ({ req, res, abortController, cookies }) { - if (!cookies || typeof cookies !== 'object') return - +function onRequestProcessParams ({ req, res, abortController, params }) { const rootSpan = web.root(req) if (!rootSpan) return + if (!params || typeof params !== 'object' || !Object.keys(params).length) return + const results = waf.run({ persistent: { - [addresses.HTTP_INCOMING_COOKIES]: cookies + [addresses.HTTP_INCOMING_PARAMS]: params } }, req) @@ -212,20 +238,6 @@ function onResponseBody ({ req, body }) { }, req) } -function onPassportVerify ({ credentials, user }) { - const store = storage.getStore() - const rootSpan = store?.req && web.root(store.req) - - if (!rootSpan) { - log.warn('No rootSpan found in onPassportVerify') - return - } - - passportTrackEvent(credentials, user, rootSpan, config.appsec.eventTracking.mode) -} - -const responseAnalyzedSet = new WeakSet() - function onResponseWriteHead ({ req, res, abortController, statusCode, responseHeaders }) { // avoid "write after end" error if (isBlocked(res)) { @@ -287,12 +299,15 @@ function disable () { // Channel#unsubscribe() is undefined for non active channels if (bodyParser.hasSubscribers) bodyParser.unsubscribe(onRequestBodyParsed) + if (cookieParser.hasSubscribers) cookieParser.unsubscribe(onRequestCookieParser) if (incomingHttpRequestStart.hasSubscribers) incomingHttpRequestStart.unsubscribe(incomingHttpStartTranslator) if (incomingHttpRequestEnd.hasSubscribers) incomingHttpRequestEnd.unsubscribe(incomingHttpEndTranslator) + if (passportVerify.hasSubscribers) passportVerify.unsubscribe(onPassportVerify) if (queryParser.hasSubscribers) queryParser.unsubscribe(onRequestQueryParsed) - if (cookieParser.hasSubscribers) cookieParser.unsubscribe(onRequestCookieParser) + if (nextBodyParsed.hasSubscribers) nextBodyParsed.unsubscribe(onRequestBodyParsed) + if (nextQueryParsed.hasSubscribers) nextQueryParsed.unsubscribe(onRequestQueryParsed) + if (expressProcessParams.hasSubscribers) expressProcessParams.unsubscribe(onRequestProcessParams) if (responseBody.hasSubscribers) responseBody.unsubscribe(onResponseBody) - if (passportVerify.hasSubscribers) passportVerify.unsubscribe(onPassportVerify) if (responseWriteHead.hasSubscribers) responseWriteHead.unsubscribe(onResponseWriteHead) if (responseSetHeader.hasSubscribers) responseSetHeader.unsubscribe(onResponseSetHeader) } diff --git a/packages/dd-trace/src/appsec/rasp/fs-plugin.js b/packages/dd-trace/src/appsec/rasp/fs-plugin.js new file mode 100644 index 00000000000..a283b4f1a61 --- /dev/null +++ b/packages/dd-trace/src/appsec/rasp/fs-plugin.js @@ -0,0 +1,99 @@ +'use strict' + +const Plugin = require('../../plugins/plugin') +const { storage } = require('../../../../datadog-core') +const log = require('../../log') + +const RASP_MODULE = 'rasp' +const IAST_MODULE = 'iast' + +const enabledFor = { + [RASP_MODULE]: false, + [IAST_MODULE]: false +} + +let fsPlugin + +function enterWith (fsProps, store = storage.getStore()) { + if (store && !store.fs?.opExcluded) { + storage.enterWith({ + ...store, + fs: { + ...store.fs, + ...fsProps, + parentStore: store + } + }) + } +} + +class AppsecFsPlugin extends Plugin { + enable () { + this.addSub('apm:fs:operation:start', this._onFsOperationStart) + this.addSub('apm:fs:operation:finish', this._onFsOperationFinishOrRenderEnd) + this.addSub('tracing:datadog:express:response:render:start', this._onResponseRenderStart) + this.addSub('tracing:datadog:express:response:render:end', this._onFsOperationFinishOrRenderEnd) + + super.configure(true) + } + + disable () { + super.configure(false) + } + + _onFsOperationStart () { + const store = storage.getStore() + if (store) { + enterWith({ root: store.fs?.root === undefined }, store) + } + } + + _onResponseRenderStart () { + enterWith({ opExcluded: true }) + } + + _onFsOperationFinishOrRenderEnd () { + const store = storage.getStore() + if (store?.fs?.parentStore) { + storage.enterWith(store.fs.parentStore) + } + } +} + +function enable (mod) { + if (enabledFor[mod] !== false) return + + enabledFor[mod] = true + + if (!fsPlugin) { + fsPlugin = new AppsecFsPlugin() + fsPlugin.enable() + } + + log.info(`Enabled AppsecFsPlugin for ${mod}`) +} + +function disable (mod) { + if (!mod || !enabledFor[mod]) return + + enabledFor[mod] = false + + const allDisabled = Object.values(enabledFor).every(val => val === false) + if (allDisabled) { + fsPlugin?.disable() + + fsPlugin = undefined + } + + log.info(`Disabled AppsecFsPlugin for ${mod}`) +} + +module.exports = { + enable, + disable, + + AppsecFsPlugin, + + RASP_MODULE, + IAST_MODULE +} diff --git a/packages/dd-trace/src/appsec/rasp/index.js b/packages/dd-trace/src/appsec/rasp/index.js index 801608e54d8..d5a1312872a 100644 --- a/packages/dd-trace/src/appsec/rasp/index.js +++ b/packages/dd-trace/src/appsec/rasp/index.js @@ -1,10 +1,11 @@ 'use strict' const web = require('../../plugins/util/web') -const { setUncaughtExceptionCaptureCallbackStart } = require('../channels') -const { block } = require('../blocking') +const { setUncaughtExceptionCaptureCallbackStart, expressMiddlewareError } = require('../channels') +const { block, isBlocked } = require('../blocking') const ssrf = require('./ssrf') const sqli = require('./sql_injection') +const lfi = require('./lfi') const { DatadogRaspAbortError } = require('./utils') @@ -30,17 +31,13 @@ function findDatadogRaspAbortError (err, deep = 10) { return err } - if (err.cause && deep > 0) { + if (err?.cause && deep > 0) { return findDatadogRaspAbortError(err.cause, deep - 1) } } -function handleUncaughtExceptionMonitor (err) { - const abortError = findDatadogRaspAbortError(err) - if (!abortError) return - - const { req, res, blockingAction } = abortError - block(req, res, web.root(req), null, blockingAction) +function handleUncaughtExceptionMonitor (error) { + if (!blockOnDatadogRaspAbortError({ error })) return if (!process.hasUncaughtExceptionCaptureCallback()) { const cleanUp = removeAllListeners(process, 'uncaughtException') @@ -82,22 +79,39 @@ function handleUncaughtExceptionMonitor (err) { } } +function blockOnDatadogRaspAbortError ({ error }) { + const abortError = findDatadogRaspAbortError(error) + if (!abortError) return false + + const { req, res, blockingAction } = abortError + if (!isBlocked(res)) { + block(req, res, web.root(req), null, blockingAction) + } + + return true +} + function enable (config) { ssrf.enable(config) sqli.enable(config) + lfi.enable(config) process.on('uncaughtExceptionMonitor', handleUncaughtExceptionMonitor) + expressMiddlewareError.subscribe(blockOnDatadogRaspAbortError) } function disable () { ssrf.disable() sqli.disable() + lfi.disable() process.off('uncaughtExceptionMonitor', handleUncaughtExceptionMonitor) + if (expressMiddlewareError.hasSubscribers) expressMiddlewareError.unsubscribe(blockOnDatadogRaspAbortError) } module.exports = { enable, disable, - handleUncaughtExceptionMonitor // exported only for testing purpose + handleUncaughtExceptionMonitor, // exported only for testing purpose + blockOnDatadogRaspAbortError // exported only for testing purpose } diff --git a/packages/dd-trace/src/appsec/rasp/lfi.js b/packages/dd-trace/src/appsec/rasp/lfi.js new file mode 100644 index 00000000000..1190734064d --- /dev/null +++ b/packages/dd-trace/src/appsec/rasp/lfi.js @@ -0,0 +1,112 @@ +'use strict' + +const { fsOperationStart, incomingHttpRequestStart } = require('../channels') +const { storage } = require('../../../../datadog-core') +const { enable: enableFsPlugin, disable: disableFsPlugin, RASP_MODULE } = require('./fs-plugin') +const { FS_OPERATION_PATH } = require('../addresses') +const waf = require('../waf') +const { RULE_TYPES, handleResult } = require('./utils') +const { isAbsolute } = require('path') + +let config +let enabled +let analyzeSubscribed + +function enable (_config) { + config = _config + + if (enabled) return + + enabled = true + + incomingHttpRequestStart.subscribe(onFirstReceivedRequest) +} + +function disable () { + if (fsOperationStart.hasSubscribers) fsOperationStart.unsubscribe(analyzeLfi) + if (incomingHttpRequestStart.hasSubscribers) incomingHttpRequestStart.unsubscribe(onFirstReceivedRequest) + + disableFsPlugin(RASP_MODULE) + + enabled = false + analyzeSubscribed = false +} + +function onFirstReceivedRequest () { + // nodejs unsubscribe during publish bug: https://github.com/nodejs/node/pull/55116 + process.nextTick(() => { + incomingHttpRequestStart.unsubscribe(onFirstReceivedRequest) + }) + + enableFsPlugin(RASP_MODULE) + + if (!analyzeSubscribed) { + fsOperationStart.subscribe(analyzeLfi) + analyzeSubscribed = true + } +} + +function analyzeLfi (ctx) { + const store = storage.getStore() + if (!store) return + + const { req, fs, res } = store + if (!req || !fs) return + + getPaths(ctx, fs).forEach(path => { + const persistent = { + [FS_OPERATION_PATH]: path + } + + const result = waf.run({ persistent }, req, RULE_TYPES.LFI) + handleResult(result, req, res, ctx.abortController, config) + }) +} + +function getPaths (ctx, fs) { + // these properties could have String, Buffer, URL, Integer or FileHandle types + const pathArguments = [ + ctx.dest, + ctx.existingPath, + ctx.file, + ctx.newPath, + ctx.oldPath, + ctx.path, + ctx.prefix, + ctx.src, + ctx.target + ] + + return pathArguments + .map(path => pathToStr(path)) + .filter(path => shouldAnalyze(path, fs)) +} + +function pathToStr (path) { + if (!path) return + + if (typeof path === 'string' || + path instanceof String || + path instanceof Buffer || + path instanceof URL) { + return path.toString() + } +} + +function shouldAnalyze (path, fs) { + if (!path) return + + const notExcludedRootOp = !fs.opExcluded && fs.root + return notExcludedRootOp && (isAbsolute(path) || path.includes('../') || shouldAnalyzeURLFile(path, fs)) +} + +function shouldAnalyzeURLFile (path, fs) { + if (path.startsWith('file://')) { + return shouldAnalyze(path.substring(7), fs) + } +} + +module.exports = { + enable, + disable +} diff --git a/packages/dd-trace/src/appsec/rasp/sql_injection.js b/packages/dd-trace/src/appsec/rasp/sql_injection.js index b942dd82be5..d4a165d8615 100644 --- a/packages/dd-trace/src/appsec/rasp/sql_injection.js +++ b/packages/dd-trace/src/appsec/rasp/sql_injection.js @@ -1,12 +1,18 @@ 'use strict' -const { pgQueryStart, pgPoolQueryStart, wafRunFinished } = require('../channels') +const { + pgQueryStart, + pgPoolQueryStart, + wafRunFinished, + mysql2OuterQueryStart +} = require('../channels') const { storage } = require('../../../../datadog-core') const addresses = require('../addresses') const waf = require('../waf') const { RULE_TYPES, handleResult } = require('./utils') const DB_SYSTEM_POSTGRES = 'postgresql' +const DB_SYSTEM_MYSQL = 'mysql' const reqQueryMap = new WeakMap() // WeakMap> let config @@ -17,18 +23,32 @@ function enable (_config) { pgQueryStart.subscribe(analyzePgSqlInjection) pgPoolQueryStart.subscribe(analyzePgSqlInjection) wafRunFinished.subscribe(clearQuerySet) + + mysql2OuterQueryStart.subscribe(analyzeMysql2SqlInjection) } function disable () { if (pgQueryStart.hasSubscribers) pgQueryStart.unsubscribe(analyzePgSqlInjection) if (pgPoolQueryStart.hasSubscribers) pgPoolQueryStart.unsubscribe(analyzePgSqlInjection) if (wafRunFinished.hasSubscribers) wafRunFinished.unsubscribe(clearQuerySet) + if (mysql2OuterQueryStart.hasSubscribers) mysql2OuterQueryStart.unsubscribe(analyzeMysql2SqlInjection) +} + +function analyzeMysql2SqlInjection (ctx) { + const query = ctx.sql + if (!query) return + + analyzeSqlInjection(query, DB_SYSTEM_MYSQL, ctx.abortController) } function analyzePgSqlInjection (ctx) { const query = ctx.query?.text if (!query) return + analyzeSqlInjection(query, DB_SYSTEM_POSTGRES, ctx.abortController) +} + +function analyzeSqlInjection (query, dbSystem, abortController) { const store = storage.getStore() if (!store) return @@ -39,7 +59,7 @@ function analyzePgSqlInjection (ctx) { let executedQueries = reqQueryMap.get(req) if (executedQueries?.has(query)) return - // Do not waste time executing same query twice + // Do not waste time checking same query twice // This also will prevent double calls in pg.Pool internal queries if (!executedQueries) { executedQueries = new Set() @@ -49,12 +69,12 @@ function analyzePgSqlInjection (ctx) { const persistent = { [addresses.DB_STATEMENT]: query, - [addresses.DB_SYSTEM]: DB_SYSTEM_POSTGRES + [addresses.DB_SYSTEM]: dbSystem } const result = waf.run({ persistent }, req, RULE_TYPES.SQL_INJECTION) - handleResult(result, req, res, ctx.abortController, config) + handleResult(result, req, res, abortController, config) } function hasInputAddress (payload) { diff --git a/packages/dd-trace/src/appsec/rasp/utils.js b/packages/dd-trace/src/appsec/rasp/utils.js index 2a46b76d6e4..c4ee4f55c3f 100644 --- a/packages/dd-trace/src/appsec/rasp/utils.js +++ b/packages/dd-trace/src/appsec/rasp/utils.js @@ -13,7 +13,8 @@ if (abortOnUncaughtException) { const RULE_TYPES = { SSRF: 'ssrf', - SQL_INJECTION: 'sql_injection' + SQL_INJECTION: 'sql_injection', + LFI: 'lfi' } class DatadogRaspAbortError extends Error { diff --git a/packages/dd-trace/src/appsec/recommended.json b/packages/dd-trace/src/appsec/recommended.json index 2bef3a7cae9..158c33a8ccd 100644 --- a/packages/dd-trace/src/appsec/recommended.json +++ b/packages/dd-trace/src/appsec/recommended.json @@ -1,7 +1,7 @@ { "version": "2.2", "metadata": { - "rules_version": "1.13.0" + "rules_version": "1.13.1" }, "rules": [ { @@ -6239,7 +6239,6 @@ { "id": "rasp-930-100", "name": "Local file inclusion exploit", - "enabled": false, "tags": { "type": "lfi", "category": "vulnerability_trigger", @@ -6287,8 +6286,7 @@ }, { "id": "rasp-932-100", - "name": "Shell injection exploit", - "enabled": false, + "name": "Command injection exploit", "tags": { "type": "command_injection", "category": "vulnerability_trigger", diff --git a/packages/dd-trace/src/appsec/remote_config/capabilities.js b/packages/dd-trace/src/appsec/remote_config/capabilities.js index f42d7358203..3eda140a986 100644 --- a/packages/dd-trace/src/appsec/remote_config/capabilities.js +++ b/packages/dd-trace/src/appsec/remote_config/capabilities.js @@ -18,6 +18,10 @@ module.exports = { APM_TRACING_CUSTOM_TAGS: 1n << 15n, APM_TRACING_ENABLED: 1n << 19n, ASM_RASP_SQLI: 1n << 21n, + ASM_RASP_LFI: 1n << 22n, ASM_RASP_SSRF: 1n << 23n, - APM_TRACING_SAMPLE_RULES: 1n << 29n + APM_TRACING_SAMPLE_RULES: 1n << 29n, + ASM_ENDPOINT_FINGERPRINT: 1n << 32n, + ASM_NETWORK_FINGERPRINT: 1n << 34n, + ASM_HEADER_FINGERPRINT: 1n << 35n } diff --git a/packages/dd-trace/src/appsec/remote_config/index.js b/packages/dd-trace/src/appsec/remote_config/index.js index b63b3690102..2b7eea57c82 100644 --- a/packages/dd-trace/src/appsec/remote_config/index.js +++ b/packages/dd-trace/src/appsec/remote_config/index.js @@ -75,10 +75,14 @@ function enableWafUpdate (appsecConfig) { rc.updateCapabilities(RemoteConfigCapabilities.ASM_CUSTOM_RULES, true) rc.updateCapabilities(RemoteConfigCapabilities.ASM_CUSTOM_BLOCKING_RESPONSE, true) rc.updateCapabilities(RemoteConfigCapabilities.ASM_TRUSTED_IPS, true) + rc.updateCapabilities(RemoteConfigCapabilities.ASM_ENDPOINT_FINGERPRINT, true) + rc.updateCapabilities(RemoteConfigCapabilities.ASM_NETWORK_FINGERPRINT, true) + rc.updateCapabilities(RemoteConfigCapabilities.ASM_HEADER_FINGERPRINT, true) if (appsecConfig.rasp?.enabled) { rc.updateCapabilities(RemoteConfigCapabilities.ASM_RASP_SQLI, true) rc.updateCapabilities(RemoteConfigCapabilities.ASM_RASP_SSRF, true) + rc.updateCapabilities(RemoteConfigCapabilities.ASM_RASP_LFI, true) } // TODO: delete noop handlers and kPreUpdate and replace with batched handlers @@ -103,9 +107,13 @@ function disableWafUpdate () { rc.updateCapabilities(RemoteConfigCapabilities.ASM_CUSTOM_RULES, false) rc.updateCapabilities(RemoteConfigCapabilities.ASM_CUSTOM_BLOCKING_RESPONSE, false) rc.updateCapabilities(RemoteConfigCapabilities.ASM_TRUSTED_IPS, false) + rc.updateCapabilities(RemoteConfigCapabilities.ASM_ENDPOINT_FINGERPRINT, false) + rc.updateCapabilities(RemoteConfigCapabilities.ASM_NETWORK_FINGERPRINT, false) + rc.updateCapabilities(RemoteConfigCapabilities.ASM_HEADER_FINGERPRINT, false) rc.updateCapabilities(RemoteConfigCapabilities.ASM_RASP_SQLI, false) rc.updateCapabilities(RemoteConfigCapabilities.ASM_RASP_SSRF, false) + rc.updateCapabilities(RemoteConfigCapabilities.ASM_RASP_LFI, false) rc.removeProductHandler('ASM_DATA') rc.removeProductHandler('ASM_DD') diff --git a/packages/dd-trace/src/appsec/reporter.js b/packages/dd-trace/src/appsec/reporter.js index a58335d9ba7..dd2bde9fb06 100644 --- a/packages/dd-trace/src/appsec/reporter.js +++ b/packages/dd-trace/src/appsec/reporter.js @@ -153,7 +153,11 @@ function reportAttack (attackData) { rootSpan.addTags(newTags) } -function reportSchemas (derivatives) { +function isFingerprintDerivative (derivative) { + return derivative.startsWith('_dd.appsec.fp') +} + +function reportDerivatives (derivatives) { if (!derivatives) return const req = storage.getStore()?.req @@ -162,9 +166,12 @@ function reportSchemas (derivatives) { if (!rootSpan) return const tags = {} - for (const [address, value] of Object.entries(derivatives)) { - const gzippedValue = zlib.gzipSync(JSON.stringify(value)) - tags[address] = gzippedValue.toString('base64') + for (let [tag, value] of Object.entries(derivatives)) { + if (!isFingerprintDerivative(tag)) { + const gzippedValue = zlib.gzipSync(JSON.stringify(value)) + value = gzippedValue.toString('base64') + } + tags[tag] = value } rootSpan.addTags(tags) @@ -248,7 +255,7 @@ module.exports = { reportMetrics, reportAttack, reportWafUpdate: incrementWafUpdatesMetric, - reportSchemas, + reportDerivatives, finishRequest, setRateLimit, mapHeaderAndTags diff --git a/packages/dd-trace/src/appsec/sdk/track_event.js b/packages/dd-trace/src/appsec/sdk/track_event.js index 61500e2cfbe..36c40093b19 100644 --- a/packages/dd-trace/src/appsec/sdk/track_event.js +++ b/packages/dd-trace/src/appsec/sdk/track_event.js @@ -5,6 +5,7 @@ const { getRootSpan } = require('./utils') const { MANUAL_KEEP } = require('../../../../../ext/tags') const { setUserTags } = require('./set_user') const standalone = require('../standalone') +const waf = require('../waf') function trackUserLoginSuccessEvent (tracer, user, metadata) { // TODO: better user check here and in _setUser() ? @@ -76,6 +77,10 @@ function trackEvent (eventName, fields, sdkMethodName, rootSpan, mode) { rootSpan.addTags(tags) standalone.sample(rootSpan) + + if (['users.login.success', 'users.login.failure'].includes(eventName)) { + waf.run({ persistent: { [`server.business_logic.${eventName}`]: null } }) + } } module.exports = { diff --git a/packages/dd-trace/src/appsec/waf/waf_context_wrapper.js b/packages/dd-trace/src/appsec/waf/waf_context_wrapper.js index ed946633174..a2dae737a86 100644 --- a/packages/dd-trace/src/appsec/waf/waf_context_wrapper.js +++ b/packages/dd-trace/src/appsec/waf/waf_context_wrapper.js @@ -93,7 +93,7 @@ class WAFContextWrapper { Reporter.reportAttack(JSON.stringify(result.events)) } - Reporter.reportSchemas(result.derivatives) + Reporter.reportDerivatives(result.derivatives) if (wafRunFinished.hasSubscribers) { wafRunFinished.publish({ payload }) diff --git a/packages/dd-trace/src/ci-visibility/early-flake-detection/get-known-tests.js b/packages/dd-trace/src/ci-visibility/early-flake-detection/get-known-tests.js index e7dac1607c8..3027baff50a 100644 --- a/packages/dd-trace/src/ci-visibility/early-flake-detection/get-known-tests.js +++ b/packages/dd-trace/src/ci-visibility/early-flake-detection/get-known-tests.js @@ -12,19 +12,7 @@ const { TELEMETRY_KNOWN_TESTS_RESPONSE_BYTES } = require('../../ci-visibility/telemetry') -function getNumTests (knownTests) { - let totalNumTests = 0 - - for (const testModule of Object.values(knownTests)) { - for (const testSuite of Object.values(testModule)) { - for (const testList of Object.values(testSuite)) { - totalNumTests += testList.length - } - } - } - - return totalNumTests -} +const { getNumFromKnownTests } = require('../../plugins/util/test') function getKnownTests ({ url, @@ -102,7 +90,7 @@ function getKnownTests ({ try { const { data: { attributes: { tests: knownTests } } } = JSON.parse(res) - const numTests = getNumTests(knownTests) + const numTests = getNumFromKnownTests(knownTests) incrementCountMetric(TELEMETRY_KNOWN_TESTS_RESPONSE_TESTS, {}, numTests) distributionMetric(TELEMETRY_KNOWN_TESTS_RESPONSE_BYTES, {}, res.length) diff --git a/packages/dd-trace/src/ci-visibility/log-submission/log-submission-plugin.js b/packages/dd-trace/src/ci-visibility/log-submission/log-submission-plugin.js new file mode 100644 index 00000000000..aa437f4cd87 --- /dev/null +++ b/packages/dd-trace/src/ci-visibility/log-submission/log-submission-plugin.js @@ -0,0 +1,53 @@ +const Plugin = require('../../plugins/plugin') +const log = require('../../log') + +function getWinstonLogSubmissionParameters (config) { + const { site, service } = config + + const defaultParameters = { + host: `http-intake.logs.${site}`, + path: `/api/v2/logs?ddsource=winston&service=${service}`, + ssl: true, + headers: { + 'DD-API-KEY': process.env.DD_API_KEY + } + } + + if (!process.env.DD_AGENTLESS_LOG_SUBMISSION_URL) { + return defaultParameters + } + + try { + const url = new URL(process.env.DD_AGENTLESS_LOG_SUBMISSION_URL) + return { + host: url.hostname, + port: url.port, + ssl: url.protocol === 'https:', + path: defaultParameters.path, + headers: defaultParameters.headers + } + } catch (e) { + log.error('Could not parse DD_AGENTLESS_LOG_SUBMISSION_URL') + return defaultParameters + } +} + +class LogSubmissionPlugin extends Plugin { + static get id () { + return 'log-submission' + } + + constructor (...args) { + super(...args) + + this.addSub('ci:log-submission:winston:configure', (httpClass) => { + this.HttpClass = httpClass + }) + + this.addSub('ci:log-submission:winston:add-transport', (logger) => { + logger.add(new this.HttpClass(getWinstonLogSubmissionParameters(this.config))) + }) + } +} + +module.exports = LogSubmissionPlugin diff --git a/packages/dd-trace/src/config.js b/packages/dd-trace/src/config.js index dc5bb524d1a..e827d1b6d0f 100644 --- a/packages/dd-trace/src/config.js +++ b/packages/dd-trace/src/config.js @@ -464,6 +464,7 @@ class Config { this._setValue(defaults, 'appsec.wafTimeout', 5e3) // µs this._setValue(defaults, 'clientIpEnabled', false) this._setValue(defaults, 'clientIpHeader', null) + this._setValue(defaults, 'codeOriginForSpans.enabled', false) this._setValue(defaults, 'dbmPropagationMode', 'disabled') this._setValue(defaults, 'dogstatsd.hostname', '127.0.0.1') this._setValue(defaults, 'dogstatsd.port', '8125') @@ -478,6 +479,7 @@ class Config { this._setValue(defaults, 'gitMetadataEnabled', true) this._setValue(defaults, 'headerTags', []) this._setValue(defaults, 'hostname', '127.0.0.1') + this._setValue(defaults, 'iast.cookieFilterPattern', '.{32,}') this._setValue(defaults, 'iast.deduplicationEnabled', true) this._setValue(defaults, 'iast.enabled', false) this._setValue(defaults, 'iast.maxConcurrentRequests', 2) @@ -498,6 +500,7 @@ class Config { this._setValue(defaults, 'isIntelligentTestRunnerEnabled', false) this._setValue(defaults, 'isManualApiEnabled', false) this._setValue(defaults, 'ciVisibilityTestSessionName', '') + this._setValue(defaults, 'ciVisAgentlessLogSubmissionEnabled', false) this._setValue(defaults, 'logInjection', false) this._setValue(defaults, 'lookup', undefined) this._setValue(defaults, 'memcachedCommandEnabled', false) @@ -571,6 +574,7 @@ class Config { DD_APPSEC_RASP_ENABLED, DD_APPSEC_TRACE_RATE_LIMIT, DD_APPSEC_WAF_TIMEOUT, + DD_CODE_ORIGIN_FOR_SPANS_ENABLED, DD_DATA_STREAMS_ENABLED, DD_DBM_PROPAGATION_MODE, DD_DOGSTATSD_HOSTNAME, @@ -581,6 +585,7 @@ class Config { DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED, DD_EXPERIMENTAL_PROFILING_ENABLED, JEST_WORKER_ID, + DD_IAST_COOKIE_FILTER_PATTERN, DD_IAST_DEDUPLICATION_ENABLED, DD_IAST_ENABLED, DD_IAST_MAX_CONCURRENT_REQUESTS, @@ -701,6 +706,7 @@ class Config { this._envUnprocessed['appsec.wafTimeout'] = DD_APPSEC_WAF_TIMEOUT this._setBoolean(env, 'clientIpEnabled', DD_TRACE_CLIENT_IP_ENABLED) this._setString(env, 'clientIpHeader', DD_TRACE_CLIENT_IP_HEADER) + this._setBoolean(env, 'codeOriginForSpans.enabled', DD_CODE_ORIGIN_FOR_SPANS_ENABLED) this._setString(env, 'dbmPropagationMode', DD_DBM_PROPAGATION_MODE) this._setString(env, 'dogstatsd.hostname', DD_DOGSTATSD_HOSTNAME) this._setString(env, 'dogstatsd.port', DD_DOGSTATSD_PORT) @@ -716,6 +722,7 @@ class Config { this._setBoolean(env, 'gitMetadataEnabled', DD_TRACE_GIT_METADATA_ENABLED) this._setArray(env, 'headerTags', DD_TRACE_HEADER_TAGS) this._setString(env, 'hostname', coalesce(DD_AGENT_HOST, DD_TRACE_AGENT_HOSTNAME)) + this._setString(env, 'iast.cookieFilterPattern', DD_IAST_COOKIE_FILTER_PATTERN) this._setBoolean(env, 'iast.deduplicationEnabled', DD_IAST_DEDUPLICATION_ENABLED) this._setBoolean(env, 'iast.enabled', DD_IAST_ENABLED) this._setValue(env, 'iast.maxConcurrentRequests', maybeInt(DD_IAST_MAX_CONCURRENT_REQUESTS)) @@ -867,6 +874,7 @@ class Config { this._optsUnprocessed['appsec.wafTimeout'] = options.appsec.wafTimeout this._setBoolean(opts, 'clientIpEnabled', options.clientIpEnabled) this._setString(opts, 'clientIpHeader', options.clientIpHeader) + this._setBoolean(opts, 'codeOriginForSpans.enabled', options.codeOriginForSpans?.enabled) this._setString(opts, 'dbmPropagationMode', options.dbmPropagationMode) if (options.dogstatsd) { this._setString(opts, 'dogstatsd.hostname', options.dogstatsd.hostname) @@ -884,6 +892,7 @@ class Config { this._optsUnprocessed.flushMinSpans = options.flushMinSpans this._setArray(opts, 'headerTags', options.headerTags) this._setString(opts, 'hostname', options.hostname) + this._setString(opts, 'iast.cookieFilterPattern', options.iast?.cookieFilterPattern) this._setBoolean(opts, 'iast.deduplicationEnabled', options.iast && options.iast.deduplicationEnabled) this._setBoolean(opts, 'iast.enabled', options.iast && (options.iast === true || options.iast.enabled === true)) @@ -1035,7 +1044,8 @@ class Config { DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED, DD_CIVISIBILITY_FLAKY_RETRY_ENABLED, DD_CIVISIBILITY_FLAKY_RETRY_COUNT, - DD_TEST_SESSION_NAME + DD_TEST_SESSION_NAME, + DD_AGENTLESS_LOG_SUBMISSION_ENABLED } = process.env if (DD_CIVISIBILITY_AGENTLESS_URL) { @@ -1052,6 +1062,7 @@ class Config { this._setBoolean(calc, 'isIntelligentTestRunnerEnabled', isTrue(this._isCiVisibilityItrEnabled())) this._setBoolean(calc, 'isManualApiEnabled', !isFalse(this._isCiVisibilityManualApiEnabled())) this._setString(calc, 'ciVisibilityTestSessionName', DD_TEST_SESSION_NAME) + this._setBoolean(calc, 'ciVisAgentlessLogSubmissionEnabled', isTrue(DD_AGENTLESS_LOG_SUBMISSION_ENABLED)) } this._setString(calc, 'dogstatsd.hostname', this._getHostname()) this._setBoolean(calc, 'isGitUploadEnabled', diff --git a/packages/dd-trace/src/datastreams/schemas/schema_builder.js b/packages/dd-trace/src/datastreams/schemas/schema_builder.js index a65863d4d87..092f5b45101 100644 --- a/packages/dd-trace/src/datastreams/schemas/schema_builder.js +++ b/packages/dd-trace/src/datastreams/schemas/schema_builder.js @@ -4,13 +4,36 @@ const { Schema } = require('./schema') const maxDepth = 10 const maxProperties = 1000 -const CACHE = new LRUCache({ max: 32 }) +const CACHE = new LRUCache({ max: 256 }) class SchemaBuilder { constructor (iterator) { this.schema = new OpenApiSchema() this.iterator = iterator - this.proerties = 0 + this.properties = 0 + } + + static getCache () { + return CACHE + } + + static getSchemaDefinition (schema) { + const noNones = convertToJsonCompatible(schema) + const definition = jsonStringify(noNones) + const id = fnv64(Buffer.from(definition, 'utf-8')).toString() + return new Schema(definition, id) + } + + static getSchema (schemaName, iterator, builder) { + if (!CACHE.has(schemaName)) { + CACHE.set(schemaName, (builder ?? new SchemaBuilder(iterator)).build()) + } + return CACHE.get(schemaName) + } + + build () { + this.iterator.iterateOverSchema(this) + return this.schema } addProperty (schemaName, fieldName, isArray, type, description, ref, format, enumValues) { @@ -26,14 +49,6 @@ class SchemaBuilder { return true } - build () { - this.iterator.iterateOverSchema(this) - const noNones = convertToJsonCompatible(this.schema) - const definition = jsonStringify(noNones) - const id = fnv64(Buffer.from(definition, 'utf-8')).toString() - return new Schema(definition, id) - } - shouldExtractSchema (schemaName, depth) { if (depth > maxDepth) { return false @@ -44,13 +59,6 @@ class SchemaBuilder { this.schema.components.schemas[schemaName] = new OpenApiSchema.SCHEMA() return true } - - static getSchema (schemaName, iterator) { - if (!CACHE.has(schemaName)) { - CACHE.set(schemaName, new SchemaBuilder(iterator).build()) - } - return CACHE.get(schemaName) - } } class OpenApiSchema { diff --git a/packages/dd-trace/src/debugger/devtools_client/config.js b/packages/dd-trace/src/debugger/devtools_client/config.js index 3e7c19715e1..838a1a76cca 100644 --- a/packages/dd-trace/src/debugger/devtools_client/config.js +++ b/packages/dd-trace/src/debugger/devtools_client/config.js @@ -7,6 +7,8 @@ const log = require('../../log') const config = module.exports = { runtimeId: parentConfig.tags['runtime-id'], service: parentConfig.service, + commitSHA: parentConfig.commitSHA, + repositoryUrl: parentConfig.repositoryUrl, parentThreadId } diff --git a/packages/dd-trace/src/debugger/devtools_client/index.js b/packages/dd-trace/src/debugger/devtools_client/index.js index f4789ea65a8..aa19c14ef64 100644 --- a/packages/dd-trace/src/debugger/devtools_client/index.js +++ b/packages/dd-trace/src/debugger/devtools_client/index.js @@ -3,8 +3,10 @@ const { randomUUID } = require('crypto') const { breakpoints } = require('./state') const session = require('./session') +const { getLocalStateForCallFrame } = require('./snapshot') const send = require('./send') -const { ackEmitting } = require('./status') +const { getScriptUrlFromId } = require('./state') +const { ackEmitting, ackError } = require('./status') const { parentThreadId } = require('./config') const log = require('../../log') const { version } = require('../../../../../package.json') @@ -19,9 +21,33 @@ const threadName = parentThreadId === 0 ? 'MainThread' : `WorkerThread:${parentT session.on('Debugger.paused', async ({ params }) => { const start = process.hrtime.bigint() const timestamp = Date.now() - const probes = params.hitBreakpoints.map((id) => breakpoints.get(id)) + + let captureSnapshotForProbe = null + let maxReferenceDepth, maxLength + const probes = params.hitBreakpoints.map((id) => { + const probe = breakpoints.get(id) + if (probe.captureSnapshot) { + captureSnapshotForProbe = probe + maxReferenceDepth = highestOrUndefined(probe.capture.maxReferenceDepth, maxReferenceDepth) + maxLength = highestOrUndefined(probe.capture.maxLength, maxLength) + } + return probe + }) + + let processLocalState + if (captureSnapshotForProbe !== null) { + try { + // TODO: Create unique states for each affected probe based on that probes unique `capture` settings (DEBUG-2863) + processLocalState = await getLocalStateForCallFrame(params.callFrames[0], { maxReferenceDepth, maxLength }) + } catch (err) { + // TODO: This error is not tied to a specific probe, but to all probes with `captureSnapshot: true`. + // However, in 99,99% of cases, there will be just a single probe, so I guess this simplification is ok? + ackError(err, captureSnapshotForProbe) // TODO: Ok to continue after sending ackError? + } + } + await session.post('Debugger.resume') - const diff = process.hrtime.bigint() - start // TODO: Should this be recored as telemetry? + const diff = process.hrtime.bigint() - start // TODO: Recored as telemetry (DEBUG-2858) log.debug(`Finished processing breakpoints - main thread paused for: ${Number(diff) / 1000000} ms`) @@ -35,7 +61,18 @@ session.on('Debugger.paused', async ({ params }) => { thread_name: threadName } - // TODO: Send multiple probes in one HTTP request as an array + const stack = params.callFrames.map((frame) => { + let fileName = getScriptUrlFromId(frame.location.scriptId) + if (fileName.startsWith('file://')) fileName = fileName.substr(7) // TODO: This might not be required + return { + fileName, + function: frame.functionName, + lineNumber: frame.location.lineNumber + 1, // Beware! lineNumber is zero-indexed + columnNumber: frame.location.columnNumber + 1 // Beware! columnNumber is zero-indexed + } + }) + + // TODO: Send multiple probes in one HTTP request as an array (DEBUG-2848) for (const probe of probes) { const snapshot = { id: randomUUID(), @@ -45,13 +82,27 @@ session.on('Debugger.paused', async ({ params }) => { version: probe.version, location: probe.location }, + stack, language: 'javascript' } - // TODO: Process template + if (probe.captureSnapshot) { + const state = processLocalState() + if (state) { + snapshot.captures = { + lines: { [probe.location.lines[0]]: { locals: state } } + } + } + } + + // TODO: Process template (DEBUG-2628) send(probe.template, logger, snapshot, (err) => { if (err) log.error(err) else ackEmitting(probe) }) } }) + +function highestOrUndefined (num, max) { + return num === undefined ? max : Math.max(num, max ?? 0) +} diff --git a/packages/dd-trace/src/debugger/devtools_client/remote_config.js b/packages/dd-trace/src/debugger/devtools_client/remote_config.js index 25ac070cc9f..8a7d7386e33 100644 --- a/packages/dd-trace/src/debugger/devtools_client/remote_config.js +++ b/packages/dd-trace/src/debugger/devtools_client/remote_config.js @@ -1,7 +1,7 @@ 'use strict' const { workerData: { rcPort } } = require('node:worker_threads') -const { getScript, probes, breakpoints } = require('./state') +const { findScriptFromPartialPath, probes, breakpoints } = require('./state') const session = require('./session') const { ackReceived, ackInstalled, ackError } = require('./status') const log = require('../../log') @@ -92,7 +92,7 @@ async function processMsg (action, probe) { await addBreakpoint(probe) break case 'modify': - // TODO: Can we modify in place? + // TODO: Modify existing probe instead of removing it (DEBUG-2817) await removeBreakpoint(probe) await addBreakpoint(probe) break @@ -114,13 +114,13 @@ async function addBreakpoint (probe) { const line = Number(probe.where.lines[0]) // Tracer doesn't support multiple-line breakpoints // Optimize for sending data to /debugger/v1/input endpoint - probe.location = { file, lines: [line] } + probe.location = { file, lines: [String(line)] } delete probe.where // TODO: Inbetween `await session.post('Debugger.enable')` and here, the scripts are parsed and cached. // Maybe there's a race condition here or maybe we're guraenteed that `await session.post('Debugger.enable')` will // not continue untill all scripts have been parsed? - const script = getScript(file) + const script = findScriptFromPartialPath(file) if (!script) throw new Error(`No loaded script found for ${file} (probe: ${probe.id}, version: ${probe.version})`) const [path, scriptId] = script diff --git a/packages/dd-trace/src/debugger/devtools_client/send.js b/packages/dd-trace/src/debugger/devtools_client/send.js index 709e14d52b7..593c3ea235d 100644 --- a/packages/dd-trace/src/debugger/devtools_client/send.js +++ b/packages/dd-trace/src/debugger/devtools_client/send.js @@ -1,23 +1,36 @@ 'use strict' +const { hostname: getHostname } = require('os') +const { stringify } = require('querystring') + const config = require('./config') const request = require('../../exporters/common/request') +const { GIT_COMMIT_SHA, GIT_REPOSITORY_URL } = require('../../plugins/util/tags') module.exports = send const ddsource = 'dd_debugger' +const hostname = getHostname() const service = config.service +const ddtags = [ + [GIT_COMMIT_SHA, config.commitSHA], + [GIT_REPOSITORY_URL, config.repositoryUrl] +].map((pair) => pair.join(':')).join(',') + +const path = `/debugger/v1/input?${stringify({ ddtags })}` + function send (message, logger, snapshot, cb) { const opts = { method: 'POST', url: config.url, - path: '/debugger/v1/input', + path, headers: { 'Content-Type': 'application/json; charset=utf-8' } } const payload = { ddsource, + hostname, service, message, logger, diff --git a/packages/dd-trace/src/debugger/devtools_client/snapshot/collector.js b/packages/dd-trace/src/debugger/devtools_client/snapshot/collector.js new file mode 100644 index 00000000000..0a8848ce5e5 --- /dev/null +++ b/packages/dd-trace/src/debugger/devtools_client/snapshot/collector.js @@ -0,0 +1,153 @@ +'use strict' + +const session = require('../session') + +const LEAF_SUBTYPES = new Set(['date', 'regexp']) +const ITERABLE_SUBTYPES = new Set(['map', 'set', 'weakmap', 'weakset']) + +module.exports = { + getRuntimeObject: getObject +} + +// TODO: Can we speed up thread pause time by calling mutiple Runtime.getProperties in parallel when possible? +// The most simple solution would be to swich from an async/await approach to a callback based approach, in which case +// each lookup will just finish in its own time and traverse the child nodes when the event loop allows it. +// Alternatively, use `Promise.all` or something like that, but the code would probably be more complex. + +async function getObject (objectId, maxDepth, depth = 0) { + const { result, privateProperties } = await session.post('Runtime.getProperties', { + objectId, + ownProperties: true // exclude inherited properties + }) + + if (privateProperties) result.push(...privateProperties) + + return traverseGetPropertiesResult(result, maxDepth, depth) +} + +async function traverseGetPropertiesResult (props, maxDepth, depth) { + // TODO: Decide if we should filter out non-enumerable properties or not: + // props = props.filter((e) => e.enumerable) + + if (depth >= maxDepth) return props + + for (const prop of props) { + if (prop.value === undefined) continue + const { value: { type, objectId, subtype } } = prop + if (type === 'object') { + if (objectId === undefined) continue // if `subtype` is "null" + if (LEAF_SUBTYPES.has(subtype)) continue // don't waste time with these subtypes + prop.value.properties = await getObjectProperties(subtype, objectId, maxDepth, depth) + } else if (type === 'function') { + prop.value.properties = await getFunctionProperties(objectId, maxDepth, depth + 1) + } + } + + return props +} + +async function getObjectProperties (subtype, objectId, maxDepth, depth) { + if (ITERABLE_SUBTYPES.has(subtype)) { + return getIterable(objectId, maxDepth, depth) + } else if (subtype === 'promise') { + return getInternalProperties(objectId, maxDepth, depth) + } else if (subtype === 'proxy') { + return getProxy(objectId, maxDepth, depth) + } else if (subtype === 'arraybuffer') { + return getArrayBuffer(objectId, maxDepth, depth) + } else { + return getObject(objectId, maxDepth, depth + 1) + } +} + +// TODO: The following extra information from `internalProperties` might be relevant to include for functions: +// - Bound function: `[[TargetFunction]]`, `[[BoundThis]]` and `[[BoundArgs]]` +// - Non-bound function: `[[FunctionLocation]]`, and `[[Scopes]]` +async function getFunctionProperties (objectId, maxDepth, depth) { + let { result } = await session.post('Runtime.getProperties', { + objectId, + ownProperties: true // exclude inherited properties + }) + + // For legacy reasons (I assume) functions has a `prototype` property besides the internal `[[Prototype]]` + result = result.filter(({ name }) => name !== 'prototype') + + return traverseGetPropertiesResult(result, maxDepth, depth) +} + +async function getIterable (objectId, maxDepth, depth) { + const { internalProperties } = await session.post('Runtime.getProperties', { + objectId, + ownProperties: true // exclude inherited properties + }) + + let entry = internalProperties[1] + if (entry.name !== '[[Entries]]') { + // Currently `[[Entries]]` is the last of 2 elements, but in case this ever changes, fall back to searching + entry = internalProperties.findLast(({ name }) => name === '[[Entries]]') + } + + // Skip the `[[Entries]]` level and go directly to the content of the iterable + const { result } = await session.post('Runtime.getProperties', { + objectId: entry.value.objectId, + ownProperties: true // exclude inherited properties + }) + + return traverseGetPropertiesResult(result, maxDepth, depth) +} + +async function getInternalProperties (objectId, maxDepth, depth) { + const { internalProperties } = await session.post('Runtime.getProperties', { + objectId, + ownProperties: true // exclude inherited properties + }) + + // We want all internal properties except the prototype + const props = internalProperties.filter(({ name }) => name !== '[[Prototype]]') + + return traverseGetPropertiesResult(props, maxDepth, depth) +} + +async function getProxy (objectId, maxDepth, depth) { + const { internalProperties } = await session.post('Runtime.getProperties', { + objectId, + ownProperties: true // exclude inherited properties + }) + + // TODO: If we do not skip the proxy wrapper, we can add a `revoked` boolean + let entry = internalProperties[1] + if (entry.name !== '[[Target]]') { + // Currently `[[Target]]` is the last of 2 elements, but in case this ever changes, fall back to searching + entry = internalProperties.findLast(({ name }) => name === '[[Target]]') + } + + // Skip the `[[Target]]` level and go directly to the target of the Proxy + const { result } = await session.post('Runtime.getProperties', { + objectId: entry.value.objectId, + ownProperties: true // exclude inherited properties + }) + + return traverseGetPropertiesResult(result, maxDepth, depth) +} + +// Support for ArrayBuffer is a bit trickly because the internal structure stored in `internalProperties` is not +// documented and is not straight forward. E.g. ArrayBuffer(3) will internally contain both Int8Array(3) and +// UInt8Array(3), whereas ArrayBuffer(8) internally contains both Int8Array(8), Uint8Array(8), Int16Array(4), and +// Int32Array(2) - all representing the same data in different ways. +async function getArrayBuffer (objectId, maxDepth, depth) { + const { internalProperties } = await session.post('Runtime.getProperties', { + objectId, + ownProperties: true // exclude inherited properties + }) + + // Use Uint8 to make it easy to convert to a string later. + const entry = internalProperties.find(({ name }) => name === '[[Uint8Array]]') + + // Skip the `[[Uint8Array]]` level and go directly to the content of the ArrayBuffer + const { result } = await session.post('Runtime.getProperties', { + objectId: entry.value.objectId, + ownProperties: true // exclude inherited properties + }) + + return traverseGetPropertiesResult(result, maxDepth, depth) +} diff --git a/packages/dd-trace/src/debugger/devtools_client/snapshot/index.js b/packages/dd-trace/src/debugger/devtools_client/snapshot/index.js new file mode 100644 index 00000000000..add097ac755 --- /dev/null +++ b/packages/dd-trace/src/debugger/devtools_client/snapshot/index.js @@ -0,0 +1,30 @@ +'use strict' + +const { getRuntimeObject } = require('./collector') +const { processRawState } = require('./processor') + +const DEFAULT_MAX_REFERENCE_DEPTH = 3 +const DEFAULT_MAX_LENGTH = 255 + +module.exports = { + getLocalStateForCallFrame +} + +async function getLocalStateForCallFrame ( + callFrame, + { maxReferenceDepth = DEFAULT_MAX_REFERENCE_DEPTH, maxLength = DEFAULT_MAX_LENGTH } = {} +) { + const rawState = [] + let processedState = null + + for (const scope of callFrame.scopeChain) { + if (scope.type === 'global') continue // The global scope is too noisy + rawState.push(...await getRuntimeObject(scope.object.objectId, maxReferenceDepth)) + } + + // Deplay calling `processRawState` so the caller gets a chance to resume the main thread before processing `rawState` + return () => { + processedState = processedState ?? processRawState(rawState, maxLength) + return processedState + } +} diff --git a/packages/dd-trace/src/debugger/devtools_client/snapshot/processor.js b/packages/dd-trace/src/debugger/devtools_client/snapshot/processor.js new file mode 100644 index 00000000000..2cac9ef0b1c --- /dev/null +++ b/packages/dd-trace/src/debugger/devtools_client/snapshot/processor.js @@ -0,0 +1,241 @@ +'use strict' + +module.exports = { + processRawState: processProperties +} + +// Matches classes in source code, no matter how it's written: +// - Named: class MyClass {} +// - Anonymous: class {} +// - Named, with odd whitespace: class\n\t MyClass\n{} +// - Anonymous, with odd whitespace: class\n{} +const CLASS_REGEX = /^class\s([^{]*)/ + +function processProperties (props, maxLength) { + const result = {} + + for (const prop of props) { + // TODO: Hack to avoid periods in keys, as EVP doesn't support that. A better solution can be implemented later + result[prop.name.replaceAll('.', '_')] = getPropertyValue(prop, maxLength) + } + + return result +} + +function getPropertyValue (prop, maxLength) { + // Special case for getters and setters which does not have a value property + if ('get' in prop) { + const hasGet = prop.get.type !== 'undefined' + const hasSet = prop.set.type !== 'undefined' + if (hasGet && hasSet) return { type: 'getter/setter' } + if (hasGet) return { type: 'getter' } + if (hasSet) return { type: 'setter' } + } + + switch (prop.value?.type) { + case 'object': + return getObjectValue(prop.value, maxLength) + case 'function': + return toFunctionOrClass(prop.value, maxLength) + case undefined: // TODO: Add test for when a prop has no value. I think it's if it's defined after the breakpoint? + case 'undefined': + return { type: 'undefined' } + case 'string': + return toString(prop.value.value, maxLength) + case 'number': + return { type: 'number', value: prop.value.description } // use `descripton` to get it as string + case 'boolean': + return { type: 'boolean', value: prop.value.value === true ? 'true' : 'false' } + case 'symbol': + return { type: 'symbol', value: prop.value.description } + case 'bigint': + return { type: 'bigint', value: prop.value.description.slice(0, -1) } // remove trailing `n` + default: + // As of this writing, the Chrome DevTools Protocol doesn't allow any other types than the ones listed above, but + // in the future new ones might be added. + return { type: prop.value.type, notCapturedReason: 'Unsupported property type' } + } +} + +function getObjectValue (obj, maxLength) { + switch (obj.subtype) { + case undefined: + return toObject(obj.className, obj.properties, maxLength) + case 'array': + return toArray(obj.className, obj.properties, maxLength) + case 'null': + return { type: 'null', isNull: true } + // case 'node': // TODO: What does this subtype represent? + case 'regexp': + return { type: obj.className, value: obj.description } + case 'date': + // TODO: This looses millisecond resolution, as that's not retained in the `.toString()` representation contained + // in the `description` field. Unfortunately that's all we get from the Chrome DevTools Protocol. + return { type: obj.className, value: `${new Date(obj.description).toISOString().slice(0, -5)}Z` } + case 'map': + return toMap(obj.className, obj.properties, maxLength) + case 'set': + return toSet(obj.className, obj.properties, maxLength) + case 'weakmap': + return toMap(obj.className, obj.properties, maxLength) + case 'weakset': + return toSet(obj.className, obj.properties, maxLength) + // case 'iterator': // TODO: I've not been able to trigger this subtype + case 'generator': + // Use `subtype` instead of `className` to make it obvious it's a generator + return toObject(obj.subtype, obj.properties, maxLength) + case 'error': + // TODO: Convert stack trace to array to avoid string trunctation or disable truncation in this case? + return toObject(obj.className, obj.properties, maxLength) + case 'proxy': + // Use `desciption` instead of `className` as the `type` to get type of target object (`Proxy(Error)` vs `proxy`) + return toObject(obj.description, obj.properties, maxLength) + case 'promise': + return toObject(obj.className, obj.properties, maxLength) + case 'typedarray': + return toArray(obj.className, obj.properties, maxLength) + case 'arraybuffer': + return toArrayBuffer(obj.className, obj.properties, maxLength) + // case 'dataview': // TODO: Looks like the internal ArrayBuffer is only accessible via the `buffer` getter + // case 'webassemblymemory': // TODO: Looks like the internal ArrayBuffer is only accessible via the `buffer` getter + // case 'wasmvalue': // TODO: I've not been able to trigger this subtype + default: + // As of this writing, the Chrome DevTools Protocol doesn't allow any other subtypes than the ones listed above, + // but in the future new ones might be added. + return { type: obj.subtype, notCapturedReason: 'Unsupported object type' } + } +} + +function toFunctionOrClass (value, maxLength) { + const classMatch = value.description.match(CLASS_REGEX) + + if (classMatch === null) { + // This is a function + // TODO: Would it make sense to detect if it's an arrow function or not? + return toObject(value.className, value.properties, maxLength) + } else { + // This is a class + const className = classMatch[1].trim() + return { type: className ? `class ${className}` : 'class' } + } +} + +function toString (str, maxLength) { + const size = str.length + + if (size <= maxLength) { + return { type: 'string', value: str } + } + + return { + type: 'string', + value: str.substr(0, maxLength), + truncated: true, + size + } +} + +function toObject (type, props, maxLength) { + if (props === undefined) return notCapturedDepth(type) + return { type, fields: processProperties(props, maxLength) } +} + +function toArray (type, elements, maxLength) { + if (elements === undefined) return notCapturedDepth(type) + + // Perf: Create array of expected size in advance (expect that it contains only one non-enumrable element) + const expectedLength = elements.length - 1 + const result = { type, elements: new Array(expectedLength) } + + let i = 0 + for (const elm of elements) { + if (elm.enumerable === false) continue // the value of the `length` property should not be part of the array + result.elements[i++] = getPropertyValue(elm, maxLength) + } + + // Safe-guard in case there were more than one non-enumerable element + if (i < expectedLength) result.elements.length = i + + return result +} + +function toMap (type, pairs, maxLength) { + if (pairs === undefined) return notCapturedDepth(type) + + // Perf: Create array of expected size in advance (expect that it contains only one non-enumrable element) + const expectedLength = pairs.length - 1 + const result = { type, entries: new Array(expectedLength) } + + let i = 0 + for (const pair of pairs) { + if (pair.enumerable === false) continue // the value of the `length` property should not be part of the map + // The following code is based on assumptions made when researching the output of the Chrome DevTools Protocol. + // There doesn't seem to be any documentation to back it up: + // + // `pair.value` is a special wrapper-object with subtype `internal#entry`. This can be skipped and we can go + // directly to its children, of which there will always be exactly two, the first containing the key, and the + // second containing the value of this entry of the Map. + const key = getPropertyValue(pair.value.properties[0], maxLength) + const val = getPropertyValue(pair.value.properties[1], maxLength) + result.entries[i++] = [key, val] + } + + // Safe-guard in case there were more than one non-enumerable element + if (i < expectedLength) result.entries.length = i + + return result +} + +function toSet (type, values, maxLength) { + if (values === undefined) return notCapturedDepth(type) + + // Perf: Create array of expected size in advance (expect that it contains only one non-enumrable element) + const expectedLength = values.length - 1 + const result = { type, elements: new Array(expectedLength) } + + let i = 0 + for (const value of values) { + if (value.enumerable === false) continue // the value of the `length` property should not be part of the set + // The following code is based on assumptions made when researching the output of the Chrome DevTools Protocol. + // There doesn't seem to be any documentation to back it up: + // + // `value.value` is a special wrapper-object with subtype `internal#entry`. This can be skipped and we can go + // directly to its children, of which there will always be exactly one, which contain the actual value in this entry + // of the Set. + result.elements[i++] = getPropertyValue(value.value.properties[0], maxLength) + } + + // Safe-guard in case there were more than one non-enumerable element + if (i < expectedLength) result.elements.length = i + + return result +} + +function toArrayBuffer (type, bytes, maxLength) { + if (bytes === undefined) return notCapturedDepth(type) + + const size = bytes.length + + if (size > maxLength) { + return { + type, + value: arrayBufferToString(bytes, maxLength), + truncated: true, + size: bytes.length + } + } else { + return { type, value: arrayBufferToString(bytes, size) } + } +} + +function arrayBufferToString (bytes, size) { + const buf = Buffer.allocUnsafe(size) + for (let i = 0; i < size; i++) { + buf[i] = bytes[i].value.value + } + return buf.toString() +} + +function notCapturedDepth (type) { + return { type, notCapturedReason: 'depth' } +} diff --git a/packages/dd-trace/src/debugger/devtools_client/state.js b/packages/dd-trace/src/debugger/devtools_client/state.js index 316841667fb..8be9c808369 100644 --- a/packages/dd-trace/src/debugger/devtools_client/state.js +++ b/packages/dd-trace/src/debugger/devtools_client/state.js @@ -2,7 +2,8 @@ const session = require('./session') -const scripts = [] +const scriptIds = [] +const scriptUrls = new Map() module.exports = { probes: new Map(), @@ -25,10 +26,14 @@ module.exports = { * @param {string} path * @returns {[string, string] | undefined} */ - getScript (path) { - return scripts + findScriptFromPartialPath (path) { + return scriptIds .filter(([url]) => url.endsWith(path)) .sort(([a], [b]) => a.length - b.length)[0] + }, + + getScriptUrlFromId (id) { + return scriptUrls.get(id) } } @@ -41,7 +46,8 @@ module.exports = { // - `` - Not sure what this is, but should just be ignored // TODO: Event fired for all files, every time debugger is enabled. So when we disable it, we need to reset the state session.on('Debugger.scriptParsed', ({ params }) => { + scriptUrls.set(params.scriptId, params.url) if (params.url.startsWith('file:')) { - scripts.push([params.url, params.scriptId]) + scriptIds.push([params.url, params.scriptId]) } }) diff --git a/packages/dd-trace/src/exporters/common/request.js b/packages/dd-trace/src/exporters/common/request.js index 6823119c0d8..ab8b697eef6 100644 --- a/packages/dd-trace/src/exporters/common/request.js +++ b/packages/dd-trace/src/exporters/common/request.js @@ -6,10 +6,9 @@ const { Readable } = require('stream') const http = require('http') const https = require('https') -// eslint-disable-next-line n/no-deprecated-api -const { parse: urlParse } = require('url') const zlib = require('zlib') +const { urlToHttpOptions } = require('./url-to-http-options-polyfill') const docker = require('./docker') const { httpAgent, httpsAgent } = require('./agents') const { storage } = require('../../../../datadog-core') @@ -20,39 +19,14 @@ const containerId = docker.id() let activeRequests = 0 -// TODO: Replace with `url.urlToHttpOptions` when supported by all versions -function urlToOptions (url) { - const agent = url.agent || http.globalAgent - const options = { - protocol: url.protocol || agent.protocol, - hostname: typeof url.hostname === 'string' && url.hostname.startsWith('[') - ? url.hostname.slice(1, -1) - : url.hostname || - url.host || - 'localhost', - hash: url.hash, - search: url.search, - pathname: url.pathname, - path: `${url.pathname || ''}${url.search || ''}`, - href: url.href - } - if (url.port !== '') { - options.port = Number(url.port) - } - if (url.username || url.password) { - options.auth = `${url.username}:${url.password}` - } - return options -} +function parseUrl (urlObjOrString) { + if (typeof urlObjOrString === 'object') return urlToHttpOptions(urlObjOrString) -function fromUrlString (urlString) { - const url = typeof urlToHttpOptions === 'function' - ? urlToOptions(new URL(urlString)) - : urlParse(urlString) + const url = urlToHttpOptions(new URL(urlObjOrString)) - // Add the 'hostname' back if we're using named pipes - if (url.protocol === 'unix:' && url.host === '.') { - const udsPath = urlString.replace(/^unix:/, '') + // Special handling if we're using named pipes on Windows + if (url.protocol === 'unix:' && url.hostname === '.') { + const udsPath = urlObjOrString.slice(5) url.path = udsPath url.pathname = udsPath } @@ -66,7 +40,7 @@ function request (data, options, callback) { } if (options.url) { - const url = typeof options.url === 'object' ? urlToOptions(options.url) : fromUrlString(options.url) + const url = parseUrl(options.url) if (url.protocol === 'unix:') { options.socketPath = url.pathname } else { diff --git a/packages/dd-trace/src/exporters/common/url-to-http-options-polyfill.js b/packages/dd-trace/src/exporters/common/url-to-http-options-polyfill.js new file mode 100644 index 00000000000..4ba6b337b08 --- /dev/null +++ b/packages/dd-trace/src/exporters/common/url-to-http-options-polyfill.js @@ -0,0 +1,31 @@ +'use strict' + +const { urlToHttpOptions } = require('url') + +// TODO: Remove `urlToHttpOptions` polyfill once we drop support for the older Cypress versions that uses a built-in +// version of Node.js doesn't include that function. +module.exports = { + urlToHttpOptions: urlToHttpOptions ?? function (url) { + const { hostname, pathname, port, username, password, search } = url + const options = { + __proto__: null, + ...url, // In case the url object was extended by the user. + protocol: url.protocol, + hostname: typeof hostname === 'string' && hostname.startsWith('[') + ? hostname.slice(1, -1) + : hostname, + hash: url.hash, + search, + pathname, + path: `${pathname || ''}${search || ''}`, + href: url.href + } + if (port !== '') { + options.port = Number(port) + } + if (username || password) { + options.auth = `${decodeURIComponent(username)}:${decodeURIComponent(password)}` + } + return options + } +} diff --git a/packages/dd-trace/src/payload-tagging/index.js b/packages/dd-trace/src/payload-tagging/index.js index c7f5dd19d30..71183443443 100644 --- a/packages/dd-trace/src/payload-tagging/index.js +++ b/packages/dd-trace/src/payload-tagging/index.js @@ -5,7 +5,7 @@ const { PAYLOAD_TAG_RESPONSE_PREFIX } = require('../constants') -const jsonpath = require('jsonpath-plus').JSONPath +const jsonpath = require('./jsonpath-plus.js').JSONPath const { tagsFromObject } = require('./tagging') diff --git a/packages/dd-trace/src/payload-tagging/jsonpath-plus.js b/packages/dd-trace/src/payload-tagging/jsonpath-plus.js new file mode 100644 index 00000000000..85249b8210d --- /dev/null +++ b/packages/dd-trace/src/payload-tagging/jsonpath-plus.js @@ -0,0 +1,2094 @@ +'use strict'; + +// NOTE(bengl): This file is taken directly from jsonpath-plus@10.0.0 +// +// https://github.com/JSONPath-Plus/JSONPath/blob/a04dcbac760fed48760b09f387874a36f289c3f3/dist/index-node-cjs.cjs +// +// The only changes are: +// - Replace Object.hasOwn with polyfill +// +// This vendoring-and-editing was done to support usage on Node.js 16.0.0, so +// once support for that release line has ended, this can be replaced with a +// direct dependency on jsonpath-plus@^10. See the PR that introduced this file +// for details. More explicitly as a searchable to-do: +// +// TODO(bengl): Replace this with a direct dependency on jsonpath-plus@^10 when +// we drop support for Node 16. + +// NOTE(bengl): Here is the license as distributed with jsonpath-plus@10: +/* +MIT License + +Copyright (c) 2011-2019 Stefan Goessner, Subbu Allamaraju, Mike Brevoort, +Robert Krahn, Brett Zamir, Richard Schneider + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ + +const hasOwn = Object.hasOwn || ((obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop)); + +var vm = require('vm'); + +/** + * @implements {IHooks} + */ +class Hooks { + /** + * @callback HookCallback + * @this {*|Jsep} this + * @param {Jsep} env + * @returns: void + */ + /** + * Adds the given callback to the list of callbacks for the given hook. + * + * The callback will be invoked when the hook it is registered for is run. + * + * One callback function can be registered to multiple hooks and the same hook multiple times. + * + * @param {string|object} name The name of the hook, or an object of callbacks keyed by name + * @param {HookCallback|boolean} callback The callback function which is given environment variables. + * @param {?boolean} [first=false] Will add the hook to the top of the list (defaults to the bottom) + * @public + */ + add(name, callback, first) { + if (typeof arguments[0] != 'string') { + // Multiple hook callbacks, keyed by name + for (let name in arguments[0]) { + this.add(name, arguments[0][name], arguments[1]); + } + } else { + (Array.isArray(name) ? name : [name]).forEach(function (name) { + this[name] = this[name] || []; + if (callback) { + this[name][first ? 'unshift' : 'push'](callback); + } + }, this); + } + } + + /** + * Runs a hook invoking all registered callbacks with the given environment variables. + * + * Callbacks will be invoked synchronously and in the order in which they were registered. + * + * @param {string} name The name of the hook. + * @param {Object} env The environment variables of the hook passed to all callbacks registered. + * @public + */ + run(name, env) { + this[name] = this[name] || []; + this[name].forEach(function (callback) { + callback.call(env && env.context ? env.context : env, env); + }); + } +} + +/** + * @implements {IPlugins} + */ +class Plugins { + constructor(jsep) { + this.jsep = jsep; + this.registered = {}; + } + + /** + * @callback PluginSetup + * @this {Jsep} jsep + * @returns: void + */ + /** + * Adds the given plugin(s) to the registry + * + * @param {object} plugins + * @param {string} plugins.name The name of the plugin + * @param {PluginSetup} plugins.init The init function + * @public + */ + register(...plugins) { + plugins.forEach(plugin => { + if (typeof plugin !== 'object' || !plugin.name || !plugin.init) { + throw new Error('Invalid JSEP plugin format'); + } + if (this.registered[plugin.name]) { + // already registered. Ignore. + return; + } + plugin.init(this.jsep); + this.registered[plugin.name] = plugin; + }); + } +} + +// JavaScript Expression Parser (JSEP) 1.3.9 + +class Jsep { + /** + * @returns {string} + */ + static get version() { + // To be filled in by the template + return '1.3.9'; + } + + /** + * @returns {string} + */ + static toString() { + return 'JavaScript Expression Parser (JSEP) v' + Jsep.version; + } + // ==================== CONFIG ================================ + /** + * @method addUnaryOp + * @param {string} op_name The name of the unary op to add + * @returns {Jsep} + */ + static addUnaryOp(op_name) { + Jsep.max_unop_len = Math.max(op_name.length, Jsep.max_unop_len); + Jsep.unary_ops[op_name] = 1; + return Jsep; + } + + /** + * @method jsep.addBinaryOp + * @param {string} op_name The name of the binary op to add + * @param {number} precedence The precedence of the binary op (can be a float). Higher number = higher precedence + * @param {boolean} [isRightAssociative=false] whether operator is right-associative + * @returns {Jsep} + */ + static addBinaryOp(op_name, precedence, isRightAssociative) { + Jsep.max_binop_len = Math.max(op_name.length, Jsep.max_binop_len); + Jsep.binary_ops[op_name] = precedence; + if (isRightAssociative) { + Jsep.right_associative.add(op_name); + } else { + Jsep.right_associative.delete(op_name); + } + return Jsep; + } + + /** + * @method addIdentifierChar + * @param {string} char The additional character to treat as a valid part of an identifier + * @returns {Jsep} + */ + static addIdentifierChar(char) { + Jsep.additional_identifier_chars.add(char); + return Jsep; + } + + /** + * @method addLiteral + * @param {string} literal_name The name of the literal to add + * @param {*} literal_value The value of the literal + * @returns {Jsep} + */ + static addLiteral(literal_name, literal_value) { + Jsep.literals[literal_name] = literal_value; + return Jsep; + } + + /** + * @method removeUnaryOp + * @param {string} op_name The name of the unary op to remove + * @returns {Jsep} + */ + static removeUnaryOp(op_name) { + delete Jsep.unary_ops[op_name]; + if (op_name.length === Jsep.max_unop_len) { + Jsep.max_unop_len = Jsep.getMaxKeyLen(Jsep.unary_ops); + } + return Jsep; + } + + /** + * @method removeAllUnaryOps + * @returns {Jsep} + */ + static removeAllUnaryOps() { + Jsep.unary_ops = {}; + Jsep.max_unop_len = 0; + return Jsep; + } + + /** + * @method removeIdentifierChar + * @param {string} char The additional character to stop treating as a valid part of an identifier + * @returns {Jsep} + */ + static removeIdentifierChar(char) { + Jsep.additional_identifier_chars.delete(char); + return Jsep; + } + + /** + * @method removeBinaryOp + * @param {string} op_name The name of the binary op to remove + * @returns {Jsep} + */ + static removeBinaryOp(op_name) { + delete Jsep.binary_ops[op_name]; + if (op_name.length === Jsep.max_binop_len) { + Jsep.max_binop_len = Jsep.getMaxKeyLen(Jsep.binary_ops); + } + Jsep.right_associative.delete(op_name); + return Jsep; + } + + /** + * @method removeAllBinaryOps + * @returns {Jsep} + */ + static removeAllBinaryOps() { + Jsep.binary_ops = {}; + Jsep.max_binop_len = 0; + return Jsep; + } + + /** + * @method removeLiteral + * @param {string} literal_name The name of the literal to remove + * @returns {Jsep} + */ + static removeLiteral(literal_name) { + delete Jsep.literals[literal_name]; + return Jsep; + } + + /** + * @method removeAllLiterals + * @returns {Jsep} + */ + static removeAllLiterals() { + Jsep.literals = {}; + return Jsep; + } + // ==================== END CONFIG ============================ + + /** + * @returns {string} + */ + get char() { + return this.expr.charAt(this.index); + } + + /** + * @returns {number} + */ + get code() { + return this.expr.charCodeAt(this.index); + } + /** + * @param {string} expr a string with the passed in express + * @returns Jsep + */ + constructor(expr) { + // `index` stores the character number we are currently at + // All of the gobbles below will modify `index` as we move along + this.expr = expr; + this.index = 0; + } + + /** + * static top-level parser + * @returns {jsep.Expression} + */ + static parse(expr) { + return new Jsep(expr).parse(); + } + + /** + * Get the longest key length of any object + * @param {object} obj + * @returns {number} + */ + static getMaxKeyLen(obj) { + return Math.max(0, ...Object.keys(obj).map(k => k.length)); + } + + /** + * `ch` is a character code in the next three functions + * @param {number} ch + * @returns {boolean} + */ + static isDecimalDigit(ch) { + return ch >= 48 && ch <= 57; // 0...9 + } + + /** + * Returns the precedence of a binary operator or `0` if it isn't a binary operator. Can be float. + * @param {string} op_val + * @returns {number} + */ + static binaryPrecedence(op_val) { + return Jsep.binary_ops[op_val] || 0; + } + + /** + * Looks for start of identifier + * @param {number} ch + * @returns {boolean} + */ + static isIdentifierStart(ch) { + return ch >= 65 && ch <= 90 || + // A...Z + ch >= 97 && ch <= 122 || + // a...z + ch >= 128 && !Jsep.binary_ops[String.fromCharCode(ch)] || + // any non-ASCII that is not an operator + Jsep.additional_identifier_chars.has(String.fromCharCode(ch)); // additional characters + } + + /** + * @param {number} ch + * @returns {boolean} + */ + static isIdentifierPart(ch) { + return Jsep.isIdentifierStart(ch) || Jsep.isDecimalDigit(ch); + } + + /** + * throw error at index of the expression + * @param {string} message + * @throws + */ + throwError(message) { + const error = new Error(message + ' at character ' + this.index); + error.index = this.index; + error.description = message; + throw error; + } + + /** + * Run a given hook + * @param {string} name + * @param {jsep.Expression|false} [node] + * @returns {?jsep.Expression} + */ + runHook(name, node) { + if (Jsep.hooks[name]) { + const env = { + context: this, + node + }; + Jsep.hooks.run(name, env); + return env.node; + } + return node; + } + + /** + * Runs a given hook until one returns a node + * @param {string} name + * @returns {?jsep.Expression} + */ + searchHook(name) { + if (Jsep.hooks[name]) { + const env = { + context: this + }; + Jsep.hooks[name].find(function (callback) { + callback.call(env.context, env); + return env.node; + }); + return env.node; + } + } + + /** + * Push `index` up to the next non-space character + */ + gobbleSpaces() { + let ch = this.code; + // Whitespace + while (ch === Jsep.SPACE_CODE || ch === Jsep.TAB_CODE || ch === Jsep.LF_CODE || ch === Jsep.CR_CODE) { + ch = this.expr.charCodeAt(++this.index); + } + this.runHook('gobble-spaces'); + } + + /** + * Top-level method to parse all expressions and returns compound or single node + * @returns {jsep.Expression} + */ + parse() { + this.runHook('before-all'); + const nodes = this.gobbleExpressions(); + + // If there's only one expression just try returning the expression + const node = nodes.length === 1 ? nodes[0] : { + type: Jsep.COMPOUND, + body: nodes + }; + return this.runHook('after-all', node); + } + + /** + * top-level parser (but can be reused within as well) + * @param {number} [untilICode] + * @returns {jsep.Expression[]} + */ + gobbleExpressions(untilICode) { + let nodes = [], + ch_i, + node; + while (this.index < this.expr.length) { + ch_i = this.code; + + // Expressions can be separated by semicolons, commas, or just inferred without any + // separators + if (ch_i === Jsep.SEMCOL_CODE || ch_i === Jsep.COMMA_CODE) { + this.index++; // ignore separators + } else { + // Try to gobble each expression individually + if (node = this.gobbleExpression()) { + nodes.push(node); + // If we weren't able to find a binary expression and are out of room, then + // the expression passed in probably has too much + } else if (this.index < this.expr.length) { + if (ch_i === untilICode) { + break; + } + this.throwError('Unexpected "' + this.char + '"'); + } + } + } + return nodes; + } + + /** + * The main parsing function. + * @returns {?jsep.Expression} + */ + gobbleExpression() { + const node = this.searchHook('gobble-expression') || this.gobbleBinaryExpression(); + this.gobbleSpaces(); + return this.runHook('after-expression', node); + } + + /** + * Search for the operation portion of the string (e.g. `+`, `===`) + * Start by taking the longest possible binary operations (3 characters: `===`, `!==`, `>>>`) + * and move down from 3 to 2 to 1 character until a matching binary operation is found + * then, return that binary operation + * @returns {string|boolean} + */ + gobbleBinaryOp() { + this.gobbleSpaces(); + let to_check = this.expr.substr(this.index, Jsep.max_binop_len); + let tc_len = to_check.length; + while (tc_len > 0) { + // Don't accept a binary op when it is an identifier. + // Binary ops that start with a identifier-valid character must be followed + // by a non identifier-part valid character + if (Jsep.binary_ops.hasOwnProperty(to_check) && (!Jsep.isIdentifierStart(this.code) || this.index + to_check.length < this.expr.length && !Jsep.isIdentifierPart(this.expr.charCodeAt(this.index + to_check.length)))) { + this.index += tc_len; + return to_check; + } + to_check = to_check.substr(0, --tc_len); + } + return false; + } + + /** + * This function is responsible for gobbling an individual expression, + * e.g. `1`, `1+2`, `a+(b*2)-Math.sqrt(2)` + * @returns {?jsep.BinaryExpression} + */ + gobbleBinaryExpression() { + let node, biop, prec, stack, biop_info, left, right, i, cur_biop; + + // First, try to get the leftmost thing + // Then, check to see if there's a binary operator operating on that leftmost thing + // Don't gobbleBinaryOp without a left-hand-side + left = this.gobbleToken(); + if (!left) { + return left; + } + biop = this.gobbleBinaryOp(); + + // If there wasn't a binary operator, just return the leftmost node + if (!biop) { + return left; + } + + // Otherwise, we need to start a stack to properly place the binary operations in their + // precedence structure + biop_info = { + value: biop, + prec: Jsep.binaryPrecedence(biop), + right_a: Jsep.right_associative.has(biop) + }; + right = this.gobbleToken(); + if (!right) { + this.throwError("Expected expression after " + biop); + } + stack = [left, biop_info, right]; + + // Properly deal with precedence using [recursive descent](http://www.engr.mun.ca/~theo/Misc/exp_parsing.htm) + while (biop = this.gobbleBinaryOp()) { + prec = Jsep.binaryPrecedence(biop); + if (prec === 0) { + this.index -= biop.length; + break; + } + biop_info = { + value: biop, + prec, + right_a: Jsep.right_associative.has(biop) + }; + cur_biop = biop; + + // Reduce: make a binary expression from the three topmost entries. + const comparePrev = prev => biop_info.right_a && prev.right_a ? prec > prev.prec : prec <= prev.prec; + while (stack.length > 2 && comparePrev(stack[stack.length - 2])) { + right = stack.pop(); + biop = stack.pop().value; + left = stack.pop(); + node = { + type: Jsep.BINARY_EXP, + operator: biop, + left, + right + }; + stack.push(node); + } + node = this.gobbleToken(); + if (!node) { + this.throwError("Expected expression after " + cur_biop); + } + stack.push(biop_info, node); + } + i = stack.length - 1; + node = stack[i]; + while (i > 1) { + node = { + type: Jsep.BINARY_EXP, + operator: stack[i - 1].value, + left: stack[i - 2], + right: node + }; + i -= 2; + } + return node; + } + + /** + * An individual part of a binary expression: + * e.g. `foo.bar(baz)`, `1`, `"abc"`, `(a % 2)` (because it's in parenthesis) + * @returns {boolean|jsep.Expression} + */ + gobbleToken() { + let ch, to_check, tc_len, node; + this.gobbleSpaces(); + node = this.searchHook('gobble-token'); + if (node) { + return this.runHook('after-token', node); + } + ch = this.code; + if (Jsep.isDecimalDigit(ch) || ch === Jsep.PERIOD_CODE) { + // Char code 46 is a dot `.` which can start off a numeric literal + return this.gobbleNumericLiteral(); + } + if (ch === Jsep.SQUOTE_CODE || ch === Jsep.DQUOTE_CODE) { + // Single or double quotes + node = this.gobbleStringLiteral(); + } else if (ch === Jsep.OBRACK_CODE) { + node = this.gobbleArray(); + } else { + to_check = this.expr.substr(this.index, Jsep.max_unop_len); + tc_len = to_check.length; + while (tc_len > 0) { + // Don't accept an unary op when it is an identifier. + // Unary ops that start with a identifier-valid character must be followed + // by a non identifier-part valid character + if (Jsep.unary_ops.hasOwnProperty(to_check) && (!Jsep.isIdentifierStart(this.code) || this.index + to_check.length < this.expr.length && !Jsep.isIdentifierPart(this.expr.charCodeAt(this.index + to_check.length)))) { + this.index += tc_len; + const argument = this.gobbleToken(); + if (!argument) { + this.throwError('missing unaryOp argument'); + } + return this.runHook('after-token', { + type: Jsep.UNARY_EXP, + operator: to_check, + argument, + prefix: true + }); + } + to_check = to_check.substr(0, --tc_len); + } + if (Jsep.isIdentifierStart(ch)) { + node = this.gobbleIdentifier(); + if (Jsep.literals.hasOwnProperty(node.name)) { + node = { + type: Jsep.LITERAL, + value: Jsep.literals[node.name], + raw: node.name + }; + } else if (node.name === Jsep.this_str) { + node = { + type: Jsep.THIS_EXP + }; + } + } else if (ch === Jsep.OPAREN_CODE) { + // open parenthesis + node = this.gobbleGroup(); + } + } + if (!node) { + return this.runHook('after-token', false); + } + node = this.gobbleTokenProperty(node); + return this.runHook('after-token', node); + } + + /** + * Gobble properties of of identifiers/strings/arrays/groups. + * e.g. `foo`, `bar.baz`, `foo['bar'].baz` + * It also gobbles function calls: + * e.g. `Math.acos(obj.angle)` + * @param {jsep.Expression} node + * @returns {jsep.Expression} + */ + gobbleTokenProperty(node) { + this.gobbleSpaces(); + let ch = this.code; + while (ch === Jsep.PERIOD_CODE || ch === Jsep.OBRACK_CODE || ch === Jsep.OPAREN_CODE || ch === Jsep.QUMARK_CODE) { + let optional; + if (ch === Jsep.QUMARK_CODE) { + if (this.expr.charCodeAt(this.index + 1) !== Jsep.PERIOD_CODE) { + break; + } + optional = true; + this.index += 2; + this.gobbleSpaces(); + ch = this.code; + } + this.index++; + if (ch === Jsep.OBRACK_CODE) { + node = { + type: Jsep.MEMBER_EXP, + computed: true, + object: node, + property: this.gobbleExpression() + }; + if (!node.property) { + this.throwError('Unexpected "' + this.char + '"'); + } + this.gobbleSpaces(); + ch = this.code; + if (ch !== Jsep.CBRACK_CODE) { + this.throwError('Unclosed ['); + } + this.index++; + } else if (ch === Jsep.OPAREN_CODE) { + // A function call is being made; gobble all the arguments + node = { + type: Jsep.CALL_EXP, + 'arguments': this.gobbleArguments(Jsep.CPAREN_CODE), + callee: node + }; + } else if (ch === Jsep.PERIOD_CODE || optional) { + if (optional) { + this.index--; + } + this.gobbleSpaces(); + node = { + type: Jsep.MEMBER_EXP, + computed: false, + object: node, + property: this.gobbleIdentifier() + }; + } + if (optional) { + node.optional = true; + } // else leave undefined for compatibility with esprima + + this.gobbleSpaces(); + ch = this.code; + } + return node; + } + + /** + * Parse simple numeric literals: `12`, `3.4`, `.5`. Do this by using a string to + * keep track of everything in the numeric literal and then calling `parseFloat` on that string + * @returns {jsep.Literal} + */ + gobbleNumericLiteral() { + let number = '', + ch, + chCode; + while (Jsep.isDecimalDigit(this.code)) { + number += this.expr.charAt(this.index++); + } + if (this.code === Jsep.PERIOD_CODE) { + // can start with a decimal marker + number += this.expr.charAt(this.index++); + while (Jsep.isDecimalDigit(this.code)) { + number += this.expr.charAt(this.index++); + } + } + ch = this.char; + if (ch === 'e' || ch === 'E') { + // exponent marker + number += this.expr.charAt(this.index++); + ch = this.char; + if (ch === '+' || ch === '-') { + // exponent sign + number += this.expr.charAt(this.index++); + } + while (Jsep.isDecimalDigit(this.code)) { + // exponent itself + number += this.expr.charAt(this.index++); + } + if (!Jsep.isDecimalDigit(this.expr.charCodeAt(this.index - 1))) { + this.throwError('Expected exponent (' + number + this.char + ')'); + } + } + chCode = this.code; + + // Check to make sure this isn't a variable name that start with a number (123abc) + if (Jsep.isIdentifierStart(chCode)) { + this.throwError('Variable names cannot start with a number (' + number + this.char + ')'); + } else if (chCode === Jsep.PERIOD_CODE || number.length === 1 && number.charCodeAt(0) === Jsep.PERIOD_CODE) { + this.throwError('Unexpected period'); + } + return { + type: Jsep.LITERAL, + value: parseFloat(number), + raw: number + }; + } + + /** + * Parses a string literal, staring with single or double quotes with basic support for escape codes + * e.g. `"hello world"`, `'this is\nJSEP'` + * @returns {jsep.Literal} + */ + gobbleStringLiteral() { + let str = ''; + const startIndex = this.index; + const quote = this.expr.charAt(this.index++); + let closed = false; + while (this.index < this.expr.length) { + let ch = this.expr.charAt(this.index++); + if (ch === quote) { + closed = true; + break; + } else if (ch === '\\') { + // Check for all of the common escape codes + ch = this.expr.charAt(this.index++); + switch (ch) { + case 'n': + str += '\n'; + break; + case 'r': + str += '\r'; + break; + case 't': + str += '\t'; + break; + case 'b': + str += '\b'; + break; + case 'f': + str += '\f'; + break; + case 'v': + str += '\x0B'; + break; + default: + str += ch; + } + } else { + str += ch; + } + } + if (!closed) { + this.throwError('Unclosed quote after "' + str + '"'); + } + return { + type: Jsep.LITERAL, + value: str, + raw: this.expr.substring(startIndex, this.index) + }; + } + + /** + * Gobbles only identifiers + * e.g.: `foo`, `_value`, `$x1` + * Also, this function checks if that identifier is a literal: + * (e.g. `true`, `false`, `null`) or `this` + * @returns {jsep.Identifier} + */ + gobbleIdentifier() { + let ch = this.code, + start = this.index; + if (Jsep.isIdentifierStart(ch)) { + this.index++; + } else { + this.throwError('Unexpected ' + this.char); + } + while (this.index < this.expr.length) { + ch = this.code; + if (Jsep.isIdentifierPart(ch)) { + this.index++; + } else { + break; + } + } + return { + type: Jsep.IDENTIFIER, + name: this.expr.slice(start, this.index) + }; + } + + /** + * Gobbles a list of arguments within the context of a function call + * or array literal. This function also assumes that the opening character + * `(` or `[` has already been gobbled, and gobbles expressions and commas + * until the terminator character `)` or `]` is encountered. + * e.g. `foo(bar, baz)`, `my_func()`, or `[bar, baz]` + * @param {number} termination + * @returns {jsep.Expression[]} + */ + gobbleArguments(termination) { + const args = []; + let closed = false; + let separator_count = 0; + while (this.index < this.expr.length) { + this.gobbleSpaces(); + let ch_i = this.code; + if (ch_i === termination) { + // done parsing + closed = true; + this.index++; + if (termination === Jsep.CPAREN_CODE && separator_count && separator_count >= args.length) { + this.throwError('Unexpected token ' + String.fromCharCode(termination)); + } + break; + } else if (ch_i === Jsep.COMMA_CODE) { + // between expressions + this.index++; + separator_count++; + if (separator_count !== args.length) { + // missing argument + if (termination === Jsep.CPAREN_CODE) { + this.throwError('Unexpected token ,'); + } else if (termination === Jsep.CBRACK_CODE) { + for (let arg = args.length; arg < separator_count; arg++) { + args.push(null); + } + } + } + } else if (args.length !== separator_count && separator_count !== 0) { + // NOTE: `&& separator_count !== 0` allows for either all commas, or all spaces as arguments + this.throwError('Expected comma'); + } else { + const node = this.gobbleExpression(); + if (!node || node.type === Jsep.COMPOUND) { + this.throwError('Expected comma'); + } + args.push(node); + } + } + if (!closed) { + this.throwError('Expected ' + String.fromCharCode(termination)); + } + return args; + } + + /** + * Responsible for parsing a group of things within parentheses `()` + * that have no identifier in front (so not a function call) + * This function assumes that it needs to gobble the opening parenthesis + * and then tries to gobble everything within that parenthesis, assuming + * that the next thing it should see is the close parenthesis. If not, + * then the expression probably doesn't have a `)` + * @returns {boolean|jsep.Expression} + */ + gobbleGroup() { + this.index++; + let nodes = this.gobbleExpressions(Jsep.CPAREN_CODE); + if (this.code === Jsep.CPAREN_CODE) { + this.index++; + if (nodes.length === 1) { + return nodes[0]; + } else if (!nodes.length) { + return false; + } else { + return { + type: Jsep.SEQUENCE_EXP, + expressions: nodes + }; + } + } else { + this.throwError('Unclosed ('); + } + } + + /** + * Responsible for parsing Array literals `[1, 2, 3]` + * This function assumes that it needs to gobble the opening bracket + * and then tries to gobble the expressions as arguments. + * @returns {jsep.ArrayExpression} + */ + gobbleArray() { + this.index++; + return { + type: Jsep.ARRAY_EXP, + elements: this.gobbleArguments(Jsep.CBRACK_CODE) + }; + } +} + +// Static fields: +const hooks = new Hooks(); +Object.assign(Jsep, { + hooks, + plugins: new Plugins(Jsep), + // Node Types + // ---------- + // This is the full set of types that any JSEP node can be. + // Store them here to save space when minified + COMPOUND: 'Compound', + SEQUENCE_EXP: 'SequenceExpression', + IDENTIFIER: 'Identifier', + MEMBER_EXP: 'MemberExpression', + LITERAL: 'Literal', + THIS_EXP: 'ThisExpression', + CALL_EXP: 'CallExpression', + UNARY_EXP: 'UnaryExpression', + BINARY_EXP: 'BinaryExpression', + ARRAY_EXP: 'ArrayExpression', + TAB_CODE: 9, + LF_CODE: 10, + CR_CODE: 13, + SPACE_CODE: 32, + PERIOD_CODE: 46, + // '.' + COMMA_CODE: 44, + // ',' + SQUOTE_CODE: 39, + // single quote + DQUOTE_CODE: 34, + // double quotes + OPAREN_CODE: 40, + // ( + CPAREN_CODE: 41, + // ) + OBRACK_CODE: 91, + // [ + CBRACK_CODE: 93, + // ] + QUMARK_CODE: 63, + // ? + SEMCOL_CODE: 59, + // ; + COLON_CODE: 58, + // : + + // Operations + // ---------- + // Use a quickly-accessible map to store all of the unary operators + // Values are set to `1` (it really doesn't matter) + unary_ops: { + '-': 1, + '!': 1, + '~': 1, + '+': 1 + }, + // Also use a map for the binary operations but set their values to their + // binary precedence for quick reference (higher number = higher precedence) + // see [Order of operations](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Operator_Precedence) + binary_ops: { + '||': 1, + '&&': 2, + '|': 3, + '^': 4, + '&': 5, + '==': 6, + '!=': 6, + '===': 6, + '!==': 6, + '<': 7, + '>': 7, + '<=': 7, + '>=': 7, + '<<': 8, + '>>': 8, + '>>>': 8, + '+': 9, + '-': 9, + '*': 10, + '/': 10, + '%': 10 + }, + // sets specific binary_ops as right-associative + right_associative: new Set(), + // Additional valid identifier chars, apart from a-z, A-Z and 0-9 (except on the starting char) + additional_identifier_chars: new Set(['$', '_']), + // Literals + // ---------- + // Store the values to return for the various literals we may encounter + literals: { + 'true': true, + 'false': false, + 'null': null + }, + // Except for `this`, which is special. This could be changed to something like `'self'` as well + this_str: 'this' +}); +Jsep.max_unop_len = Jsep.getMaxKeyLen(Jsep.unary_ops); +Jsep.max_binop_len = Jsep.getMaxKeyLen(Jsep.binary_ops); + +// Backward Compatibility: +const jsep = expr => new Jsep(expr).parse(); +const stdClassProps = Object.getOwnPropertyNames(class Test {}); +Object.getOwnPropertyNames(Jsep).filter(prop => !stdClassProps.includes(prop) && jsep[prop] === undefined).forEach(m => { + jsep[m] = Jsep[m]; +}); +jsep.Jsep = Jsep; // allows for const { Jsep } = require('jsep'); + +const CONDITIONAL_EXP = 'ConditionalExpression'; +var ternary = { + name: 'ternary', + init(jsep) { + // Ternary expression: test ? consequent : alternate + jsep.hooks.add('after-expression', function gobbleTernary(env) { + if (env.node && this.code === jsep.QUMARK_CODE) { + this.index++; + const test = env.node; + const consequent = this.gobbleExpression(); + if (!consequent) { + this.throwError('Expected expression'); + } + this.gobbleSpaces(); + if (this.code === jsep.COLON_CODE) { + this.index++; + const alternate = this.gobbleExpression(); + if (!alternate) { + this.throwError('Expected expression'); + } + env.node = { + type: CONDITIONAL_EXP, + test, + consequent, + alternate + }; + + // check for operators of higher priority than ternary (i.e. assignment) + // jsep sets || at 1, and assignment at 0.9, and conditional should be between them + if (test.operator && jsep.binary_ops[test.operator] <= 0.9) { + let newTest = test; + while (newTest.right.operator && jsep.binary_ops[newTest.right.operator] <= 0.9) { + newTest = newTest.right; + } + env.node.test = newTest.right; + newTest.right = env.node; + env.node = test; + } + } else { + this.throwError('Expected :'); + } + } + }); + } +}; + +// Add default plugins: + +jsep.plugins.register(ternary); + +const FSLASH_CODE = 47; // '/' +const BSLASH_CODE = 92; // '\\' + +var index = { + name: 'regex', + init(jsep) { + // Regex literal: /abc123/ig + jsep.hooks.add('gobble-token', function gobbleRegexLiteral(env) { + if (this.code === FSLASH_CODE) { + const patternIndex = ++this.index; + let inCharSet = false; + while (this.index < this.expr.length) { + if (this.code === FSLASH_CODE && !inCharSet) { + const pattern = this.expr.slice(patternIndex, this.index); + let flags = ''; + while (++this.index < this.expr.length) { + const code = this.code; + if (code >= 97 && code <= 122 // a...z + || code >= 65 && code <= 90 // A...Z + || code >= 48 && code <= 57) { + // 0-9 + flags += this.char; + } else { + break; + } + } + let value; + try { + value = new RegExp(pattern, flags); + } catch (e) { + this.throwError(e.message); + } + env.node = { + type: jsep.LITERAL, + value, + raw: this.expr.slice(patternIndex - 1, this.index) + }; + + // allow . [] and () after regex: /regex/.test(a) + env.node = this.gobbleTokenProperty(env.node); + return env.node; + } + if (this.code === jsep.OBRACK_CODE) { + inCharSet = true; + } else if (inCharSet && this.code === jsep.CBRACK_CODE) { + inCharSet = false; + } + this.index += this.code === BSLASH_CODE ? 2 : 1; + } + this.throwError('Unclosed Regex'); + } + }); + } +}; + +const PLUS_CODE = 43; // + +const MINUS_CODE = 45; // - + +const plugin = { + name: 'assignment', + assignmentOperators: new Set(['=', '*=', '**=', '/=', '%=', '+=', '-=', '<<=', '>>=', '>>>=', '&=', '^=', '|=']), + updateOperators: [PLUS_CODE, MINUS_CODE], + assignmentPrecedence: 0.9, + init(jsep) { + const updateNodeTypes = [jsep.IDENTIFIER, jsep.MEMBER_EXP]; + plugin.assignmentOperators.forEach(op => jsep.addBinaryOp(op, plugin.assignmentPrecedence, true)); + jsep.hooks.add('gobble-token', function gobbleUpdatePrefix(env) { + const code = this.code; + if (plugin.updateOperators.some(c => c === code && c === this.expr.charCodeAt(this.index + 1))) { + this.index += 2; + env.node = { + type: 'UpdateExpression', + operator: code === PLUS_CODE ? '++' : '--', + argument: this.gobbleTokenProperty(this.gobbleIdentifier()), + prefix: true + }; + if (!env.node.argument || !updateNodeTypes.includes(env.node.argument.type)) { + this.throwError(`Unexpected ${env.node.operator}`); + } + } + }); + jsep.hooks.add('after-token', function gobbleUpdatePostfix(env) { + if (env.node) { + const code = this.code; + if (plugin.updateOperators.some(c => c === code && c === this.expr.charCodeAt(this.index + 1))) { + if (!updateNodeTypes.includes(env.node.type)) { + this.throwError(`Unexpected ${env.node.operator}`); + } + this.index += 2; + env.node = { + type: 'UpdateExpression', + operator: code === PLUS_CODE ? '++' : '--', + argument: env.node, + prefix: false + }; + } + } + }); + jsep.hooks.add('after-expression', function gobbleAssignment(env) { + if (env.node) { + // Note: Binaries can be chained in a single expression to respect + // operator precedence (i.e. a = b = 1 + 2 + 3) + // Update all binary assignment nodes in the tree + updateBinariesToAssignments(env.node); + } + }); + function updateBinariesToAssignments(node) { + if (plugin.assignmentOperators.has(node.operator)) { + node.type = 'AssignmentExpression'; + updateBinariesToAssignments(node.left); + updateBinariesToAssignments(node.right); + } else if (!node.operator) { + Object.values(node).forEach(val => { + if (val && typeof val === 'object') { + updateBinariesToAssignments(val); + } + }); + } + } + } +}; + +/* eslint-disable no-bitwise */ + +// register plugins +jsep.plugins.register(index, plugin); +const SafeEval = { + /** + * @param {jsep.Expression} ast + * @param {Record} subs + */ + evalAst(ast, subs) { + switch (ast.type) { + case 'BinaryExpression': + case 'LogicalExpression': + return SafeEval.evalBinaryExpression(ast, subs); + case 'Compound': + return SafeEval.evalCompound(ast, subs); + case 'ConditionalExpression': + return SafeEval.evalConditionalExpression(ast, subs); + case 'Identifier': + return SafeEval.evalIdentifier(ast, subs); + case 'Literal': + return SafeEval.evalLiteral(ast, subs); + case 'MemberExpression': + return SafeEval.evalMemberExpression(ast, subs); + case 'UnaryExpression': + return SafeEval.evalUnaryExpression(ast, subs); + case 'ArrayExpression': + return SafeEval.evalArrayExpression(ast, subs); + case 'CallExpression': + return SafeEval.evalCallExpression(ast, subs); + case 'AssignmentExpression': + return SafeEval.evalAssignmentExpression(ast, subs); + default: + throw SyntaxError('Unexpected expression', ast); + } + }, + evalBinaryExpression(ast, subs) { + const result = { + '||': (a, b) => a || b(), + '&&': (a, b) => a && b(), + '|': (a, b) => a | b(), + '^': (a, b) => a ^ b(), + '&': (a, b) => a & b(), + // eslint-disable-next-line eqeqeq + '==': (a, b) => a == b(), + // eslint-disable-next-line eqeqeq + '!=': (a, b) => a != b(), + '===': (a, b) => a === b(), + '!==': (a, b) => a !== b(), + '<': (a, b) => a < b(), + '>': (a, b) => a > b(), + '<=': (a, b) => a <= b(), + '>=': (a, b) => a >= b(), + '<<': (a, b) => a << b(), + '>>': (a, b) => a >> b(), + '>>>': (a, b) => a >>> b(), + '+': (a, b) => a + b(), + '-': (a, b) => a - b(), + '*': (a, b) => a * b(), + '/': (a, b) => a / b(), + '%': (a, b) => a % b() + }[ast.operator](SafeEval.evalAst(ast.left, subs), () => SafeEval.evalAst(ast.right, subs)); + return result; + }, + evalCompound(ast, subs) { + let last; + for (let i = 0; i < ast.body.length; i++) { + if (ast.body[i].type === 'Identifier' && ['var', 'let', 'const'].includes(ast.body[i].name) && ast.body[i + 1] && ast.body[i + 1].type === 'AssignmentExpression') { + // var x=2; is detected as + // [{Identifier var}, {AssignmentExpression x=2}] + // eslint-disable-next-line @stylistic/max-len -- Long + // eslint-disable-next-line sonarjs/updated-loop-counter -- Convenient + i += 1; + } + const expr = ast.body[i]; + last = SafeEval.evalAst(expr, subs); + } + return last; + }, + evalConditionalExpression(ast, subs) { + if (SafeEval.evalAst(ast.test, subs)) { + return SafeEval.evalAst(ast.consequent, subs); + } + return SafeEval.evalAst(ast.alternate, subs); + }, + evalIdentifier(ast, subs) { + if (ast.name in subs) { + return subs[ast.name]; + } + throw ReferenceError(`${ast.name} is not defined`); + }, + evalLiteral(ast) { + return ast.value; + }, + evalMemberExpression(ast, subs) { + const prop = ast.computed ? SafeEval.evalAst(ast.property) // `object[property]` + : ast.property.name; // `object.property` property is Identifier + const obj = SafeEval.evalAst(ast.object, subs); + const result = obj[prop]; + if (typeof result === 'function') { + return result.bind(obj); // arrow functions aren't affected by bind. + } + return result; + }, + evalUnaryExpression(ast, subs) { + const result = { + '-': a => -SafeEval.evalAst(a, subs), + '!': a => !SafeEval.evalAst(a, subs), + '~': a => ~SafeEval.evalAst(a, subs), + // eslint-disable-next-line no-implicit-coercion + '+': a => +SafeEval.evalAst(a, subs) + }[ast.operator](ast.argument); + return result; + }, + evalArrayExpression(ast, subs) { + return ast.elements.map(el => SafeEval.evalAst(el, subs)); + }, + evalCallExpression(ast, subs) { + const args = ast.arguments.map(arg => SafeEval.evalAst(arg, subs)); + const func = SafeEval.evalAst(ast.callee, subs); + return func(...args); + }, + evalAssignmentExpression(ast, subs) { + if (ast.left.type !== 'Identifier') { + throw SyntaxError('Invalid left-hand side in assignment'); + } + const id = ast.left.name; + const value = SafeEval.evalAst(ast.right, subs); + subs[id] = value; + return subs[id]; + } +}; + +/** + * A replacement for NodeJS' VM.Script which is also {@link https://developer.mozilla.org/en-US/docs/Web/HTTP/CSP | Content Security Policy} friendly. + */ +class SafeScript { + /** + * @param {string} expr Expression to evaluate + */ + constructor(expr) { + this.code = expr; + this.ast = jsep(this.code); + } + + /** + * @param {object} context Object whose items will be added + * to evaluation + * @returns {EvaluatedResult} Result of evaluated code + */ + runInNewContext(context) { + const keyMap = { + ...context + }; + return SafeEval.evalAst(this.ast, keyMap); + } +} + +/* eslint-disable camelcase, unicorn/prefer-string-replace-all, + unicorn/prefer-at */ + + +/** + * @typedef {null|boolean|number|string|object|GenericArray} JSONObject + */ + +/** + * @typedef {any} AnyItem + */ + +/** + * @typedef {any} AnyResult + */ + +/** + * Copies array and then pushes item into it. + * @param {GenericArray} arr Array to copy and into which to push + * @param {AnyItem} item Array item to add (to end) + * @returns {GenericArray} Copy of the original array + */ +function push(arr, item) { + arr = arr.slice(); + arr.push(item); + return arr; +} +/** + * Copies array and then unshifts item into it. + * @param {AnyItem} item Array item to add (to beginning) + * @param {GenericArray} arr Array to copy and into which to unshift + * @returns {GenericArray} Copy of the original array + */ +function unshift(item, arr) { + arr = arr.slice(); + arr.unshift(item); + return arr; +} + +/** + * Caught when JSONPath is used without `new` but rethrown if with `new` + * @extends Error + */ +class NewError extends Error { + /** + * @param {AnyResult} value The evaluated scalar value + */ + constructor(value) { + super('JSONPath should not be called with "new" (it prevents return ' + 'of (unwrapped) scalar values)'); + this.avoidNew = true; + this.value = value; + this.name = 'NewError'; + } +} + +/** +* @typedef {object} ReturnObject +* @property {string} path +* @property {JSONObject} value +* @property {object|GenericArray} parent +* @property {string} parentProperty +*/ + +/** +* @callback JSONPathCallback +* @param {string|object} preferredOutput +* @param {"value"|"property"} type +* @param {ReturnObject} fullRetObj +* @returns {void} +*/ + +/** +* @callback OtherTypeCallback +* @param {JSONObject} val +* @param {string} path +* @param {object|GenericArray} parent +* @param {string} parentPropName +* @returns {boolean} +*/ + +/** + * @typedef {any} ContextItem + */ + +/** + * @typedef {any} EvaluatedResult + */ + +/** +* @callback EvalCallback +* @param {string} code +* @param {ContextItem} context +* @returns {EvaluatedResult} +*/ + +/** + * @typedef {typeof SafeScript} EvalClass + */ + +/** + * @typedef {object} JSONPathOptions + * @property {JSON} json + * @property {string|string[]} path + * @property {"value"|"path"|"pointer"|"parent"|"parentProperty"| + * "all"} [resultType="value"] + * @property {boolean} [flatten=false] + * @property {boolean} [wrap=true] + * @property {object} [sandbox={}] + * @property {EvalCallback|EvalClass|'safe'|'native'| + * boolean} [eval = 'safe'] + * @property {object|GenericArray|null} [parent=null] + * @property {string|null} [parentProperty=null] + * @property {JSONPathCallback} [callback] + * @property {OtherTypeCallback} [otherTypeCallback] Defaults to + * function which throws on encountering `@other` + * @property {boolean} [autostart=true] + */ + +/** + * @param {string|JSONPathOptions} opts If a string, will be treated as `expr` + * @param {string} [expr] JSON path to evaluate + * @param {JSON} [obj] JSON object to evaluate against + * @param {JSONPathCallback} [callback] Passed 3 arguments: 1) desired payload + * per `resultType`, 2) `"value"|"property"`, 3) Full returned object with + * all payloads + * @param {OtherTypeCallback} [otherTypeCallback] If `@other()` is at the end + * of one's query, this will be invoked with the value of the item, its + * path, its parent, and its parent's property name, and it should return + * a boolean indicating whether the supplied value belongs to the "other" + * type or not (or it may handle transformations and return `false`). + * @returns {JSONPath} + * @class + */ +function JSONPath(opts, expr, obj, callback, otherTypeCallback) { + // eslint-disable-next-line no-restricted-syntax + if (!(this instanceof JSONPath)) { + try { + return new JSONPath(opts, expr, obj, callback, otherTypeCallback); + } catch (e) { + if (!e.avoidNew) { + throw e; + } + return e.value; + } + } + if (typeof opts === 'string') { + otherTypeCallback = callback; + callback = obj; + obj = expr; + expr = opts; + opts = null; + } + const optObj = opts && typeof opts === 'object'; + opts = opts || {}; + this.json = opts.json || obj; + this.path = opts.path || expr; + this.resultType = opts.resultType || 'value'; + this.flatten = opts.flatten || false; + this.wrap = hasOwn(opts, 'wrap') ? opts.wrap : true; + this.sandbox = opts.sandbox || {}; + this.eval = opts.eval === undefined ? 'safe' : opts.eval; + this.ignoreEvalErrors = typeof opts.ignoreEvalErrors === 'undefined' ? false : opts.ignoreEvalErrors; + this.parent = opts.parent || null; + this.parentProperty = opts.parentProperty || null; + this.callback = opts.callback || callback || null; + this.otherTypeCallback = opts.otherTypeCallback || otherTypeCallback || function () { + throw new TypeError('You must supply an otherTypeCallback callback option ' + 'with the @other() operator.'); + }; + if (opts.autostart !== false) { + const args = { + path: optObj ? opts.path : expr + }; + if (!optObj) { + args.json = obj; + } else if ('json' in opts) { + args.json = opts.json; + } + const ret = this.evaluate(args); + if (!ret || typeof ret !== 'object') { + throw new NewError(ret); + } + return ret; + } +} + +// PUBLIC METHODS +JSONPath.prototype.evaluate = function (expr, json, callback, otherTypeCallback) { + let currParent = this.parent, + currParentProperty = this.parentProperty; + let { + flatten, + wrap + } = this; + this.currResultType = this.resultType; + this.currEval = this.eval; + this.currSandbox = this.sandbox; + callback = callback || this.callback; + this.currOtherTypeCallback = otherTypeCallback || this.otherTypeCallback; + json = json || this.json; + expr = expr || this.path; + if (expr && typeof expr === 'object' && !Array.isArray(expr)) { + if (!expr.path && expr.path !== '') { + throw new TypeError('You must supply a "path" property when providing an object ' + 'argument to JSONPath.evaluate().'); + } + if (!hasOwn(expr, 'json')) { + throw new TypeError('You must supply a "json" property when providing an object ' + 'argument to JSONPath.evaluate().'); + } + ({ + json + } = expr); + flatten = hasOwn(expr, 'flatten') ? expr.flatten : flatten; + this.currResultType = hasOwn(expr, 'resultType') ? expr.resultType : this.currResultType; + this.currSandbox = hasOwn(expr, 'sandbox') ? expr.sandbox : this.currSandbox; + wrap = hasOwn(expr, 'wrap') ? expr.wrap : wrap; + this.currEval = hasOwn(expr, 'eval') ? expr.eval : this.currEval; + callback = hasOwn(expr, 'callback') ? expr.callback : callback; + this.currOtherTypeCallback = hasOwn(expr, 'otherTypeCallback') ? expr.otherTypeCallback : this.currOtherTypeCallback; + currParent = hasOwn(expr, 'parent') ? expr.parent : currParent; + currParentProperty = hasOwn(expr, 'parentProperty') ? expr.parentProperty : currParentProperty; + expr = expr.path; + } + currParent = currParent || null; + currParentProperty = currParentProperty || null; + if (Array.isArray(expr)) { + expr = JSONPath.toPathString(expr); + } + if (!expr && expr !== '' || !json) { + return undefined; + } + const exprList = JSONPath.toPathArray(expr); + if (exprList[0] === '$' && exprList.length > 1) { + exprList.shift(); + } + this._hasParentSelector = null; + const result = this._trace(exprList, json, ['$'], currParent, currParentProperty, callback).filter(function (ea) { + return ea && !ea.isParentSelector; + }); + if (!result.length) { + return wrap ? [] : undefined; + } + if (!wrap && result.length === 1 && !result[0].hasArrExpr) { + return this._getPreferredOutput(result[0]); + } + return result.reduce((rslt, ea) => { + const valOrPath = this._getPreferredOutput(ea); + if (flatten && Array.isArray(valOrPath)) { + rslt = rslt.concat(valOrPath); + } else { + rslt.push(valOrPath); + } + return rslt; + }, []); +}; + +// PRIVATE METHODS + +JSONPath.prototype._getPreferredOutput = function (ea) { + const resultType = this.currResultType; + switch (resultType) { + case 'all': + { + const path = Array.isArray(ea.path) ? ea.path : JSONPath.toPathArray(ea.path); + ea.pointer = JSONPath.toPointer(path); + ea.path = typeof ea.path === 'string' ? ea.path : JSONPath.toPathString(ea.path); + return ea; + } + case 'value': + case 'parent': + case 'parentProperty': + return ea[resultType]; + case 'path': + return JSONPath.toPathString(ea[resultType]); + case 'pointer': + return JSONPath.toPointer(ea.path); + default: + throw new TypeError('Unknown result type'); + } +}; +JSONPath.prototype._handleCallback = function (fullRetObj, callback, type) { + if (callback) { + const preferredOutput = this._getPreferredOutput(fullRetObj); + fullRetObj.path = typeof fullRetObj.path === 'string' ? fullRetObj.path : JSONPath.toPathString(fullRetObj.path); + // eslint-disable-next-line n/callback-return + callback(preferredOutput, type, fullRetObj); + } +}; + +/** + * + * @param {string} expr + * @param {JSONObject} val + * @param {string} path + * @param {object|GenericArray} parent + * @param {string} parentPropName + * @param {JSONPathCallback} callback + * @param {boolean} hasArrExpr + * @param {boolean} literalPriority + * @returns {ReturnObject|ReturnObject[]} + */ +JSONPath.prototype._trace = function (expr, val, path, parent, parentPropName, callback, hasArrExpr, literalPriority) { + // No expr to follow? return path and value as the result of + // this trace branch + let retObj; + if (!expr.length) { + retObj = { + path, + value: val, + parent, + parentProperty: parentPropName, + hasArrExpr + }; + this._handleCallback(retObj, callback, 'value'); + return retObj; + } + const loc = expr[0], + x = expr.slice(1); + + // We need to gather the return value of recursive trace calls in order to + // do the parent sel computation. + const ret = []; + /** + * + * @param {ReturnObject|ReturnObject[]} elems + * @returns {void} + */ + function addRet(elems) { + if (Array.isArray(elems)) { + // This was causing excessive stack size in Node (with or + // without Babel) against our performance test: + // `ret.push(...elems);` + elems.forEach(t => { + ret.push(t); + }); + } else { + ret.push(elems); + } + } + if ((typeof loc !== 'string' || literalPriority) && val && hasOwn(val, loc)) { + // simple case--directly follow property + addRet(this._trace(x, val[loc], push(path, loc), val, loc, callback, hasArrExpr)); + // eslint-disable-next-line unicorn/prefer-switch -- Part of larger `if` + } else if (loc === '*') { + // all child properties + this._walk(val, m => { + addRet(this._trace(x, val[m], push(path, m), val, m, callback, true, true)); + }); + } else if (loc === '..') { + // all descendent parent properties + // Check remaining expression with val's immediate children + addRet(this._trace(x, val, path, parent, parentPropName, callback, hasArrExpr)); + this._walk(val, m => { + // We don't join m and x here because we only want parents, + // not scalar values + if (typeof val[m] === 'object') { + // Keep going with recursive descent on val's + // object children + addRet(this._trace(expr.slice(), val[m], push(path, m), val, m, callback, true)); + } + }); + // The parent sel computation is handled in the frame above using the + // ancestor object of val + } else if (loc === '^') { + // This is not a final endpoint, so we do not invoke the callback here + this._hasParentSelector = true; + return { + path: path.slice(0, -1), + expr: x, + isParentSelector: true + }; + } else if (loc === '~') { + // property name + retObj = { + path: push(path, loc), + value: parentPropName, + parent, + parentProperty: null + }; + this._handleCallback(retObj, callback, 'property'); + return retObj; + } else if (loc === '$') { + // root only + addRet(this._trace(x, val, path, null, null, callback, hasArrExpr)); + } else if (/^(-?\d*):(-?\d*):?(\d*)$/u.test(loc)) { + // [start:end:step] Python slice syntax + addRet(this._slice(loc, x, val, path, parent, parentPropName, callback)); + } else if (loc.indexOf('?(') === 0) { + // [?(expr)] (filtering) + if (this.currEval === false) { + throw new Error('Eval [?(expr)] prevented in JSONPath expression.'); + } + const safeLoc = loc.replace(/^\?\((.*?)\)$/u, '$1'); + // check for a nested filter expression + const nested = /@.?([^?]*)[['](\??\(.*?\))(?!.\)\])[\]']/gu.exec(safeLoc); + if (nested) { + // find if there are matches in the nested expression + // add them to the result set if there is at least one match + this._walk(val, m => { + const npath = [nested[2]]; + const nvalue = nested[1] ? val[m][nested[1]] : val[m]; + const filterResults = this._trace(npath, nvalue, path, parent, parentPropName, callback, true); + if (filterResults.length > 0) { + addRet(this._trace(x, val[m], push(path, m), val, m, callback, true)); + } + }); + } else { + this._walk(val, m => { + if (this._eval(safeLoc, val[m], m, path, parent, parentPropName)) { + addRet(this._trace(x, val[m], push(path, m), val, m, callback, true)); + } + }); + } + } else if (loc[0] === '(') { + // [(expr)] (dynamic property/index) + if (this.currEval === false) { + throw new Error('Eval [(expr)] prevented in JSONPath expression.'); + } + // As this will resolve to a property name (but we don't know it + // yet), property and parent information is relative to the + // parent of the property to which this expression will resolve + addRet(this._trace(unshift(this._eval(loc, val, path[path.length - 1], path.slice(0, -1), parent, parentPropName), x), val, path, parent, parentPropName, callback, hasArrExpr)); + } else if (loc[0] === '@') { + // value type: @boolean(), etc. + let addType = false; + const valueType = loc.slice(1, -2); + switch (valueType) { + case 'scalar': + if (!val || !['object', 'function'].includes(typeof val)) { + addType = true; + } + break; + case 'boolean': + case 'string': + case 'undefined': + case 'function': + if (typeof val === valueType) { + addType = true; + } + break; + case 'integer': + if (Number.isFinite(val) && !(val % 1)) { + addType = true; + } + break; + case 'number': + if (Number.isFinite(val)) { + addType = true; + } + break; + case 'nonFinite': + if (typeof val === 'number' && !Number.isFinite(val)) { + addType = true; + } + break; + case 'object': + if (val && typeof val === valueType) { + addType = true; + } + break; + case 'array': + if (Array.isArray(val)) { + addType = true; + } + break; + case 'other': + addType = this.currOtherTypeCallback(val, path, parent, parentPropName); + break; + case 'null': + if (val === null) { + addType = true; + } + break; + /* c8 ignore next 2 */ + default: + throw new TypeError('Unknown value type ' + valueType); + } + if (addType) { + retObj = { + path, + value: val, + parent, + parentProperty: parentPropName + }; + this._handleCallback(retObj, callback, 'value'); + return retObj; + } + // `-escaped property + } else if (loc[0] === '`' && val && hasOwn(val, loc.slice(1))) { + const locProp = loc.slice(1); + addRet(this._trace(x, val[locProp], push(path, locProp), val, locProp, callback, hasArrExpr, true)); + } else if (loc.includes(',')) { + // [name1,name2,...] + const parts = loc.split(','); + for (const part of parts) { + addRet(this._trace(unshift(part, x), val, path, parent, parentPropName, callback, true)); + } + // simple case--directly follow property + } else if (!literalPriority && val && hasOwn(val, loc)) { + addRet(this._trace(x, val[loc], push(path, loc), val, loc, callback, hasArrExpr, true)); + } + + // We check the resulting values for parent selections. For parent + // selections we discard the value object and continue the trace with the + // current val object + if (this._hasParentSelector) { + for (let t = 0; t < ret.length; t++) { + const rett = ret[t]; + if (rett && rett.isParentSelector) { + const tmp = this._trace(rett.expr, val, rett.path, parent, parentPropName, callback, hasArrExpr); + if (Array.isArray(tmp)) { + ret[t] = tmp[0]; + const tl = tmp.length; + for (let tt = 1; tt < tl; tt++) { + // eslint-disable-next-line @stylistic/max-len -- Long + // eslint-disable-next-line sonarjs/updated-loop-counter -- Convenient + t++; + ret.splice(t, 0, tmp[tt]); + } + } else { + ret[t] = tmp; + } + } + } + } + return ret; +}; +JSONPath.prototype._walk = function (val, f) { + if (Array.isArray(val)) { + const n = val.length; + for (let i = 0; i < n; i++) { + f(i); + } + } else if (val && typeof val === 'object') { + Object.keys(val).forEach(m => { + f(m); + }); + } +}; +JSONPath.prototype._slice = function (loc, expr, val, path, parent, parentPropName, callback) { + if (!Array.isArray(val)) { + return undefined; + } + const len = val.length, + parts = loc.split(':'), + step = parts[2] && Number.parseInt(parts[2]) || 1; + let start = parts[0] && Number.parseInt(parts[0]) || 0, + end = parts[1] && Number.parseInt(parts[1]) || len; + start = start < 0 ? Math.max(0, start + len) : Math.min(len, start); + end = end < 0 ? Math.max(0, end + len) : Math.min(len, end); + const ret = []; + for (let i = start; i < end; i += step) { + const tmp = this._trace(unshift(i, expr), val, path, parent, parentPropName, callback, true); + // Should only be possible to be an array here since first part of + // ``unshift(i, expr)` passed in above would not be empty, nor `~`, + // nor begin with `@` (as could return objects) + // This was causing excessive stack size in Node (with or + // without Babel) against our performance test: `ret.push(...tmp);` + tmp.forEach(t => { + ret.push(t); + }); + } + return ret; +}; +JSONPath.prototype._eval = function (code, _v, _vname, path, parent, parentPropName) { + this.currSandbox._$_parentProperty = parentPropName; + this.currSandbox._$_parent = parent; + this.currSandbox._$_property = _vname; + this.currSandbox._$_root = this.json; + this.currSandbox._$_v = _v; + const containsPath = code.includes('@path'); + if (containsPath) { + this.currSandbox._$_path = JSONPath.toPathString(path.concat([_vname])); + } + const scriptCacheKey = this.currEval + 'Script:' + code; + if (!JSONPath.cache[scriptCacheKey]) { + let script = code.replace(/@parentProperty/gu, '_$_parentProperty').replace(/@parent/gu, '_$_parent').replace(/@property/gu, '_$_property').replace(/@root/gu, '_$_root').replace(/@([.\s)[])/gu, '_$_v$1'); + if (containsPath) { + script = script.replace(/@path/gu, '_$_path'); + } + if (this.currEval === 'safe' || this.currEval === true || this.currEval === undefined) { + JSONPath.cache[scriptCacheKey] = new this.safeVm.Script(script); + } else if (this.currEval === 'native') { + JSONPath.cache[scriptCacheKey] = new this.vm.Script(script); + } else if (typeof this.currEval === 'function' && this.currEval.prototype && hasOwn(this.currEval.prototype, 'runInNewContext')) { + const CurrEval = this.currEval; + JSONPath.cache[scriptCacheKey] = new CurrEval(script); + } else if (typeof this.currEval === 'function') { + JSONPath.cache[scriptCacheKey] = { + runInNewContext: context => this.currEval(script, context) + }; + } else { + throw new TypeError(`Unknown "eval" property "${this.currEval}"`); + } + } + try { + return JSONPath.cache[scriptCacheKey].runInNewContext(this.currSandbox); + } catch (e) { + if (this.ignoreEvalErrors) { + return false; + } + throw new Error('jsonPath: ' + e.message + ': ' + code); + } +}; + +// PUBLIC CLASS PROPERTIES AND METHODS + +// Could store the cache object itself +JSONPath.cache = {}; + +/** + * @param {string[]} pathArr Array to convert + * @returns {string} The path string + */ +JSONPath.toPathString = function (pathArr) { + const x = pathArr, + n = x.length; + let p = '$'; + for (let i = 1; i < n; i++) { + if (!/^(~|\^|@.*?\(\))$/u.test(x[i])) { + p += /^[0-9*]+$/u.test(x[i]) ? '[' + x[i] + ']' : "['" + x[i] + "']"; + } + } + return p; +}; + +/** + * @param {string} pointer JSON Path + * @returns {string} JSON Pointer + */ +JSONPath.toPointer = function (pointer) { + const x = pointer, + n = x.length; + let p = ''; + for (let i = 1; i < n; i++) { + if (!/^(~|\^|@.*?\(\))$/u.test(x[i])) { + p += '/' + x[i].toString().replace(/~/gu, '~0').replace(/\//gu, '~1'); + } + } + return p; +}; + +/** + * @param {string} expr Expression to convert + * @returns {string[]} + */ +JSONPath.toPathArray = function (expr) { + const { + cache + } = JSONPath; + if (cache[expr]) { + return cache[expr].concat(); + } + const subx = []; + const normalized = expr + // Properties + .replace(/@(?:null|boolean|number|string|integer|undefined|nonFinite|scalar|array|object|function|other)\(\)/gu, ';$&;') + // Parenthetical evaluations (filtering and otherwise), directly + // within brackets or single quotes + .replace(/[['](\??\(.*?\))[\]'](?!.\])/gu, function ($0, $1) { + return '[#' + (subx.push($1) - 1) + ']'; + }) + // Escape periods and tildes within properties + .replace(/\[['"]([^'\]]*)['"]\]/gu, function ($0, prop) { + return "['" + prop.replace(/\./gu, '%@%').replace(/~/gu, '%%@@%%') + "']"; + }) + // Properties operator + .replace(/~/gu, ';~;') + // Split by property boundaries + .replace(/['"]?\.['"]?(?![^[]*\])|\[['"]?/gu, ';') + // Reinsert periods within properties + .replace(/%@%/gu, '.') + // Reinsert tildes within properties + .replace(/%%@@%%/gu, '~') + // Parent + .replace(/(?:;)?(\^+)(?:;)?/gu, function ($0, ups) { + return ';' + ups.split('').join(';') + ';'; + }) + // Descendents + .replace(/;;;|;;/gu, ';..;') + // Remove trailing + .replace(/;$|'?\]|'$/gu, ''); + const exprList = normalized.split(';').map(function (exp) { + const match = exp.match(/#(\d+)/u); + return !match || !match[1] ? exp : subx[match[1]]; + }); + cache[expr] = exprList; + return cache[expr].concat(); +}; +JSONPath.prototype.safeVm = { + Script: SafeScript +}; + +JSONPath.prototype.vm = vm; + +exports.JSONPath = JSONPath; diff --git a/packages/dd-trace/src/plugin_manager.js b/packages/dd-trace/src/plugin_manager.js index 80e87ce545e..e9daea9b60b 100644 --- a/packages/dd-trace/src/plugin_manager.js +++ b/packages/dd-trace/src/plugin_manager.js @@ -137,7 +137,8 @@ module.exports = class PluginManager { dsmEnabled, clientIpEnabled, memcachedCommandEnabled, - ciVisibilityTestSessionName + ciVisibilityTestSessionName, + ciVisAgentlessLogSubmissionEnabled } = this._tracerConfig const sharedConfig = { @@ -147,7 +148,8 @@ module.exports = class PluginManager { site, url, headers: headerTags || [], - ciVisibilityTestSessionName + ciVisibilityTestSessionName, + ciVisAgentlessLogSubmissionEnabled } if (logInjection !== undefined) { diff --git a/packages/dd-trace/src/plugins/ci_plugin.js b/packages/dd-trace/src/plugins/ci_plugin.js index b86d20d5760..d4c9f32bc68 100644 --- a/packages/dd-trace/src/plugins/ci_plugin.js +++ b/packages/dd-trace/src/plugins/ci_plugin.js @@ -100,7 +100,9 @@ module.exports = class CiPlugin extends Plugin { ...testSessionSpanMetadata } }) + // TODO: add telemetry tag when we can add `is_agentless_log_submission_enabled` for agentless log submission this.telemetry.ciVisEvent(TELEMETRY_EVENT_CREATED, 'session') + this.testModuleSpan = this.tracer.startSpan(`${this.constructor.id}.test_module`, { childOf: this.testSessionSpan, tags: { diff --git a/packages/dd-trace/src/plugins/index.js b/packages/dd-trace/src/plugins/index.js index 06325724b71..80c32401536 100644 --- a/packages/dd-trace/src/plugins/index.js +++ b/packages/dd-trace/src/plugins/index.js @@ -3,6 +3,7 @@ module.exports = { get '@apollo/gateway' () { return require('../../../datadog-plugin-apollo/src') }, get '@aws-sdk/smithy-client' () { return require('../../../datadog-plugin-aws-sdk/src') }, + get '@azure/functions' () { return require('../../../datadog-plugin-azure-functions/src') }, get '@cucumber/cucumber' () { return require('../../../datadog-plugin-cucumber/src') }, get '@playwright/test' () { return require('../../../datadog-plugin-playwright/src') }, get '@elastic/elasticsearch' () { return require('../../../datadog-plugin-elasticsearch/src') }, @@ -22,6 +23,7 @@ module.exports = { get aerospike () { return require('../../../datadog-plugin-aerospike/src') }, get amqp10 () { return require('../../../datadog-plugin-amqp10/src') }, get amqplib () { return require('../../../datadog-plugin-amqplib/src') }, + get avsc () { return require('../../../datadog-plugin-avsc/src') }, get 'aws-sdk' () { return require('../../../datadog-plugin-aws-sdk/src') }, get bunyan () { return require('../../../datadog-plugin-bunyan/src') }, get 'cassandra-driver' () { return require('../../../datadog-plugin-cassandra-driver/src') }, @@ -77,6 +79,7 @@ module.exports = { get pino () { return require('../../../datadog-plugin-pino/src') }, get 'pino-pretty' () { return require('../../../datadog-plugin-pino/src') }, get playwright () { return require('../../../datadog-plugin-playwright/src') }, + get protobufjs () { return require('../../../datadog-plugin-protobufjs/src') }, get redis () { return require('../../../datadog-plugin-redis/src') }, get restify () { return require('../../../datadog-plugin-restify/src') }, get rhea () { return require('../../../datadog-plugin-rhea/src') }, diff --git a/packages/dd-trace/src/plugins/log_plugin.js b/packages/dd-trace/src/plugins/log_plugin.js index 353008a9e02..b0812ea46d3 100644 --- a/packages/dd-trace/src/plugins/log_plugin.js +++ b/packages/dd-trace/src/plugins/log_plugin.js @@ -54,7 +54,7 @@ module.exports = class LogPlugin extends Plugin { configure (config) { return super.configure({ ...config, - enabled: config.enabled && config.logInjection + enabled: config.enabled && (config.logInjection || config.ciVisAgentlessLogSubmissionEnabled) }) } } diff --git a/packages/dd-trace/src/plugins/schema.js b/packages/dd-trace/src/plugins/schema.js new file mode 100644 index 00000000000..675ba6a715f --- /dev/null +++ b/packages/dd-trace/src/plugins/schema.js @@ -0,0 +1,35 @@ +'use strict' + +const Plugin = require('./plugin') + +const SERIALIZATION = 'serialization' +const DESERIALIZATION = 'deserialization' + +class SchemaPlugin extends Plugin { + constructor (...args) { + super(...args) + + this.addSub(`apm:${this.constructor.id}:serialize-start`, this.handleSerializeStart.bind(this)) + this.addSub(`apm:${this.constructor.id}:deserialize-end`, this.handleDeserializeFinish.bind(this)) + } + + handleSerializeStart (args) { + const activeSpan = this.tracer.scope().active() + if (activeSpan && this.config.dsmEnabled) { + this.constructor.schemaExtractor.attachSchemaOnSpan( + args, activeSpan, SERIALIZATION, this.tracer + ) + } + } + + handleDeserializeFinish (args) { + const activeSpan = this.tracer.scope().active() + if (activeSpan && this.config.dsmEnabled) { + this.constructor.schemaExtractor.attachSchemaOnSpan( + args, activeSpan, DESERIALIZATION, this.tracer + ) + } + } +} + +module.exports = SchemaPlugin diff --git a/packages/dd-trace/src/plugins/util/ci.js b/packages/dd-trace/src/plugins/util/ci.js index 35e58c5a94e..86bda260212 100644 --- a/packages/dd-trace/src/plugins/util/ci.js +++ b/packages/dd-trace/src/plugins/util/ci.js @@ -1,3 +1,4 @@ +const { readFileSync } = require('fs') const { GIT_BRANCH, GIT_COMMIT_SHA, @@ -6,6 +7,9 @@ const { GIT_COMMIT_AUTHOR_NAME, GIT_COMMIT_MESSAGE, GIT_COMMIT_AUTHOR_DATE, + GIT_COMMIT_HEAD_SHA, + GIT_PULL_REQUEST_BASE_BRANCH_SHA, + GIT_PULL_REQUEST_BASE_BRANCH, GIT_REPOSITORY_URL, CI_PIPELINE_ID, CI_PIPELINE_NAME, @@ -77,6 +81,13 @@ function resolveTilde (filePath) { return filePath } +function getGitHubEventPayload () { + if (!process.env.GITHUB_EVENT_PATH) { + return + } + return JSON.parse(readFileSync(process.env.GITHUB_EVENT_PATH, 'utf8')) +} + module.exports = { normalizeRef, getCIMetadata () { @@ -241,7 +252,8 @@ module.exports = { GITHUB_REPOSITORY, GITHUB_SERVER_URL, GITHUB_RUN_ATTEMPT, - GITHUB_JOB + GITHUB_JOB, + GITHUB_BASE_REF } = env const repositoryURL = `${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}.git` @@ -277,6 +289,16 @@ module.exports = { GITHUB_RUN_ATTEMPT }) } + if (GITHUB_BASE_REF) { // `pull_request` or `pull_request_target` event + tags[GIT_PULL_REQUEST_BASE_BRANCH] = GITHUB_BASE_REF + try { + const eventContent = getGitHubEventPayload() + tags[GIT_PULL_REQUEST_BASE_BRANCH_SHA] = eventContent.pull_request.base.sha + tags[GIT_COMMIT_HEAD_SHA] = eventContent.pull_request.head.sha + } catch (e) { + // ignore malformed event content + } + } } if (env.APPVEYOR) { diff --git a/packages/dd-trace/src/plugins/util/serverless.js b/packages/dd-trace/src/plugins/util/serverless.js new file mode 100644 index 00000000000..3e969ffdfad --- /dev/null +++ b/packages/dd-trace/src/plugins/util/serverless.js @@ -0,0 +1,7 @@ +const types = require('../../../../../ext/types') +const web = require('./web') + +const serverless = { ...web } +serverless.TYPE = types.SERVERLESS + +module.exports = serverless diff --git a/packages/dd-trace/src/plugins/util/stacktrace.js b/packages/dd-trace/src/plugins/util/stacktrace.js new file mode 100644 index 00000000000..f67ba52c7c2 --- /dev/null +++ b/packages/dd-trace/src/plugins/util/stacktrace.js @@ -0,0 +1,94 @@ +'use strict' + +const { relative, sep, isAbsolute } = require('path') + +const cwd = process.cwd() + +module.exports = { + getCallSites, + getUserLandFrames +} + +// From https://github.com/felixge/node-stack-trace/blob/ba06dcdb50d465cd440d84a563836e293b360427/index.js#L1 +function getCallSites (constructorOpt) { + const oldLimit = Error.stackTraceLimit + Error.stackTraceLimit = Infinity + + const dummy = {} + + const v8Handler = Error.prepareStackTrace + Error.prepareStackTrace = function (_, v8StackTrace) { + return v8StackTrace + } + Error.captureStackTrace(dummy, constructorOpt) + + const v8StackTrace = dummy.stack + Error.prepareStackTrace = v8Handler + Error.stackTraceLimit = oldLimit + + return v8StackTrace +} + +/** + * Get stack trace of user-land frames. + * + * @param {Function} constructorOpt - Function to pass along to Error.captureStackTrace + * @param {number} [limit=Infinity] - The maximum number of frames to return + * @returns {{ file: string, line: number, method: (string|undefined), type: (string|undefined) }[]} - A + */ +function getUserLandFrames (constructorOpt, limit = Infinity) { + const callsites = getCallSites(constructorOpt) + const frames = [] + + for (const callsite of callsites) { + if (callsite.isNative()) { + continue + } + + const filename = callsite.getFileName() + + // If the callsite is native, there will be no associated filename. However, there might be other instances where + // this can happen, so to be sure, we add this additional check + if (filename === null) { + continue + } + + // ESM module paths start with the "file://" protocol (because ESM supports https imports) + // TODO: Node.js also supports `data:` and `node:` imports, should we do something specific for `data:`? + const containsFileProtocol = filename.startsWith('file:') + + // TODO: I'm not sure how stable this check is. Alternatively, we could consider reversing it if we can get + // a comprehensive list of all non-file-based values, eg: + // + // filename === '' || filename.startsWith('node:') + if (containsFileProtocol === false && isAbsolute(filename) === false) { + continue + } + + // TODO: Technically, the algorithm below could be simplified to not use the relative path, but be simply: + // + // if (filename.includes(sep + 'node_modules' + sep)) continue + // + // However, the tests in `packages/dd-trace/test/plugins/util/stacktrace.spec.js` will fail on my machine + // because I have the source code in a parent folder called `node_modules`. So the code below thinks that + // it's not in user-land + const relativePath = relative(cwd, containsFileProtocol ? filename.substring(7) : filename) + if (relativePath.startsWith('node_modules' + sep) || relativePath.includes(sep + 'node_modules' + sep)) { + continue + } + + const method = callsite.getFunctionName() + const type = callsite.getTypeName() + frames.push({ + file: filename, + line: callsite.getLineNumber(), + column: callsite.getColumnNumber(), + method: method ?? undefined, // force to undefined if null so JSON.stringify will omit it + type: type ?? undefined // force to undefined if null so JSON.stringify will omit it + }) + + if (frames.length === limit) break + } + + return frames +} diff --git a/packages/dd-trace/src/plugins/util/tags.js b/packages/dd-trace/src/plugins/util/tags.js index 15a795f4c1d..58709f0ceb7 100644 --- a/packages/dd-trace/src/plugins/util/tags.js +++ b/packages/dd-trace/src/plugins/util/tags.js @@ -9,6 +9,10 @@ const GIT_COMMIT_COMMITTER_NAME = 'git.commit.committer.name' const GIT_COMMIT_AUTHOR_DATE = 'git.commit.author.date' const GIT_COMMIT_AUTHOR_EMAIL = 'git.commit.author.email' const GIT_COMMIT_AUTHOR_NAME = 'git.commit.author.name' +const GIT_COMMIT_HEAD_SHA = 'git.commit.head_sha' + +const GIT_PULL_REQUEST_BASE_BRANCH_SHA = 'git.pull_request.base_branch_sha' +const GIT_PULL_REQUEST_BASE_BRANCH = 'git.pull_request.base_branch' const CI_PIPELINE_ID = 'ci.pipeline.id' const CI_PIPELINE_NAME = 'ci.pipeline.name' @@ -36,6 +40,9 @@ module.exports = { GIT_COMMIT_AUTHOR_DATE, GIT_COMMIT_AUTHOR_EMAIL, GIT_COMMIT_AUTHOR_NAME, + GIT_COMMIT_HEAD_SHA, + GIT_PULL_REQUEST_BASE_BRANCH_SHA, + GIT_PULL_REQUEST_BASE_BRANCH, CI_PIPELINE_ID, CI_PIPELINE_NAME, CI_PIPELINE_NUMBER, diff --git a/packages/dd-trace/src/plugins/util/test.js b/packages/dd-trace/src/plugins/util/test.js index 3cf1421ad15..6c0dde70cfb 100644 --- a/packages/dd-trace/src/plugins/util/test.js +++ b/packages/dd-trace/src/plugins/util/test.js @@ -168,7 +168,6 @@ module.exports = { mergeCoverage, fromCoverageMapToCoverage, getTestLineStart, - getCallSites, removeInvalidMetadata, parseAnnotations, EFD_STRING, @@ -181,7 +180,8 @@ module.exports = { TEST_BROWSER_NAME, TEST_BROWSER_VERSION, getTestSessionName, - TEST_LEVEL_EVENT_TYPES + TEST_LEVEL_EVENT_TYPES, + getNumFromKnownTests } // Returns pkg manager and its version, separated by '-', e.g. npm-8.15.0 or yarn-1.22.19 @@ -557,26 +557,6 @@ function getTestLineStart (err, testSuitePath) { } } -// From https://github.com/felixge/node-stack-trace/blob/ba06dcdb50d465cd440d84a563836e293b360427/index.js#L1 -function getCallSites () { - const oldLimit = Error.stackTraceLimit - Error.stackTraceLimit = Infinity - - const dummy = {} - - const v8Handler = Error.prepareStackTrace - Error.prepareStackTrace = function (_, v8StackTrace) { - return v8StackTrace - } - Error.captureStackTrace(dummy) - - const v8StackTrace = dummy.stack - Error.prepareStackTrace = v8Handler - Error.stackTraceLimit = oldLimit - - return v8StackTrace -} - /** * Gets an object of test tags from an Playwright annotations array. * @param {Object[]} annotations - Annotations from a Playwright test. @@ -639,3 +619,21 @@ function getTestSessionName (config, testCommand, envTags) { } return testCommand } + +// Calculate the number of a tests from the known tests response, which has a shape like: +// { testModule1: { testSuite1: [test1, test2, test3] }, testModule2: { testSuite2: [test4, test5] } } +function getNumFromKnownTests (knownTests) { + if (!knownTests) { + return 0 + } + + let totalNumTests = 0 + + for (const testModule of Object.values(knownTests)) { + for (const testSuite of Object.values(testModule)) { + totalNumTests += testSuite.length + } + } + + return totalNumTests +} diff --git a/packages/dd-trace/src/plugins/util/web.js b/packages/dd-trace/src/plugins/util/web.js index c9cdf1990aa..832044b29f8 100644 --- a/packages/dd-trace/src/plugins/util/web.js +++ b/packages/dd-trace/src/plugins/util/web.js @@ -36,6 +36,8 @@ const contexts = new WeakMap() const ends = new WeakMap() const web = { + TYPE: WEB, + // Ensure the configuration has the correct structure and defaults. normalizeConfig (config) { const headers = getHeadersToRecord(config) @@ -103,7 +105,7 @@ const web = { context.res = res this.setConfig(req, config) - addRequestTags(context) + addRequestTags(context, this.TYPE) return span }, @@ -296,7 +298,7 @@ const web = { if (context.finished && !req.stream) return - addRequestTags(context) + addRequestTags(context, this.TYPE) addResponseTags(context) context.config.hooks.request(context.span, req, res) @@ -423,7 +425,7 @@ function reactivate (req, fn) { : fn() } -function addRequestTags (context) { +function addRequestTags (context, spanType) { const { req, span, config } = context const url = extractURL(req) @@ -431,7 +433,7 @@ function addRequestTags (context) { [HTTP_URL]: web.obfuscateQs(config, url), [HTTP_METHOD]: req.method, [SPAN_KIND]: SERVER, - [SPAN_TYPE]: WEB, + [SPAN_TYPE]: spanType, [HTTP_USERAGENT]: req.headers['user-agent'] }) diff --git a/packages/dd-trace/src/profiling/profiler.js b/packages/dd-trace/src/profiling/profiler.js index 50b6fa13c53..3e6c5d7f618 100644 --- a/packages/dd-trace/src/profiling/profiler.js +++ b/packages/dd-trace/src/profiling/profiler.js @@ -146,6 +146,10 @@ class Profiler extends EventEmitter { const encodedProfiles = {} try { + if (Object.keys(this._config.profilers).length === 0) { + throw new Error('No profile types configured.') + } + // collect profiles synchronously so that profilers can be safely stopped asynchronously for (const profiler of this._config.profilers) { const profile = profiler.profile(restart, startDate, endDate) @@ -156,23 +160,32 @@ class Profiler extends EventEmitter { profiles.push({ profiler, profile }) } + if (restart) { + this._capture(this._timeoutInterval, endDate) + } + // encode and export asynchronously for (const { profiler, profile } of profiles) { - encodedProfiles[profiler.type] = await profiler.encode(profile) - this._logger.debug(() => { - const profileJson = JSON.stringify(profile, (key, value) => { - return typeof value === 'bigint' ? value.toString() : value + try { + encodedProfiles[profiler.type] = await profiler.encode(profile) + this._logger.debug(() => { + const profileJson = JSON.stringify(profile, (key, value) => { + return typeof value === 'bigint' ? value.toString() : value + }) + return `Collected ${profiler.type} profile: ` + profileJson }) - return `Collected ${profiler.type} profile: ` + profileJson - }) + } catch (err) { + // If encoding one of the profile types fails, we should still try to + // encode and submit the other profile types. + this._logError(err) + } } - if (restart) { - this._capture(this._timeoutInterval, endDate) + if (Object.keys(encodedProfiles).length > 0) { + await this._submit(encodedProfiles, startDate, endDate, snapshotKind) + profileSubmittedChannel.publish() + this._logger.debug('Submitted profiles') } - await this._submit(encodedProfiles, startDate, endDate, snapshotKind) - profileSubmittedChannel.publish() - this._logger.debug('Submitted profiles') } catch (err) { this._logError(err) this._stop() @@ -180,9 +193,6 @@ class Profiler extends EventEmitter { } _submit (profiles, start, end, snapshotKind) { - if (!Object.keys(profiles).length) { - return Promise.reject(new Error('No profiles to submit')) - } const { tags } = this._config const tasks = [] diff --git a/packages/dd-trace/src/profiling/profilers/events.js b/packages/dd-trace/src/profiling/profilers/events.js index e1d42484f13..f8f43b06a9a 100644 --- a/packages/dd-trace/src/profiling/profilers/events.js +++ b/packages/dd-trace/src/profiling/profilers/events.js @@ -330,13 +330,13 @@ class EventsProfiler { if (!restart) { this.stop() } - const profile = this.eventSerializer.createProfile(startDate, endDate) + const thatEventSerializer = this.eventSerializer this.eventSerializer = new EventSerializer() - return profile + return () => thatEventSerializer.createProfile(startDate, endDate) } encode (profile) { - return pprof.encode(profile) + return pprof.encode(profile()) } } diff --git a/packages/dd-trace/src/profiling/profilers/wall.js b/packages/dd-trace/src/profiling/profilers/wall.js index ee23b1145b0..3d7041cfecf 100644 --- a/packages/dd-trace/src/profiling/profilers/wall.js +++ b/packages/dd-trace/src/profiling/profilers/wall.js @@ -20,7 +20,7 @@ const enterCh = dc.channel('dd-trace:storage:enter') const spanFinishCh = dc.channel('dd-trace:span:finish') const profilerTelemetryMetrics = telemetryMetrics.manager.namespace('profilers') -const MemoizedWebTags = Symbol('NativeWallProfiler.MemoizedWebTags') +const ProfilingContext = Symbol('NativeWallProfiler.ProfilingContext') let kSampleCount @@ -44,36 +44,42 @@ function endpointNameFromTags (tags) { ].filter(v => v).join(' ') } -function getWebTags (startedSpans, i, span) { - // Are web tags for this span already memoized? - const memoizedWebTags = span[MemoizedWebTags] - if (memoizedWebTags !== undefined) { - return memoizedWebTags - } - // No, we'll have to memoize a new value - function memoize (tags) { - span[MemoizedWebTags] = tags - return tags - } - // Is this span itself a web span? - const context = span.context() - const tags = context._tags - if (isWebServerSpan(tags)) { - return memoize(tags) - } - // It isn't. Get parent's web tags (memoize them too recursively.) - // There might be several webspans, for example with next.js, http plugin creates the first span - // and then next.js plugin creates a child span, and this child span has the correct endpoint - // information. That's why we always use the tags of the closest ancestor web span. - const parentId = context._parentId - while (--i >= 0) { - const ispan = startedSpans[i] - if (ispan.context()._spanId === parentId) { - return memoize(getWebTags(startedSpans, i, ispan)) +let channelsActivated = false +function ensureChannelsActivated () { + if (channelsActivated) return + + const { AsyncLocalStorage, createHook } = require('async_hooks') + const shimmer = require('../../../../datadog-shimmer') + + createHook({ before: () => beforeCh.publish() }).enable() + + let inRun = false + shimmer.wrap(AsyncLocalStorage.prototype, 'enterWith', function (original) { + return function (...args) { + const retVal = original.apply(this, args) + if (!inRun) enterCh.publish() + return retVal } - } - // Local root span with no web span - return memoize(null) + }) + + shimmer.wrap(AsyncLocalStorage.prototype, 'run', function (original) { + return function (store, callback, ...args) { + const wrappedCb = shimmer.wrapFunction(callback, cb => function (...args) { + inRun = false + enterCh.publish() + const retVal = cb.apply(this, args) + inRun = true + return retVal + }) + inRun = true + const retVal = original.call(this, store, wrappedCb, ...args) + enterCh.publish() + inRun = false + return retVal + } + }) + + channelsActivated = true } class NativeWallProfiler { @@ -121,6 +127,8 @@ class NativeWallProfiler { start ({ mapper } = {}) { if (this._started) return + ensureChannelsActivated() + this._mapper = mapper this._pprof = require('@datadog/pprof') kSampleCount = this._pprof.time.constants.kSampleCount @@ -144,14 +152,10 @@ class NativeWallProfiler { }) if (this._withContexts) { - this._currentContext = {} - this._pprof.time.setContext(this._currentContext) + this._setNewContext() if (this._captureSpanData) { this._profilerState = this._pprof.time.getState() - this._lastSpan = undefined - this._lastStartedSpans = undefined - this._lastWebTags = undefined this._lastSampleCount = 0 beforeCh.subscribe(this._enter) @@ -169,51 +173,78 @@ class NativeWallProfiler { const sampleCount = this._profilerState[kSampleCount] if (sampleCount !== this._lastSampleCount) { this._lastSampleCount = sampleCount - const context = this._currentContext - this._currentContext = {} - this._pprof.time.setContext(this._currentContext) + const context = this._currentContext.ref + this._setNewContext() this._updateContext(context) } const span = getActiveSpan() - if (span) { + this._currentContext.ref = span ? this._getProfilingContext(span) : {} + } + + _getProfilingContext (span) { + let profilingContext = span[ProfilingContext] + if (profilingContext === undefined) { const context = span.context() - this._lastSpan = span const startedSpans = getStartedSpans(context) - this._lastStartedSpans = startedSpans + + let spanId + let rootSpanId + if (this._codeHotspotsEnabled) { + spanId = context._spanId + rootSpanId = startedSpans.length ? startedSpans[0].context()._spanId : context._spanId + } + + let webTags if (this._endpointCollectionEnabled) { - this._lastWebTags = getWebTags(startedSpans, startedSpans.length, span) + const tags = context._tags + if (isWebServerSpan(tags)) { + webTags = tags + } else { + // Get parent's context's web tags + const parentId = context._parentId + for (let i = startedSpans.length; --i >= 0;) { + const ispan = startedSpans[i] + if (ispan.context()._spanId === parentId) { + webTags = this._getProfilingContext(ispan).webTags + break + } + } + } } - } else { - this._lastStartedSpans = undefined - this._lastSpan = undefined - this._lastWebTags = undefined + + profilingContext = { spanId, rootSpanId, webTags } + span[ProfilingContext] = profilingContext } + return profilingContext + } + + _setNewContext () { + this._pprof.time.setContext( + this._currentContext = { + ref: {} + } + ) } _updateContext (context) { - if (!this._lastSpan) { - return + if (typeof context.spanId === 'object') { + context.spanId = context.spanId.toString(10) } - if (this._codeHotspotsEnabled) { - context.spanId = this._lastSpan.context().toSpanId() - const rootSpan = this._lastStartedSpans[0] - if (rootSpan) { - context.rootSpanId = rootSpan.context().toSpanId() - } + if (typeof context.rootSpanId === 'object') { + context.rootSpanId = context.rootSpanId.toString(10) } - if (this._lastWebTags) { - context.webTags = this._lastWebTags + if (context.webTags !== undefined && context.endpoint === undefined) { // endpoint may not be determined yet, but keep it as fallback // if tags are not available anymore during serialization - context.endpoint = endpointNameFromTags(this._lastWebTags) + context.endpoint = endpointNameFromTags(context.webTags) } } _spanFinished (span) { - if (span[MemoizedWebTags]) { - span[MemoizedWebTags] = undefined + if (span[ProfilingContext] !== undefined) { + span[ProfilingContext] = undefined } } @@ -248,9 +279,6 @@ class NativeWallProfiler { enterCh.unsubscribe(this._enter) spanFinishCh.unsubscribe(this._spanFinished) this._profilerState = undefined - this._lastSpan = undefined - this._lastStartedSpans = undefined - this._lastWebTags = undefined } this._started = false } @@ -273,20 +301,20 @@ class NativeWallProfiler { const labels = { ...getThreadLabels() } - const { context: { spanId, rootSpanId, webTags, endpoint }, timestamp } = context + const { context: { ref: { spanId, rootSpanId, webTags, endpoint } }, timestamp } = context if (this._timelineEnabled) { // Incoming timestamps are in microseconds, we emit nanos. labels[END_TIMESTAMP_LABEL] = timestamp * 1000n } - if (spanId) { + if (spanId !== undefined) { labels[SPAN_ID_LABEL] = spanId } - if (rootSpanId) { + if (rootSpanId !== undefined) { labels[LOCAL_ROOT_SPAN_ID_LABEL] = rootSpanId } - if (webTags && Object.keys(webTags).length !== 0) { + if (webTags !== undefined && Object.keys(webTags).length !== 0) { labels['trace endpoint'] = endpointNameFromTags(webTags) } else if (endpoint) { // fallback to endpoint computed when sample was taken diff --git a/packages/dd-trace/src/proxy.js b/packages/dd-trace/src/proxy.js index d7ce0538f39..b8916b205d4 100644 --- a/packages/dd-trace/src/proxy.js +++ b/packages/dd-trace/src/proxy.js @@ -162,6 +162,18 @@ class Tracer extends NoopProxy { this._testApiManualPlugin.configure({ ...config, enabled: true }) } } + if (config.ciVisAgentlessLogSubmissionEnabled) { + if (process.env.DD_API_KEY) { + const LogSubmissionPlugin = require('./ci-visibility/log-submission/log-submission-plugin') + const automaticLogPlugin = new LogSubmissionPlugin(this) + automaticLogPlugin.configure({ ...config, enabled: true }) + } else { + log.warn( + 'DD_AGENTLESS_LOG_SUBMISSION_ENABLED is set, ' + + 'but DD_API_KEY is undefined, so no automatic log submission will be performed.' + ) + } + } } catch (e) { log.error(e) } diff --git a/packages/dd-trace/src/service-naming/schemas/v0/index.js b/packages/dd-trace/src/service-naming/schemas/v0/index.js index c2751a64bf0..1b0b746035d 100644 --- a/packages/dd-trace/src/service-naming/schemas/v0/index.js +++ b/packages/dd-trace/src/service-naming/schemas/v0/index.js @@ -3,5 +3,6 @@ const messaging = require('./messaging') const storage = require('./storage') const graphql = require('./graphql') const web = require('./web') +const serverless = require('./serverless') -module.exports = new SchemaDefinition({ messaging, storage, web, graphql }) +module.exports = new SchemaDefinition({ messaging, storage, web, graphql, serverless }) diff --git a/packages/dd-trace/src/service-naming/schemas/v0/serverless.js b/packages/dd-trace/src/service-naming/schemas/v0/serverless.js new file mode 100644 index 00000000000..fcccdcb465a --- /dev/null +++ b/packages/dd-trace/src/service-naming/schemas/v0/serverless.js @@ -0,0 +1,12 @@ +const { identityService } = require('../util') + +const serverless = { + server: { + 'azure-functions': { + opName: () => 'azure-functions.invoke', + serviceName: identityService + } + } +} + +module.exports = serverless diff --git a/packages/dd-trace/src/service-naming/schemas/v1/index.js b/packages/dd-trace/src/service-naming/schemas/v1/index.js index c2751a64bf0..1b0b746035d 100644 --- a/packages/dd-trace/src/service-naming/schemas/v1/index.js +++ b/packages/dd-trace/src/service-naming/schemas/v1/index.js @@ -3,5 +3,6 @@ const messaging = require('./messaging') const storage = require('./storage') const graphql = require('./graphql') const web = require('./web') +const serverless = require('./serverless') -module.exports = new SchemaDefinition({ messaging, storage, web, graphql }) +module.exports = new SchemaDefinition({ messaging, storage, web, graphql, serverless }) diff --git a/packages/dd-trace/src/service-naming/schemas/v1/serverless.js b/packages/dd-trace/src/service-naming/schemas/v1/serverless.js new file mode 100644 index 00000000000..fcccdcb465a --- /dev/null +++ b/packages/dd-trace/src/service-naming/schemas/v1/serverless.js @@ -0,0 +1,12 @@ +const { identityService } = require('../util') + +const serverless = { + server: { + 'azure-functions': { + opName: () => 'azure-functions.invoke', + serviceName: identityService + } + } +} + +module.exports = serverless diff --git a/packages/dd-trace/test/.eslintrc.json b/packages/dd-trace/test/.eslintrc.json index ed8a9ff7a87..3a9e197c393 100644 --- a/packages/dd-trace/test/.eslintrc.json +++ b/packages/dd-trace/test/.eslintrc.json @@ -2,8 +2,12 @@ "extends": [ "../../../.eslintrc.json" ], + "parserOptions": { + "ecmaVersion": 2022 + }, "env": { - "mocha": true + "mocha": true, + "es2022": true }, "globals": { "expect": true, diff --git a/packages/dd-trace/test/appsec/attacker-fingerprinting-rules.json b/packages/dd-trace/test/appsec/attacker-fingerprinting-rules.json new file mode 100644 index 00000000000..722f9153ce4 --- /dev/null +++ b/packages/dd-trace/test/appsec/attacker-fingerprinting-rules.json @@ -0,0 +1,204 @@ +{ + "version": "2.2", + "metadata": { + "rules_version": "1.5.0" + }, + "rules": [ + { + "id": "tst-000-001-", + "name": "rule to test fingerprint", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + } + ], + "list": [ + "testattack" + ] + }, + "operator": "phrase_match" + } + ], + "transformers": [] + } + ], + "processors": [ + { + "id": "http-endpoint-fingerprint", + "generator": "http_endpoint_fingerprint", + "conditions": [ + { + "operator": "exists", + "parameters": { + "inputs": [ + { + "address": "waf.context.event" + }, + { + "address": "server.business_logic.users.login.failure" + }, + { + "address": "server.business_logic.users.login.success" + } + ] + } + } + ], + "parameters": { + "mappings": [ + { + "method": [ + { + "address": "server.request.method" + } + ], + "uri_raw": [ + { + "address": "server.request.uri.raw" + } + ], + "body": [ + { + "address": "server.request.body" + } + ], + "query": [ + { + "address": "server.request.query" + } + ], + "output": "_dd.appsec.fp.http.endpoint" + } + ] + }, + "evaluate": false, + "output": true + }, + { + "id": "http-header-fingerprint", + "generator": "http_header_fingerprint", + "conditions": [ + { + "operator": "exists", + "parameters": { + "inputs": [ + { + "address": "waf.context.event" + }, + { + "address": "server.business_logic.users.login.failure" + }, + { + "address": "server.business_logic.users.login.success" + } + ] + } + } + ], + "parameters": { + "mappings": [ + { + "headers": [ + { + "address": "server.request.headers.no_cookies" + } + ], + "output": "_dd.appsec.fp.http.header" + } + ] + }, + "evaluate": false, + "output": true + }, + { + "id": "http-network-fingerprint", + "generator": "http_network_fingerprint", + "conditions": [ + { + "operator": "exists", + "parameters": { + "inputs": [ + { + "address": "waf.context.event" + }, + { + "address": "server.business_logic.users.login.failure" + }, + { + "address": "server.business_logic.users.login.success" + } + ] + } + } + ], + "parameters": { + "mappings": [ + { + "headers": [ + { + "address": "server.request.headers.no_cookies" + } + ], + "output": "_dd.appsec.fp.http.network" + } + ] + }, + "evaluate": false, + "output": true + }, + { + "id": "session-fingerprint", + "generator": "session_fingerprint", + "conditions": [ + { + "operator": "exists", + "parameters": { + "inputs": [ + { + "address": "waf.context.event" + }, + { + "address": "server.business_logic.users.login.failure" + }, + { + "address": "server.business_logic.users.login.success" + } + ] + } + } + ], + "parameters": { + "mappings": [ + { + "cookies": [ + { + "address": "server.request.cookies" + } + ], + "session_id": [ + { + "address": "usr.session_id" + } + ], + "user_id": [ + { + "address": "usr.id" + } + ], + "output": "_dd.appsec.fp.session" + } + ] + }, + "evaluate": false, + "output": true + } + ] +} diff --git a/packages/dd-trace/test/appsec/attacker-fingerprinting.express.plugin.spec.js b/packages/dd-trace/test/appsec/attacker-fingerprinting.express.plugin.spec.js new file mode 100644 index 00000000000..bc7c918965c --- /dev/null +++ b/packages/dd-trace/test/appsec/attacker-fingerprinting.express.plugin.spec.js @@ -0,0 +1,79 @@ +'use strict' + +const axios = require('axios') +const { assert } = require('chai') +const path = require('path') + +const agent = require('../plugins/agent') +const appsec = require('../../src/appsec') +const Config = require('../../src/config') + +describe('Attacker fingerprinting', () => { + let port, server + + before(() => { + return agent.load(['express', 'http'], { client: false }) + }) + + before((done) => { + const express = require('../../../../versions/express').get() + const bodyParser = require('../../../../versions/body-parser').get() + + const app = express() + app.use(bodyParser.json()) + + app.post('/', (req, res) => { + res.end('DONE') + }) + + server = app.listen(port, () => { + port = server.address().port + done() + }) + }) + + after(() => { + server.close() + return agent.close({ ritmReset: false }) + }) + + beforeEach(() => { + appsec.enable(new Config( + { + appsec: { + enabled: true, + rules: path.join(__dirname, 'attacker-fingerprinting-rules.json') + } + } + )) + }) + + afterEach(() => { + appsec.disable() + }) + + it('should report http fingerprints', async () => { + await axios.post( + `http://localhost:${port}/?key=testattack`, + { + bodyParam: 'bodyValue' + }, + { + headers: { + headerName: 'headerValue', + 'x-real-ip': '255.255.255.255' + } + } + ) + + await agent.use((traces) => { + const span = traces[0][0] + assert.property(span.meta, '_dd.appsec.fp.http.header') + assert.equal(span.meta['_dd.appsec.fp.http.header'], 'hdr-0110000110-6431a3e6-5-55682ec1') + assert.property(span.meta, '_dd.appsec.fp.http.network') + assert.equal(span.meta['_dd.appsec.fp.http.network'], 'net-1-0100000000') + assert.property(span.meta, '_dd.appsec.fp.http.endpoint') + assert.equal(span.meta['_dd.appsec.fp.http.endpoint'], 'http-post-8a5edab2-2c70e12b-be31090f') + }) + }) +}) diff --git a/packages/dd-trace/test/appsec/attacker-fingerprinting.passport-http.plugin.spec.js b/packages/dd-trace/test/appsec/attacker-fingerprinting.passport-http.plugin.spec.js new file mode 100644 index 00000000000..58b54e2c704 --- /dev/null +++ b/packages/dd-trace/test/appsec/attacker-fingerprinting.passport-http.plugin.spec.js @@ -0,0 +1,107 @@ +'use strict' + +const Axios = require('axios') +const { assert } = require('chai') + +const agent = require('../plugins/agent') +const appsec = require('../../src/appsec') +const Config = require('../../src/config') + +function assertFingerprintInTraces (traces) { + const span = traces[0][0] + assert.property(span.meta, '_dd.appsec.fp.http.header') + assert.equal(span.meta['_dd.appsec.fp.http.header'], 'hdr-0110000110-6431a3e6-5-e58aa9dd') + assert.property(span.meta, '_dd.appsec.fp.http.network') + assert.equal(span.meta['_dd.appsec.fp.http.network'], 'net-0-0000000000') + assert.property(span.meta, '_dd.appsec.fp.http.endpoint') + assert.equal(span.meta['_dd.appsec.fp.http.endpoint'], 'http-post-7e93fba0--') +} + +withVersions('passport-http', 'passport-http', version => { + describe('Attacker fingerprinting', () => { + let port, server, axios + + before(() => { + return agent.load(['express', 'http'], { client: false }) + }) + + before(() => { + appsec.enable(new Config({ + appsec: true + })) + }) + + before((done) => { + const express = require('../../../../versions/express').get() + const bodyParser = require('../../../../versions/body-parser').get() + const passport = require('../../../../versions/passport').get() + const { BasicStrategy } = require(`../../../../versions/passport-http@${version}`).get() + + const app = express() + app.use(bodyParser.json()) + app.use(passport.initialize()) + + passport.use(new BasicStrategy( + function verify (username, password, done) { + if (username === 'success') { + done(null, { + id: 1234, + username + }) + } else { + done(null, false) + } + } + )) + + app.post('/login', passport.authenticate('basic', { session: false }), function (req, res) { + res.end() + }) + + server = app.listen(port, () => { + port = server.address().port + axios = Axios.create({ + baseURL: `http://localhost:${port}` + }) + done() + }) + }) + + after(() => { + server.close() + return agent.close({ ritmReset: false }) + }) + + after(() => { + appsec.disable() + }) + + it('should report http fingerprints on login fail', async () => { + try { + await axios.post( + `http://localhost:${port}/login`, {}, { + auth: { + username: 'fail', + password: '1234' + } + } + ) + } catch (e) {} + + await agent.use(assertFingerprintInTraces) + }) + + it('should report http fingerprints on login successful', async () => { + await axios.post( + `http://localhost:${port}/login`, {}, { + auth: { + username: 'success', + password: '1234' + } + } + ) + + await agent.use(assertFingerprintInTraces) + }) + }) +}) diff --git a/packages/dd-trace/test/appsec/attacker-fingerprinting.passport-local.plugin.spec.js b/packages/dd-trace/test/appsec/attacker-fingerprinting.passport-local.plugin.spec.js new file mode 100644 index 00000000000..b51aa57de9c --- /dev/null +++ b/packages/dd-trace/test/appsec/attacker-fingerprinting.passport-local.plugin.spec.js @@ -0,0 +1,105 @@ +'use strict' + +const Axios = require('axios') +const { assert } = require('chai') + +const agent = require('../plugins/agent') +const appsec = require('../../src/appsec') +const Config = require('../../src/config') + +function assertFingerprintInTraces (traces) { + const span = traces[0][0] + assert.property(span.meta, '_dd.appsec.fp.http.header') + assert.equal(span.meta['_dd.appsec.fp.http.header'], 'hdr-0110000110-6431a3e6-4-c348f529') + assert.property(span.meta, '_dd.appsec.fp.http.network') + assert.equal(span.meta['_dd.appsec.fp.http.network'], 'net-0-0000000000') + assert.property(span.meta, '_dd.appsec.fp.http.endpoint') + assert.equal(span.meta['_dd.appsec.fp.http.endpoint'], 'http-post-7e93fba0--f29f6224') +} + +withVersions('passport-local', 'passport-local', version => { + describe('Attacker fingerprinting', () => { + let port, server, axios + + before(() => { + return agent.load(['express', 'http'], { client: false }) + }) + + before(() => { + appsec.enable(new Config({ + appsec: true + })) + }) + + before((done) => { + const express = require('../../../../versions/express').get() + const bodyParser = require('../../../../versions/body-parser').get() + const passport = require('../../../../versions/passport').get() + const LocalStrategy = require(`../../../../versions/passport-local@${version}`).get() + + const app = express() + app.use(bodyParser.json()) + app.use(passport.initialize()) + + passport.use(new LocalStrategy( + function verify (username, password, done) { + if (username === 'success') { + done(null, { + id: 1234, + username + }) + } else { + done(null, false) + } + } + )) + + app.post('/login', passport.authenticate('local', { session: false }), function (req, res) { + res.end() + }) + + server = app.listen(port, () => { + port = server.address().port + axios = Axios.create({ + baseURL: `http://localhost:${port}` + }) + done() + }) + }) + + after(() => { + server.close() + return agent.close({ ritmReset: false }) + }) + + after(() => { + appsec.disable() + }) + + it('should report http fingerprints on login fail', async () => { + try { + await axios.post( + `http://localhost:${port}/login`, + { + username: 'fail', + password: '1234' + } + ) + } catch (e) {} + + await agent.use(assertFingerprintInTraces) + }) + + it('should report http fingerprints on login successful', async () => { + await axios.post( + `http://localhost:${port}/login`, + { + username: 'success', + password: '1234' + } + ) + + await agent.use(assertFingerprintInTraces) + }) + }) +}) diff --git a/packages/dd-trace/test/appsec/attacker-fingerprinting.spec.js b/packages/dd-trace/test/appsec/attacker-fingerprinting.spec.js new file mode 100644 index 00000000000..013c9cbd3ed --- /dev/null +++ b/packages/dd-trace/test/appsec/attacker-fingerprinting.spec.js @@ -0,0 +1,83 @@ +'use strict' + +const axios = require('axios') +const { assert } = require('chai') +const agent = require('../plugins/agent') +const tracer = require('../../../../index') +const appsec = require('../../src/appsec') +const Config = require('../../src/config') + +describe('Attacker fingerprinting', () => { + describe('SDK', () => { + let http + let controller + let appListener + let port + + function listener (req, res) { + if (controller) { + controller(req, res) + } + } + + before(() => { + appsec.enable(new Config({ + enabled: true + })) + }) + + before(async () => { + await agent.load('http') + http = require('http') + }) + + before(done => { + const server = new http.Server(listener) + appListener = server + .listen(port, 'localhost', () => { + port = appListener.address().port + done() + }) + }) + + after(() => { + appListener.close() + appsec.disable() + return agent.close({ ritmReset: false }) + }) + + it('should provide fingerprinting on successful user login track', (done) => { + controller = (req, res) => { + tracer.appsec.trackUserLoginSuccessEvent({ + id: 'test_user_id' + }, { metakey: 'metaValue' }) + res.end() + } + + agent.use(traces => { + assert.property(traces[0][0].meta, '_dd.appsec.fp.http.header') + assert.equal(traces[0][0].meta['_dd.appsec.fp.http.header'], 'hdr-0110000010-6431a3e6-3-98425651') + assert.property(traces[0][0].meta, '_dd.appsec.fp.http.network') + assert.equal(traces[0][0].meta['_dd.appsec.fp.http.network'], 'net-0-0000000000') + }).then(done).catch(done) + + axios.get(`http://localhost:${port}/`) + }) + + it('should provide fingerprinting on failed user login track', (done) => { + controller = (req, res) => { + tracer.appsec.trackUserLoginFailureEvent('test_user_id', true, { metakey: 'metaValue' }) + res.end() + } + + agent.use(traces => { + assert.property(traces[0][0].meta, '_dd.appsec.fp.http.header') + assert.equal(traces[0][0].meta['_dd.appsec.fp.http.header'], 'hdr-0110000010-6431a3e6-3-98425651') + assert.property(traces[0][0].meta, '_dd.appsec.fp.http.network') + assert.equal(traces[0][0].meta['_dd.appsec.fp.http.network'], 'net-0-0000000000') + }).then(done).catch(done) + + axios.get(`http://localhost:${port}/`) + }) + }) +}) diff --git a/packages/dd-trace/test/appsec/express-rules.json b/packages/dd-trace/test/appsec/express-rules.json index 8c5dfeaba31..e8dd910bd02 100644 --- a/packages/dd-trace/test/appsec/express-rules.json +++ b/packages/dd-trace/test/appsec/express-rules.json @@ -28,6 +28,31 @@ ], "transformers": ["lowercase"], "on_match": ["block"] + }, + { + "id": "test-rule-id-2", + "name": "test-rule-name-2", + "tags": { + "type": "security_scanner", + "category": "attack_attempt" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.path_params" + } + ], + "list": [ + "testattack" + ] + }, + "operator": "phrase_match" + } + ], + "transformers": ["lowercase"], + "on_match": ["block"] } ] } diff --git a/packages/dd-trace/test/appsec/iast/analyzers/cookie-analyzer.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/cookie-analyzer.spec.js new file mode 100644 index 00000000000..ba9c114a5c1 --- /dev/null +++ b/packages/dd-trace/test/appsec/iast/analyzers/cookie-analyzer.spec.js @@ -0,0 +1,110 @@ +'use strict' + +const { assert } = require('chai') +const CookieAnalyzer = require('../../../../src/appsec/iast/analyzers/cookie-analyzer') +const Analyzer = require('../../../../src/appsec/iast/analyzers/vulnerability-analyzer') +const Config = require('../../../../src/config') + +describe('CookieAnalyzer', () => { + const VULNERABILITY_TYPE = 'VULN_TYPE' + + it('should extends Analyzer', () => { + assert.isTrue(Analyzer.isPrototypeOf(CookieAnalyzer)) + }) + + describe('_createHashSource', () => { + let cookieAnalyzer + + beforeEach(() => { + cookieAnalyzer = new CookieAnalyzer(VULNERABILITY_TYPE, 'prop') + }) + + describe('default config', () => { + beforeEach(() => { + cookieAnalyzer.onConfigure(new Config({ iast: true })) + }) + + it('should create hash from vulnerability type and not long enough evidence value', () => { + const evidence = { + value: '0'.repeat(31) + } + + const vulnerability = cookieAnalyzer._createVulnerability(VULNERABILITY_TYPE, evidence, null, {}) + + assert.equal(vulnerability.hash, cookieAnalyzer._createHash(`${VULNERABILITY_TYPE}:${evidence.value}`)) + }) + + it('should create different hash from vulnerability type and long evidence value', () => { + const evidence = { + value: '0'.repeat(32) + } + + const vulnerability = cookieAnalyzer._createVulnerability(VULNERABILITY_TYPE, evidence, null, {}) + + assert.equal(vulnerability.hash, cookieAnalyzer._createHash(`FILTERED_${VULNERABILITY_TYPE}`)) + }) + }) + + describe('custom cookieFilterPattern', () => { + beforeEach(() => { + cookieAnalyzer.onConfigure(new Config({ + iast: { + enabled: true, + cookieFilterPattern: '^filtered$' + } + })) + }) + + it('should create hash from vulnerability with the default pattern', () => { + const evidence = { + value: 'notfiltered' + } + + const vulnerability = cookieAnalyzer._createVulnerability(VULNERABILITY_TYPE, evidence, null, {}) + + assert.equal(vulnerability.hash, cookieAnalyzer._createHash(`${VULNERABILITY_TYPE}:${evidence.value}`)) + }) + + it('should create different hash from vulnerability type and long evidence value', () => { + const evidence = { + value: 'filtered' + } + + const vulnerability = cookieAnalyzer._createVulnerability(VULNERABILITY_TYPE, evidence, null, {}) + + assert.equal(vulnerability.hash, cookieAnalyzer._createHash(`FILTERED_${VULNERABILITY_TYPE}`)) + }) + }) + + describe('invalid cookieFilterPattern maintains default behaviour', () => { + beforeEach(() => { + cookieAnalyzer.onConfigure(new Config({ + iast: { + enabled: true, + cookieFilterPattern: '(' + } + })) + }) + + it('should create hash from vulnerability type and not long enough evidence value', () => { + const evidence = { + value: '0'.repeat(31) + } + + const vulnerability = cookieAnalyzer._createVulnerability(VULNERABILITY_TYPE, evidence, null, {}) + + assert.equal(vulnerability.hash, cookieAnalyzer._createHash(`${VULNERABILITY_TYPE}:${evidence.value}`)) + }) + + it('should create different hash from vulnerability type and long evidence value', () => { + const evidence = { + value: '0'.repeat(32) + } + + const vulnerability = cookieAnalyzer._createVulnerability(VULNERABILITY_TYPE, evidence, null, {}) + + assert.equal(vulnerability.hash, cookieAnalyzer._createHash(`FILTERED_${VULNERABILITY_TYPE}`)) + }) + }) + }) +}) diff --git a/packages/dd-trace/test/appsec/iast/analyzers/insecure-cookie-analyzer.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/insecure-cookie-analyzer.spec.js index fbb3454c27e..af4bd911325 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/insecure-cookie-analyzer.spec.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/insecure-cookie-analyzer.spec.js @@ -3,12 +3,20 @@ const { prepareTestServerForIast } = require('../utils') const Analyzer = require('../../../../src/appsec/iast/analyzers/vulnerability-analyzer') const { INSECURE_COOKIE } = require('../../../../src/appsec/iast/vulnerabilities') +const insecureCookieAnalyzer = require('../../../../src/appsec/iast/analyzers/insecure-cookie-analyzer') +const CookieAnalyzer = require('../../../../src/appsec/iast/analyzers/cookie-analyzer') + const analyzer = new Analyzer() describe('insecure cookie analyzer', () => { it('Expected vulnerability identifier', () => { expect(INSECURE_COOKIE).to.be.equals('INSECURE_COOKIE') }) + + it('InsecureCookieAnalyzer extends CookieAnalyzer', () => { + expect(CookieAnalyzer.isPrototypeOf(insecureCookieAnalyzer.constructor)).to.be.true + }) + // In these test, even when we are having multiple vulnerabilities, all the vulnerabilities // are in the same cookies method, and it is expected to detect both even when the max operations is 1 const iastConfig = { @@ -43,6 +51,12 @@ describe('insecure cookie analyzer', () => { res.setHeader('set-cookie', ['key=value; HttpOnly', 'key2=value2; Secure']) }, INSECURE_COOKIE, 1) + testThatRequestHasVulnerability((req, res) => { + const cookieNamePrefix = '0'.repeat(32) + res.setHeader('set-cookie', [cookieNamePrefix + 'key1=value', cookieNamePrefix + 'key2=value2']) + }, INSECURE_COOKIE, 1, undefined, undefined, + 'Should be detected as the same INSECURE_COOKIE vulnerability when the cookie name is long') + testThatRequestHasNoVulnerability((req, res) => { res.setHeader('set-cookie', 'key=value; Secure') }, INSECURE_COOKIE) diff --git a/packages/dd-trace/test/appsec/iast/analyzers/no-httponly-cookie-analyzer.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/no-httponly-cookie-analyzer.spec.js index 3c9ed1bae19..743db43097c 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/no-httponly-cookie-analyzer.spec.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/no-httponly-cookie-analyzer.spec.js @@ -3,6 +3,9 @@ const { prepareTestServerForIast } = require('../utils') const Analyzer = require('../../../../src/appsec/iast/analyzers/vulnerability-analyzer') const { NO_HTTPONLY_COOKIE } = require('../../../../src/appsec/iast/vulnerabilities') +const CookieAnalyzer = require('../../../../src/appsec/iast/analyzers/cookie-analyzer') +const noHttponlyCookieAnalyzer = require('../../../../src/appsec/iast/analyzers/no-httponly-cookie-analyzer') + const analyzer = new Analyzer() describe('no HttpOnly cookie analyzer', () => { @@ -10,6 +13,10 @@ describe('no HttpOnly cookie analyzer', () => { expect(NO_HTTPONLY_COOKIE).to.be.equals('NO_HTTPONLY_COOKIE') }) + it('NoHttponlyCookieAnalyzer extends CookieAnalyzer', () => { + expect(CookieAnalyzer.isPrototypeOf(noHttponlyCookieAnalyzer.constructor)).to.be.true + }) + // In these test, even when we are having multiple vulnerabilities, all the vulnerabilities // are in the same cookies method, and it is expected to detect both even when the max operations is 1 const iastConfig = { @@ -18,6 +25,7 @@ describe('no HttpOnly cookie analyzer', () => { maxConcurrentRequests: 1, maxContextOperations: 1 } + prepareTestServerForIast('no HttpOnly cookie analyzer', (testThatRequestHasVulnerability, testThatRequestHasNoVulnerability) => { testThatRequestHasVulnerability((req, res) => { @@ -47,6 +55,12 @@ describe('no HttpOnly cookie analyzer', () => { res.setHeader('set-cookie', ['key=value; HttpOnly', 'key2=value2; Secure']) }, NO_HTTPONLY_COOKIE, 1) + testThatRequestHasVulnerability((req, res) => { + const cookieNamePrefix = '0'.repeat(32) + res.setHeader('set-cookie', [cookieNamePrefix + 'key1=value', cookieNamePrefix + 'key2=value2']) + }, NO_HTTPONLY_COOKIE, 1, undefined, undefined, + 'Should be detected as the same NO_HTTPONLY_COOKIE vulnerability when the cookie name is long') + testThatRequestHasNoVulnerability((req, res) => { res.setHeader('set-cookie', 'key=value; HttpOnly') }, NO_HTTPONLY_COOKIE) diff --git a/packages/dd-trace/test/appsec/iast/analyzers/no-samesite-cookie-analyzer.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/no-samesite-cookie-analyzer.spec.js index 03be8280795..0d7b1f26dc9 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/no-samesite-cookie-analyzer.spec.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/no-samesite-cookie-analyzer.spec.js @@ -3,6 +3,9 @@ const { prepareTestServerForIast } = require('../utils') const Analyzer = require('../../../../src/appsec/iast/analyzers/vulnerability-analyzer') const { NO_SAMESITE_COOKIE } = require('../../../../src/appsec/iast/vulnerabilities') +const CookieAnalyzer = require('../../../../src/appsec/iast/analyzers/cookie-analyzer') +const noSamesiteCookieAnalyzer = require('../../../../src/appsec/iast/analyzers/no-samesite-cookie-analyzer') + const analyzer = new Analyzer() describe('no SameSite cookie analyzer', () => { @@ -10,6 +13,10 @@ describe('no SameSite cookie analyzer', () => { expect(NO_SAMESITE_COOKIE).to.be.equals('NO_SAMESITE_COOKIE') }) + it('NoSamesiteCookieAnalyzer extends CookieAnalyzer', () => { + expect(CookieAnalyzer.isPrototypeOf(noSamesiteCookieAnalyzer.constructor)).to.be.true + }) + // In these test, even when we are having multiple vulnerabilities, all the vulnerabilities // are in the same cookies method, and it is expected to detect both even when the max operations is 1 const iastConfig = { @@ -59,6 +66,12 @@ describe('no SameSite cookie analyzer', () => { res.setHeader('set-cookie', 'key=value; SameSite=strict') }, NO_SAMESITE_COOKIE) + testThatRequestHasVulnerability((req, res) => { + const cookieNamePrefix = '0'.repeat(32) + res.setHeader('set-cookie', [cookieNamePrefix + 'key1=value', cookieNamePrefix + 'key2=value2']) + }, NO_SAMESITE_COOKIE, 1, undefined, undefined, + 'Should be detected as the same NO_SAMESITE_COOKIE vulnerability when the cookie name is long') + testThatRequestHasNoVulnerability((req, res) => { res.setHeader('set-cookie', 'key=') }, NO_SAMESITE_COOKIE) diff --git a/packages/dd-trace/test/appsec/iast/analyzers/path-traversal-analyzer.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/path-traversal-analyzer.spec.js index 5b46c193fbd..6c39799f916 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/path-traversal-analyzer.spec.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/path-traversal-analyzer.spec.js @@ -45,6 +45,14 @@ const InjectionAnalyzer = proxyquire('../../../../src/appsec/iast/analyzers/inje }) describe('path-traversal-analyzer', () => { + before(() => { + pathTraversalAnalyzer.enable() + }) + + after(() => { + pathTraversalAnalyzer.disable() + }) + it('Analyzer should be subscribed to proper channel', () => { expect(pathTraversalAnalyzer._subscriptions).to.have.lengthOf(1) expect(pathTraversalAnalyzer._subscriptions[0]._channel.name).to.equals('apm:fs:operation:start') diff --git a/packages/dd-trace/test/appsec/iast/index.spec.js b/packages/dd-trace/test/appsec/iast/index.spec.js index 7035296d8de..f770694ede4 100644 --- a/packages/dd-trace/test/appsec/iast/index.spec.js +++ b/packages/dd-trace/test/appsec/iast/index.spec.js @@ -7,6 +7,7 @@ const iastContextFunctions = require('../../../src/appsec/iast/iast-context') const overheadController = require('../../../src/appsec/iast/overhead-controller') const vulnerabilityReporter = require('../../../src/appsec/iast/vulnerability-reporter') const { testInRequest } = require('./utils') +const { IAST_MODULE } = require('../../../src/appsec/rasp/fs-plugin') describe('IAST Index', () => { beforeEach(() => { @@ -102,6 +103,8 @@ describe('IAST Index', () => { let mockVulnerabilityReporter let mockIast let mockOverheadController + let appsecFsPlugin + let analyzers const config = new Config({ experimental: { @@ -125,9 +128,18 @@ describe('IAST Index', () => { startGlobalContext: sinon.stub(), finishGlobalContext: sinon.stub() } + appsecFsPlugin = { + enable: sinon.stub(), + disable: sinon.stub() + } + analyzers = { + enableAllAnalyzers: sinon.stub() + } mockIast = proxyquire('../../../src/appsec/iast', { './vulnerability-reporter': mockVulnerabilityReporter, - './overhead-controller': mockOverheadController + './overhead-controller': mockOverheadController, + '../rasp/fs-plugin': appsecFsPlugin, + './analyzers': analyzers }) }) @@ -136,6 +148,22 @@ describe('IAST Index', () => { mockIast.disable() }) + describe('enable', () => { + it('should enable AppsecFsPlugin', () => { + mockIast.enable(config) + expect(appsecFsPlugin.enable).to.have.been.calledOnceWithExactly(IAST_MODULE) + expect(analyzers.enableAllAnalyzers).to.have.been.calledAfter(appsecFsPlugin.enable) + }) + }) + + describe('disable', () => { + it('should disable AppsecFsPlugin', () => { + mockIast.enable(config) + mockIast.disable() + expect(appsecFsPlugin.disable).to.have.been.calledOnceWithExactly(IAST_MODULE) + }) + }) + describe('managing overhead controller global context', () => { it('should start global context refresher on iast enabled', () => { mockIast.enable(config) diff --git a/packages/dd-trace/test/appsec/iast/taint-tracking/resources/propagationFunctions.js b/packages/dd-trace/test/appsec/iast/taint-tracking/resources/propagationFunctions.js index 4028f265b3e..de37c351789 100644 --- a/packages/dd-trace/test/appsec/iast/taint-tracking/resources/propagationFunctions.js +++ b/packages/dd-trace/test/appsec/iast/taint-tracking/resources/propagationFunctions.js @@ -12,6 +12,13 @@ function templateLiteralEndingWithNumberParams (str) { return `${str}Literal${num1}${num2}` } +function templateLiteralWithTaintedAtTheEnd (str) { + const num1 = 1 + const num2 = 2 + const hello = 'world' + return `Literal${num1}${num2}-${hello}-${str}` +} + function appendStr (str) { let pre = 'pre_' pre += str @@ -108,6 +115,7 @@ module.exports = { substrStr, substringStr, templateLiteralEndingWithNumberParams, + templateLiteralWithTaintedAtTheEnd, toLowerCaseStr, toUpperCaseStr, trimEndStr, diff --git a/packages/dd-trace/test/appsec/iast/taint-tracking/taint-tracking-impl.spec.js b/packages/dd-trace/test/appsec/iast/taint-tracking/taint-tracking-impl.spec.js index e0eb9fc580a..d356753d607 100644 --- a/packages/dd-trace/test/appsec/iast/taint-tracking/taint-tracking-impl.spec.js +++ b/packages/dd-trace/test/appsec/iast/taint-tracking/taint-tracking-impl.spec.js @@ -26,6 +26,7 @@ const propagationFns = [ 'substrStr', 'substringStr', 'templateLiteralEndingWithNumberParams', + 'templateLiteralWithTaintedAtTheEnd', 'toLowerCaseStr', 'toUpperCaseStr', 'trimEndStr', @@ -137,7 +138,8 @@ describe('TaintTracking', () => { 'concatSuffix', 'concatTaintedStr', 'insertStr', - 'templateLiteralEndingWithNumberParams' + 'templateLiteralEndingWithNumberParams', + 'templateLiteralWithTaintedAtTheEnd' ] propagationFns.forEach((propFn) => { if (filtered.includes(propFn)) return diff --git a/packages/dd-trace/test/appsec/iast/utils.js b/packages/dd-trace/test/appsec/iast/utils.js index 23d9d73260a..2ef5a77ee30 100644 --- a/packages/dd-trace/test/appsec/iast/utils.js +++ b/packages/dd-trace/test/appsec/iast/utils.js @@ -112,9 +112,7 @@ function beforeEachIastTest (iastConfig) { beforeEach(() => { vulnerabilityReporter.clearCache() iast.enable(new Config({ - experimental: { - iast: iastConfig - } + iast: iastConfig })) }) } @@ -249,8 +247,8 @@ function prepareTestServerForIast (description, tests, iastConfig) { return agent.close({ ritmReset: false }) }) - function testThatRequestHasVulnerability (fn, vulnerability, occurrences, cb, makeRequest) { - it(`should have ${vulnerability} vulnerability`, function (done) { + function testThatRequestHasVulnerability (fn, vulnerability, occurrences, cb, makeRequest, description) { + it(description || `should have ${vulnerability} vulnerability`, function (done) { this.timeout(5000) app = fn checkVulnerabilityInRequest(vulnerability, occurrences, cb, makeRequest, config, done) diff --git a/packages/dd-trace/test/appsec/index.express.plugin.spec.js b/packages/dd-trace/test/appsec/index.express.plugin.spec.js index e8b0d4a50e4..c38d496623b 100644 --- a/packages/dd-trace/test/appsec/index.express.plugin.spec.js +++ b/packages/dd-trace/test/appsec/index.express.plugin.spec.js @@ -1,6 +1,8 @@ 'use strict' -const axios = require('axios') +const Axios = require('axios') +const { assert } = require('chai') +const getPort = require('get-port') const path = require('path') const agent = require('../plugins/agent') const appsec = require('../../src/appsec') @@ -9,8 +11,8 @@ const { json } = require('../../src/appsec/blocked_templates') const zlib = require('zlib') withVersions('express', 'express', version => { - describe('Suspicious request blocking - query', () => { - let port, server, requestBody + describe('Suspicious request blocking - path parameters', () => { + let server, paramCallbackSpy, axios before(() => { return agent.load(['express', 'http'], { client: false }) @@ -18,35 +20,41 @@ withVersions('express', 'express', version => { before((done) => { const express = require('../../../../versions/express').get() - const bodyParser = require('../../../../versions/body-parser').get() const app = express() - app.use(bodyParser.json()) - app.get('/', (req, res) => { - requestBody() - res.end('DONE') + app.get('/multiple-path-params/:parameter1/:parameter2', (req, res) => { + res.send('DONE') }) - app.post('/', (req, res) => { + const nestedRouter = express.Router({ mergeParams: true }) + nestedRouter.get('/:nestedDuplicatedParameter', (req, res) => { res.send('DONE') }) - app.post('/sendjson', (req, res) => { - res.send({ sendResKey: 'sendResValue' }) - }) + app.use('/nested/:nestedDuplicatedParameter', nestedRouter) - app.post('/jsonp', (req, res) => { - res.jsonp({ jsonpResKey: 'jsonpResValue' }) + app.get('/callback-path-param/:callbackedParameter', (req, res) => { + res.send('DONE') }) - app.post('/json', (req, res) => { - res.jsonp({ jsonResKey: 'jsonResValue' }) + const paramCallback = (req, res, next) => { + next() + } + + paramCallbackSpy = sinon.spy(paramCallback) + + app.param(() => { + return paramCallbackSpy }) - server = app.listen(port, () => { - port = server.address().port - done() + app.param('callbackedParameter') + + getPort().then((port) => { + server = app.listen(port, () => { + axios = Axios.create({ baseURL: `http://localhost:${port}` }) + done() + }) }) }) @@ -55,135 +63,330 @@ withVersions('express', 'express', version => { return agent.close({ ritmReset: false }) }) - describe('Blocking', () => { - beforeEach(async () => { - requestBody = sinon.stub() - appsec.enable(new Config({ appsec: { enabled: true, rules: path.join(__dirname, 'express-rules.json') } })) + beforeEach(async () => { + appsec.enable(new Config({ + appsec: { + enabled: true, + rules: path.join(__dirname, 'express-rules.json') + } + })) + }) + + afterEach(() => { + appsec.disable() + sinon.reset() + }) + + describe('route with multiple path parameters', () => { + it('should not block the request when attack is not detected', async () => { + const res = await axios.get('/multiple-path-params/safe_param/safe_param') + + assert.equal(res.status, 200) + assert.equal(res.data, 'DONE') + }) + + it('should block the request when attack is detected in both parameters', async () => { + try { + await axios.get('/multiple-path-params/testattack/testattack') + + return Promise.reject(new Error('Request should not return 200')) + } catch (e) { + assert.equal(e.response.status, 403) + assert.deepEqual(e.response.data, JSON.parse(json)) + } + }) + + it('should block the request when attack is detected in the first parameter', async () => { + try { + await axios.get('/multiple-path-params/testattack/safe_param') + + return Promise.reject(new Error('Request should not return 200')) + } catch (e) { + assert.equal(e.response.status, 403) + assert.deepEqual(e.response.data, JSON.parse(json)) + } + }) + + it('should block the request when attack is detected in the second parameter', async () => { + try { + await axios.get('/multiple-path-params/safe_param/testattack') + + return Promise.reject(new Error('Request should not return 200')) + } catch (e) { + assert.equal(e.response.status, 403) + assert.deepEqual(e.response.data, JSON.parse(json)) + } + }) + }) + + describe('nested routers', () => { + it('should not block the request when attack is not detected', async () => { + const res = await axios.get('/nested/safe_param/safe_param') + + assert.equal(res.status, 200) + assert.equal(res.data, 'DONE') }) - afterEach(() => { - appsec.disable() + it('should block the request when attack is detected in the nested paremeter', async () => { + try { + await axios.get('/nested/safe_param/testattack') + + return Promise.reject(new Error('Request should not return 200')) + } catch (e) { + assert.equal(e.response.status, 403) + assert.deepEqual(e.response.data, JSON.parse(json)) + } }) - it('should not block the request without an attack', async () => { - const res = await axios.get(`http://localhost:${port}/?key=value`) + it('should block the request when attack is detected in the parent paremeter', async () => { + try { + await axios.get('/nested/testattack/safe_param') - expect(requestBody).to.be.calledOnce - expect(res.data).to.be.equal('DONE') + return Promise.reject(new Error('Request should not return 200')) + } catch (e) { + assert.equal(e.response.status, 403) + assert.deepEqual(e.response.data, JSON.parse(json)) + } + }) + + it('should block the request when attack is detected both parameters', async () => { + try { + await axios.get('/nested/testattack/testattack') + + return Promise.reject(new Error('Request should not return 200')) + } catch (e) { + assert.equal(e.response.status, 403) + assert.deepEqual(e.response.data, JSON.parse(json)) + } + }) + }) + + describe('path parameter callback', () => { + it('should not block the request when attack is not detected', async () => { + const res = await axios.get('/callback-path-param/safe_param') + assert.equal(res.status, 200) + assert.equal(res.data, 'DONE') + sinon.assert.calledOnce(paramCallbackSpy) }) it('should block the request when attack is detected', async () => { try { - await axios.get(`http://localhost:${port}/?key=testattack`) + await axios.get('/callback-path-param/testattack') return Promise.reject(new Error('Request should not return 200')) } catch (e) { - expect(e.response.status).to.be.equals(403) - expect(e.response.data).to.be.deep.equal(JSON.parse(json)) - expect(requestBody).not.to.be.called + assert.equal(e.response.status, 403) + assert.deepEqual(e.response.data, JSON.parse(json)) + sinon.assert.notCalled(paramCallbackSpy) } }) }) + }) + + describe('Suspicious request blocking - query', () => { + let server, requestBody, axios + + before(() => { + return agent.load(['express', 'http'], { client: false }) + }) - describe('Api Security', () => { - let config + before((done) => { + const express = require('../../../../versions/express').get() - beforeEach(() => { - config = new Config({ - appsec: { - enabled: true, - rules: path.join(__dirname, 'api_security_rules.json'), - apiSecurity: { - enabled: true - } - } + const app = express() + + app.get('/', (req, res) => { + requestBody() + res.end('DONE') + }) + + getPort().then((port) => { + server = app.listen(port, () => { + axios = Axios.create({ baseURL: `http://localhost:${port}` }) + done() }) }) + }) + + after(() => { + server.close() + return agent.close({ ritmReset: false }) + }) + + beforeEach(async () => { + requestBody = sinon.stub() + appsec.enable(new Config({ + appsec: { + enabled: true, + rules: path.join(__dirname, 'express-rules.json') + } + })) + }) + + afterEach(() => { + appsec.disable() + }) + + it('should not block the request without an attack', async () => { + const res = await axios.get('/?key=value') + + assert.equal(res.status, 200) + assert.equal(res.data, 'DONE') + sinon.assert.calledOnce(requestBody) + }) + + it('should block the request when attack is detected', async () => { + try { + await axios.get('/?key=testattack') + + return Promise.reject(new Error('Request should not return 200')) + } catch (e) { + assert.equal(e.response.status, 403) + assert.deepEqual(e.response.data, JSON.parse(json)) + sinon.assert.notCalled(requestBody) + } + }) + }) + + describe('Api Security', () => { + let config, server, axios + + before(() => { + return agent.load(['express', 'http'], { client: false }) + }) + + before((done) => { + const express = require('../../../../versions/express').get() + const bodyParser = require('../../../../versions/body-parser').get() + + const app = express() + app.use(bodyParser.json()) + + app.post('/', (req, res) => { + res.send('DONE') + }) + + app.post('/sendjson', (req, res) => { + res.send({ sendResKey: 'sendResValue' }) + }) - afterEach(() => { - appsec.disable() + app.post('/jsonp', (req, res) => { + res.jsonp({ jsonpResKey: 'jsonpResValue' }) }) - describe('with requestSampling 1.0', () => { - beforeEach(() => { - config.appsec.apiSecurity.requestSampling = 1.0 - appsec.enable(config) + app.post('/json', (req, res) => { + res.jsonp({ jsonResKey: 'jsonResValue' }) + }) + + getPort().then((port) => { + server = app.listen(port, () => { + axios = Axios.create({ baseURL: `http://localhost:${port}` }) + done() }) + }) + }) - function formatSchema (body) { - return zlib.gzipSync(JSON.stringify(body)).toString('base64') + after(() => { + server.close() + return agent.close({ ritmReset: false }) + }) + + beforeEach(() => { + config = new Config({ + appsec: { + enabled: true, + rules: path.join(__dirname, 'api_security_rules.json'), + apiSecurity: { + enabled: true + } } + }) + }) - it('should get the request body schema', async () => { - const expectedRequestBodySchema = formatSchema([{ key: [8] }]) - const res = await axios.post(`http://localhost:${port}/`, { key: 'value' }) + afterEach(() => { + appsec.disable() + }) - await agent.use((traces) => { - const span = traces[0][0] - expect(span.meta).to.haveOwnProperty('_dd.appsec.s.req.body') - expect(span.meta).not.to.haveOwnProperty('_dd.appsec.s.res.body') - expect(span.meta['_dd.appsec.s.req.body']).to.be.equal(expectedRequestBodySchema) - }) + describe('with requestSampling 1.0', () => { + beforeEach(() => { + config.appsec.apiSecurity.requestSampling = 1.0 + appsec.enable(config) + }) - expect(res.status).to.be.equal(200) - expect(res.data).to.be.equal('DONE') - }) + function formatSchema (body) { + return zlib.gzipSync(JSON.stringify(body)).toString('base64') + } - it('should get the response body schema with res.send method with object', async () => { - const expectedResponseBodySchema = formatSchema([{ sendResKey: [8] }]) - const res = await axios.post(`http://localhost:${port}/sendjson`, { key: 'value' }) + it('should get the request body schema', async () => { + const expectedRequestBodySchema = formatSchema([{ key: [8] }]) - await agent.use((traces) => { - const span = traces[0][0] - expect(span.meta['_dd.appsec.s.res.body']).to.be.equal(expectedResponseBodySchema) - }) + const res = await axios.post('/', { key: 'value' }) - expect(res.status).to.be.equal(200) - expect(res.data).to.be.deep.equal({ sendResKey: 'sendResValue' }) + await agent.use((traces) => { + const span = traces[0][0] + assert.property(span.meta, '_dd.appsec.s.req.body') + assert.notProperty(span.meta, '_dd.appsec.s.res.body') + assert.equal(span.meta['_dd.appsec.s.req.body'], expectedRequestBodySchema) }) - it('should get the response body schema with res.json method', async () => { - const expectedResponseBodySchema = formatSchema([{ jsonResKey: [8] }]) - const res = await axios.post(`http://localhost:${port}/json`, { key: 'value' }) + assert.equal(res.status, 200) + assert.equal(res.data, 'DONE') + }) - await agent.use((traces) => { - const span = traces[0][0] - expect(span.meta['_dd.appsec.s.res.body']).to.be.equal(expectedResponseBodySchema) - }) + it('should get the response body schema with res.send method with object', async () => { + const expectedResponseBodySchema = formatSchema([{ sendResKey: [8] }]) + const res = await axios.post('/sendjson', { key: 'value' }) - expect(res.status).to.be.equal(200) - expect(res.data).to.be.deep.equal({ jsonResKey: 'jsonResValue' }) + await agent.use((traces) => { + const span = traces[0][0] + assert.equal(span.meta['_dd.appsec.s.res.body'], expectedResponseBodySchema) }) - it('should get the response body schema with res.jsonp method', async () => { - const expectedResponseBodySchema = formatSchema([{ jsonpResKey: [8] }]) - const res = await axios.post(`http://localhost:${port}/jsonp`, { key: 'value' }) + assert.equal(res.status, 200) + assert.deepEqual(res.data, { sendResKey: 'sendResValue' }) + }) - await agent.use((traces) => { - const span = traces[0][0] - expect(span.meta['_dd.appsec.s.res.body']).to.be.equal(expectedResponseBodySchema) - }) + it('should get the response body schema with res.json method', async () => { + const expectedResponseBodySchema = formatSchema([{ jsonResKey: [8] }]) + const res = await axios.post('/json', { key: 'value' }) - expect(res.status).to.be.equal(200) - expect(res.data).to.be.deep.equal({ jsonpResKey: 'jsonpResValue' }) + await agent.use((traces) => { + const span = traces[0][0] + assert.equal(span.meta['_dd.appsec.s.res.body'], expectedResponseBodySchema) }) - }) - it('should not get the schema', async () => { - config.appsec.apiSecurity.requestSampling = 0 - appsec.enable(config) + assert.equal(res.status, 200) + assert.deepEqual(res.data, { jsonResKey: 'jsonResValue' }) + }) - const res = await axios.post(`http://localhost:${port}/`, { key: 'value' }) + it('should get the response body schema with res.jsonp method', async () => { + const expectedResponseBodySchema = formatSchema([{ jsonpResKey: [8] }]) + const res = await axios.post('/jsonp', { key: 'value' }) await agent.use((traces) => { const span = traces[0][0] - expect(span.meta).not.to.haveOwnProperty('_dd.appsec.s.req.body') - expect(span.meta).not.to.haveOwnProperty('_dd.appsec.s.res.body') + assert.equal(span.meta['_dd.appsec.s.res.body'], expectedResponseBodySchema) }) - expect(res.status).to.be.equal(200) - expect(res.data).to.be.equal('DONE') + assert.equal(res.status, 200) + assert.deepEqual(res.data, { jsonpResKey: 'jsonpResValue' }) + }) + }) + + it('should not get the schema', async () => { + config.appsec.apiSecurity.requestSampling = 0 + appsec.enable(config) + + const res = await axios.post('/', { key: 'value' }) + + await agent.use((traces) => { + const span = traces[0][0] + assert.notProperty(span.meta, '_dd.appsec.s.req.body') + assert.notProperty(span.meta, '_dd.appsec.s.res.body') }) + + assert.equal(res.status, 200) + assert.equal(res.data, 'DONE') }) }) }) diff --git a/packages/dd-trace/test/appsec/index.sequelize.plugin.spec.js b/packages/dd-trace/test/appsec/index.sequelize.plugin.spec.js index 07013a570d2..d444b82ec5e 100644 --- a/packages/dd-trace/test/appsec/index.sequelize.plugin.spec.js +++ b/packages/dd-trace/test/appsec/index.sequelize.plugin.spec.js @@ -30,7 +30,7 @@ describe('sequelize', () => { // close agent after(() => { appsec.disable() - return agent.close() + return agent.close({ ritmReset: false }) }) // init database diff --git a/packages/dd-trace/test/appsec/index.spec.js b/packages/dd-trace/test/appsec/index.spec.js index b8a41d840b5..4b8c6c0438c 100644 --- a/packages/dd-trace/test/appsec/index.spec.js +++ b/packages/dd-trace/test/appsec/index.spec.js @@ -10,8 +10,11 @@ const { cookieParser, incomingHttpRequestStart, incomingHttpRequestEnd, - queryParser, passportVerify, + queryParser, + nextBodyParsed, + nextQueryParsed, + expressProcessParams, responseBody, responseWriteHead, responseSetHeader @@ -167,8 +170,11 @@ describe('AppSec Index', function () { it('should subscribe to blockable channels', () => { expect(bodyParser.hasSubscribers).to.be.false expect(cookieParser.hasSubscribers).to.be.false - expect(queryParser.hasSubscribers).to.be.false expect(passportVerify.hasSubscribers).to.be.false + expect(queryParser.hasSubscribers).to.be.false + expect(nextBodyParsed.hasSubscribers).to.be.false + expect(nextQueryParsed.hasSubscribers).to.be.false + expect(expressProcessParams.hasSubscribers).to.be.false expect(responseWriteHead.hasSubscribers).to.be.false expect(responseSetHeader.hasSubscribers).to.be.false @@ -176,8 +182,11 @@ describe('AppSec Index', function () { expect(bodyParser.hasSubscribers).to.be.true expect(cookieParser.hasSubscribers).to.be.true - expect(queryParser.hasSubscribers).to.be.true expect(passportVerify.hasSubscribers).to.be.true + expect(queryParser.hasSubscribers).to.be.true + expect(nextBodyParsed.hasSubscribers).to.be.true + expect(nextQueryParsed.hasSubscribers).to.be.true + expect(expressProcessParams.hasSubscribers).to.be.true expect(responseWriteHead.hasSubscribers).to.be.true expect(responseSetHeader.hasSubscribers).to.be.true }) @@ -254,8 +263,11 @@ describe('AppSec Index', function () { expect(bodyParser.hasSubscribers).to.be.false expect(cookieParser.hasSubscribers).to.be.false - expect(queryParser.hasSubscribers).to.be.false expect(passportVerify.hasSubscribers).to.be.false + expect(queryParser.hasSubscribers).to.be.false + expect(nextBodyParsed.hasSubscribers).to.be.false + expect(nextQueryParsed.hasSubscribers).to.be.false + expect(expressProcessParams.hasSubscribers).to.be.false expect(responseWriteHead.hasSubscribers).to.be.false expect(responseSetHeader.hasSubscribers).to.be.false }) @@ -422,9 +434,6 @@ describe('AppSec Index', function () { route: { path: '/path/:c' }, - params: { - c: '3' - }, cookies: { d: '4', e: '5' @@ -446,7 +455,6 @@ describe('AppSec Index', function () { expect(waf.run).to.have.been.calledOnceWithExactly({ persistent: { 'server.request.body': { a: '1' }, - 'server.request.path_params': { c: '3' }, 'server.request.cookies': { d: '4', e: '5' }, 'server.request.query': { b: '2' } } @@ -1058,7 +1066,10 @@ describe('IP blocking', function () { beforeEach(() => { appsec.enable(new Config({ appsec: { - enabled: true + enabled: true, + rasp: { + enabled: false // disable rasp to not trigger lfi + } } })) diff --git a/packages/dd-trace/test/appsec/rasp/fs-plugin.spec.js b/packages/dd-trace/test/appsec/rasp/fs-plugin.spec.js new file mode 100644 index 00000000000..03b2a0acdd0 --- /dev/null +++ b/packages/dd-trace/test/appsec/rasp/fs-plugin.spec.js @@ -0,0 +1,251 @@ +'use strict' + +const proxyquire = require('proxyquire') +const { assert } = require('chai') +const path = require('path') +const dc = require('dc-polyfill') +const { storage } = require('../../../../datadog-core') +const { AppsecFsPlugin } = require('../../../src/appsec/rasp/fs-plugin') +const agent = require('../../plugins/agent') + +const opStartCh = dc.channel('apm:fs:operation:start') +const opFinishCh = dc.channel('apm:fs:operation:finish') + +describe('AppsecFsPlugin', () => { + let appsecFsPlugin + + beforeEach(() => { + appsecFsPlugin = new AppsecFsPlugin() + appsecFsPlugin.enable() + }) + + afterEach(() => { appsecFsPlugin.disable() }) + + describe('enable/disable', () => { + let fsPlugin, configure + + beforeEach(() => { + configure = sinon.stub() + class PluginClass { + addSub (channelName, handler) {} + + configure (config) { + configure(config) + } + } + + fsPlugin = proxyquire('../../../src/appsec/rasp/fs-plugin', { + '../../plugins/plugin': PluginClass + }) + }) + + afterEach(() => { sinon.restore() }) + + it('should require valid mod when calling enable', () => { + fsPlugin.enable('iast') + + sinon.assert.calledOnceWithExactly(configure, true) + }) + + it('should create only one instance', () => { + fsPlugin.enable('iast') + fsPlugin.enable('iast') + fsPlugin.enable('rasp') + + sinon.assert.calledOnceWithExactly(configure, true) + }) + + it('should discard unknown mods when enabled', () => { + fsPlugin.enable('unknown') + sinon.assert.notCalled(configure) + + fsPlugin.enable() + sinon.assert.notCalled(configure) + }) + + it('should not disable if there are still modules using the plugin', () => { + fsPlugin.enable('iast') + fsPlugin.enable('rasp') + + fsPlugin.disable('rasp') + + sinon.assert.calledOnce(configure) + }) + + it('should disable only if there are no more modules using the plugin', () => { + fsPlugin.enable('iast') + fsPlugin.enable('rasp') + + fsPlugin.disable('rasp') + fsPlugin.disable('iast') + + sinon.assert.calledTwice(configure) + assert.strictEqual(configure.secondCall.args[0], false) + }) + + it('should discard unknown mods when disabling', () => { + fsPlugin.disable('unknown') + sinon.assert.notCalled(configure) + + fsPlugin.disable() + sinon.assert.notCalled(configure) + }) + }) + + describe('_onFsOperationStart', () => { + it('should mark fs root', () => { + const origStore = {} + storage.enterWith(origStore) + + appsecFsPlugin._onFsOperationStart() + + let store = storage.getStore() + assert.property(store, 'fs') + assert.propertyVal(store.fs, 'parentStore', origStore) + assert.propertyVal(store.fs, 'root', true) + + appsecFsPlugin._onFsOperationFinishOrRenderEnd() + + store = storage.getStore() + assert.equal(store, origStore) + assert.notProperty(store, 'fs') + }) + + it('should mark fs children', () => { + const origStore = { orig: true } + storage.enterWith(origStore) + + appsecFsPlugin._onFsOperationStart() + + const rootStore = storage.getStore() + assert.property(rootStore, 'fs') + assert.propertyVal(rootStore.fs, 'parentStore', origStore) + assert.propertyVal(rootStore.fs, 'root', true) + + appsecFsPlugin._onFsOperationStart() + + let store = storage.getStore() + assert.property(store, 'fs') + assert.propertyVal(store.fs, 'parentStore', rootStore) + assert.propertyVal(store.fs, 'root', false) + assert.propertyVal(store, 'orig', true) + + appsecFsPlugin._onFsOperationFinishOrRenderEnd() + + store = storage.getStore() + assert.equal(store, rootStore) + + appsecFsPlugin._onFsOperationFinishOrRenderEnd() + store = storage.getStore() + assert.equal(store, origStore) + }) + }) + + describe('_onResponseRenderStart', () => { + it('should mark fs ops as excluded while response rendering', () => { + appsecFsPlugin.enable() + + const origStore = {} + storage.enterWith(origStore) + + appsecFsPlugin._onResponseRenderStart() + + let store = storage.getStore() + assert.property(store, 'fs') + assert.propertyVal(store.fs, 'parentStore', origStore) + assert.propertyVal(store.fs, 'opExcluded', true) + + appsecFsPlugin._onFsOperationFinishOrRenderEnd() + + store = storage.getStore() + assert.equal(store, origStore) + assert.notProperty(store, 'fs') + }) + }) + + describe('integration', () => { + describe('apm:fs:operation', () => { + let fs + + afterEach(() => agent.close({ ritmReset: false })) + + beforeEach(() => agent.load('fs', undefined, { flushInterval: 1 }).then(() => { + fs = require('fs') + })) + + it('should mark root operations', () => { + let count = 0 + const onStart = () => { + const store = storage.getStore() + assert.isNotNull(store.fs) + + count++ + assert.strictEqual(count === 1, store.fs.root) + } + + try { + const origStore = {} + storage.enterWith(origStore) + + opStartCh.subscribe(onStart) + + fs.readFileSync(path.join(__dirname, 'fs-plugin.spec.js')) + + assert.strictEqual(count, 4) + } finally { + opStartCh.unsubscribe(onStart) + } + }) + + it('should mark root even if op is excluded', () => { + let count = 0 + const onStart = () => { + const store = storage.getStore() + assert.isNotNull(store.fs) + + count++ + assert.isUndefined(store.fs.root) + } + + try { + const origStore = { + fs: { opExcluded: true } + } + storage.enterWith(origStore) + + opStartCh.subscribe(onStart) + + fs.readFileSync(path.join(__dirname, 'fs-plugin.spec.js')) + + assert.strictEqual(count, 4) + } finally { + opStartCh.unsubscribe(onStart) + } + }) + + it('should clean up store when finishing op', () => { + let count = 4 + const onFinish = () => { + const store = storage.getStore() + count-- + + if (count === 0) { + assert.isUndefined(store.fs) + } + } + try { + const origStore = {} + storage.enterWith(origStore) + + opFinishCh.subscribe(onFinish) + + fs.readFileSync(path.join(__dirname, 'fs-plugin.spec.js')) + + assert.strictEqual(count, 0) + } finally { + opFinishCh.unsubscribe(onFinish) + } + }) + }) + }) +}) diff --git a/packages/dd-trace/test/appsec/rasp/index.spec.js b/packages/dd-trace/test/appsec/rasp/index.spec.js index 0dae9c527e5..be6c602780a 100644 --- a/packages/dd-trace/test/appsec/rasp/index.spec.js +++ b/packages/dd-trace/test/appsec/rasp/index.spec.js @@ -1,9 +1,12 @@ 'use strict' -const rasp = require('../../../src/appsec/rasp') +const proxyquire = require('proxyquire') const { handleUncaughtExceptionMonitor } = require('../../../src/appsec/rasp') +const { DatadogRaspAbortError } = require('../../../src/appsec/rasp/utils') describe('RASP', () => { + let rasp, subscribe, unsubscribe, block, blocked + beforeEach(() => { const config = { appsec: { @@ -15,6 +18,25 @@ describe('RASP', () => { } } + subscribe = sinon.stub() + unsubscribe = sinon.stub() + + block = sinon.stub() + + rasp = proxyquire('../../../src/appsec/rasp', { + '../blocking': { + block, + isBlocked: sinon.stub().callsFake(() => blocked) + }, + '../channels': { + expressMiddlewareError: { + subscribe, + unsubscribe, + hasSubscribers: true + } + } + }) + rasp.enable(config) }) @@ -31,4 +53,53 @@ describe('RASP', () => { handleUncaughtExceptionMonitor(err) }) }) + + describe('enable/disable', () => { + it('should subscribe to apm:express:middleware:error', () => { + sinon.assert.calledOnce(subscribe) + }) + + it('should unsubscribe to apm:express:middleware:error', () => { + rasp.disable() + + sinon.assert.calledOnce(unsubscribe) + }) + }) + + describe('blockOnDatadogRaspAbortError', () => { + let req, res, blockingAction + + beforeEach(() => { + req = {} + res = {} + blockingAction = {} + }) + + afterEach(() => { + sinon.restore() + }) + + it('should skip non DatadogRaspAbortError', () => { + rasp.blockOnDatadogRaspAbortError({ error: new Error() }) + + sinon.assert.notCalled(block) + }) + + it('should block DatadogRaspAbortError first time', () => { + rasp.blockOnDatadogRaspAbortError({ error: new DatadogRaspAbortError(req, res, blockingAction) }) + + sinon.assert.calledOnce(block) + }) + + it('should skip calling block if blocked before', () => { + rasp.blockOnDatadogRaspAbortError({ error: new DatadogRaspAbortError(req, res, blockingAction) }) + + blocked = true + + rasp.blockOnDatadogRaspAbortError({ error: new DatadogRaspAbortError(req, res, blockingAction) }) + rasp.blockOnDatadogRaspAbortError({ error: new DatadogRaspAbortError(req, res, blockingAction) }) + + sinon.assert.calledOnce(block) + }) + }) }) diff --git a/packages/dd-trace/test/appsec/rasp/lfi.express.plugin.spec.js b/packages/dd-trace/test/appsec/rasp/lfi.express.plugin.spec.js new file mode 100644 index 00000000000..b5b825cc628 --- /dev/null +++ b/packages/dd-trace/test/appsec/rasp/lfi.express.plugin.spec.js @@ -0,0 +1,469 @@ +'use strict' + +const Axios = require('axios') +const os = require('os') +const fs = require('fs') +const agent = require('../../plugins/agent') +const appsec = require('../../../src/appsec') +const Config = require('../../../src/config') +const path = require('path') +const { assert } = require('chai') +const { checkRaspExecutedAndNotThreat, checkRaspExecutedAndHasThreat } = require('./utils') + +describe('RASP - lfi', () => { + let axios + + async function testBlockingRequest (url = '/?file=/test.file', config = undefined, ruleEvalCount = 1) { + try { + await axios.get(url, config) + } catch (e) { + if (!e.response) { + throw e + } + + assert.strictEqual(e.response.status, 418) // a teapot + + return checkRaspExecutedAndHasThreat(agent, 'rasp-lfi-rule-id-1', ruleEvalCount) + } + + assert.fail('Request should be blocked') + } + + withVersions('express', 'express', expressVersion => { + let app, server + + before(() => { + return agent.load(['http', 'express'], { client: false }) + }) + + before((done) => { + const express = require(`../../../../../versions/express@${expressVersion}`).get() + const expressApp = express() + + expressApp.get('/', (req, res) => { + app(req, res) + }) + + appsec.enable(new Config({ + appsec: { + enabled: true, + rules: path.join(__dirname, 'resources', 'lfi_rasp_rules.json'), + rasp: { enabled: true } + } + })) + + server = expressApp.listen(0, () => { + const port = server.address().port + axios = Axios.create({ + baseURL: `http://localhost:${port}` + }) + done() + }) + }) + + after(() => { + appsec.disable() + server.close() + return agent.close({ ritmReset: false }) + }) + + describe('lfi', () => { + function getApp (fn, args, options) { + return async (req, res) => { + try { + const result = await fn(args) + options.onfinish?.(result) + } catch (e) { + if (e.message === 'DatadogRaspAbortError') { + res.writeHead(418) + } + } + res.end('end') + } + } + + function getAppSync (fn, args, options) { + return (req, res) => { + try { + const result = fn(args) + options.onfinish?.(result) + } catch (e) { + if (e.message === 'DatadogRaspAbortError') { + res.writeHead(418) + } + } + res.end('end') + } + } + + function runFsMethodTest (description, options, fn, ...args) { + const { vulnerableIndex = 0, ruleEvalCount } = options + + describe(description, () => { + const getAppFn = options.getAppFn ?? getApp + + it('should block param from the request', async () => { + app = getAppFn(fn, args, options) + + const file = args[vulnerableIndex] + return testBlockingRequest(`/?file=${file}`, undefined, ruleEvalCount) + .then(span => { + assert(span.meta['_dd.appsec.json'].includes(file)) + }) + }) + + it('should not block if param not found in the request', async () => { + app = getAppFn(fn, args, options) + + await axios.get('/?file=/test.file') + + return checkRaspExecutedAndNotThreat(agent, false) + }) + }) + } + + function runFsMethodTestThreeWay (methodName, options = {}, ...args) { + let desc = `test ${methodName} ${options.desc ?? ''}` + const { vulnerableIndex = 0 } = options + if (vulnerableIndex !== 0) { + desc += ` with vulnerable index ${vulnerableIndex}` + } + describe(desc, () => { + runFsMethodTest(`test fs.${methodName}Sync method`, { ...options, getAppFn: getAppSync }, (args) => { + return require('fs')[`${methodName}Sync`](...args) + }, ...args) + + runFsMethodTest(`test fs.${methodName} method`, options, (args) => { + return new Promise((resolve, reject) => { + require('fs')[methodName](...args, (err, res) => { + if (err) reject(err) + else resolve(res) + }) + }) + }, ...args) + + runFsMethodTest(`test fs.promises.${methodName} method`, options, async (args) => { + return require('fs').promises[methodName](...args) + }, ...args) + }) + } + + function unlink (...args) { + args.forEach(arg => { + try { + fs.unlinkSync(arg) + } catch (e) { + + } + }) + } + + describe('test access', () => { + runFsMethodTestThreeWay('access', undefined, __filename) + runFsMethodTestThreeWay('access', { desc: 'Buffer' }, Buffer.from(__filename)) + + // not supported by waf yet + // runFsMethodTestThreeWay('access', { desc: 'URL' }, new URL(`file://${__filename}`)) + }) + + describe('test appendFile', () => { + const filename = path.join(os.tmpdir(), 'test-appendfile') + + beforeEach(() => { + fs.writeFileSync(filename, '') + }) + + afterEach(() => { + fs.unlinkSync(filename) + }) + + runFsMethodTestThreeWay('appendFile', undefined, filename, 'test-content') + }) + + describe('test chmod', () => { + const filename = path.join(os.tmpdir(), 'test-chmod') + + beforeEach(() => { + fs.writeFileSync(filename, '') + }) + + afterEach(() => { + fs.unlinkSync(filename) + }) + runFsMethodTestThreeWay('chmod', undefined, filename, '666') + }) + + describe('test copyFile', () => { + const src = path.join(os.tmpdir(), 'test-copyFile-src') + const dest = path.join(os.tmpdir(), 'test-copyFile-dst') + + beforeEach(() => { + fs.writeFileSync(src, '') + }) + + afterEach(() => unlink(src, dest)) + + runFsMethodTestThreeWay('copyFile', { vulnerableIndex: 0, ruleEvalCount: 2 }, src, dest) + runFsMethodTestThreeWay('copyFile', { vulnerableIndex: 1, ruleEvalCount: 2 }, src, dest) + }) + + describe('test link', () => { + const src = path.join(os.tmpdir(), 'test-link-src') + const dest = path.join(os.tmpdir(), 'test-link-dst') + + beforeEach(() => { + fs.writeFileSync(src, '') + }) + + afterEach(() => unlink(src, dest)) + + runFsMethodTestThreeWay('copyFile', { vulnerableIndex: 0, ruleEvalCount: 2 }, src, dest) + runFsMethodTestThreeWay('copyFile', { vulnerableIndex: 1, ruleEvalCount: 2 }, src, dest) + }) + + describe('test lstat', () => { + runFsMethodTestThreeWay('lstat', undefined, __filename) + }) + + describe('test mkdir', () => { + const dirname = path.join(os.tmpdir(), 'test-mkdir') + + afterEach(() => { + try { + fs.rmdirSync(dirname) + } catch (e) { + // some ops are blocked + } + }) + runFsMethodTestThreeWay('mkdir', undefined, dirname) + }) + + describe('test mkdtemp', () => { + const dirname = path.join(os.tmpdir(), 'test-mkdtemp') + + runFsMethodTestThreeWay('mkdtemp', { + onfinish: (todelete) => { + try { + fs.rmdirSync(todelete) + } catch (e) { + // some ops are blocked + } + } + }, dirname) + }) + + describe('test open', () => { + runFsMethodTestThreeWay('open', { + onfinish: (fd) => { + if (fd && fd.close) { + fd.close() + } else { + fs.close(fd, () => {}) + } + } + }, __filename, 'r') + }) + + describe('test opendir', () => { + const dirname = path.join(os.tmpdir(), 'test-opendir') + + beforeEach(() => { + fs.mkdirSync(dirname) + }) + + afterEach(() => { + fs.rmdirSync(dirname) + }) + runFsMethodTestThreeWay('opendir', { + onfinish: (dir) => { + dir.close() + } + }, dirname) + }) + + describe('test readdir', () => { + const dirname = path.join(os.tmpdir(), 'test-opendir') + + beforeEach(() => { + fs.mkdirSync(dirname) + }) + + afterEach(() => { + fs.rmdirSync(dirname) + }) + runFsMethodTestThreeWay('readdir', undefined, dirname) + }) + + describe('test readFile', () => { + runFsMethodTestThreeWay('readFile', undefined, __filename) + }) + + describe('test readlink', () => { + const src = path.join(os.tmpdir(), 'test-readlink-src') + const dest = path.join(os.tmpdir(), 'test-readlink-dst') + + beforeEach(() => { + fs.writeFileSync(src, '') + fs.linkSync(src, dest) + }) + + afterEach(() => unlink(src, dest)) + + runFsMethodTestThreeWay('readlink', undefined, dest) + }) + + describe('test realpath', () => { + runFsMethodTestThreeWay('realpath', undefined, __filename) + + runFsMethodTest('test fs.realpath.native method', {}, (args) => { + return new Promise((resolve, reject) => { + require('fs').realpath.native(...args, (err, result) => { + if (err) reject(err) + else resolve(result) + }) + }) + }, __filename) + }) + + describe('test rename', () => { + const src = path.join(os.tmpdir(), 'test-rename-src') + const dest = path.join(os.tmpdir(), 'test-rename-dst') + + beforeEach(() => { + fs.writeFileSync(src, '') + }) + + afterEach(() => unlink(dest)) + + runFsMethodTestThreeWay('rename', { vulnerableIndex: 0, ruleEvalCount: 2 }, src, dest) + runFsMethodTestThreeWay('rename', { vulnerableIndex: 1, ruleEvalCount: 2 }, src, dest) + }) + + describe('test rmdir', () => { + const dirname = path.join(os.tmpdir(), 'test-rmdir') + + beforeEach(() => { + fs.mkdirSync(dirname) + }) + + afterEach(() => { + try { fs.rmdirSync(dirname) } catch (e) {} + }) + + runFsMethodTestThreeWay('rmdir', undefined, dirname) + }) + + describe('test stat', () => { + runFsMethodTestThreeWay('stat', undefined, __filename) + }) + + describe('test symlink', () => { + const src = path.join(os.tmpdir(), 'test-symlink-src') + const dest = path.join(os.tmpdir(), 'test-symlink-dst') + + beforeEach(() => { + fs.writeFileSync(src, '') + }) + + afterEach(() => { + unlink(src, dest) + }) + + runFsMethodTestThreeWay('symlink', { vulnerableIndex: 0, ruleEvalCount: 2 }, src, dest) + runFsMethodTestThreeWay('symlink', { vulnerableIndex: 1, ruleEvalCount: 2 }, src, dest) + }) + + describe('test truncate', () => { + const src = path.join(os.tmpdir(), 'test-truncate-src') + + beforeEach(() => { + fs.writeFileSync(src, 'aaaaaa') + }) + + afterEach(() => unlink(src)) + + runFsMethodTestThreeWay('truncate', undefined, src) + }) + + describe('test unlink', () => { + const src = path.join(os.tmpdir(), 'test-unlink-src') + + beforeEach(() => { + fs.writeFileSync(src, '') + }) + runFsMethodTestThreeWay('unlink', undefined, src) + }) + + describe('test writeFile', () => { + const src = path.join(os.tmpdir(), 'test-writeFile-src') + + afterEach(() => unlink(src)) + + runFsMethodTestThreeWay('writeFile', undefined, src, 'content') + }) + }) + }) + + describe('without express', () => { + let app, server + + before(() => { + return agent.load(['http'], { client: false }) + }) + + before((done) => { + const http = require('http') + server = http.createServer((req, res) => { + if (app) { + app(req, res) + } else { + res.end('end') + } + }) + + appsec.enable(new Config({ + appsec: { + enabled: true, + rules: path.join(__dirname, 'resources', 'lfi_rasp_rules.json'), + rasp: { enabled: true } + } + })) + + server.listen(0, () => { + const port = server.address().port + axios = Axios.create({ + baseURL: `http://localhost:${port}` + }) + + done() + }) + }) + + after(() => { + appsec.disable() + server.close() + return agent.close({ ritmReset: false }) + }) + + it('Should detect threat but not block', async () => { + app = (req, res) => { + try { + require('fs').statSync(req.headers.file) + } catch (e) { + if (e.message === 'DatadogRaspAbortError') { + res.writeHead(500) + } else { + res.writeHead(418) + } + } + res.end('end') + } + + return testBlockingRequest('/', { + headers: { + file: '/test.file' + } + }) + }) + }) +}) diff --git a/packages/dd-trace/test/appsec/rasp/lfi.integration.express.plugin.spec.js b/packages/dd-trace/test/appsec/rasp/lfi.integration.express.plugin.spec.js new file mode 100644 index 00000000000..45dc1cac46f --- /dev/null +++ b/packages/dd-trace/test/appsec/rasp/lfi.integration.express.plugin.spec.js @@ -0,0 +1,69 @@ +'use strict' + +const { createSandbox, FakeAgent, spawnProc } = require('../../../../../integration-tests/helpers') +const getPort = require('get-port') +const path = require('path') +const Axios = require('axios') +const { assert } = require('chai') + +describe('RASP - lfi - integration - sync', () => { + let axios, sandbox, cwd, appPort, appFile, agent, proc + + before(async function () { + this.timeout(60000) + sandbox = await createSandbox( + ['express', 'fs'], + false, + [path.join(__dirname, 'resources')]) + + appPort = await getPort() + cwd = sandbox.folder + appFile = path.join(cwd, 'resources', 'lfi-app', 'index.js') + + axios = Axios.create({ + baseURL: `http://localhost:${appPort}` + }) + }) + + after(async function () { + this.timeout(60000) + await sandbox.remove() + }) + + beforeEach(async () => { + agent = await new FakeAgent().start() + proc = await spawnProc(appFile, { + cwd, + env: { + DD_TRACE_AGENT_PORT: agent.port, + APP_PORT: appPort, + DD_APPSEC_ENABLED: true, + DD_APPSEC_RASP_ENABLED: true, + DD_APPSEC_RULES: path.join(cwd, 'resources', 'lfi_rasp_rules.json') + } + }) + }) + + afterEach(async () => { + proc.kill() + await agent.stop() + }) + + it('should block a sync endpoint getting the error from apm:express:middleware:error', async () => { + try { + await axios.get('/lfi/sync?file=/etc/passwd') + } catch (e) { + if (!e.response) { + throw e + } + + assert.strictEqual(e.response.status, 403) + return await agent.assertMessageReceived(({ headers, payload }) => { + assert.property(payload[0][0].meta, '_dd.appsec.json') + assert.include(payload[0][0].meta['_dd.appsec.json'], '"rasp-lfi-rule-id-1"') + }) + } + + throw new Error('Request should be blocked') + }) +}) diff --git a/packages/dd-trace/test/appsec/rasp/lfi.spec.js b/packages/dd-trace/test/appsec/rasp/lfi.spec.js new file mode 100644 index 00000000000..405311ae0d3 --- /dev/null +++ b/packages/dd-trace/test/appsec/rasp/lfi.spec.js @@ -0,0 +1,144 @@ +'use strict' + +const proxyquire = require('proxyquire') +const { assert } = require('chai') +const { fsOperationStart, incomingHttpRequestStart } = require('../../../src/appsec/channels') +const { FS_OPERATION_PATH } = require('../../../src/appsec/addresses') +const { RASP_MODULE } = require('../../../src/appsec/rasp/fs-plugin') + +describe('RASP - lfi.js', () => { + let waf, datadogCore, lfi, web, blocking, appsecFsPlugin, config + + beforeEach(() => { + datadogCore = { + storage: { + getStore: sinon.stub() + } + } + + waf = { + run: sinon.stub() + } + + web = { + root: sinon.stub() + } + + blocking = { + block: sinon.stub() + } + + appsecFsPlugin = { + enable: sinon.stub(), + disable: sinon.stub() + } + + lfi = proxyquire('../../../src/appsec/rasp/lfi', { + '../../../../datadog-core': datadogCore, + '../waf': waf, + '../../plugins/util/web': web, + '../blocking': blocking, + './fs-plugin': appsecFsPlugin + }) + + config = { + appsec: { + stackTrace: { + enabled: true, + maxStackTraces: 2, + maxDepth: 42 + } + } + } + }) + + afterEach(() => { + sinon.restore() + lfi.disable() + }) + + describe('enable', () => { + it('should subscribe to first http req', () => { + const subscribe = sinon.stub(incomingHttpRequestStart, 'subscribe') + + lfi.enable(config) + + sinon.assert.calledOnce(subscribe) + }) + + it('should enable AppsecFsPlugin after the first request', () => { + const unsubscribe = sinon.stub(incomingHttpRequestStart, 'unsubscribe') + const fsOpSubscribe = sinon.stub(fsOperationStart, 'subscribe') + + lfi.enable(config) + + incomingHttpRequestStart.publish({}) + + sinon.assert.calledOnceWithExactly(appsecFsPlugin.enable, RASP_MODULE) + + assert(fsOpSubscribe.calledAfter(appsecFsPlugin.enable)) + + process.nextTick(() => { + sinon.assert.calledOnce(unsubscribe) + }) + }) + }) + + describe('disable', () => { + it('should disable AppsecFsPlugin', () => { + lfi.enable(config) + + lfi.disable() + sinon.assert.calledOnceWithExactly(appsecFsPlugin.disable, RASP_MODULE) + }) + }) + + describe('analyzeLfi', () => { + const path = '/etc/passwd' + const ctx = { path } + const req = {} + + beforeEach(() => { + lfi.enable(config) + + incomingHttpRequestStart.publish({}) + }) + + it('should analyze lfi for root fs operations', () => { + const fs = { root: true } + datadogCore.storage.getStore.returns({ req, fs }) + + fsOperationStart.publish(ctx) + + const persistent = { [FS_OPERATION_PATH]: path } + sinon.assert.calledOnceWithExactly(waf.run, { persistent }, req, 'lfi') + }) + + it('should NOT analyze lfi for child fs operations', () => { + const fs = {} + datadogCore.storage.getStore.returns({ req, fs }) + + fsOperationStart.publish(ctx) + + sinon.assert.notCalled(waf.run) + }) + + it('should NOT analyze lfi for undefined fs (AppsecFsPlugin disabled)', () => { + const fs = undefined + datadogCore.storage.getStore.returns({ req, fs }) + + fsOperationStart.publish(ctx) + + sinon.assert.notCalled(waf.run) + }) + + it('should NOT analyze lfi for excluded operations', () => { + const fs = { opExcluded: true, root: true } + datadogCore.storage.getStore.returns({ req, fs }) + + fsOperationStart.publish(ctx) + + sinon.assert.notCalled(waf.run) + }) + }) +}) diff --git a/packages/dd-trace/test/appsec/rasp/resources/lfi-app/index.js b/packages/dd-trace/test/appsec/rasp/resources/lfi-app/index.js new file mode 100644 index 00000000000..1beb4d977cb --- /dev/null +++ b/packages/dd-trace/test/appsec/rasp/resources/lfi-app/index.js @@ -0,0 +1,28 @@ +'use strict' + +const tracer = require('dd-trace') +tracer.init({ + flushInterval: 0 +}) + +const express = require('express') +const { readFileSync } = require('fs') + +const app = express() +const port = process.env.APP_PORT || 3000 + +app.get('/lfi/sync', (req, res) => { + let result + try { + result = readFileSync(req.query.file) + } catch (e) { + if (e.message === 'DatadogRaspAbortError') { + throw e + } + } + res.send(result) +}) + +app.listen(port, () => { + process.send({ port }) +}) diff --git a/packages/dd-trace/test/appsec/rasp/resources/lfi_rasp_rules.json b/packages/dd-trace/test/appsec/rasp/resources/lfi_rasp_rules.json new file mode 100644 index 00000000000..814f6c72236 --- /dev/null +++ b/packages/dd-trace/test/appsec/rasp/resources/lfi_rasp_rules.json @@ -0,0 +1,61 @@ +{ + "version": "2.2", + "metadata": { + "rules_version": "1.99.0" + }, + "rules": [ + { + "id": "rasp-lfi-rule-id-1", + "name": "Local file inclusion exploit", + "enabled": true, + "tags": { + "type": "lfi", + "category": "vulnerability_trigger", + "cwe": "22", + "capec": "1000/255/153/126", + "confidence": "0", + "module": "rasp" + }, + "conditions": [ + { + "parameters": { + "resource": [ + { + "address": "server.io.fs.file" + } + ], + "params": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ] + }, + "operator": "lfi_detector" + } + ], + "transformers": [], + "on_match": [ + "block", + "stack_trace" + ] + } + ] +} diff --git a/packages/dd-trace/test/appsec/rasp/sql_injection.mysql2.plugin.spec.js b/packages/dd-trace/test/appsec/rasp/sql_injection.mysql2.plugin.spec.js new file mode 100644 index 00000000000..2fe74e9f262 --- /dev/null +++ b/packages/dd-trace/test/appsec/rasp/sql_injection.mysql2.plugin.spec.js @@ -0,0 +1,229 @@ +'use strict' + +const agent = require('../../plugins/agent') +const appsec = require('../../../src/appsec') +const Config = require('../../../src/config') +const path = require('path') +const Axios = require('axios') +const { assert } = require('chai') +const { checkRaspExecutedAndNotThreat, checkRaspExecutedAndHasThreat } = require('./utils') + +describe('RASP - sql_injection', () => { + withVersions('mysql2', 'express', expressVersion => { + withVersions('mysql2', 'mysql2', mysql2Version => { + describe('sql injection with mysql2', () => { + const connectionData = { + host: '127.0.0.1', + user: 'root', + database: 'db' + } + let server, axios, app, mysql2 + + before(() => { + return agent.load(['express', 'http', 'mysql2'], { client: false }) + }) + + before(done => { + const express = require(`../../../../../versions/express@${expressVersion}`).get() + mysql2 = require(`../../../../../versions/mysql2@${mysql2Version}`).get() + const expressApp = express() + + expressApp.get('/', (req, res) => { + app(req, res) + }) + + appsec.enable(new Config({ + appsec: { + enabled: true, + rules: path.join(__dirname, 'resources', 'rasp_rules.json'), + rasp: { enabled: true } + } + })) + + server = expressApp.listen(0, () => { + const port = server.address().port + axios = Axios.create({ + baseURL: `http://localhost:${port}` + }) + done() + }) + }) + + after(() => { + appsec.disable() + server.close() + return agent.close({ ritmReset: false }) + }) + + describe('Test using Connection', () => { + let connection + + beforeEach(() => { + connection = mysql2.createConnection(connectionData) + connection.connect() + }) + + afterEach((done) => { + connection.end(() => done()) + }) + + describe('query', () => { + it('Should not detect threat', async () => { + app = (req, res) => { + connection.query('SELECT ' + req.query.param, (err) => { + if (err) { + res.statusCode = 500 + } + + res.end() + }) + } + + axios.get('/?param=1') + + await checkRaspExecutedAndNotThreat(agent) + }) + + it('Should block query with callback', async () => { + app = (req, res) => { + connection.query(`SELECT * FROM users WHERE id='${req.query.param}'`, (err) => { + if (err?.name === 'DatadogRaspAbortError') { + res.statusCode = 500 + } + res.end() + }) + } + + try { + await axios.get('/?param=\' OR 1 = 1 --') + } catch (e) { + return await checkRaspExecutedAndHasThreat(agent, 'rasp-sqli-rule-id-2') + } + + assert.fail('Request should be blocked') + }) + }) + + describe('execute', () => { + it('Should not detect threat', async () => { + app = (req, res) => { + connection.execute('SELECT ' + req.query.param, (err) => { + if (err) { + res.statusCode = 500 + } + + res.end() + }) + } + + axios.get('/?param=1') + + await checkRaspExecutedAndNotThreat(agent) + }) + + it('Should block query with callback', async () => { + app = (req, res) => { + connection.execute(`SELECT * FROM users WHERE id='${req.query.param}'`, (err) => { + if (err?.name === 'DatadogRaspAbortError') { + res.statusCode = 500 + } + res.end() + }) + } + + try { + await axios.get('/?param=\' OR 1 = 1 --') + } catch (e) { + return await checkRaspExecutedAndHasThreat(agent, 'rasp-sqli-rule-id-2') + } + + assert.fail('Request should be blocked') + }) + }) + }) + + describe('Test using Pool', () => { + let pool + + beforeEach(() => { + pool = mysql2.createPool(connectionData) + }) + + describe('query', () => { + it('Should not detect threat', async () => { + app = (req, res) => { + pool.query('SELECT ' + req.query.param, (err) => { + if (err) { + res.statusCode = 500 + } + + res.end() + }) + } + + axios.get('/?param=1') + + await checkRaspExecutedAndNotThreat(agent) + }) + + it('Should block query with callback', async () => { + app = (req, res) => { + pool.query(`SELECT * FROM users WHERE id='${req.query.param}'`, (err) => { + if (err?.name === 'DatadogRaspAbortError') { + res.statusCode = 500 + } + res.end() + }) + } + + try { + await axios.get('/?param=\' OR 1 = 1 --') + } catch (e) { + return await checkRaspExecutedAndHasThreat(agent, 'rasp-sqli-rule-id-2') + } + + assert.fail('Request should be blocked') + }) + }) + + describe('execute', () => { + it('Should not detect threat', async () => { + app = (req, res) => { + pool.execute('SELECT ' + req.query.param, (err) => { + if (err) { + res.statusCode = 500 + } + + res.end() + }) + } + + axios.get('/?param=1') + + await checkRaspExecutedAndNotThreat(agent) + }) + + it('Should block query with callback', async () => { + app = (req, res) => { + pool.execute(`SELECT * FROM users WHERE id='${req.query.param}'`, (err) => { + if (err?.name === 'DatadogRaspAbortError') { + res.statusCode = 500 + } + res.end() + }) + } + + try { + await axios.get('/?param=\' OR 1 = 1 --') + } catch (e) { + return await checkRaspExecutedAndHasThreat(agent, 'rasp-sqli-rule-id-2') + } + + assert.fail('Request should be blocked') + }) + }) + }) + }) + }) + }) +}) diff --git a/packages/dd-trace/test/appsec/rasp/sql_injection.spec.js b/packages/dd-trace/test/appsec/rasp/sql_injection.spec.js index 5467f7ef150..d713521e986 100644 --- a/packages/dd-trace/test/appsec/rasp/sql_injection.spec.js +++ b/packages/dd-trace/test/appsec/rasp/sql_injection.spec.js @@ -1,6 +1,6 @@ 'use strict' -const { pgQueryStart } = require('../../../src/appsec/channels') +const { pgQueryStart, mysql2OuterQueryStart } = require('../../../src/appsec/channels') const addresses = require('../../../src/appsec/addresses') const proxyquire = require('proxyquire') @@ -113,4 +113,69 @@ describe('RASP - sql_injection', () => { sinon.assert.notCalled(waf.run) }) }) + + describe('analyzeMysql2SqlInjection', () => { + it('should analyze sql injection', () => { + const ctx = { + sql: 'SELECT 1' + } + const req = {} + datadogCore.storage.getStore.returns({ req }) + + mysql2OuterQueryStart.publish(ctx) + + const persistent = { + [addresses.DB_STATEMENT]: 'SELECT 1', + [addresses.DB_SYSTEM]: 'mysql' + } + sinon.assert.calledOnceWithExactly(waf.run, { persistent }, req, 'sql_injection') + }) + + it('should not analyze sql injection if rasp is disabled', () => { + sqli.disable() + + const ctx = { + sql: 'SELECT 1' + } + const req = {} + datadogCore.storage.getStore.returns({ req }) + + mysql2OuterQueryStart.publish(ctx) + + sinon.assert.notCalled(waf.run) + }) + + it('should not analyze sql injection if no store', () => { + const ctx = { + sql: 'SELECT 1' + } + datadogCore.storage.getStore.returns(undefined) + + mysql2OuterQueryStart.publish(ctx) + + sinon.assert.notCalled(waf.run) + }) + + it('should not analyze sql injection if no req', () => { + const ctx = { + sql: 'SELECT 1' + } + datadogCore.storage.getStore.returns({}) + + mysql2OuterQueryStart.publish(ctx) + + sinon.assert.notCalled(waf.run) + }) + + it('should not analyze sql injection if no query', () => { + const ctx = { + sql: 'SELECT 1' + } + datadogCore.storage.getStore.returns({}) + + mysql2OuterQueryStart.publish(ctx) + + sinon.assert.notCalled(waf.run) + }) + }) }) diff --git a/packages/dd-trace/test/appsec/rasp/utils.js b/packages/dd-trace/test/appsec/rasp/utils.js index e9353d5d815..0d8a3e076a4 100644 --- a/packages/dd-trace/test/appsec/rasp/utils.js +++ b/packages/dd-trace/test/appsec/rasp/utils.js @@ -13,24 +13,28 @@ function getWebSpan (traces) { throw new Error('web span not found') } -function checkRaspExecutedAndNotThreat (agent) { +function checkRaspExecutedAndNotThreat (agent, checkRuleEval = true) { return agent.use((traces) => { const span = getWebSpan(traces) assert.notProperty(span.meta, '_dd.appsec.json') assert.notProperty(span.meta_struct || {}, '_dd.stack') - assert.equal(span.metrics['_dd.appsec.rasp.rule.eval'], 1) + if (checkRuleEval) { + assert.equal(span.metrics['_dd.appsec.rasp.rule.eval'], 1) + } }) } -function checkRaspExecutedAndHasThreat (agent, ruleId) { +function checkRaspExecutedAndHasThreat (agent, ruleId, ruleEvalCount = 1) { return agent.use((traces) => { const span = getWebSpan(traces) assert.property(span.meta, '_dd.appsec.json') assert(span.meta['_dd.appsec.json'].includes(ruleId)) - assert.equal(span.metrics['_dd.appsec.rasp.rule.eval'], 1) + assert.equal(span.metrics['_dd.appsec.rasp.rule.eval'], ruleEvalCount) assert(span.metrics['_dd.appsec.rasp.duration'] > 0) assert(span.metrics['_dd.appsec.rasp.duration_ext'] > 0) assert.property(span.meta_struct, '_dd.stack') + + return span }) } diff --git a/packages/dd-trace/test/appsec/remote_config/index.spec.js b/packages/dd-trace/test/appsec/remote_config/index.spec.js index fd923c9a92b..dbd710d6a4e 100644 --- a/packages/dd-trace/test/appsec/remote_config/index.spec.js +++ b/packages/dd-trace/test/appsec/remote_config/index.spec.js @@ -286,10 +286,18 @@ describe('Remote Config index', () => { .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_CUSTOM_BLOCKING_RESPONSE, true) expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_TRUSTED_IPS, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_ENDPOINT_FINGERPRINT, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_NETWORK_FINGERPRINT, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_HEADER_FINGERPRINT, true) expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SSRF, true) expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SQLI, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_LFI, true) expect(rc.setProductHandler).to.have.been.calledWith('ASM_DATA') expect(rc.setProductHandler).to.have.been.calledWith('ASM_DD') @@ -320,10 +328,18 @@ describe('Remote Config index', () => { .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_CUSTOM_BLOCKING_RESPONSE, true) expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_TRUSTED_IPS, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_ENDPOINT_FINGERPRINT, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_NETWORK_FINGERPRINT, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_HEADER_FINGERPRINT, true) expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SSRF, true) expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SQLI, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_LFI, true) expect(rc.setProductHandler).to.have.been.calledWith('ASM_DATA') expect(rc.setProductHandler).to.have.been.calledWith('ASM_DD') @@ -356,10 +372,18 @@ describe('Remote Config index', () => { .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_CUSTOM_BLOCKING_RESPONSE, true) expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_TRUSTED_IPS, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_ENDPOINT_FINGERPRINT, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_NETWORK_FINGERPRINT, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_HEADER_FINGERPRINT, true) expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SSRF, true) expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SQLI, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_LFI, true) }) it('should not activate rasp capabilities if rasp is disabled', () => { @@ -387,10 +411,18 @@ describe('Remote Config index', () => { .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_CUSTOM_BLOCKING_RESPONSE, true) expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_TRUSTED_IPS, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_ENDPOINT_FINGERPRINT, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_NETWORK_FINGERPRINT, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_HEADER_FINGERPRINT, true) expect(rc.updateCapabilities) .to.not.have.been.calledWith(RemoteConfigCapabilities.ASM_RASP_SSRF) expect(rc.updateCapabilities) .to.not.have.been.calledWith(RemoteConfigCapabilities.ASM_RASP_SQLI) + expect(rc.updateCapabilities) + .to.not.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_LFI) }) }) @@ -418,10 +450,18 @@ describe('Remote Config index', () => { .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_CUSTOM_BLOCKING_RESPONSE, false) expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_TRUSTED_IPS, false) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_ENDPOINT_FINGERPRINT, false) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_NETWORK_FINGERPRINT, false) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_HEADER_FINGERPRINT, false) expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SSRF, false) expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SQLI, false) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_LFI, false) expect(rc.removeProductHandler).to.have.been.calledWith('ASM_DATA') expect(rc.removeProductHandler).to.have.been.calledWith('ASM_DD') diff --git a/packages/dd-trace/test/appsec/reporter.spec.js b/packages/dd-trace/test/appsec/reporter.spec.js index c683bdc59fe..0860b2c75ac 100644 --- a/packages/dd-trace/test/appsec/reporter.spec.js +++ b/packages/dd-trace/test/appsec/reporter.spec.js @@ -253,7 +253,7 @@ describe('reporter', () => { expect(Reporter.reportAttack('', params)).to.not.be.false expect(addTags.getCall(5).firstArg).to.have.property('manual.keep').that.equals('true') done() - }, 1e3) + }, 1020) }) it('should not overwrite origin tag', () => { @@ -314,20 +314,24 @@ describe('reporter', () => { }) }) - describe('reportSchemas', () => { + describe('reportDerivatives', () => { it('should not call addTags if parameter is undefined', () => { - Reporter.reportSchemas(undefined) + Reporter.reportDerivatives(undefined) expect(span.addTags).not.to.be.called }) it('should call addTags with an empty array', () => { - Reporter.reportSchemas([]) + Reporter.reportDerivatives([]) expect(span.addTags).to.be.calledOnceWithExactly({}) }) it('should call addTags', () => { const schemaValue = [{ key: [8] }] const derivatives = { + '_dd.appsec.fp.http.endpoint': 'endpoint_fingerprint', + '_dd.appsec.fp.http.header': 'header_fingerprint', + '_dd.appsec.fp.http.network': 'network_fingerprint', + '_dd.appsec.fp.session': 'session_fingerprint', '_dd.appsec.s.req.headers': schemaValue, '_dd.appsec.s.req.query': schemaValue, '_dd.appsec.s.req.params': schemaValue, @@ -336,10 +340,14 @@ describe('reporter', () => { 'custom.processor.output': schemaValue } - Reporter.reportSchemas(derivatives) + Reporter.reportDerivatives(derivatives) const schemaEncoded = zlib.gzipSync(JSON.stringify(schemaValue)).toString('base64') expect(span.addTags).to.be.calledOnceWithExactly({ + '_dd.appsec.fp.http.endpoint': 'endpoint_fingerprint', + '_dd.appsec.fp.http.header': 'header_fingerprint', + '_dd.appsec.fp.http.network': 'network_fingerprint', + '_dd.appsec.fp.session': 'session_fingerprint', '_dd.appsec.s.req.headers': schemaEncoded, '_dd.appsec.s.req.query': schemaEncoded, '_dd.appsec.s.req.params': schemaEncoded, diff --git a/packages/dd-trace/test/appsec/response_blocking.spec.js b/packages/dd-trace/test/appsec/response_blocking.spec.js index 2868a42b05b..03541858955 100644 --- a/packages/dd-trace/test/appsec/response_blocking.spec.js +++ b/packages/dd-trace/test/appsec/response_blocking.spec.js @@ -52,7 +52,10 @@ describe('HTTP Response Blocking', () => { appsec.enable(new Config({ appsec: { enabled: true, - rules: path.join(__dirname, 'response_blocking_rules.json') + rules: path.join(__dirname, 'response_blocking_rules.json'), + rasp: { + enabled: false // disable rasp to not trigger waf.run executions due to lfi + } } })) }) diff --git a/packages/dd-trace/test/appsec/sdk/track_event.spec.js b/packages/dd-trace/test/appsec/sdk/track_event.spec.js index acc5db1e905..e3739488b81 100644 --- a/packages/dd-trace/test/appsec/sdk/track_event.spec.js +++ b/packages/dd-trace/test/appsec/sdk/track_event.spec.js @@ -4,6 +4,7 @@ const proxyquire = require('proxyquire') const agent = require('../../plugins/agent') const axios = require('axios') const tracer = require('../../../../../index') +const { LOGIN_SUCCESS, LOGIN_FAILURE } = require('../../../src/appsec/addresses') describe('track_event', () => { describe('Internal API', () => { @@ -14,6 +15,7 @@ describe('track_event', () => { let setUserTags let trackUserLoginSuccessEvent, trackUserLoginFailureEvent, trackCustomEvent, trackEvent let sample + let waf beforeEach(() => { log = { @@ -30,6 +32,10 @@ describe('track_event', () => { sample = sinon.stub() + waf = { + run: sinon.spy() + } + const trackEvents = proxyquire('../../../src/appsec/sdk/track_event', { '../../log': log, './utils': { @@ -40,7 +46,8 @@ describe('track_event', () => { }, '../standalone': { sample - } + }, + '../waf': waf }) trackUserLoginSuccessEvent = trackEvents.trackUserLoginSuccessEvent @@ -49,6 +56,10 @@ describe('track_event', () => { trackEvent = trackEvents.trackEvent }) + afterEach(() => { + sinon.restore() + }) + describe('trackUserLoginSuccessEvent', () => { it('should log warning when passed invalid user', () => { trackUserLoginSuccessEvent(tracer, null, { key: 'value' }) @@ -106,6 +117,16 @@ describe('track_event', () => { '_dd.appsec.events.users.login.success.sdk': 'true' }) }) + + it('should call waf run with login success address', () => { + const user = { id: 'user_id' } + + trackUserLoginSuccessEvent(tracer, user) + sinon.assert.calledOnceWithExactly( + waf.run, + { persistent: { [LOGIN_SUCCESS]: null } } + ) + }) }) describe('trackUserLoginFailureEvent', () => { @@ -182,6 +203,14 @@ describe('track_event', () => { 'appsec.events.users.login.failure.usr.exists': 'true' }) }) + + it('should call waf run with login failure address', () => { + trackUserLoginFailureEvent(tracer, 'user_id') + sinon.assert.calledOnceWithExactly( + waf.run, + { persistent: { [LOGIN_FAILURE]: null } } + ) + }) }) describe('trackCustomEvent', () => { diff --git a/packages/dd-trace/test/appsec/waf/index.spec.js b/packages/dd-trace/test/appsec/waf/index.spec.js index 816b3fe89c6..b0c16647872 100644 --- a/packages/dd-trace/test/appsec/waf/index.spec.js +++ b/packages/dd-trace/test/appsec/waf/index.spec.js @@ -48,7 +48,7 @@ describe('WAF Manager', () => { sinon.stub(Reporter, 'reportMetrics') sinon.stub(Reporter, 'reportAttack') sinon.stub(Reporter, 'reportWafUpdate') - sinon.stub(Reporter, 'reportSchemas') + sinon.stub(Reporter, 'reportDerivatives') webContext = {} sinon.stub(web, 'getContext').returns(webContext) @@ -404,7 +404,29 @@ describe('WAF Manager', () => { ddwafContext.run.returns(result) wafContextWrapper.run(params) - expect(Reporter.reportSchemas).to.be.calledOnceWithExactly(result.derivatives) + expect(Reporter.reportDerivatives).to.be.calledOnceWithExactly(result.derivatives) + }) + + it('should report fingerprints when ddwafContext returns fingerprints in results derivatives', () => { + const result = { + totalRuntime: 1, + durationExt: 1, + derivatives: { + '_dd.appsec.s.req.body': [8], + '_dd.appsec.fp.http.endpoint': 'http-post-abcdefgh-12345678-abcdefgh', + '_dd.appsec.fp.http.network': 'net-1-0100000000', + '_dd.appsec.fp.http.headers': 'hdr-0110000110-abcdefgh-5-12345678' + } + } + + ddwafContext.run.returns(result) + + wafContextWrapper.run({ + persistent: { + 'server.request.body': 'foo' + } + }) + sinon.assert.calledOnceWithExactly(Reporter.reportDerivatives, result.derivatives) }) }) }) diff --git a/packages/dd-trace/test/config.spec.js b/packages/dd-trace/test/config.spec.js index ca4d8b142d3..6558485b529 100644 --- a/packages/dd-trace/test/config.spec.js +++ b/packages/dd-trace/test/config.spec.js @@ -219,6 +219,7 @@ describe('Config', () => { expect(config).to.have.property('reportHostname', false) expect(config).to.have.property('scope', undefined) expect(config).to.have.property('logLevel', 'debug') + expect(config).to.have.nested.property('codeOriginForSpans.enabled', false) expect(config).to.have.property('dynamicInstrumentationEnabled', false) expect(config).to.have.property('traceId128BitGenerationEnabled', true) expect(config).to.have.property('traceId128BitLoggingEnabled', false) @@ -291,6 +292,7 @@ describe('Config', () => { { name: 'appsec.wafTimeout', value: 5e3, origin: 'default' }, { name: 'clientIpEnabled', value: false, origin: 'default' }, { name: 'clientIpHeader', value: null, origin: 'default' }, + { name: 'codeOriginForSpans.enabled', value: false, origin: 'default' }, { name: 'dbmPropagationMode', value: 'disabled', origin: 'default' }, { name: 'dogstatsd.hostname', value: '127.0.0.1', origin: 'calculated' }, { name: 'dogstatsd.port', value: '8125', origin: 'default' }, @@ -305,6 +307,7 @@ describe('Config', () => { { name: 'gitMetadataEnabled', value: true, origin: 'default' }, { name: 'headerTags', value: [], origin: 'default' }, { name: 'hostname', value: '127.0.0.1', origin: 'default' }, + { name: 'iast.cookieFilterPattern', value: '.{32,}', origin: 'default' }, { name: 'iast.deduplicationEnabled', value: true, origin: 'default' }, { name: 'iast.enabled', value: false, origin: 'default' }, { name: 'iast.maxConcurrentRequests', value: 2, origin: 'default' }, @@ -410,6 +413,7 @@ describe('Config', () => { }) it('should initialize from environment variables', () => { + process.env.DD_CODE_ORIGIN_FOR_SPANS_ENABLED = 'true' process.env.DD_TRACE_AGENT_HOSTNAME = 'agent' process.env.DD_TRACE_AGENT_PORT = '6218' process.env.DD_DOGSTATSD_HOSTNAME = 'dsd-agent' @@ -475,6 +479,7 @@ describe('Config', () => { process.env.DD_IAST_REQUEST_SAMPLING = '40' process.env.DD_IAST_MAX_CONCURRENT_REQUESTS = '3' process.env.DD_IAST_MAX_CONTEXT_OPERATIONS = '4' + process.env.DD_IAST_COOKIE_FILTER_PATTERN = '.*' process.env.DD_IAST_DEDUPLICATION_ENABLED = false process.env.DD_IAST_REDACTION_ENABLED = false process.env.DD_IAST_REDACTION_NAME_PATTERN = 'REDACTION_NAME_PATTERN' @@ -509,6 +514,7 @@ describe('Config', () => { expect(config).to.have.property('clientIpHeader', 'x-true-client-ip') expect(config).to.have.property('runtimeMetrics', true) expect(config).to.have.property('reportHostname', true) + expect(config).to.have.nested.property('codeOriginForSpans.enabled', true) expect(config).to.have.property('dynamicInstrumentationEnabled', true) expect(config).to.have.property('env', 'test') expect(config).to.have.property('sampleRate', 0.5) @@ -574,6 +580,7 @@ describe('Config', () => { expect(config).to.have.nested.property('iast.requestSampling', 40) expect(config).to.have.nested.property('iast.maxConcurrentRequests', 3) expect(config).to.have.nested.property('iast.maxContextOperations', 4) + expect(config).to.have.nested.property('iast.cookieFilterPattern', '.*') expect(config).to.have.nested.property('iast.deduplicationEnabled', false) expect(config).to.have.nested.property('iast.redactionEnabled', false) expect(config).to.have.nested.property('iast.redactionNamePattern', 'REDACTION_NAME_PATTERN') @@ -604,6 +611,7 @@ describe('Config', () => { { name: 'appsec.wafTimeout', value: '42', origin: 'env_var' }, { name: 'clientIpEnabled', value: true, origin: 'env_var' }, { name: 'clientIpHeader', value: 'x-true-client-ip', origin: 'env_var' }, + { name: 'codeOriginForSpans.enabled', value: true, origin: 'env_var' }, { name: 'dogstatsd.hostname', value: 'dsd-agent', origin: 'env_var' }, { name: 'dogstatsd.port', value: '5218', origin: 'env_var' }, { name: 'dynamicInstrumentationEnabled', value: true, origin: 'env_var' }, @@ -612,6 +620,7 @@ describe('Config', () => { { name: 'experimental.exporter', value: 'log', origin: 'env_var' }, { name: 'experimental.runtimeId', value: true, origin: 'env_var' }, { name: 'hostname', value: 'agent', origin: 'env_var' }, + { name: 'iast.cookieFilterPattern', value: '.*', origin: 'env_var' }, { name: 'iast.deduplicationEnabled', value: false, origin: 'env_var' }, { name: 'iast.enabled', value: true, origin: 'env_var' }, { name: 'iast.maxConcurrentRequests', value: '3', origin: 'env_var' }, @@ -733,6 +742,9 @@ describe('Config', () => { env: 'test', clientIpEnabled: true, clientIpHeader: 'x-true-client-ip', + codeOriginForSpans: { + enabled: false + }, sampleRate: 0.5, rateLimit: 1000, samplingRules, @@ -776,6 +788,7 @@ describe('Config', () => { requestSampling: 50, maxConcurrentRequests: 4, maxContextOperations: 5, + cookieFilterPattern: '.*', deduplicationEnabled: false, redactionEnabled: false, redactionNamePattern: 'REDACTION_NAME_PATTERN', @@ -820,6 +833,7 @@ describe('Config', () => { expect(config).to.have.property('reportHostname', true) expect(config).to.have.property('plugins', false) expect(config).to.have.property('logLevel', logLevel) + expect(config).to.have.nested.property('codeOriginForSpans.enabled', false) expect(config).to.have.property('traceId128BitGenerationEnabled', true) expect(config).to.have.property('traceId128BitLoggingEnabled', true) expect(config).to.have.property('spanRemoveIntegrationFromService', true) @@ -841,6 +855,7 @@ describe('Config', () => { expect(config).to.have.nested.property('iast.requestSampling', 50) expect(config).to.have.nested.property('iast.maxConcurrentRequests', 4) expect(config).to.have.nested.property('iast.maxContextOperations', 5) + expect(config).to.have.nested.property('iast.cookieFilterPattern', '.*') expect(config).to.have.nested.property('iast.deduplicationEnabled', false) expect(config).to.have.nested.property('iast.redactionEnabled', false) expect(config).to.have.nested.property('iast.redactionNamePattern', 'REDACTION_NAME_PATTERN') @@ -874,6 +889,7 @@ describe('Config', () => { { name: 'appsec.standalone.enabled', value: true, origin: 'code' }, { name: 'clientIpEnabled', value: true, origin: 'code' }, { name: 'clientIpHeader', value: 'x-true-client-ip', origin: 'code' }, + { name: 'codeOriginForSpans.enabled', value: false, origin: 'code' }, { name: 'dogstatsd.hostname', value: 'agent-dsd', origin: 'code' }, { name: 'dogstatsd.port', value: '5218', origin: 'code' }, { name: 'dynamicInstrumentationEnabled', value: true, origin: 'code' }, @@ -884,6 +900,7 @@ describe('Config', () => { { name: 'flushInterval', value: 5000, origin: 'code' }, { name: 'flushMinSpans', value: 500, origin: 'code' }, { name: 'hostname', value: 'agent', origin: 'code' }, + { name: 'iast.cookieFilterPattern', value: '.*', origin: 'code' }, { name: 'iast.deduplicationEnabled', value: false, origin: 'code' }, { name: 'iast.enabled', value: true, origin: 'code' }, { name: 'iast.maxConcurrentRequests', value: 4, origin: 'code' }, @@ -1081,6 +1098,7 @@ describe('Config', () => { process.env.DD_API_SECURITY_REQUEST_SAMPLE_RATE = 0.5 process.env.DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS = 11 process.env.DD_IAST_ENABLED = 'false' + process.env.DD_IAST_COOKIE_FILTER_PATTERN = '.*' process.env.DD_IAST_REDACTION_NAME_PATTERN = 'name_pattern_to_be_overriden_by_options' process.env.DD_IAST_REDACTION_VALUE_PATTERN = 'value_pattern_to_be_overriden_by_options' process.env.DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED = 'true' @@ -1155,12 +1173,16 @@ describe('Config', () => { }, iast: { enabled: true, + cookieFilterPattern: '.{10,}', redactionNamePattern: 'REDACTION_NAME_PATTERN', redactionValuePattern: 'REDACTION_VALUE_PATTERN' }, remoteConfig: { pollInterval: 42 }, + codeOriginForSpans: { + enabled: false + }, traceId128BitGenerationEnabled: false, traceId128BitLoggingEnabled: false }) @@ -1177,6 +1199,7 @@ describe('Config', () => { expect(config).to.have.property('flushMinSpans', 500) expect(config).to.have.property('service', 'test') expect(config).to.have.property('version', '1.0.0') + expect(config).to.have.nested.property('codeOriginForSpans.enabled', false) expect(config).to.have.property('dynamicInstrumentationEnabled', false) expect(config).to.have.property('env', 'development') expect(config).to.have.property('clientIpEnabled', true) @@ -1218,6 +1241,7 @@ describe('Config', () => { expect(config).to.have.nested.property('iast.maxConcurrentRequests', 2) expect(config).to.have.nested.property('iast.maxContextOperations', 2) expect(config).to.have.nested.property('iast.deduplicationEnabled', true) + expect(config).to.have.nested.property('iast.cookieFilterPattern', '.{10,}') expect(config).to.have.nested.property('iast.redactionEnabled', true) expect(config).to.have.nested.property('iast.redactionNamePattern', 'REDACTION_NAME_PATTERN') expect(config).to.have.nested.property('iast.redactionValuePattern', 'REDACTION_VALUE_PATTERN') @@ -1251,6 +1275,7 @@ describe('Config', () => { requestSampling: 15, maxConcurrentRequests: 3, maxContextOperations: 4, + cookieFilterPattern: '.*', deduplicationEnabled: false, redactionEnabled: false, redactionNamePattern: 'REDACTION_NAME_PATTERN', @@ -1284,6 +1309,7 @@ describe('Config', () => { requestSampling: 25, maxConcurrentRequests: 6, maxContextOperations: 7, + cookieFilterPattern: '.{10,}', deduplicationEnabled: true, redactionEnabled: true, redactionNamePattern: 'IGNORED_REDACTION_NAME_PATTERN', @@ -1332,6 +1358,7 @@ describe('Config', () => { requestSampling: 15, maxConcurrentRequests: 3, maxContextOperations: 4, + cookieFilterPattern: '.*', deduplicationEnabled: false, redactionEnabled: false, redactionNamePattern: 'REDACTION_NAME_PATTERN', diff --git a/packages/dd-trace/test/datastreams/schemas/schema_builder.spec.js b/packages/dd-trace/test/datastreams/schemas/schema_builder.spec.js index db602ef83aa..134724b593a 100644 --- a/packages/dd-trace/test/datastreams/schemas/schema_builder.spec.js +++ b/packages/dd-trace/test/datastreams/schemas/schema_builder.spec.js @@ -24,7 +24,7 @@ describe('SchemaBuilder', () => { const shouldExtractAddress = builder.shouldExtractSchema('address', 1) const shouldExtractPerson2 = builder.shouldExtractSchema('person', 0) const shouldExtractTooDeep = builder.shouldExtractSchema('city', 11) - const schema = builder.build() + const schema = SchemaBuilder.getSchemaDefinition(builder.build()) const expectedSchema = { components: { diff --git a/packages/dd-trace/test/debugger/devtools_client/snapshot/complex-types.spec.js b/packages/dd-trace/test/debugger/devtools_client/snapshot/complex-types.spec.js new file mode 100644 index 00000000000..22036e4c60a --- /dev/null +++ b/packages/dd-trace/test/debugger/devtools_client/snapshot/complex-types.spec.js @@ -0,0 +1,323 @@ +'use strict' + +require('../../../setup/mocha') + +const { session, getTargetCodePath, enable, teardown, setAndTriggerBreakpoint } = require('./utils') +const { getLocalStateForCallFrame } = require('../../../../src/debugger/devtools_client/snapshot') + +const NODE_20_PLUS = require('semver').gte(process.version, '20.0.0') +const target = getTargetCodePath(__filename) + +describe('debugger -> devtools client -> snapshot.getLocalStateForCallFrame', function () { + describe('complex types', function () { + let state + + beforeEach(enable(__filename)) + + afterEach(teardown) + + beforeEach(async function () { + let resolve + const localState = new Promise((_resolve) => { resolve = _resolve }) + + session.once('Debugger.paused', async ({ params }) => { + expect(params.hitBreakpoints.length).to.eq(1) + + resolve((await getLocalStateForCallFrame(params.callFrames[0]))()) + }) + + await setAndTriggerBreakpoint(target, 10) + + state = await localState + }) + + it('should contain expected properties from closure scope', function () { + expect(Object.keys(state).length).to.equal(28) + + // from block scope + // ... tested individually in the remaining it-blocks inside this describe-block + + // from closure scope + expect(state).to.have.deep.property('ref', { + type: 'Object', + fields: { + wmo1: { type: 'Object', fields: { a: { type: 'number', value: '1' } } }, + wmo2: { type: 'Object', fields: { b: { type: 'number', value: '3' } } }, + wso1: { type: 'Object', fields: { a: { type: 'number', value: '1' } } }, + wso2: { type: 'Object', fields: { a: { type: 'number', value: '2' } } }, + wso3: { type: 'Object', fields: { a: { type: 'number', value: '3' } } } + } + }) + expect(state).to.have.deep.property('get', { + type: 'Function', + fields: { + length: { type: 'number', value: '0' }, + name: { type: 'string', value: 'get' } + } + }) + }) + + it('object literal', function () { + expect(state).to.have.deep.property('oblit', { + type: 'Object', + fields: { + a: { type: 'number', value: '1' }, + b_b: { type: 'number', value: '2' }, + 'Symbol(c)': { type: 'number', value: '3' }, + d: { type: 'getter' }, + e: { type: 'getter' }, + f: { type: 'setter' }, + g: { type: 'getter/setter' } + } + }) + }) + + it('custom object from class', function () { + expect(state).to.have.deep.property('obnew', { + type: 'MyClass', + fields: { + foo: { type: 'number', value: '42' }, + '#secret': { type: 'number', value: '42' } + } + }) + }) + + it('Array', function () { + expect(state).to.have.deep.property('arr', { + type: 'Array', + elements: [ + { type: 'number', value: '1' }, + { type: 'number', value: '2' }, + { type: 'number', value: '3' } + ] + }) + }) + + it('RegExp', function () { + expect(state).to.have.deep.property('regex', { type: 'RegExp', value: '/foo/' }) + }) + + it('Date', function () { + expect(state).to.have.deep.property('date', { + type: 'Date', + value: '2024-09-20T07:22:59Z' // missing milliseconds due to API limitation (should have been `998`) + }) + }) + + it('Map', function () { + expect(state).to.have.deep.property('map', { + type: 'Map', + entries: [ + [{ type: 'number', value: '1' }, { type: 'number', value: '2' }], + [{ type: 'number', value: '3' }, { type: 'number', value: '4' }] + ] + }) + }) + + it('Set', function () { + expect(state).to.have.deep.property('set', { + type: 'Set', + elements: [ + { + type: 'Array', + elements: [ + { type: 'number', value: '1' }, + { type: 'number', value: '2' } + ] + }, + { type: 'number', value: '3' }, + { type: 'number', value: '4' } + ] + }) + }) + + it('WeakMap', function () { + expect(state).to.have.property('wmap') + expect(state.wmap).to.have.keys('type', 'entries') + expect(state.wmap.entries).to.be.an('array') + state.wmap.entries = state.wmap.entries.sort((a, b) => a[1].value - b[1].value) + expect(state).to.have.deep.property('wmap', { + type: 'WeakMap', + entries: [[ + { type: 'Object', fields: { a: { type: 'number', value: '1' } } }, + { type: 'number', value: '2' } + ], [ + { type: 'Object', fields: { b: { type: 'number', value: '3' } } }, + { type: 'number', value: '4' } + ]] + }) + }) + + it('WeakSet', function () { + expect(state).to.have.property('wset') + expect(state.wset).to.have.keys('type', 'elements') + expect(state.wset.elements).to.be.an('array') + state.wset.elements = state.wset.elements.sort((a, b) => a.fields.a.value - b.fields.a.value) + expect(state).to.have.deep.property('wset', { + type: 'WeakSet', + elements: [ + { type: 'Object', fields: { a: { type: 'number', value: '1' } } }, + { type: 'Object', fields: { a: { type: 'number', value: '2' } } }, + { type: 'Object', fields: { a: { type: 'number', value: '3' } } } + ] + }) + }) + + it('Generator', function () { + expect(state).to.have.deep.property('gen', { + type: 'generator', + fields: { foo: { type: 'number', value: '42' } } + }) + }) + + it('Error', function () { + expect(state).to.have.property('err') + expect(state.err).to.have.keys('type', 'fields') + expect(state.err).to.have.property('type', 'CustomError') + expect(state.err.fields).to.be.an('object') + expect(state.err.fields).to.have.keys('stack', 'message', 'foo') + expect(state.err.fields).to.deep.include({ + message: { type: 'string', value: 'boom!' }, + foo: { type: 'number', value: '42' } + }) + expect(state.err.fields.stack).to.have.keys('type', 'value', 'truncated', 'size') + expect(state.err.fields.stack.value).to.be.a('string') + expect(state.err.fields.stack.value).to.match(/^Error: boom!/) + expect(state.err.fields.stack.size).to.be.a('number') + expect(state.err.fields.stack.size).to.above(255) + expect(state.err.fields.stack).to.deep.include({ + type: 'string', + truncated: true + }) + }) + + it('Function', function () { + expect(state).to.have.deep.property('fn', { + type: 'Function', + fields: { + foo: { + type: 'Object', + fields: { bar: { type: 'number', value: '42' } } + }, + length: { type: 'number', value: '2' }, + name: { type: 'string', value: 'fnWithProperties' } + } + }) + }) + + it('Bound function', function () { + expect(state).to.have.deep.property('bfn', { + type: 'Function', + fields: { + length: { type: 'number', value: '0' }, + name: { type: 'string', value: 'bound fnWithProperties' } + } + }) + }) + + it('Arrow function', function () { + expect(state).to.have.deep.property('afn', { + type: 'Function', + fields: { + length: { type: 'number', value: '0' }, + name: { type: 'string', value: 'afn' } + } + }) + }) + + it('Class', function () { + expect(state).to.have.deep.property('cls', { type: 'class MyClass' }) + }) + + it('Anonymous class', function () { + expect(state).to.have.deep.property('acls', { type: 'class' }) + }) + + it('Proxy for object literal', function () { + expect(state).to.have.deep.property('prox', { + type: NODE_20_PLUS ? 'Proxy(Object)' : 'Proxy', + fields: { + target: { type: 'boolean', value: 'true' } + } + }) + }) + + it('Proxy for custom class', function () { + expect(state).to.have.deep.property('custProx', { + type: NODE_20_PLUS ? 'Proxy(MyClass)' : 'Proxy', + fields: { + foo: { type: 'number', value: '42' } + } + }) + }) + + it('Promise: Pending', function () { + expect(state).to.have.deep.property('pPen', { + type: 'Promise', + fields: { + '[[PromiseState]]': { type: 'string', value: 'pending' }, + '[[PromiseResult]]': { type: 'undefined' } + } + }) + }) + + it('Promise: Resolved', function () { + expect(state).to.have.deep.property('pRes', { + type: 'Promise', + fields: { + '[[PromiseState]]': { type: 'string', value: 'fulfilled' }, + '[[PromiseResult]]': { type: 'string', value: 'resolved value' } + } + }) + }) + + it('Promise: Rejected', function () { + expect(state).to.have.deep.property('pRej', { + type: 'Promise', + fields: { + '[[PromiseState]]': { type: 'string', value: 'rejected' }, + '[[PromiseResult]]': { type: 'string', value: 'rejected value' } + } + }) + }) + + it('TypedArray', function () { + expect(state).to.have.deep.property('tarr', { + type: 'Int8Array', + elements: [ + { type: 'number', value: '72' }, + { type: 'number', value: '65' }, + { type: 'number', value: '76' } + ] + }) + }) + + it('ArrayBuffer', function () { + expect(state).to.have.deep.property('ab', { + type: 'ArrayBuffer', + value: 'HAL' + }) + }) + + it('SharedArrayBuffer', function () { + expect(state).to.have.deep.property('sab', { + type: 'SharedArrayBuffer', + value: 'hello\x01\x02\x03world' + }) + }) + + it('circular reference in object', function () { + expect(state).to.have.property('circular') + expect(state.circular).to.have.property('type', 'Object') + expect(state.circular).to.have.property('fields') + // For the circular field, just check that at least one of the expected properties are present + expect(state.circular.fields).to.deep.include({ + regex: { type: 'RegExp', value: '/foo/' } + }) + }) + + it('non-enumerable property', function () { + expect(state).to.have.deep.property('hidden', { type: 'string', value: 'secret' }) + }) + }) +}) diff --git a/packages/dd-trace/test/debugger/devtools_client/snapshot/max-reference-depth.spec.js b/packages/dd-trace/test/debugger/devtools_client/snapshot/max-reference-depth.spec.js new file mode 100644 index 00000000000..4c5971969fb --- /dev/null +++ b/packages/dd-trace/test/debugger/devtools_client/snapshot/max-reference-depth.spec.js @@ -0,0 +1,124 @@ +'use strict' + +require('../../../setup/mocha') + +const { getTargetCodePath, enable, teardown, assertOnBreakpoint, setAndTriggerBreakpoint } = require('./utils') + +const target = getTargetCodePath(__filename) + +describe('debugger -> devtools client -> snapshot.getLocalStateForCallFrame', function () { + describe('maxReferenceDepth', function () { + beforeEach(enable(__filename)) + + afterEach(teardown) + + it('should return expected object for nested objects with maxReferenceDepth: 1', function (done) { + assertOnBreakpoint(done, { maxReferenceDepth: 1 }, (state) => { + expect(Object.keys(state).length).to.equal(1) + + expect(state).to.have.property('myNestedObj') + expect(state.myNestedObj).to.have.property('type', 'Object') + expect(state.myNestedObj).to.have.property('fields') + expect(Object.keys(state.myNestedObj).length).to.equal(2) + + expect(state.myNestedObj.fields).to.have.deep.property('deepObj', { + type: 'Object', notCapturedReason: 'depth' + }) + + expect(state.myNestedObj.fields).to.have.deep.property('deepArr', { + type: 'Array', notCapturedReason: 'depth' + }) + }) + + setAndTriggerBreakpoint(target, 9) + }) + + it('should return expected object for nested objects with maxReferenceDepth: 5', function (done) { + assertOnBreakpoint(done, { maxReferenceDepth: 5 }, (state) => { + expect(Object.entries(state).length).to.equal(1) + + expect(state).to.have.property('myNestedObj') + expect(state.myNestedObj).to.have.property('type', 'Object') + expect(state.myNestedObj).to.have.property('fields') + expect(Object.entries(state.myNestedObj).length).to.equal(2) + + expect(state.myNestedObj.fields).to.have.deep.property('deepObj', { + type: 'Object', + fields: { + foo: { + type: 'Object', + fields: { + foo: { + type: 'Object', + fields: { + foo: { + type: 'Object', + fields: { + foo: { type: 'Object', notCapturedReason: 'depth' } + } + } + } + } + } + } + } + }) + + expect(state.myNestedObj.fields).to.have.deep.property('deepArr', { + type: 'Array', + elements: [{ + type: 'Array', + elements: [{ + type: 'Array', + elements: [{ + type: 'Array', + elements: [{ type: 'Array', notCapturedReason: 'depth' }] + }] + }] + }] + }) + }) + + setAndTriggerBreakpoint(target, 9) + }) + + it('should return expected object for nested objects if maxReferenceDepth is missing', function (done) { + assertOnBreakpoint(done, (state) => { + expect(Object.entries(state).length).to.equal(1) + + expect(state).to.have.property('myNestedObj') + expect(state.myNestedObj).to.have.property('type', 'Object') + expect(state.myNestedObj).to.have.property('fields') + expect(Object.entries(state.myNestedObj).length).to.equal(2) + + expect(state.myNestedObj.fields).to.have.deep.property('deepObj', { + type: 'Object', + fields: { + foo: { + type: 'Object', + fields: { + foo: { + type: 'Object', + notCapturedReason: 'depth' + } + } + } + } + }) + + expect(state.myNestedObj.fields).to.have.deep.property('deepArr', { + type: 'Array', + elements: [{ + type: 'Array', + elements: [{ + type: 'Array', + notCapturedReason: 'depth' + }] + }] + }) + }) + + setAndTriggerBreakpoint(target, 9) + }) + }) +}) diff --git a/packages/dd-trace/test/debugger/devtools_client/snapshot/primitives.spec.js b/packages/dd-trace/test/debugger/devtools_client/snapshot/primitives.spec.js new file mode 100644 index 00000000000..a01203fe48f --- /dev/null +++ b/packages/dd-trace/test/debugger/devtools_client/snapshot/primitives.spec.js @@ -0,0 +1,30 @@ +'use strict' + +require('../../../setup/mocha') + +const { getTargetCodePath, enable, teardown, assertOnBreakpoint, setAndTriggerBreakpoint } = require('./utils') + +const target = getTargetCodePath(__filename) + +describe('debugger -> devtools client -> snapshot.getLocalStateForCallFrame', function () { + describe('primitives', function () { + beforeEach(enable(__filename)) + + afterEach(teardown) + + it('should return expected object for primitives', function (done) { + assertOnBreakpoint(done, (state) => { + expect(Object.keys(state).length).to.equal(7) + expect(state).to.have.deep.property('undef', { type: 'undefined' }) + expect(state).to.have.deep.property('nil', { type: 'null', isNull: true }) + expect(state).to.have.deep.property('bool', { type: 'boolean', value: 'true' }) + expect(state).to.have.deep.property('num', { type: 'number', value: '42' }) + expect(state).to.have.deep.property('bigint', { type: 'bigint', value: '18014398509481982' }) + expect(state).to.have.deep.property('str', { type: 'string', value: 'foo' }) + expect(state).to.have.deep.property('sym', { type: 'symbol', value: 'Symbol(foo)' }) + }) + + setAndTriggerBreakpoint(target, 13) + }) + }) +}) diff --git a/packages/dd-trace/test/debugger/devtools_client/snapshot/scopes.spec.js b/packages/dd-trace/test/debugger/devtools_client/snapshot/scopes.spec.js new file mode 100644 index 00000000000..d02093a4b01 --- /dev/null +++ b/packages/dd-trace/test/debugger/devtools_client/snapshot/scopes.spec.js @@ -0,0 +1,29 @@ +'use strict' + +require('../../../setup/mocha') + +const { getTargetCodePath, enable, teardown, assertOnBreakpoint, setAndTriggerBreakpoint } = require('./utils') + +const target = getTargetCodePath(__filename) + +describe('debugger -> devtools client -> snapshot.getLocalStateForCallFrame', function () { + beforeEach(enable(__filename)) + + afterEach(teardown) + + describe('scopes', function () { + it('should capture expected scopes', function (done) { + assertOnBreakpoint(done, (state) => { + expect(Object.entries(state).length).to.equal(5) + + expect(state).to.have.deep.property('a1', { type: 'number', value: '1' }) + expect(state).to.have.deep.property('a2', { type: 'number', value: '2' }) + expect(state).to.have.deep.property('total', { type: 'number', value: '0' }) + expect(state).to.have.deep.property('i', { type: 'number', value: '0' }) + expect(state).to.have.deep.property('inc', { type: 'number', value: '2' }) + }) + + setAndTriggerBreakpoint(target, 13) + }) + }) +}) diff --git a/packages/dd-trace/test/debugger/devtools_client/snapshot/target-code/complex-types.js b/packages/dd-trace/test/debugger/devtools_client/snapshot/target-code/complex-types.js new file mode 100644 index 00000000000..65e3e7fac48 --- /dev/null +++ b/packages/dd-trace/test/debugger/devtools_client/snapshot/target-code/complex-types.js @@ -0,0 +1,127 @@ +'use strict' + +function run () { + /* eslint-disable no-unused-vars */ + const { + oblit, obnew, arr, regex, date, map, set, wmap, wset, gen, err, fn, bfn, afn, cls, acls, prox, custProx, pPen, + pRes, pRej, tarr, ab, sab, circular, hidden + } = get() + /* eslint-enable no-unused-vars */ + return 'my return value' // breakpoint at this line +} + +// WARNING: Breakpoints present above this line - Any changes to the lines above might influence tests! + +// References to objects used in WeakMap/WeakSet objects to ensure that they are not garbage collected during testing +const ref = { + wmo1: { a: 1 }, + wmo2: { b: 3 }, + wso1: { a: 1 }, + wso2: { a: 2 }, + wso3: { a: 3 } +} + +// warp it all in a single function to avoid spamming the closure scope with a lot of variables (makes testing simpler) +function get () { + let e, g + const oblit = { + a: 1, + 'b.b': 2, + [Symbol('c')]: 3, + // Has no side-effect + // TODO: At some point it would be great to detect this and get the value, + // though currently we can neither detect it, nor execute the getter. + get d () { + return 4 + }, + // Has side-effect: We should never try to fetch this! + get e () { + e = Math.random() + return e + }, + // Only setter + set f (v) {}, // eslint-disable-line accessor-pairs + // Both getter and setter + get g () { return g }, + set g (x) { g = x } + } + + function fnWithProperties (a, b) {} + fnWithProperties.foo = { bar: 42 } + + class MyClass { + #secret = 42 + constructor () { + this.foo = this.#secret + } + } + + function * makeIterator () { + yield 1 + yield 2 + } + const gen = makeIterator() + gen.foo = 42 + + class CustomError extends Error { + constructor (...args) { + super(...args) + this.foo = 42 + } + } + const err = new CustomError('boom!') + + const buf1 = Buffer.from('IBM') + const buf2 = Buffer.from('hello\x01\x02\x03world') + + const arrayBuffer = new ArrayBuffer(buf1.length) + const sharedArrayBuffer = new SharedArrayBuffer(buf2.length) + + const typedArray = new Int8Array(arrayBuffer) + for (let i = 0; i < buf1.length; i++) typedArray[i] = buf1[i] - 1 + + const sharedTypedArray = new Int8Array(sharedArrayBuffer) + for (let i = 0; i < buf2.length; i++) sharedTypedArray[i] = buf2[i] + + const complexTypes = { + oblit, + obnew: new MyClass(), + arr: [1, 2, 3], + regex: /foo/, + date: new Date('2024-09-20T07:22:59.998Z'), + map: new Map([[1, 2], [3, 4]]), + set: new Set([[1, 2], 3, 4]), + wmap: new WeakMap([[ref.wmo1, 2], [ref.wmo2, 4]]), + wset: new WeakSet([ref.wso1, ref.wso2, ref.wso3]), + gen, + err, + fn: fnWithProperties, + bfn: fnWithProperties.bind(new MyClass(), 1, 2), + afn: () => { return 42 }, + cls: MyClass, + acls: class + {}, // eslint-disable-line indent, brace-style + prox: new Proxy({ target: true }, { get () { return false } }), + custProx: new Proxy(new MyClass(), { get () { return false } }), + pPen: new Promise(() => {}), + pRes: Promise.resolve('resolved value'), + pRej: Promise.reject('rejected value'), // eslint-disable-line prefer-promise-reject-errors + tarr: typedArray, // TODO: Should we test other TypedArray's? + ab: arrayBuffer, + sab: sharedArrayBuffer + } + + complexTypes.circular = complexTypes + + Object.defineProperty(complexTypes, 'hidden', { + value: 'secret', + enumerable: false + }) + + // ensure we don't get an unhandled promise rejection error + complexTypes.pRej.catch(() => {}) + + return complexTypes +} + +module.exports = { run } diff --git a/packages/dd-trace/test/debugger/devtools_client/snapshot/target-code/max-reference-depth.js b/packages/dd-trace/test/debugger/devtools_client/snapshot/target-code/max-reference-depth.js new file mode 100644 index 00000000000..4c80d2098f9 --- /dev/null +++ b/packages/dd-trace/test/debugger/devtools_client/snapshot/target-code/max-reference-depth.js @@ -0,0 +1,12 @@ +'use strict' + +function run () { + // eslint-disable-next-line no-unused-vars + const myNestedObj = { + deepObj: { foo: { foo: { foo: { foo: { foo: true } } } } }, + deepArr: [[[[[42]]]]] + } + return 'my return value' // breakpoint at this line +} + +module.exports = { run } diff --git a/packages/dd-trace/test/debugger/devtools_client/snapshot/target-code/primitives.js b/packages/dd-trace/test/debugger/devtools_client/snapshot/target-code/primitives.js new file mode 100644 index 00000000000..eba86269a4d --- /dev/null +++ b/packages/dd-trace/test/debugger/devtools_client/snapshot/target-code/primitives.js @@ -0,0 +1,16 @@ +'use strict' + +function run () { + /* eslint-disable no-unused-vars */ + const undef = undefined + const nil = null + const bool = true + const num = 42 + const bigint = BigInt(Number.MAX_SAFE_INTEGER) * 2n + const str = 'foo' + const sym = Symbol('foo') + /* eslint-enable no-unused-vars */ + return 'my return value' // breakpoint at this line +} + +module.exports = { run } diff --git a/packages/dd-trace/test/debugger/devtools_client/snapshot/target-code/scopes.js b/packages/dd-trace/test/debugger/devtools_client/snapshot/target-code/scopes.js new file mode 100644 index 00000000000..e9f771f7226 --- /dev/null +++ b/packages/dd-trace/test/debugger/devtools_client/snapshot/target-code/scopes.js @@ -0,0 +1,17 @@ +'use strict' + +/* eslint-disable no-unused-vars */ +const foo = 'foo' +const bar = 'bar' +/* eslint-enable no-unused-vars */ + +function run (a1 = 1, a2 = 2) { + let total = 0 + for (let i = 0; i < 3; i++) { + const inc = 2 + // eslint-disable-next-line no-unused-vars + total += inc // breakpoint at this line + } +} + +module.exports = { run } diff --git a/packages/dd-trace/test/debugger/devtools_client/snapshot/utils.js b/packages/dd-trace/test/debugger/devtools_client/snapshot/utils.js new file mode 100644 index 00000000000..215b93a4002 --- /dev/null +++ b/packages/dd-trace/test/debugger/devtools_client/snapshot/utils.js @@ -0,0 +1,92 @@ +'use strict' + +const { join, basename } = require('path') + +const inspector = require('../../../../src/debugger/devtools_client/inspector_promises_polyfill') +const session = new inspector.Session() +session.connect() + +session['@noCallThru'] = true +proxyquire('../src/debugger/devtools_client/snapshot/collector', { + '../session': session +}) + +const { getLocalStateForCallFrame } = require('../../../../src/debugger/devtools_client/snapshot') + +module.exports = { + session, + getTargetCodePath, + enable, + teardown, + setAndTriggerBreakpoint, + assertOnBreakpoint +} + +/** + * @param {string} caller - The filename of the calling spec file (hint: `__filename`) + */ +function getTargetCodePath (caller) { + // Convert /path/to/file.spec.js to /path/to/target-code/file.js + const filename = basename(caller) + return caller.replace(filename, join('target-code', filename.replace('.spec', ''))) +} + +/** + * @param {string} caller - The filename of the calling spec file (hint: `__filename`) + */ +function enable (caller) { + const path = getTargetCodePath(caller) + + // The beforeEach hook + return async () => { + // The scriptIds are resolved asynchronously, so to ensure we have an easy way to get them for each script, we + // store a promise on the script that will resolve to its id once it's emitted by Debugger.scriptParsed. + let pResolve = null + const p = new Promise((resolve) => { + pResolve = resolve + }) + p.resolve = pResolve + require(path).scriptId = p + + session.on('Debugger.scriptParsed', ({ params }) => { + if (params.url.endsWith(path)) { + require(path).scriptId.resolve(params.scriptId) + } + }) + + await session.post('Debugger.enable') + } +} + +async function teardown () { + session.removeAllListeners('Debugger.scriptParsed') + session.removeAllListeners('Debugger.paused') + await session.post('Debugger.disable') +} + +async function setAndTriggerBreakpoint (path, line) { + const { run, scriptId } = require(path) + await session.post('Debugger.setBreakpoint', { + location: { + scriptId: await scriptId, + lineNumber: line - 1 // Beware! lineNumber is zero-indexed + } + }) + run() +} + +function assertOnBreakpoint (done, config, callback) { + if (typeof config === 'function') { + callback = config + config = undefined + } + + session.once('Debugger.paused', ({ params }) => { + expect(params.hitBreakpoints.length).to.eq(1) + + getLocalStateForCallFrame(params.callFrames[0], config).then((process) => { + callback(process()) + done() + }).catch(done) + }) +} diff --git a/packages/dd-trace/test/debugger/devtools_client/status.spec.js b/packages/dd-trace/test/debugger/devtools_client/status.spec.js index 728279c7eca..41433f453c5 100644 --- a/packages/dd-trace/test/debugger/devtools_client/status.spec.js +++ b/packages/dd-trace/test/debugger/devtools_client/status.spec.js @@ -1,12 +1,12 @@ 'use strict' -require('../../setup/tap') +require('../../setup/mocha') const ddsource = 'dd_debugger' const service = 'my-service' const runtimeId = 'my-runtime-id' -describe('diagnostic message http request caching', () => { +describe('diagnostic message http request caching', function () { let statusproxy, request const acks = [ @@ -16,7 +16,7 @@ describe('diagnostic message http request caching', () => { ['ackError', 'ERROR', new Error('boom')] ] - beforeEach(() => { + beforeEach(function () { request = sinon.spy() request['@noCallThru'] = true @@ -27,10 +27,10 @@ describe('diagnostic message http request caching', () => { }) for (const [ackFnName, status, err] of acks) { - describe(ackFnName, () => { + describe(ackFnName, function () { let ackFn, exception - beforeEach(() => { + beforeEach(function () { if (err) { ackFn = statusproxy[ackFnName].bind(null, err) // Use `JSON.stringify` to remove any fields that are `undefined` @@ -45,7 +45,7 @@ describe('diagnostic message http request caching', () => { } }) - it('should only call once if no change', () => { + it('should only call once if no change', function () { ackFn({ id: 'foo', version: 0 }) expect(request).to.have.been.calledOnce assertRequestData(request, { probeId: 'foo', version: 0, status, exception }) @@ -54,7 +54,7 @@ describe('diagnostic message http request caching', () => { expect(request).to.have.been.calledOnce }) - it('should call again if version changes', () => { + it('should call again if version changes', function () { ackFn({ id: 'foo', version: 0 }) expect(request).to.have.been.calledOnce assertRequestData(request, { probeId: 'foo', version: 0, status, exception }) @@ -64,7 +64,7 @@ describe('diagnostic message http request caching', () => { assertRequestData(request, { probeId: 'foo', version: 1, status, exception }) }) - it('should call again if probeId changes', () => { + it('should call again if probeId changes', function () { ackFn({ id: 'foo', version: 0 }) expect(request).to.have.been.calledOnce assertRequestData(request, { probeId: 'foo', version: 0, status, exception }) diff --git a/packages/dd-trace/test/plugins/agent.js b/packages/dd-trace/test/plugins/agent.js index dc87f18dc3a..cb6f241e7d3 100644 --- a/packages/dd-trace/test/plugins/agent.js +++ b/packages/dd-trace/test/plugins/agent.js @@ -210,10 +210,10 @@ function runCallback (callback, options, handlers) { function handler () { try { - callback.apply(null, arguments) + const result = callback.apply(null, arguments) handlers.delete(handlerPayload) clearTimeout(timeout) - deferred.resolve() + deferred.resolve(result) } catch (e) { if (options && options.rejectFirst) { clearTimeout(timeout) diff --git a/packages/dd-trace/test/plugins/externals.json b/packages/dd-trace/test/plugins/externals.json index eddbe0f887c..78373b16daa 100644 --- a/packages/dd-trace/test/plugins/externals.json +++ b/packages/dd-trace/test/plugins/externals.json @@ -95,6 +95,16 @@ "versions": ["5", ">=6"] } ], + "mysql2": [ + { + "name": "mysql2", + "versions": ["1.3.3"] + }, + { + "name": "express", + "versions": [">=4"] + } + ], "fastify": [ { "name": "fastify", @@ -331,6 +341,10 @@ { "name": "express", "versions": [">=4.16.2"] + }, + { + "name": "body-parser", + "versions": ["1.20.1"] } ], "pg": [ diff --git a/packages/dd-trace/test/plugins/util/fixtures/github_event_payload.json b/packages/dd-trace/test/plugins/util/fixtures/github_event_payload.json new file mode 100644 index 00000000000..64828fe2b7b --- /dev/null +++ b/packages/dd-trace/test/plugins/util/fixtures/github_event_payload.json @@ -0,0 +1,70 @@ +{ + "action": "synchronize", + "after": "df289512a51123083a8e6931dd6f57bb3883d4c4", + "before": "f659d2fdd7bedffb40d9ab223dbde6afa5eadc32", + "number": 1, + "pull_request": { + "_links": {}, + "active_lock_reason": null, + "additions": 2, + "assignee": null, + "assignees": [], + "author_association": "OWNER", + "auto_merge": null, + "base": { + "label": "datadog:main", + "ref": "main", + "repo": {}, + "sha": "52e0974c74d41160a03d59ddc73bb9f5adab054b", + "user": {} + }, + "body": "# What Does This Do\r\n\r\n# Motivation\r\n\r\n# Additional Notes\r\n", + "changed_files": 3, + "closed_at": null, + "comments": 0, + "comments_url": "", + "commits": 2, + "commits_url": "", + "created_at": "2024-09-11T15:08:02Z", + "deletions": 0, + "diff_url": "", + "draft": false, + "head": { + "label": "forked_org:test-branch", + "ref": "test-branch", + "repo": {}, + "sha": "df289512a51123083a8e6931dd6f57bb3883d4c4", + "user": {} + }, + "html_url": "", + "id": 2066570986, + "issue_url": "", + "labels": [], + "locked": false, + "maintainer_can_modify": false, + "merge_commit_sha": "d9a3212d0d5d1483426dbbdf0beea32ee50abcde", + "mergeable": null, + "mergeable_state": "unknown", + "merged": false, + "merged_at": null, + "merged_by": null, + "milestone": null, + "node_id": "PR_kwDOIvpGAs57LV7q", + "number": 1, + "patch_url": "", + "rebaseable": null, + "requested_reviewers": [], + "requested_teams": [], + "review_comment_url": "", + "review_comments": 0, + "review_comments_url": "", + "state": "open", + "statuses_url": "", + "title": "Test commit", + "updated_at": "2024-09-11T15:12:26Z", + "url": "", + "user": {} + }, + "repository": {}, + "sender": {} +} diff --git a/packages/dd-trace/test/plugins/util/fixtures/github_event_payload_malformed.json b/packages/dd-trace/test/plugins/util/fixtures/github_event_payload_malformed.json new file mode 100644 index 00000000000..0967ef424bc --- /dev/null +++ b/packages/dd-trace/test/plugins/util/fixtures/github_event_payload_malformed.json @@ -0,0 +1 @@ +{} diff --git a/packages/dd-trace/test/plugins/util/stacktrace.spec.js b/packages/dd-trace/test/plugins/util/stacktrace.spec.js new file mode 100644 index 00000000000..3fefc2b29ef --- /dev/null +++ b/packages/dd-trace/test/plugins/util/stacktrace.spec.js @@ -0,0 +1,68 @@ +'use strict' + +const { isAbsolute } = require('path') + +require('../../setup/tap') + +const { + getCallSites, + getUserLandFrames +} = require('../../../src/plugins/util/stacktrace') + +describe('stacktrace utils', () => { + it('should get callsites array from getCallsites', () => { + const callsites = getCallSites() + expect(callsites).to.be.an('array') + expect(callsites.length).to.be.gt(0) + callsites.forEach((callsite) => { + expect(callsite).to.be.an.instanceof(Object) + expect(callsite.constructor.name).to.equal('CallSite') + expect(callsite.getFileName).to.be.an.instanceof(Function) + }) + }) + + describe('getUserLandFrames', () => { + it('should return array of frame objects', function helloWorld () { + function someFunction () { + const frames = getUserLandFrames(someFunction) + + expect(frames).to.be.an('array') + expect(frames.length).to.be.gt(1) + frames.forEach((frame) => { + expect(frame).to.be.an.instanceof(Object) + expect(frame).to.have.all.keys('file', 'line', 'column', 'method', 'type') + expect(frame.file).to.be.a('string') + expect(frame.line).to.be.gt(0) + expect(frame.column).to.be.gt(0) + expect(typeof frame.method).to.be.oneOf(['string', 'undefined']) + expect(typeof frame.type).to.be.oneOf(['string', 'undefined']) + expect(isAbsolute(frame.file)).to.be.true + }) + + const frame = frames[0] + expect(frame.file).to.equal(__filename) + expect(frame.line).to.equal(lineNumber) + expect(frame.method).to.equal('helloWorld') + expect(frame.type).to.equal('Test') + } + + const lineNumber = getNextLineNumber() + someFunction() + }) + + it('should respect limit', function helloWorld () { + (function someFunction () { + const frames = getUserLandFrames(someFunction, 1) + expect(frames.length).to.equal(1) + const frame = frames[0] + expect(frame.file).to.equal(__filename) + expect(frame.method).to.equal('helloWorld') + expect(frame.type).to.equal('Test') + })() + }) + }) +}) + +function getNextLineNumber () { + return Number(new Error().stack.split('\n')[2].match(/:(\d+):/)[1]) + 1 +} diff --git a/packages/dd-trace/test/plugins/util/test-environment.spec.js b/packages/dd-trace/test/plugins/util/test-environment.spec.js index 01f9b4914a4..63726622fb5 100644 --- a/packages/dd-trace/test/plugins/util/test-environment.spec.js +++ b/packages/dd-trace/test/plugins/util/test-environment.spec.js @@ -9,7 +9,13 @@ const proxyquire = require('proxyquire') const execFileSyncStub = sinon.stub().returns('') const { getCIMetadata } = require('../../../src/plugins/util/ci') -const { CI_ENV_VARS, CI_NODE_LABELS } = require('../../../src/plugins/util/tags') +const { + CI_ENV_VARS, + CI_NODE_LABELS, + GIT_PULL_REQUEST_BASE_BRANCH, + GIT_PULL_REQUEST_BASE_BRANCH_SHA, + GIT_COMMIT_HEAD_SHA +} = require('../../../src/plugins/util/tags') const { getGitMetadata } = proxyquire('../../../src/plugins/util/git', { child_process: { @@ -36,6 +42,44 @@ describe('test environment data', () => { const ciProviders = fs.readdirSync(path.join(__dirname, 'ci-env')) ciProviders.forEach(ciProvider => { const assertions = require(path.join(__dirname, 'ci-env', ciProvider)) + if (ciProvider === 'github.json') { + // We grab the first assertion because we only need to test one + const [env] = assertions[0] + it('can read pull request data from GitHub Actions', () => { + process.env = env + process.env.GITHUB_BASE_REF = 'datadog:main' + process.env.GITHUB_EVENT_PATH = path.join(__dirname, 'fixtures', 'github_event_payload.json') + const { + [GIT_PULL_REQUEST_BASE_BRANCH]: pullRequestBaseBranch, + [GIT_PULL_REQUEST_BASE_BRANCH_SHA]: pullRequestBaseBranchSha, + [GIT_COMMIT_HEAD_SHA]: headCommitSha + } = getTestEnvironmentMetadata() + + expect({ + pullRequestBaseBranch, + pullRequestBaseBranchSha, + headCommitSha + }).to.eql({ + pullRequestBaseBranch: 'datadog:main', + pullRequestBaseBranchSha: '52e0974c74d41160a03d59ddc73bb9f5adab054b', + headCommitSha: 'df289512a51123083a8e6931dd6f57bb3883d4c4' + }) + }) + it('does not crash if GITHUB_EVENT_PATH is not a valid JSON file', () => { + process.env = env + process.env.GITHUB_BASE_REF = 'datadog:main' + process.env.GITHUB_EVENT_PATH = path.join(__dirname, 'fixtures', 'github_event_payload_malformed.json') + const { + [GIT_PULL_REQUEST_BASE_BRANCH]: pullRequestBaseBranch, + [GIT_PULL_REQUEST_BASE_BRANCH_SHA]: pullRequestBaseBranchSha, + [GIT_COMMIT_HEAD_SHA]: headCommitSha + } = getTestEnvironmentMetadata() + + expect(pullRequestBaseBranch).to.equal('datadog:main') + expect(pullRequestBaseBranchSha).to.be.undefined + expect(headCommitSha).to.be.undefined + }) + } assertions.forEach(([env, expectedSpanTags], index) => { it(`reads env info for spec ${index} from ${ciProvider}`, () => { diff --git a/packages/dd-trace/test/plugins/util/test.spec.js b/packages/dd-trace/test/plugins/util/test.spec.js index ee321f3c0e0..f79ab8fd34d 100644 --- a/packages/dd-trace/test/plugins/util/test.spec.js +++ b/packages/dd-trace/test/plugins/util/test.spec.js @@ -15,7 +15,8 @@ const { resetCoverage, removeInvalidMetadata, parseAnnotations, - getIsFaultyEarlyFlakeDetection + getIsFaultyEarlyFlakeDetection, + getNumFromKnownTests } = require('../../../src/plugins/util/test') const { GIT_REPOSITORY_URL, GIT_COMMIT_SHA, CI_PIPELINE_URL } = require('../../../src/plugins/util/tags') @@ -335,3 +336,32 @@ describe('getIsFaultyEarlyFlakeDetection', () => { expect(isFaulty).to.be.true }) }) + +describe('getNumFromKnownTests', () => { + it('calculates the number of tests from the known tests', () => { + const knownTests = { + testModule: { + 'test1.spec.js': ['test1', 'test2'], + 'test2.spec.js': ['test3'] + } + } + + const numTests = getNumFromKnownTests(knownTests) + expect(numTests).to.equal(3) + }) + + it('does not crash with empty dictionaries', () => { + const knownTests = {} + + const numTests = getNumFromKnownTests(knownTests) + expect(numTests).to.equal(0) + }) + + it('does not crash if known tests is undefined or null', () => { + const numTestsUndefined = getNumFromKnownTests(undefined) + expect(numTestsUndefined).to.equal(0) + + const numTestsNull = getNumFromKnownTests(null) + expect(numTestsNull).to.equal(0) + }) +}) diff --git a/packages/dd-trace/test/profiling/profiler.spec.js b/packages/dd-trace/test/profiling/profiler.spec.js index dc94061ff1f..d99eb6135ea 100644 --- a/packages/dd-trace/test/profiling/profiler.spec.js +++ b/packages/dd-trace/test/profiling/profiler.spec.js @@ -181,6 +181,21 @@ describe('profiler', function () { }) it('should stop when capturing failed', async () => { + wallProfiler.profile.throws(new Error('boom')) + + await profiler._start({ profilers, exporters, logger }) + + clock.tick(interval) + + sinon.assert.calledOnce(wallProfiler.stop) + sinon.assert.calledOnce(spaceProfiler.stop) + sinon.assert.calledOnce(consoleLogger.error) + sinon.assert.notCalled(wallProfiler.encode) + sinon.assert.notCalled(spaceProfiler.encode) + sinon.assert.notCalled(exporter.export) + }) + + it('should not stop when encoding failed', async () => { const rejected = Promise.reject(new Error('boom')) wallProfiler.encode.returns(rejected) @@ -190,9 +205,25 @@ describe('profiler', function () { await rejected.catch(() => {}) - sinon.assert.calledOnce(wallProfiler.stop) - sinon.assert.calledOnce(spaceProfiler.stop) + sinon.assert.notCalled(wallProfiler.stop) + sinon.assert.notCalled(spaceProfiler.stop) sinon.assert.calledOnce(consoleLogger.error) + sinon.assert.calledOnce(exporter.export) + }) + + it('should not stop when exporting failed', async () => { + const rejected = Promise.reject(new Error('boom')) + exporter.export.returns(rejected) + + await profiler._start({ profilers, exporters, logger }) + + clock.tick(interval) + + await rejected.catch(() => {}) + + sinon.assert.notCalled(wallProfiler.stop) + sinon.assert.notCalled(spaceProfiler.stop) + sinon.assert.calledOnce(exporter.export) }) it('should flush when the interval is reached', async () => { @@ -270,17 +301,6 @@ describe('profiler', function () { sinon.assert.calledWithMatch(submit, 'Submitted profiles') }) - it('should skip submit with no profiles', async () => { - const start = new Date() - const end = new Date() - try { - await profiler._submit({}, start, end) - throw new Error('should have got exception from _submit') - } catch (err) { - expect(err.message).to.equal('No profiles to submit') - } - }) - it('should have a new start time for each capture', async () => { await profiler._start({ profilers, exporters }) diff --git a/yarn.lock b/yarn.lock index 6f0e4159c46..e5c88856acd 100644 --- a/yarn.lock +++ b/yarn.lock @@ -263,10 +263,10 @@ dependencies: node-gyp-build "^3.9.0" -"@datadog/native-iast-rewriter@2.4.1": - version "2.4.1" - resolved "https://registry.yarnpkg.com/@datadog/native-iast-rewriter/-/native-iast-rewriter-2.4.1.tgz#e8211f78c818906513fb96a549374da0382c7623" - integrity sha512-j3auTmyyn63e2y+SL28CGNy/l+jXQyh+pxqoGTacWaY5FW/dvo5nGQepAismgJ3qJ8VhQfVWRdxBSiT7wu9clw== +"@datadog/native-iast-rewriter@2.5.0": + version "2.5.0" + resolved "https://registry.yarnpkg.com/@datadog/native-iast-rewriter/-/native-iast-rewriter-2.5.0.tgz#b613defe86e78168f750d1f1662d4ffb3cf002e6" + integrity sha512-WRu34A3Wwp6oafX8KWNAbedtDaaJO+nzfYQht7pcJKjyC2ggfPeF7SoP+eDo9wTn4/nQwEOscSR4hkJqTRlpXQ== dependencies: lru-cache "^7.14.0" node-gyp-build "^4.5.0" @@ -534,16 +534,6 @@ "@jridgewell/resolve-uri" "^3.0.3" "@jridgewell/sourcemap-codec" "^1.4.10" -"@jsep-plugin/assignment@^1.2.1": - version "1.2.1" - resolved "https://registry.yarnpkg.com/@jsep-plugin/assignment/-/assignment-1.2.1.tgz#07277bdd7862451a865d391e2142efba33f46c9b" - integrity sha512-gaHqbubTi29aZpVbBlECRpmdia+L5/lh2BwtIJTmtxdbecEyyX/ejAOg7eQDGNvGOUmPY7Z2Yxdy9ioyH/VJeA== - -"@jsep-plugin/regex@^1.0.3": - version "1.0.3" - resolved "https://registry.yarnpkg.com/@jsep-plugin/regex/-/regex-1.0.3.tgz#3aeaa2e5fa45d89de116aeafbfa41c95935b7f6d" - integrity sha512-XfZgry4DwEZvSFtS/6Y+R48D7qJYJK6R9/yJFyUFHCIUMEEHuJ4X95TDgJp5QkmzfLYvapMPzskV5HpIDrREug== - "@nodelib/fs.scandir@2.1.5": version "2.1.5" resolved "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz" @@ -635,40 +625,40 @@ resolved "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz" integrity sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw== -"@sinonjs/commons@^2.0.0": - version "2.0.0" - resolved "https://registry.npmjs.org/@sinonjs/commons/-/commons-2.0.0.tgz" - integrity "sha1-/UylsGNVQwfoMntFZL1W07c5JKM= sha512-uLa0j859mMrg2slwQYdO/AkrOfmH+X6LTVmNTS9CqexuE2IvVORIkSpJLqePAbEnKJ77aMmCwr1NUZ57120Xcg==" - dependencies: - type-detect "4.0.8" - -"@sinonjs/commons@^3.0.0": - version "3.0.0" - resolved "https://registry.npmjs.org/@sinonjs/commons/-/commons-3.0.0.tgz" - integrity "sha1-vrQ0/oddllJl4EcizPwh3391XXI= sha512-jXBtWAF4vmdNmZgD5FoKsVLv3rPgDnLgPbU84LIJ3otV44vJlDRokVng5v8NFJdCf/da9legHcKaRuZs4L7faA==" +"@sinonjs/commons@^3.0.0", "@sinonjs/commons@^3.0.1": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-3.0.1.tgz#1029357e44ca901a615585f6d27738dbc89084cd" + integrity sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ== dependencies: type-detect "4.0.8" -"@sinonjs/fake-timers@^10.0.2", "@sinonjs/fake-timers@^10.3.0": +"@sinonjs/fake-timers@^10.3.0": version "10.3.0" - resolved "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz" - integrity "sha1-Vf3/Hsq581QBkSna9N8N1Nkj6mY= sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA==" + resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz#55fdff1ecab9f354019129daf4df0dd4d923ea66" + integrity sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA== dependencies: "@sinonjs/commons" "^3.0.0" +"@sinonjs/fake-timers@^11.2.2": + version "11.3.1" + resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-11.3.1.tgz#51d6e8d83ca261ff02c0ab0e68e9db23d5cd5999" + integrity sha512-EVJO7nW5M/F5Tur0Rf2z/QoMo+1Ia963RiMtapiQrEWvY0iBUvADo8Beegwjpnle5BHkyHuoxSTW3jF43H1XRA== + dependencies: + "@sinonjs/commons" "^3.0.1" + "@sinonjs/samsam@^8.0.0": - version "8.0.0" - resolved "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-8.0.0.tgz" - integrity "sha1-DUiMke+z+hRC4mq+qBdZ38i1rGA= sha512-Bp8KUVlLp8ibJZrnvq2foVhP0IVX2CIprMJPK0vqGqgrDa0OHVKeZyBykqskkrdxV6yKBPmGasO8LVjAKR3Gew==" + version "8.0.2" + resolved "https://registry.yarnpkg.com/@sinonjs/samsam/-/samsam-8.0.2.tgz#e4386bf668ff36c95949e55a38dc5f5892fc2689" + integrity sha512-v46t/fwnhejRSFTGqbpn9u+LQ9xJDse10gNnPgAcxgdoCDMXj/G2asWAC/8Qs+BAZDicX+MNZouXT1A7c83kVw== dependencies: - "@sinonjs/commons" "^2.0.0" + "@sinonjs/commons" "^3.0.1" lodash.get "^4.4.2" - type-detect "^4.0.8" + type-detect "^4.1.0" -"@sinonjs/text-encoding@^0.7.1": - version "0.7.2" - resolved "https://registry.npmjs.org/@sinonjs/text-encoding/-/text-encoding-0.7.2.tgz" - integrity sha512-sXXKG+uL9IrKqViTtao2Ws6dy0znu9sOaP1di/jKGW1M6VssO8vlpXCQcpZ+jisQ1tTFAC5Jo/EOzFbggBagFQ== +"@sinonjs/text-encoding@^0.7.2": + version "0.7.3" + resolved "https://registry.yarnpkg.com/@sinonjs/text-encoding/-/text-encoding-0.7.3.tgz#282046f03e886e352b2d5f5da5eb755e01457f3f" + integrity sha512-DE427ROAphMQzU4ENbliGYrBSYPXF+TtLg9S8vzeA+OF4ZKzoDdzfL8sxuMUGS/lgRhM6j1URSk9ghf7Xo1tyA== "@types/json5@^0.0.29": version "0.0.29" @@ -1021,10 +1011,10 @@ bind-obj-methods@^3.0.0: resolved "https://registry.npmjs.org/bind-obj-methods/-/bind-obj-methods-3.0.0.tgz" integrity sha512-nLEaaz3/sEzNSyPWRsN9HNsqwk1AUyECtGj+XwGdIi3xABnEqecvXtIJ0wehQXuuER5uZ/5fTs2usONgYjG+iw== -body-parser@1.20.2, body-parser@^1.20.2: - version "1.20.2" - resolved "https://registry.npmjs.org/body-parser/-/body-parser-1.20.2.tgz" - integrity "sha1-b+sOIcRyTQbef/ONo22tT1enR/0= sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==" +body-parser@1.20.3, body-parser@^1.20.3: + version "1.20.3" + resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.3.tgz#1953431221c6fb5cd63c4b36d53fab0928e548c6" + integrity sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g== dependencies: bytes "3.1.2" content-type "~1.0.5" @@ -1034,7 +1024,7 @@ body-parser@1.20.2, body-parser@^1.20.2: http-errors "2.0.0" iconv-lite "0.4.24" on-finished "2.4.1" - qs "6.11.0" + qs "6.13.0" raw-body "2.5.2" type-is "~1.6.18" unpipe "1.0.0" @@ -1429,10 +1419,10 @@ cookie-signature@1.0.6: resolved "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz" integrity sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ== -cookie@0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.6.0.tgz#2798b04b071b0ecbff0dbb62a505a8efa4e19051" - integrity sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw== +cookie@0.7.1: + version "0.7.1" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.7.1.tgz#2f73c42142d5d5cf71310a74fc4ae61670e5dbc9" + integrity sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w== core-util-is@~1.0.0: version "1.0.3" @@ -1613,9 +1603,9 @@ diff@^4.0.1, diff@^4.0.2: integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== diff@^5.1.0: - version "5.1.0" - resolved "https://registry.npmjs.org/diff/-/diff-5.1.0.tgz" - integrity sha512-D+mk+qE8VC/PAUrlAU34N+VfXev0ghe5ywmpqrawphmVZc1bEfn56uo9qpyGp1p4xpzOHkSW4ztBd6L7Xx4ACw== + version "5.2.0" + resolved "https://registry.yarnpkg.com/diff/-/diff-5.2.0.tgz#26ded047cd1179b78b9537d5ef725503ce1ae531" + integrity sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A== doctrine@^2.1.0: version "2.1.0" @@ -1656,6 +1646,11 @@ encodeurl@~1.0.2: resolved "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz" integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== +encodeurl@~2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-2.0.0.tgz#7b8ea898077d7e409d3ac45474ea38eaf0857a58" + integrity sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg== + es-abstract@^1.22.1, es-abstract@^1.22.3, es-abstract@^1.23.0, es-abstract@^1.23.2: version "1.23.3" resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.23.3.tgz#8f0c5a35cd215312573c5a27c87dfd6c881a0aa0" @@ -2034,36 +2029,36 @@ events@1.1.1: integrity "sha1-nr23Y1rQmccNzEwqH1AEKI6L2SQ= sha512-kEcvvCBByWXGnZy6JUlgAp2gBIUjfCAV6P6TgT1/aaQKcmuAEC4OZTV1I4EWQLz2gxZw76atuVyvHhTxvi0Flw==" express@^4.18.2: - version "4.19.2" - resolved "https://registry.yarnpkg.com/express/-/express-4.19.2.tgz#e25437827a3aa7f2a827bc8171bbbb664a356465" - integrity sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q== + version "4.21.1" + resolved "https://registry.yarnpkg.com/express/-/express-4.21.1.tgz#9dae5dda832f16b4eec941a4e44aa89ec481b281" + integrity sha512-YSFlK1Ee0/GC8QaO91tHcDxJiE/X4FbpAyQWkxAvG6AXCuR65YzK8ua6D9hvi/TzUfZMpc+BwuM1IPw8fmQBiQ== dependencies: accepts "~1.3.8" array-flatten "1.1.1" - body-parser "1.20.2" + body-parser "1.20.3" content-disposition "0.5.4" content-type "~1.0.4" - cookie "0.6.0" + cookie "0.7.1" cookie-signature "1.0.6" debug "2.6.9" depd "2.0.0" - encodeurl "~1.0.2" + encodeurl "~2.0.0" escape-html "~1.0.3" etag "~1.8.1" - finalhandler "1.2.0" + finalhandler "1.3.1" fresh "0.5.2" http-errors "2.0.0" - merge-descriptors "1.0.1" + merge-descriptors "1.0.3" methods "~1.1.2" on-finished "2.4.1" parseurl "~1.3.3" - path-to-regexp "0.1.7" + path-to-regexp "0.1.10" proxy-addr "~2.0.7" - qs "6.11.0" + qs "6.13.0" range-parser "~1.2.1" safe-buffer "5.2.1" - send "0.18.0" - serve-static "1.15.0" + send "0.19.0" + serve-static "1.16.2" setprototypeof "1.2.0" statuses "2.0.1" type-is "~1.6.18" @@ -2114,13 +2109,13 @@ fill-range@^7.1.1: dependencies: to-regex-range "^5.0.1" -finalhandler@1.2.0: - version "1.2.0" - resolved "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz" - integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg== +finalhandler@1.3.1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.3.1.tgz#0c575f1d1d324ddd1da35ad7ece3df7d19088019" + integrity sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ== dependencies: debug "2.6.9" - encodeurl "~1.0.2" + encodeurl "~2.0.0" escape-html "~1.0.3" on-finished "2.4.1" parseurl "~1.3.3" @@ -2843,11 +2838,6 @@ is-windows@^1.0.2: resolved "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz" integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== -isarray@0.0.1: - version "0.0.1" - resolved "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" - integrity sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ== - isarray@^1.0.0, isarray@~1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" @@ -2967,11 +2957,6 @@ js-yaml@^3.13.1: argparse "^1.0.7" esprima "^4.0.0" -jsep@^1.3.8: - version "1.3.8" - resolved "https://registry.yarnpkg.com/jsep/-/jsep-1.3.8.tgz#facb6eb908d085d71d950bd2b24b757c7b8a46d7" - integrity sha512-qofGylTGgYj9gZFsHuyWAN4jr35eJ66qJCK4eKDnldohuUoQFbU3iZn2zjvEbd9wOAhP9Wx5DsAAduTyE1PSWQ== - jsesc@^2.5.1: version "2.5.2" resolved "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz" @@ -3004,15 +2989,6 @@ json5@^2.2.3: resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283" integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg== -jsonpath-plus@^9.0.0: - version "9.0.0" - resolved "https://registry.yarnpkg.com/jsonpath-plus/-/jsonpath-plus-9.0.0.tgz#bb8703ee481531142bca8dee9a42fe72b8358a7f" - integrity sha512-bqE77VIDStrOTV/czspZhTn+o27Xx9ZJRGVkdVShEtPoqsIx5yALv3lWVU6y+PqYvWPJNWE7ORCQheQkEe0DDA== - dependencies: - "@jsep-plugin/assignment" "^1.2.1" - "@jsep-plugin/regex" "^1.0.3" - jsep "^1.3.8" - jszip@^3.5.0: version "3.10.1" resolved "https://registry.npmjs.org/jszip/-/jszip-3.10.1.tgz" @@ -3023,10 +2999,10 @@ jszip@^3.5.0: readable-stream "~2.3.6" setimmediate "^1.0.5" -just-extend@^4.0.2: - version "4.2.1" - resolved "https://registry.npmjs.org/just-extend/-/just-extend-4.2.1.tgz" - integrity sha512-g3UB796vUFIY90VIv/WX3L2c8CS2MdWUww3CNrYmqza1Fg0DURc2K/O4YrnklBdQarSJ/y8JnJYDGc+1iumQjg== +just-extend@^6.2.0: + version "6.2.0" + resolved "https://registry.yarnpkg.com/just-extend/-/just-extend-6.2.0.tgz#b816abfb3d67ee860482e7401564672558163947" + integrity sha512-cYofQu2Xpom82S6qD778jBDpwvvy39s1l/hrYij2u9AMdQcGRpaBu6kY4mVhuno5kJVi1DAz4aiphA2WI1/OAw== knex@^2.4.2: version "2.4.2" @@ -3113,7 +3089,7 @@ lodash.flattendeep@^4.4.0: lodash.get@^4.4.2: version "4.4.2" - resolved "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz" + resolved "https://registry.yarnpkg.com/lodash.get/-/lodash.get-4.4.2.tgz#2d177f652fa31e939b4438d5341499dfa3825e99" integrity sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ== lodash.merge@^4.6.2: @@ -3187,7 +3163,12 @@ media-typer@0.3.0: resolved "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz" integrity sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ== -merge-descriptors@1.0.1, merge-descriptors@~1.0.0: +merge-descriptors@1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.3.tgz#d80319a65f3c7935351e5cfdac8f9318504dbed5" + integrity sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ== + +merge-descriptors@~1.0.0: version "1.0.1" resolved "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz" integrity sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w== @@ -3356,15 +3337,15 @@ negotiator@0.6.3: integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== nise@^5.1.4: - version "5.1.4" - resolved "https://registry.npmjs.org/nise/-/nise-5.1.4.tgz" - integrity "sha1-SRzn5zB9TsVG9aZZsu/pShi0u8A= sha512-8+Ib8rRJ4L0o3kfmyVCL7gzrohyDe0cMFTBa2d364yIrEGMEoetznKJx899YxjybU6bL9SQkYPSBBs1gyYs8Xg==" + version "5.1.9" + resolved "https://registry.yarnpkg.com/nise/-/nise-5.1.9.tgz#0cb73b5e4499d738231a473cd89bd8afbb618139" + integrity sha512-qOnoujW4SV6e40dYxJOb3uvuoPHtmLzIk4TFo+j0jPJoC+5Z9xja5qH5JZobEPsa8+YYphMrOSwnrshEhG2qww== dependencies: - "@sinonjs/commons" "^2.0.0" - "@sinonjs/fake-timers" "^10.0.2" - "@sinonjs/text-encoding" "^0.7.1" - just-extend "^4.0.2" - path-to-regexp "^1.7.0" + "@sinonjs/commons" "^3.0.0" + "@sinonjs/fake-timers" "^11.2.2" + "@sinonjs/text-encoding" "^0.7.2" + just-extend "^6.2.0" + path-to-regexp "^6.2.1" nock@^11.3.3: version "11.9.1" @@ -3654,22 +3635,15 @@ path-parse@^1.0.7: resolved "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== -path-to-regexp@0.1.7: - version "0.1.7" - resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz" - integrity sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ== - -path-to-regexp@^0.1.10: +path-to-regexp@0.1.10, path-to-regexp@^0.1.10: version "0.1.10" resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.10.tgz#67e9108c5c0551b9e5326064387de4763c4d5f8b" integrity sha512-7lf7qcQidTku0Gu3YDPc8DJ1q7OOucfa/BSsIwjuh56VU7katFvuM8hULfkwB3Fns/rsVF7PwPKVw1sl5KQS9w== -path-to-regexp@^1.7.0: - version "1.8.0" - resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz" - integrity sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA== - dependencies: - isarray "0.0.1" +path-to-regexp@^6.2.1: + version "6.3.0" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-6.3.0.tgz#2b6a26a337737a8e1416f9272ed0766b1c0389f4" + integrity sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ== pathval@^1.1.1: version "1.1.1" @@ -3795,12 +3769,12 @@ punycode@^2.0.0, punycode@^2.1.0: resolved "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz" integrity sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA== -qs@6.11.0: - version "6.11.0" - resolved "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz" - integrity "sha1-/Q2WNEb3pl4TZ+AavYVClFPww3o= sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==" +qs@6.13.0: + version "6.13.0" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.13.0.tgz#6ca3bd58439f7e245655798997787b0d88a51906" + integrity sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg== dependencies: - side-channel "^1.0.4" + side-channel "^1.0.6" querystring@0.2.0: version "0.2.0" @@ -4068,10 +4042,10 @@ semver@^7.0.0, semver@^7.5.3, semver@^7.5.4: resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.3.tgz#980f7b5550bc175fb4dc09403085627f9eb33143" integrity sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A== -send@0.18.0: - version "0.18.0" - resolved "https://registry.npmjs.org/send/-/send-0.18.0.tgz" - integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg== +send@0.19.0: + version "0.19.0" + resolved "https://registry.yarnpkg.com/send/-/send-0.19.0.tgz#bbc5a388c8ea6c048967049dbeac0e4a3f09d7f8" + integrity sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw== dependencies: debug "2.6.9" depd "2.0.0" @@ -4094,15 +4068,15 @@ serialize-javascript@6.0.0: dependencies: randombytes "^2.1.0" -serve-static@1.15.0: - version "1.15.0" - resolved "https://registry.npmjs.org/serve-static/-/serve-static-1.15.0.tgz" - integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g== +serve-static@1.16.2: + version "1.16.2" + resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.16.2.tgz#b6a5343da47f6bdd2673848bf45754941e803296" + integrity sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw== dependencies: - encodeurl "~1.0.2" + encodeurl "~2.0.0" escape-html "~1.0.3" parseurl "~1.3.3" - send "0.18.0" + send "0.19.0" set-blocking@^2.0.0: version "2.0.0" @@ -4167,6 +4141,16 @@ side-channel@^1.0.4: get-intrinsic "^1.0.2" object-inspect "^1.9.0" +side-channel@^1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.6.tgz#abd25fb7cd24baf45466406b1096b7831c9215f2" + integrity sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA== + dependencies: + call-bind "^1.0.7" + es-errors "^1.3.0" + get-intrinsic "^1.2.4" + object-inspect "^1.13.1" + signal-exit@^3.0.2, signal-exit@^3.0.4, signal-exit@^3.0.6: version "3.0.7" resolved "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz" @@ -4177,10 +4161,10 @@ sinon-chai@^3.7.0: resolved "https://registry.npmjs.org/sinon-chai/-/sinon-chai-3.7.0.tgz" integrity sha512-mf5NURdUaSdnatJx3uhoBOrY9dtL19fiOtAdT1Azxg3+lNJFiuN0uzaU3xX1LeAfL17kHQhTAJgpsfhbMJMY2g== -sinon@^15.2.0: - version "15.2.0" - resolved "https://registry.npmjs.org/sinon/-/sinon-15.2.0.tgz" - integrity "sha1-XkTUvFqbXZk4cRN/01YL6/rCdWU= sha512-nPS85arNqwBXaIsFCkolHjGIkFo+Oxu9vbgmBJizLAhqe6P2o3Qmj3KCUoRkfhHtvgDhZdWD3risLHAUJ8npjw==" +sinon@^16.1.3: + version "16.1.3" + resolved "https://registry.yarnpkg.com/sinon/-/sinon-16.1.3.tgz#b760ddafe785356e2847502657b4a0da5501fba8" + integrity sha512-mjnWWeyxcAf9nC0bXcPmiDut+oE8HYridTNzBbF98AYVLmWwGRp2ISEpyhYflG1ifILT+eNn3BmKUJPxjXUPlA== dependencies: "@sinonjs/commons" "^3.0.0" "@sinonjs/fake-timers" "^10.3.0" @@ -4518,11 +4502,16 @@ type-check@^0.4.0, type-check@~0.4.0: dependencies: prelude-ls "^1.2.1" -type-detect@4.0.8, type-detect@^4.0.0, type-detect@^4.0.5, type-detect@^4.0.8: +type-detect@4.0.8, type-detect@^4.0.0, type-detect@^4.0.5: version "4.0.8" resolved "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz" integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== +type-detect@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.1.0.tgz#deb2453e8f08dcae7ae98c626b13dddb0155906c" + integrity sha512-Acylog8/luQ8L7il+geoSxhEkazvkslg7PSNKOX59mbB9cOveP5aq9h74Y7YU8yDpJwetzQQrfIwtf4Wp4LKcw== + type-fest@^0.12.0: version "0.12.0" resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.12.0.tgz"