Skip to content

Commit

Permalink
[DI] Drop snapshot if JSON payload is too large (#4818)
Browse files Browse the repository at this point in the history
The log track has a 1MB limit of the JSON payload. The client is not
notified if the payload is too large, but it is simply never indexed.

This is a very crude approach. In the future a more sophsticated
algorithm will be implemented that reduces the size of the snapshot
instead of removing it completely.
  • Loading branch information
watson authored Oct 30, 2024
1 parent 1c0958e commit 111c14a
Show file tree
Hide file tree
Showing 5 changed files with 115 additions and 6 deletions.
43 changes: 43 additions & 0 deletions integration-tests/debugger/snapshot-pruning.spec.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
'use strict'

const { assert } = require('chai')
const { setup, getBreakpointInfo } = require('./utils')

const { line } = getBreakpointInfo()

describe('Dynamic Instrumentation', function () {
const t = setup()

describe('input messages', function () {
describe('with snapshot', function () {
beforeEach(t.triggerBreakpoint)

it('should prune snapshot if payload is too large', function (done) {
t.agent.on('debugger-input', ({ payload }) => {
assert.isBelow(Buffer.byteLength(JSON.stringify(payload)), 1024 * 1024) // 1MB
assert.deepEqual(payload['debugger.snapshot'].captures, {
lines: {
[line]: {
locals: {
notCapturedReason: 'Snapshot was too large',
size: 6
}
}
}
})
done()
})

t.agent.addRemoteConfig(t.generateRemoteConfig({
captureSnapshot: true,
capture: {
// ensure we get a large snapshot
maxCollectionSize: Number.MAX_SAFE_INTEGER,
maxFieldCount: Number.MAX_SAFE_INTEGER,
maxLength: Number.MAX_SAFE_INTEGER
}
}))
})
})
})
})
41 changes: 41 additions & 0 deletions integration-tests/debugger/target-app/snapshot-pruning.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
'use strict'

require('dd-trace/init')

const { randomBytes } = require('crypto')
const Fastify = require('fastify')

const fastify = Fastify()

const TARGET_SIZE = 1024 * 1024 // 1MB
const LARGE_STRING = randomBytes(1024).toString('hex')

fastify.get('/:name', function handler (request) {
// eslint-disable-next-line no-unused-vars
const obj = generateObjectWithJSONSizeLargerThan1MB()

return { hello: request.params.name } // BREAKPOINT
})

fastify.listen({ port: process.env.APP_PORT }, (err) => {
if (err) {
fastify.log.error(err)
process.exit(1)
}
process.send({ port: process.env.APP_PORT })
})

function generateObjectWithJSONSizeLargerThan1MB () {
const obj = {}
let i = 0

while (++i) {
if (i % 100 === 0) {
const size = JSON.stringify(obj).length
if (size > TARGET_SIZE) break
}
obj[i] = LARGE_STRING
}

return obj
}
19 changes: 15 additions & 4 deletions integration-tests/debugger/utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,13 @@ const pollInterval = 1

module.exports = {
pollInterval,
setup
setup,
getBreakpointInfo
}

function setup () {
let sandbox, cwd, appPort, proc
const breakpoint = getBreakpointInfo()
const breakpoint = getBreakpointInfo(1) // `1` to disregard the `setup` function
const t = {
breakpoint,
axios: null,
Expand Down Expand Up @@ -103,11 +104,21 @@ function setup () {
return t
}

function getBreakpointInfo () {
const testFile = new Error().stack.split('\n')[3].split(' (')[1].slice(0, -1).split(':')[0] // filename of caller
function getBreakpointInfo (stackIndex = 0) {
// First, get the filename of file that called this function
const testFile = new Error().stack
.split('\n')[stackIndex + 2] // +2 to skip this function + the first line, which is the error message
.split(' (')[1]
.slice(0, -1)
.split(':')[0]

// Then, find the corresponding file in which the breakpoint exists
const filename = basename(testFile).replace('.spec', '')

// Finally, find the line number of the breakpoint
const line = readFileSync(join(__dirname, 'target-app', filename), 'utf8')
.split('\n')
.findIndex(line => line.includes('// BREAKPOINT')) + 1

return { file: `debugger/target-app/${filename}`, line }
}
16 changes: 15 additions & 1 deletion packages/dd-trace/src/debugger/devtools_client/send.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@ const { GIT_COMMIT_SHA, GIT_REPOSITORY_URL } = require('../../plugins/util/tags'

module.exports = send

const MAX_PAYLOAD_SIZE = 1024 * 1024 // 1MB

const ddsource = 'dd_debugger'
const hostname = getHostname()
const service = config.service
Expand Down Expand Up @@ -37,5 +39,17 @@ function send (message, logger, snapshot, cb) {
'debugger.snapshot': snapshot
}

request(JSON.stringify(payload), opts, cb)
let json = JSON.stringify(payload)

if (Buffer.byteLength(json) > MAX_PAYLOAD_SIZE) {
// TODO: This is a very crude way to handle large payloads. Proper pruning will be implemented later (DEBUG-2624)
const line = Object.values(payload['debugger.snapshot'].captures.lines)[0]
line.locals = {
notCapturedReason: 'Snapshot was too large',
size: Object.keys(line.locals).length
}
json = JSON.stringify(payload)
}

request(json, opts, cb)
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ describe('debugger -> devtools client -> snapshot.getLocalStateForCallFrame', fu
session.once('Debugger.paused', async ({ params }) => {
expect(params.hitBreakpoints.length).to.eq(1)

resolve((await getLocalStateForCallFrame(params.callFrames[0], { maxFieldCount: Infinity }))())
resolve((await getLocalStateForCallFrame(params.callFrames[0], { maxFieldCount: Number.MAX_SAFE_INTEGER }))())
})

await setAndTriggerBreakpoint(target, 10)
Expand Down

0 comments on commit 111c14a

Please sign in to comment.