Skip to content

Commit 94b268d

Browse files
authored
ci: Capture overhead in node app (#17420)
This adds a simple node express app that is run on CI to get very ~~ overhead measurements of using Sentry. **THIS IS NOT NECESSARILY REPRESENTATIVE OF USING SENTRY IN A REAL APP!** The app is very synthetic and real world numbers may vary wildly. We run 3 endpoints with 3 scenarios each: 1. Minimal GET request: ```js app.get('/test-get', function (req, res) { res.send({ version: 'v1' }); }); ``` 2. POST request with a bit of compute work: ```js app.post('/test-post', function (req, res) { const body = req.body; res.send(generateResponse(body)); }); function generateResponse(body) { const bodyStr = JSON.stringify(body); const RES_BODY_SIZE = 10000; const bodyLen = bodyStr.length; let resBody = ''; for (let i = 0; i < RES_BODY_SIZE; i++) { resBody += `${i}${bodyStr[i % bodyLen]}-`; } return { version: 'v1', length: bodyLen, resBody }; } ``` 3. GET request with MySQL query: ```js app.get('/test-mysql', function (_req, res) { pool.query('SELECT * from users').then(([users]) => { res.send({ version: 'v1', users }); }); }); ``` Each of these is run for 10s (after warmup) via [autocannon](https://github.com/mcollina/autocannon), and we capture the average # of requests per seconds. The scenarios are: 1. Plain, no Sentry/instrumentation 2. With "full" Sentry instrumentation: ```js Sentry.init({ dsn: process.env.E2E_TEST_DSN, tracesSampleRate: 1, }); ``` 3, With errors-only Sentry instrumentation: ```js Sentry.init({ dsn: process.env.E2E_TEST_DSN }); ```
1 parent d269f1c commit 94b268d

19 files changed

+1100
-21
lines changed

.github/workflows/build.yml

Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -200,6 +200,9 @@ jobs:
200200
changed_node:
201201
${{ needs.job_get_metadata.outputs.changed_ci == 'true' || contains(steps.checkForAffected.outputs.affected,
202202
'@sentry/node') }}
203+
changed_node_overhead_action:
204+
${{ needs.job_get_metadata.outputs.changed_ci == 'true' || contains(steps.checkForAffected.outputs.affected,
205+
'@sentry-internal/node-overhead-gh-action') }}
203206
changed_deno:
204207
${{ needs.job_get_metadata.outputs.changed_ci == 'true' || contains(steps.checkForAffected.outputs.affected,
205208
'@sentry/deno') }}
@@ -253,6 +256,37 @@ jobs:
253256
# Only run comparison against develop if this is a PR
254257
comparison_branch: ${{ (github.event_name == 'pull_request' && github.base_ref) || ''}}
255258

259+
job_node_overhead_check:
260+
name: Node Overhead Check
261+
needs: [job_get_metadata, job_build]
262+
timeout-minutes: 15
263+
runs-on: ubuntu-24.04
264+
if:
265+
(needs.job_build.outputs.changed_node == 'true' && github.event_name == 'pull_request') ||
266+
(needs.job_build.outputs.changed_node_overhead_action == 'true' && github.event_name == 'pull_request') ||
267+
needs.job_get_metadata.outputs.is_base_branch == 'true' || needs.job_get_metadata.outputs.is_release == 'true'
268+
steps:
269+
- name: Check out current commit (${{ needs.job_get_metadata.outputs.commit_label }})
270+
uses: actions/checkout@v4
271+
with:
272+
ref: ${{ env.HEAD_COMMIT }}
273+
- name: Set up Node
274+
uses: actions/setup-node@v4
275+
with:
276+
node-version-file: 'package.json'
277+
- name: Restore caches
278+
uses: ./.github/actions/restore-cache
279+
with:
280+
dependency_cache_key: ${{ needs.job_build.outputs.dependency_cache_key }}
281+
- name: Check node overhead
282+
uses: ./dev-packages/node-overhead-gh-action
283+
env:
284+
DEBUG: '1'
285+
with:
286+
github_token: ${{ secrets.GITHUB_TOKEN }}
287+
# Only run comparison against develop if this is a PR
288+
comparison_branch: ${{ (github.event_name == 'pull_request' && github.base_ref) || ''}}
289+
256290
job_lint:
257291
name: Lint
258292
# Even though the linter only checks source code, not built code, it needs the built code in order check that all
Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
module.exports = {
2+
env: {
3+
node: true,
4+
},
5+
extends: ['../../.eslintrc.js'],
6+
overrides: [
7+
{
8+
files: ['**/*.mjs'],
9+
parserOptions: {
10+
project: ['tsconfig.json'],
11+
sourceType: 'module',
12+
},
13+
},
14+
],
15+
};
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
# node-overhead-gh-action
2+
3+
Capture the overhead of Sentry in a node app.
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
name: 'node-overhead-gh-action'
2+
description: 'Run node overhead comparison'
3+
inputs:
4+
github_token:
5+
required: true
6+
description: 'a github access token'
7+
comparison_branch:
8+
required: false
9+
default: ''
10+
description: 'If set, compare the current branch with this branch'
11+
threshold:
12+
required: false
13+
default: '3'
14+
description: 'The percentage threshold for size changes before posting a comment'
15+
runs:
16+
using: 'node24'
17+
main: 'index.mjs'
Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
CREATE DATABASE mydb;
2+
USE mydb
3+
4+
-- SQL script to create the 'users' table and insert initial data.
5+
6+
-- 1. Create the 'users' table
7+
-- This table stores basic user information.
8+
-- 'id' is the primary key and will automatically increment for each new record.
9+
-- 'name' stores the user's name, up to 255 characters.
10+
-- 'age' stores the user's age as an integer.
11+
12+
CREATE TABLE users (
13+
id INT PRIMARY KEY AUTO_INCREMENT,
14+
name VARCHAR(255) NOT NULL,
15+
age INT
16+
);
17+
18+
-- 2. Insert 5 rows into the 'users' table
19+
-- Populating the table with some sample data.
20+
21+
INSERT INTO users (name, age) VALUES ('Alice Johnson', 28);
22+
INSERT INTO users (name, age) VALUES ('Bob Smith', 45);
23+
INSERT INTO users (name, age) VALUES ('Charlie Brown', 32);
24+
INSERT INTO users (name, age) VALUES ('Diana Prince', 25);
25+
INSERT INTO users (name, age) VALUES ('Ethan Hunt', 41);
Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
services:
2+
db:
3+
image: mysql:8
4+
restart: always
5+
container_name: node-overhead-gh-action-mysql
6+
ports:
7+
- '3306:3306'
8+
environment:
9+
MYSQL_ROOT_PASSWORD: password
10+
volumes:
11+
# - ./db/data:/var/lib/mysql
12+
- ./db/init:/docker-entrypoint-initdb.d/:ro
Lines changed: 236 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,236 @@
1+
import { promises as fs } from 'node:fs';
2+
import path from 'node:path';
3+
import { fileURLToPath } from 'node:url';
4+
import { DefaultArtifactClient } from '@actions/artifact';
5+
import * as core from '@actions/core';
6+
import { exec } from '@actions/exec';
7+
import { context, getOctokit } from '@actions/github';
8+
import * as glob from '@actions/glob';
9+
import * as io from '@actions/io';
10+
import { markdownTable } from 'markdown-table';
11+
import { getArtifactsForBranchAndWorkflow } from './lib/getArtifactsForBranchAndWorkflow.mjs';
12+
import { getAveragedOverheadMeasurements } from './lib/getOverheadMeasurements.mjs';
13+
import { formatResults, hasChanges } from './lib/markdown-table-formatter.mjs';
14+
15+
const NODE_OVERHEAD_HEADING = '## node-overhead report 🧳';
16+
const ARTIFACT_NAME = 'node-overhead-action';
17+
const RESULTS_FILE = 'node-overhead-results.json';
18+
19+
function getResultsFilePath() {
20+
const __dirname = path.dirname(fileURLToPath(import.meta.url));
21+
return path.resolve(__dirname, RESULTS_FILE);
22+
}
23+
24+
const { getInput, setFailed } = core;
25+
26+
async function fetchPreviousComment(octokit, repo, pr) {
27+
const { data: commentList } = await octokit.rest.issues.listComments({
28+
...repo,
29+
issue_number: pr.number,
30+
});
31+
32+
return commentList.find(comment => comment.body.startsWith(NODE_OVERHEAD_HEADING));
33+
}
34+
35+
async function run() {
36+
const __dirname = path.dirname(fileURLToPath(import.meta.url));
37+
38+
try {
39+
const { payload, repo } = context;
40+
const pr = payload.pull_request;
41+
42+
const comparisonBranch = getInput('comparison_branch');
43+
const githubToken = getInput('github_token');
44+
const threshold = getInput('threshold') || 1;
45+
46+
if (comparisonBranch && !pr) {
47+
throw new Error('No PR found. Only pull_request workflows are supported.');
48+
}
49+
50+
const octokit = getOctokit(githubToken);
51+
const resultsFilePath = getResultsFilePath();
52+
53+
// If we have no comparison branch, we just run overhead check & store the result as artifact
54+
if (!comparisonBranch) {
55+
return runNodeOverheadOnComparisonBranch();
56+
}
57+
58+
// Else, we run overhead check for the current branch, AND fetch it for the comparison branch
59+
let base;
60+
let current;
61+
let baseIsNotLatest = false;
62+
let baseWorkflowRun;
63+
64+
try {
65+
const workflowName = `${process.env.GITHUB_WORKFLOW || ''}`;
66+
core.startGroup(`getArtifactsForBranchAndWorkflow - workflow:"${workflowName}", branch:"${comparisonBranch}"`);
67+
const artifacts = await getArtifactsForBranchAndWorkflow(octokit, {
68+
...repo,
69+
artifactName: ARTIFACT_NAME,
70+
branch: comparisonBranch,
71+
workflowName,
72+
});
73+
core.endGroup();
74+
75+
if (!artifacts) {
76+
throw new Error('No artifacts found');
77+
}
78+
79+
baseWorkflowRun = artifacts.workflowRun;
80+
81+
await downloadOtherWorkflowArtifact(octokit, {
82+
...repo,
83+
artifactName: ARTIFACT_NAME,
84+
artifactId: artifacts.artifact.id,
85+
downloadPath: __dirname,
86+
});
87+
88+
base = JSON.parse(await fs.readFile(resultsFilePath, { encoding: 'utf8' }));
89+
90+
if (!artifacts.isLatest) {
91+
baseIsNotLatest = true;
92+
core.info('Base artifact is not the latest one. This may lead to incorrect results.');
93+
}
94+
} catch (error) {
95+
core.startGroup('Warning, unable to find base results');
96+
core.error(error);
97+
core.endGroup();
98+
}
99+
100+
core.startGroup('Getting current overhead measurements');
101+
try {
102+
current = await getAveragedOverheadMeasurements();
103+
} catch (error) {
104+
core.error('Error getting current overhead measurements');
105+
core.endGroup();
106+
throw error;
107+
}
108+
core.debug(`Current overhead measurements: ${JSON.stringify(current, null, 2)}`);
109+
core.endGroup();
110+
111+
const thresholdNumber = Number(threshold);
112+
113+
const nodeOverheadComment = await fetchPreviousComment(octokit, repo, pr);
114+
115+
if (nodeOverheadComment) {
116+
core.debug('Found existing node overhead comment, updating it instead of creating a new one...');
117+
}
118+
119+
const shouldComment = isNaN(thresholdNumber) || hasChanges(base, current, thresholdNumber) || nodeOverheadComment;
120+
121+
if (shouldComment) {
122+
const bodyParts = [
123+
NODE_OVERHEAD_HEADING,
124+
'Note: This is a synthetic benchmark with a minimal express app and does not necessarily reflect the real-world performance impact in an application.',
125+
];
126+
127+
if (baseIsNotLatest) {
128+
bodyParts.push(
129+
'⚠️ **Warning:** Base artifact is not the latest one, because the latest workflow run is not done yet. This may lead to incorrect results. Try to re-run all tests to get up to date results.',
130+
);
131+
}
132+
try {
133+
bodyParts.push(markdownTable(formatResults(base, current)));
134+
} catch (error) {
135+
core.error('Error generating markdown table');
136+
throw error;
137+
}
138+
139+
if (baseWorkflowRun) {
140+
bodyParts.push('');
141+
bodyParts.push(`[View base workflow run](${baseWorkflowRun.html_url})`);
142+
}
143+
144+
const body = bodyParts.join('\r\n');
145+
146+
try {
147+
if (!nodeOverheadComment) {
148+
await octokit.rest.issues.createComment({
149+
...repo,
150+
issue_number: pr.number,
151+
body,
152+
});
153+
} else {
154+
await octokit.rest.issues.updateComment({
155+
...repo,
156+
comment_id: nodeOverheadComment.id,
157+
body,
158+
});
159+
}
160+
} catch (error) {
161+
core.error(
162+
"Error updating comment. This can happen for PR's originating from a fork without write permissions.",
163+
);
164+
}
165+
} else {
166+
core.debug('Skipping comment because there are no changes.');
167+
}
168+
} catch (error) {
169+
core.error(error);
170+
setFailed(error.message);
171+
}
172+
}
173+
174+
async function runNodeOverheadOnComparisonBranch() {
175+
const __dirname = path.dirname(fileURLToPath(import.meta.url));
176+
const resultsFilePath = getResultsFilePath();
177+
178+
const artifactClient = new DefaultArtifactClient();
179+
180+
const result = await getAveragedOverheadMeasurements();
181+
182+
try {
183+
await fs.writeFile(resultsFilePath, JSON.stringify(result), 'utf8');
184+
} catch (error) {
185+
core.error('Error parsing node overhead output. The output should be a json.');
186+
throw error;
187+
}
188+
189+
const globber = await glob.create(resultsFilePath, {
190+
followSymbolicLinks: false,
191+
});
192+
const files = await globber.glob();
193+
194+
await artifactClient.uploadArtifact(ARTIFACT_NAME, files, __dirname);
195+
}
196+
197+
run();
198+
199+
/**
200+
* Use GitHub API to fetch artifact download url, then
201+
* download and extract artifact to `downloadPath`
202+
*/
203+
async function downloadOtherWorkflowArtifact(octokit, { owner, repo, artifactId, artifactName, downloadPath }) {
204+
const artifact = await octokit.rest.actions.downloadArtifact({
205+
owner,
206+
repo,
207+
artifact_id: artifactId,
208+
archive_format: 'zip',
209+
});
210+
211+
// Make sure output path exists
212+
try {
213+
await io.mkdirP(downloadPath);
214+
} catch {
215+
// ignore errors
216+
}
217+
218+
const downloadFile = path.resolve(downloadPath, `${artifactName}.zip`);
219+
220+
await exec('wget', [
221+
'-nv',
222+
'--retry-connrefused',
223+
'--waitretry=1',
224+
'--read-timeout=20',
225+
'--timeout=15',
226+
'-t',
227+
'0',
228+
'-O',
229+
downloadFile,
230+
artifact.url,
231+
]);
232+
233+
await exec('unzip', ['-q', '-d', downloadPath, downloadFile], {
234+
silent: true,
235+
});
236+
}

0 commit comments

Comments
 (0)