Skip to content

Commit

Permalink
perf(cmd-api-server): add demonstration of continuous benchmarking
Browse files Browse the repository at this point in the history
Primary change:
---------------

This is the ice-breaker for some work that got stuck related to this issue:
https://github.com/hyperledger/cacti/issues/2672

The used benchamking library (benchmark.js) is old but solid and has
almost no dependencies which means that we'll be in the clear longer term
when it comes to CVEs popping up.

The benchmarks added here are very simple and measure the throughput of
the API server's Open API spec providing endpoints.

The GitHub action that we use is designed to do regression detection and
reporting but this is hard to verify before actually putting it in place
as we cannot simulate the CI environment's clone on a local environment.

The hope is that this change will make it so that if someone tries to
make a code change that will lower performance significantly, then we
can catch that at the review stage instead of having to find out later.

Secondary change:
-----------------

1. Started using tsx in favor of ts-node because it appers to be about
5% faster when looking at the benchmark execution. It also claims to have
less problems with ESM compared to ts-node so if this initial trial
goes well we could later on decide to swap out ts-node with it project-wide.

Signed-off-by: Peter Somogyvari <peter.somogyvari@accenture.com>
  • Loading branch information
petermetz committed Jan 31, 2024
1 parent 3f36d58 commit 8a0ac09
Show file tree
Hide file tree
Showing 6 changed files with 618 additions and 7 deletions.
13 changes: 7 additions & 6 deletions .cspell.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,21 +4,21 @@
"minWordLength": 4,
"allowCompoundWords": true,
"words": [
"outsh",
"adminpw",
"Albertirsa",
"ALLFORTX",
"Anoncreds",
"anoncreds",
"Anoncreds",
"ANYFORTX",
"APIV",
"Apim",
"APIV",
"approveformyorg",
"Askar",
"askar",
"Askar",
"Authz",
"authzn",
"AWSSM",
"benchmarkjs",
"Besu",
"Bools",
"brioux",
Expand Down Expand Up @@ -47,8 +47,8 @@
"data",
"dclm",
"DHTAPI",
"Dids",
"dids",
"Dids",
"DockerOde",
"ealen",
"ecparams",
Expand Down Expand Up @@ -80,8 +80,8 @@
"ipaddress",
"ipfs",
"IPFSHTTP",
"IPLD",
"ipld",
"IPLD",
"Iroha",
"Irohad",
"isready",
Expand Down Expand Up @@ -127,6 +127,7 @@
"organisation",
"Orgs",
"ossp",
"outsh",
"parameterizable",
"Postgres",
"proto",
Expand Down
32 changes: 32 additions & 0 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -386,6 +386,38 @@ jobs:
${{ runner.os }}-yarn-${{ hashFiles('./yarn.lock') }}
- run: ./tools/ci.sh

- name: Ensure .tmp Directory Exists
run: mkdir -p .tmp/benchmark-results/cmd-api-server/

# Download previous benchmark result from cache (if exists)
- name: Download previous benchmark data
uses: actions/cache@v3.3.1
with:
path: .tmp/benchmark-results/cmd-api-server/
key: ${{ runner.os }}-benchmark

- name: Run Benchmarks
working-directory: ./packages/cactus-cmd-api-server/
run: yarn run benchmark

- name: Store benchmark result
uses: benchmark-action/github-action-benchmark@v1.19.2
with:
tool: 'benchmarkjs'
output-file-path: .tmp/benchmark-results/cmd-api-server/run-cmd-api-server-benchmark.ts.log
github-token: ${{ secrets.GITHUB_TOKEN }}

# Only push the benchmark results to gh-pages website if we are running on the main branch
# We do not want to clutter the benchmark results with intermediate results from PRs that could be drafts
auto-push: ${{ github.ref == 'refs/heads/main' }}

# Show alert with commit comment on detecting possible performance regression
alert-threshold: '5%'
comment-on-alert: true
fail-on-alert: true
alert-comment-cc-users: '@petermetz'

cactus-cmd-socketio-server:
continue-on-error: false
env:
Expand Down
2 changes: 2 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,7 @@
"@lerna-lite/version": "3.1.0",
"@openapitools/openapi-generator-cli": "2.7.0",
"@types/adm-zip": "0.5.0",
"@types/benchmark": "2.1.5",
"@types/fs-extra": "9.0.13",
"@types/jest": "29.5.3",
"@types/node": "16.18.41",
Expand All @@ -123,6 +124,7 @@
"@typescript-eslint/eslint-plugin": "6.4.0",
"@typescript-eslint/parser": "6.4.0",
"adm-zip": "0.5.10",
"benchmark": "2.1.4",
"buffer": "6.0.3",
"cpy-cli": "4.2.0",
"cross-env": "7.0.3",
Expand Down
6 changes: 5 additions & 1 deletion packages/cactus-cmd-api-server/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@
"dist/*"
],
"scripts": {
"benchmark": "tsx ./src/test/typescript/benchmark/run-cmd-api-server-benchmark.ts .tmp/benchmark-results/cmd-api-server/run-cmd-api-server-benchmark.ts.log",
"codegen": "run-p 'codegen:*'",
"codegen:openapi": "npm run generate-sdk",
"codegen:proto": "run-s proto:openapi proto:protoc-gen-ts",
Expand Down Expand Up @@ -97,6 +98,7 @@
"@hyperledger/cactus-plugin-keychain-vault": "2.0.0-alpha.2",
"@hyperledger/cactus-test-tooling": "2.0.0-alpha.2",
"@openapitools/openapi-generator-cli": "2.7.0",
"@types/benchmark": "2.1.5",
"@types/compression": "1.7.4",
"@types/convict": "6.1.1",
"@types/cors": "2.8.12",
Expand All @@ -114,11 +116,13 @@
"@types/semver": "7.3.8",
"@types/uuid": "8.3.4",
"@types/xml2js": "0.4.9",
"benchmark": "2.1.4",
"google-protobuf": "3.18.0-rc.2",
"grpc-tools": "1.12.4",
"grpc_tools_node_protoc_ts": "5.3.3",
"http-status-codes": "2.1.4",
"protobufjs": "7.2.5"
"protobufjs": "7.2.5",
"tsx": "4.7.0"
},
"engines": {
"node": ">=18",
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,251 @@
import path from "path";
import { EOL } from "os";

import * as Benchmark from "benchmark";
import { v4 as uuidv4 } from "uuid";
import type { AuthorizeOptions as SocketIoJwtOptions } from "@thream/socketio-jwt";
import type { Params as ExpressJwtOptions } from "express-jwt";
import { SignJWT, exportSPKI, generateKeyPair } from "jose";
import { RuntimeError } from "run-time-error-cjs";
import * as grpc from "@grpc/grpc-js";
import fse from "fs-extra";

import { LogLevelDesc } from "@hyperledger/cactus-common";

import { IJoseFittingJwtParams } from "@hyperledger/cactus-common";
import { PluginRegistry } from "@hyperledger/cactus-core";
import { Constants } from "@hyperledger/cactus-core-api";

import {
ApiServer,
ApiServerApiClient,
ApiServerApiClientConfiguration,
AuthorizationProtocol,
ConfigService,
IAuthorizationConfig,
} from "../../../main/typescript/public-api";

import { default_service, empty } from "../../../main/typescript/public-api";

const LOG_TAG =
"[packages/cactus-cmd-api-server/src/test/typescript/benchmark/run-cmd-api-server-benchmark.ts]";

const createTestInfrastructure = async (opts: {
readonly logLevel: LogLevelDesc;
}): Promise<{
readonly httpApi: ApiServerApiClient;
readonly grpcCredentials: grpc.ChannelCredentials;
readonly grpcHost: string;
readonly apiServer: ApiServer;
}> => {
const logLevel = opts.logLevel || "DEBUG";

const jwtKeyPair = await generateKeyPair("RS256", { modulusLength: 4096 });
const jwtPublicKey = await exportSPKI(jwtKeyPair.publicKey);
const expressJwtOptions: ExpressJwtOptions & IJoseFittingJwtParams = {
algorithms: ["RS256"],
secret: jwtPublicKey,
audience: uuidv4(),
issuer: uuidv4(),
};
const socketIoJwtOptions: SocketIoJwtOptions = {
secret: jwtPublicKey,
algorithms: ["RS256"],
};

const authorizationConfig: IAuthorizationConfig = {
unprotectedEndpointExemptions: [],
expressJwtOptions,
socketIoJwtOptions,
socketIoPath: Constants.SocketIoConnectionPathV1,
};

const pluginsPath = path.join(
__dirname,
"../../../../../../", // walk back up to the project root
"packages/cactus-cmd-api-server/src/test/typescript/benchmark/run-cmd-api-server-benchmark/", // the dir path from the root
uuidv4(), // then a random directory to ensure proper isolation
);
const pluginManagerOptionsJson = JSON.stringify({ pluginsPath });

const pluginRegistry = new PluginRegistry({ logLevel });

const configService = new ConfigService();

const apiSrvOpts = await configService.newExampleConfig();
apiSrvOpts.logLevel = logLevel;
apiSrvOpts.pluginManagerOptionsJson = pluginManagerOptionsJson;
apiSrvOpts.authorizationProtocol = AuthorizationProtocol.JSON_WEB_TOKEN;
apiSrvOpts.authorizationConfigJson = authorizationConfig;
apiSrvOpts.configFile = "";
apiSrvOpts.apiCorsDomainCsv = "*";
apiSrvOpts.apiPort = 0;
apiSrvOpts.cockpitPort = 0;
apiSrvOpts.grpcPort = 0;
apiSrvOpts.apiTlsEnabled = false;
apiSrvOpts.grpcMtlsEnabled = false;
apiSrvOpts.plugins = [];

const config = await configService.newExampleConfigConvict(apiSrvOpts);

const apiServer = new ApiServer({
config: config.getProperties(),
pluginRegistry,
});

apiServer.initPluginRegistry({ pluginRegistry });
const startResponsePromise = apiServer.start();

const { addressInfoApi, addressInfoGrpc } = await startResponsePromise;
const protocol = apiSrvOpts.apiTlsEnabled ? "https" : "http";
const { address, port } = addressInfoApi;
const apiHost = `${protocol}://${address}:${port}`;

const grpcHost = `${addressInfoGrpc.address}:${addressInfoGrpc.port}`;

const jwtPayload = { name: "Peter", location: "Albertirsa" };
const validJwt = await new SignJWT(jwtPayload)
.setProtectedHeader({ alg: "RS256" })
.setIssuer(expressJwtOptions.issuer)
.setAudience(expressJwtOptions.audience)
.sign(jwtKeyPair.privateKey);

const validBearerToken = `Bearer ${validJwt}`;

const apiClient = new ApiServerApiClient(
new ApiServerApiClientConfiguration({
basePath: apiHost,
baseOptions: { headers: { Authorization: validBearerToken } },
logLevel,
}),
);

const grpcCredentials = grpc.credentials.createInsecure();

return {
grpcCredentials,
httpApi: apiClient,
grpcHost,
apiServer,
};
};

const main = async (opts: { readonly argv: Readonly<Array<string>> }) => {
const logLevel: LogLevelDesc = "WARN";

const gitRootPath = path.join(
__dirname,
"../../../../../../", // walk back up to the project root
);

console.log("%s gitRootPath=%s", LOG_TAG, gitRootPath);

const DEFAULT_OUTPUT_FILE_RELATIVE_PATH =
".tmp/benchmark-results/cmd-api-server/run-cmd-api-server-benchmark.ts.log";

const relativeOutputFilePath =
opts.argv[2] === undefined
? DEFAULT_OUTPUT_FILE_RELATIVE_PATH
: opts.argv[2];

console.log(
"%s DEFAULT_OUTPUT_FILE_RELATIVE_PATH=%s",
LOG_TAG,
DEFAULT_OUTPUT_FILE_RELATIVE_PATH,
);

console.log("%s opts.argv[2]=%s", LOG_TAG, opts.argv[2]);

console.log("%s relativeOutputFilePath=%s", LOG_TAG, relativeOutputFilePath);

const absoluteOutputFilePath = path.join(gitRootPath, relativeOutputFilePath);

console.log("%s absoluteOutputFilePath=%s", LOG_TAG, absoluteOutputFilePath);

const absoluteOutputDirPath = path.dirname(absoluteOutputFilePath);
console.log("%s absoluteOutputDirPath=%s", LOG_TAG, absoluteOutputDirPath);

await fse.mkdirp(absoluteOutputDirPath);
console.log("%s mkdir -p OK: %s", LOG_TAG, absoluteOutputDirPath);

const { apiServer, httpApi, grpcHost, grpcCredentials } =
await createTestInfrastructure({ logLevel });

const minSamples = 100;
const suite = new Benchmark.Suite({});

const cycles: string[] = [];

await new Promise((resolve, reject) => {
suite
.add("cmd-api-server_HTTP_GET_getOpenApiSpecV1", {
defer: true,
minSamples,
fn: async function (deferred: Benchmark.Deferred) {
await httpApi.getOpenApiSpecV1();
deferred.resolve();
},
})
.add("cmd-api-server_gRPC_GetOpenApiSpecV1", {
defer: true,
minSamples,
fn: async function (deferred: Benchmark.Deferred) {
const grpcClient =
new default_service.org.hyperledger.cactus.cmd_api_server.DefaultServiceClient(
grpcHost,
grpcCredentials,
);

await new Promise<default_service.org.hyperledger.cactus.cmd_api_server.GetOpenApiSpecV1Response>(
(resolve, reject) => {
const req = new empty.google.protobuf.Empty();
grpcClient.GetOpenApiSpecV1(req, (err3, value) => {
if (err3) {
reject(err3);
} else if (value) {
resolve(value);
} else {
reject(
new RuntimeError("Response object received is falsy."),
);
}
});
},
);

grpcClient.close();
deferred.resolve();
},
})
.on("cycle", (event: { target: unknown }) => {
// Output benchmark result by converting benchmark result to string
// Example line on stdout:
// cmd-api-server_HTTP_GET_getOpenApiSpecV1 x 1,020 ops/sec ±2.25% (177 runs sampled)
const cycle = String(event.target);
console.log("%s Benchmark.js CYCLE: %s", LOG_TAG, cycle);
cycles.push(cycle);
})
.on("complete", function () {
console.log("%s Benchmark.js COMPLETE.", LOG_TAG);
resolve(suite);
})
.on("error", (ex: unknown) => {
console.log("%s Benchmark.js ERROR: %o", LOG_TAG, ex);
reject(ex);
})
.run();
});

const data = cycles.join(EOL);
console.log("%s Writing results...", LOG_TAG);
await fse.writeFile(absoluteOutputFilePath, data, { encoding: "utf-8" });
console.log("%s Wrote results to %s", LOG_TAG, absoluteOutputFilePath);

await apiServer.shutdown();
console.log("%s Shut down API server OK", LOG_TAG);
};

main({ argv: process.argv }).catch((ex: unknown) => {
console.error("%s process crashed with:", LOG_TAG, ex);
process.exit(1);
});
Loading

0 comments on commit 8a0ac09

Please sign in to comment.