Skip to content

Commit

Permalink
feat: re-enable logevent compression (#441)
Browse files Browse the repository at this point in the history
* feat: re-enable logevent compression

* chore: fix err

* chore: lint

* chore: beacon compress

* chore: disable compression for test

* chore: attach param

* chore: use stream

* chore: add flag and test

* chore: save test

* chore: centralize checks encoding and compression

* test: verify compression works on large payloads

* chore: up min bundles build size

* chore: avoid spreading Uint8Array

---------

Co-authored-by: tore-statsig <74584483+tore-statsig@users.noreply.github.com>
Co-authored-by: “Kat <kat@statsig.com>
  • Loading branch information
3 people authored Feb 12, 2025
1 parent 8f4a8f9 commit 8d28738
Show file tree
Hide file tree
Showing 15 changed files with 250 additions and 60 deletions.
2 changes: 1 addition & 1 deletion .size-limit.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ module.exports = [
},
{
name: 'statsig-js-client + session-replay',
limit: '34.0 kB',
limit: '35.0 kB',
path: 'dist/packages/combo/build/js-client/statsig-js-client+session-replay.min.js',
import: '{ StatsigClient }',
ignore: ['rrwebRecord'],
Expand Down
148 changes: 99 additions & 49 deletions packages/client-core/src/NetworkCore.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import {
FallbackResolverArgs,
NetworkFallbackResolver,
} from './NetworkFallbackResolver';
import { SDKFlags } from './SDKFlags';
import { SDKType } from './SDKType';
import { _getWindowSafe } from './SafeJs';
import { SessionID } from './SessionID';
Expand Down Expand Up @@ -47,12 +48,15 @@ type RequestArgs = {
>;
};

type DataFlags = {
isStatsigEncodable?: boolean;
isCompressable?: boolean;
};

export type RequestArgsWithData = Flatten<
RequestArgs & {
data: Record<string, unknown>;
isStatsigEncodable?: boolean;
isCompressable?: boolean;
}
} & DataFlags
>;

type BeaconRequestArgs = Pick<
Expand All @@ -65,7 +69,7 @@ type RequestArgsInternal = Flatten<
FallbackResolverArgs & {
method: 'POST' | 'GET';
body?: BodyInit;
}
} & DataFlags
>;

type NetworkResponse = {
Expand Down Expand Up @@ -133,25 +137,18 @@ export class NetworkCore {
}

const argsInternal = this._getInternalRequestArgs('POST', args);
const body: BodyInit = await this._getPopulatedBody(
argsInternal,
args.data,
);
await this._tryToCompressBody(argsInternal);

const url = await this._getPopulatedURL(argsInternal);
const nav = navigator;
return nav.sendBeacon.bind(nav)(url, body);
return nav.sendBeacon.bind(nav)(url, argsInternal.body);
}

async post(args: RequestArgsWithData): Promise<NetworkResponse | null> {
const argsInternal = this._getInternalRequestArgs('POST', args);

argsInternal.body = await this._getPopulatedBody(argsInternal, args.data);
if (args.isStatsigEncodable) {
argsInternal.body = this._attemptToEncodeString(
argsInternal,
argsInternal.body,
);
}
this._tryEncodeBody(argsInternal);
await this._tryToCompressBody(argsInternal);

return this._sendRequest(argsInternal);
}
Expand Down Expand Up @@ -332,67 +329,99 @@ export class NetworkCore {
return `${url}${query ? `?${query}` : ''}`;
}

private async _getPopulatedBody(
args: RequestArgsInternal,
data: Record<string, unknown>,
): Promise<string> {
const { sdkKey, fallbackUrl } = args;
const stableID = StableID.get(sdkKey);
const sessionID = SessionID.get(sdkKey);
const sdkType = SDKType._get(sdkKey);

return JSON.stringify({
...data,
statsigMetadata: {
...StatsigMetadataProvider.get(),
stableID,
sessionID,
sdkType,
fallbackUrl,
},
});
}

private _attemptToEncodeString(
args: RequestArgsInternal,
input: string,
): string {
private _tryEncodeBody(args: RequestArgsInternal): void {
const win = _getWindowSafe();
const body = args.body;
if (
!args.isStatsigEncodable ||
this._options.disableStatsigEncoding ||
typeof body !== 'string' ||
_getStatsigGlobalFlag('no-encode') != null ||
!win?.btoa
) {
return input;
return;
}

try {
const result = win.btoa(input).split('').reverse().join('') ?? input;
args.body = win.btoa(body).split('').reverse().join('');
args.params = {
...(args.params ?? {}),
[NetworkParam.StatsigEncoded]: '1',
};
return result;
} catch {
Log.warn(`Request encoding failed for ${args.urlConfig.getUrl()}`);
return input;
} catch (e) {
Log.warn(`Request encoding failed for ${args.urlConfig.getUrl()}`, e);
}
}

private async _tryToCompressBody(args: RequestArgsInternal): Promise<void> {
const body = args.body;
if (
!args.isCompressable ||
this._options.disableCompression ||
typeof body !== 'string' ||
SDKFlags.get(args.sdkKey, 'enable_log_event_compression') != true ||
_getStatsigGlobalFlag('no-compress') != null ||
typeof CompressionStream === 'undefined' ||
typeof TextEncoder === 'undefined'
) {
return;
}

try {
const bytes = new TextEncoder().encode(body);
const stream = new CompressionStream('gzip');
const writer = stream.writable.getWriter();

writer.write(bytes).catch(Log.error);
writer.close().catch(Log.error);

const reader = stream.readable.getReader();
const chunks: Uint8Array[] = [];

let result: ReadableStreamReadResult<Uint8Array>;
// eslint-disable-next-line no-await-in-loop
while (!(result = await reader.read()).done) {
chunks.push(result.value);
}

const totalLength = chunks.reduce((acc, chunk) => acc + chunk.length, 0);
const combined = new Uint8Array(totalLength);
let offset = 0;
for (const chunk of chunks) {
combined.set(chunk, offset);
offset += chunk.length;
}

args.body = combined;
args.params = {
...(args.params ?? {}),
[NetworkParam.IsGzipped]: '1',
};
} catch (e) {
Log.warn(`Request compression failed for ${args.urlConfig.getUrl()}`, e);
}
}

private _getInternalRequestArgs(
method: 'GET' | 'POST',
args: RequestArgs,
args: RequestArgs | RequestArgsWithData,
): RequestArgsInternal {
const fallbackUrl = this._fallbackResolver.getActiveFallbackUrl(
args.sdkKey,
args.urlConfig,
);

return {
const result = {
...args,
method,
fallbackUrl,
};

if ('data' in args) {
_populateRequestBody(result, args.data);
}

return result;
}
}

Expand All @@ -404,6 +433,27 @@ const _ensureValidSdkKey = (args: RequestArgs) => {
return true;
};

const _populateRequestBody = (
args: RequestArgsInternal,
data: Record<string, unknown>,
) => {
const { sdkKey, fallbackUrl } = args;
const stableID = StableID.get(sdkKey);
const sessionID = SessionID.get(sdkKey);
const sdkType = SDKType._get(sdkKey);

args.body = JSON.stringify({
...data,
statsigMetadata: {
...StatsigMetadataProvider.get(),
stableID,
sessionID,
sdkType,
fallbackUrl,
},
});
};

function _getErrorMessage(
controller: AbortController | null,
error: unknown,
Expand Down
7 changes: 7 additions & 0 deletions packages/client-core/src/StatsigOptionsCommon.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,13 @@ export type StatsigRuntimeMutableOptions = {
* Note: caching will not work if storage is disabled
*/
disableStorage?: boolean;

/**
* Whether or not Statsig should compress JSON bodies for network requests where possible.
*
* default: `false`
*/
disableCompression?: boolean;
};

export type NetworkConfigCommon = {
Expand Down
130 changes: 130 additions & 0 deletions packages/combo/src/__tests__/LogEventCompression.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
import 'jest-fetch-mock';
import { CompressionStream, DecompressionStream } from 'node:stream/web';
import { TextDecoder, TextEncoder } from 'util';

import {
Endpoint,
NetworkCore,
SDKFlags,
UrlConfiguration,
} from '@statsig/client-core';

Object.assign(global, {
CompressionStream,
TextEncoder,
});

const SDK_KEY = 'client-key';
const BODY = {
sdkKey: SDK_KEY,
data: {
values: [1, 2, 3],
},
urlConfig: new UrlConfiguration(Endpoint._rgstr, null, null, null),
isCompressable: true,
};

describe('Log Event Compression', () => {
const setCompressionFlag = (flag: boolean) => {
SDKFlags.setFlags(SDK_KEY, {
enable_log_event_compression: flag,
});
};

let network: NetworkCore;

beforeEach(() => {
fetchMock.enableMocks();
fetchMock.mockResponseOnce(JSON.stringify({ success: true }));
fetchMock.mock.calls = [];

__STATSIG__ = {} as any;

network = new NetworkCore({});
});

it('should compress the body if the flag is enabled', async () => {
setCompressionFlag(true);
BODY.isCompressable = true;

await network.post(BODY);
const [url, options] = fetchMock.mock.calls[0];

expect(url).toContain('gz=1');
expect(options?.body?.constructor.name).toBe('Uint8Array');
});

it('should not compress the body if the flag is disabled', async () => {
setCompressionFlag(false);
BODY.isCompressable = true;

await network.post(BODY);
const [url, options] = fetchMock.mock.calls[0];

expect(url).not.toContain('gz=1');
expect(options?.body?.constructor.name).toBe('String');
});

it('should not compress the body if the body is not compressable', async () => {
setCompressionFlag(true);
BODY.isCompressable = false;

await network.post(BODY);
const [url, options] = fetchMock.mock.calls[0];

expect(url).not.toContain('gz=1');
expect(options?.body?.constructor.name).toBe('String');
});

it('should not compress the body when no-compress is set', async () => {
setCompressionFlag(true);
BODY.isCompressable = true;

(__STATSIG__ as any)['no-compress'] = 1;

await network.post(BODY);
const [url, options] = fetchMock.mock.calls[0];

expect(url).not.toContain('gz=1');
expect(options?.body?.constructor.name).toBe('String');
});

it('can compress very large bodies', async () => {
setCompressionFlag(true);
BODY.isCompressable = true;
const largeData = {
values: Array.from({ length: 200000 }, (_, i) => i),
};

await network.post({ ...BODY, data: largeData });
const [url, options] = fetchMock.mock.calls[0];

expect(url).toContain('gz=1');
expect(options?.body?.constructor.name).toBe('Uint8Array');

const decoded = await decompress(options?.body as any);
expect(JSON.parse(decoded).values).toEqual(largeData.values);
});
});

async function decompress(compressed: Uint8Array) {
const stream = new DecompressionStream('gzip');
const writer = stream.writable.getWriter();
writer.write(compressed).catch(() => {
throw new Error('Failed to write to decompression stream');
});
writer.close().catch(() => {
throw new Error('Failed to close decompression stream');
});

const reader = stream.readable.getReader();
const chunks = [];

let result;
while (!(result = await reader.read()).done) {
chunks.push(...result.value);
}

const concatenated = new Uint8Array(chunks);
return new TextDecoder().decode(concatenated);
}
2 changes: 1 addition & 1 deletion packages/combo/webpack[js-client+web-analytics].config.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ const { createStatsigWebpackBundle } = require('./statsig-webpack-bundler');

module.exports = createStatsigWebpackBundle({
bundleFile: 'js-client+web-analytics',
maxByteSize: 58_000,
maxByteSize: 59_000,
dependencies: [
'@statsig/client-core',
'@statsig/js-client',
Expand Down
2 changes: 1 addition & 1 deletion packages/combo/webpack[js-client].config.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ const { createStatsigWebpackBundle } = require('./statsig-webpack-bundler');

module.exports = createStatsigWebpackBundle({
bundleFile: 'js-client',
maxByteSize: 49_000,
maxByteSize: 50_000,
dependencies: ['@statsig/client-core', '@statsig/js-client'],
client: 'js-client',
});
Loading

0 comments on commit 8d28738

Please sign in to comment.