-
Notifications
You must be signed in to change notification settings - Fork 4
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat: re-enable logevent compression (#441)
* feat: re-enable logevent compression * chore: fix err * chore: lint * chore: beacon compress * chore: disable compression for test * chore: attach param * chore: use stream * chore: add flag and test * chore: save test * chore: centralize checks encoding and compression * test: verify compression works on large payloads * chore: up min bundles build size * chore: avoid spreading Uint8Array --------- Co-authored-by: tore-statsig <74584483+tore-statsig@users.noreply.github.com> Co-authored-by: “Kat <kat@statsig.com>
- Loading branch information
1 parent
8f4a8f9
commit 8d28738
Showing
15 changed files
with
250 additions
and
60 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
130 changes: 130 additions & 0 deletions
130
packages/combo/src/__tests__/LogEventCompression.test.ts
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,130 @@ | ||
import 'jest-fetch-mock'; | ||
import { CompressionStream, DecompressionStream } from 'node:stream/web'; | ||
import { TextDecoder, TextEncoder } from 'util'; | ||
|
||
import { | ||
Endpoint, | ||
NetworkCore, | ||
SDKFlags, | ||
UrlConfiguration, | ||
} from '@statsig/client-core'; | ||
|
||
Object.assign(global, { | ||
CompressionStream, | ||
TextEncoder, | ||
}); | ||
|
||
const SDK_KEY = 'client-key'; | ||
const BODY = { | ||
sdkKey: SDK_KEY, | ||
data: { | ||
values: [1, 2, 3], | ||
}, | ||
urlConfig: new UrlConfiguration(Endpoint._rgstr, null, null, null), | ||
isCompressable: true, | ||
}; | ||
|
||
describe('Log Event Compression', () => { | ||
const setCompressionFlag = (flag: boolean) => { | ||
SDKFlags.setFlags(SDK_KEY, { | ||
enable_log_event_compression: flag, | ||
}); | ||
}; | ||
|
||
let network: NetworkCore; | ||
|
||
beforeEach(() => { | ||
fetchMock.enableMocks(); | ||
fetchMock.mockResponseOnce(JSON.stringify({ success: true })); | ||
fetchMock.mock.calls = []; | ||
|
||
__STATSIG__ = {} as any; | ||
|
||
network = new NetworkCore({}); | ||
}); | ||
|
||
it('should compress the body if the flag is enabled', async () => { | ||
setCompressionFlag(true); | ||
BODY.isCompressable = true; | ||
|
||
await network.post(BODY); | ||
const [url, options] = fetchMock.mock.calls[0]; | ||
|
||
expect(url).toContain('gz=1'); | ||
expect(options?.body?.constructor.name).toBe('Uint8Array'); | ||
}); | ||
|
||
it('should not compress the body if the flag is disabled', async () => { | ||
setCompressionFlag(false); | ||
BODY.isCompressable = true; | ||
|
||
await network.post(BODY); | ||
const [url, options] = fetchMock.mock.calls[0]; | ||
|
||
expect(url).not.toContain('gz=1'); | ||
expect(options?.body?.constructor.name).toBe('String'); | ||
}); | ||
|
||
it('should not compress the body if the body is not compressable', async () => { | ||
setCompressionFlag(true); | ||
BODY.isCompressable = false; | ||
|
||
await network.post(BODY); | ||
const [url, options] = fetchMock.mock.calls[0]; | ||
|
||
expect(url).not.toContain('gz=1'); | ||
expect(options?.body?.constructor.name).toBe('String'); | ||
}); | ||
|
||
it('should not compress the body when no-compress is set', async () => { | ||
setCompressionFlag(true); | ||
BODY.isCompressable = true; | ||
|
||
(__STATSIG__ as any)['no-compress'] = 1; | ||
|
||
await network.post(BODY); | ||
const [url, options] = fetchMock.mock.calls[0]; | ||
|
||
expect(url).not.toContain('gz=1'); | ||
expect(options?.body?.constructor.name).toBe('String'); | ||
}); | ||
|
||
it('can compress very large bodies', async () => { | ||
setCompressionFlag(true); | ||
BODY.isCompressable = true; | ||
const largeData = { | ||
values: Array.from({ length: 200000 }, (_, i) => i), | ||
}; | ||
|
||
await network.post({ ...BODY, data: largeData }); | ||
const [url, options] = fetchMock.mock.calls[0]; | ||
|
||
expect(url).toContain('gz=1'); | ||
expect(options?.body?.constructor.name).toBe('Uint8Array'); | ||
|
||
const decoded = await decompress(options?.body as any); | ||
expect(JSON.parse(decoded).values).toEqual(largeData.values); | ||
}); | ||
}); | ||
|
||
async function decompress(compressed: Uint8Array) { | ||
const stream = new DecompressionStream('gzip'); | ||
const writer = stream.writable.getWriter(); | ||
writer.write(compressed).catch(() => { | ||
throw new Error('Failed to write to decompression stream'); | ||
}); | ||
writer.close().catch(() => { | ||
throw new Error('Failed to close decompression stream'); | ||
}); | ||
|
||
const reader = stream.readable.getReader(); | ||
const chunks = []; | ||
|
||
let result; | ||
while (!(result = await reader.read()).done) { | ||
chunks.push(...result.value); | ||
} | ||
|
||
const concatenated = new Uint8Array(chunks); | ||
return new TextDecoder().decode(concatenated); | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.