Skip to content

Commit

Permalink
test_runner: fix test deserialize edge cases
Browse files Browse the repository at this point in the history
  • Loading branch information
MoLow committed May 21, 2023
1 parent cd016a0 commit 1b32632
Show file tree
Hide file tree
Showing 3 changed files with 147 additions and 14 deletions.
10 changes: 8 additions & 2 deletions lib/internal/test_runner/reporter/v8-serializer.js
Original file line number Diff line number Diff line change
@@ -1,12 +1,17 @@
'use strict';

const {
TypedArrayPrototypeGetLength,
} = primordials;
const { DefaultSerializer } = require('v8');
const { Buffer } = require('buffer');
const { serializeError } = require('internal/error_serdes');


module.exports = async function* v8Reporter(source) {
const serializer = new DefaultSerializer();
serializer.writeHeader();
const headerLength = TypedArrayPrototypeGetLength(serializer.releaseBuffer());

for await (const item of source) {
const originalError = item.data.details?.error;
Expand All @@ -16,6 +21,7 @@ module.exports = async function* v8Reporter(source) {
// Error is restored after serialization.
item.data.details.error = serializeError(originalError);
}
serializer.writeHeader();
// Add 4 bytes, to later populate with message length
serializer.writeRawBytes(Buffer.allocUnsafe(4));
serializer.writeHeader();
Expand All @@ -26,14 +32,14 @@ module.exports = async function* v8Reporter(source) {
}

const serializedMessage = serializer.releaseBuffer();
const serializedMessageLength = serializedMessage.length - 4;
const serializedMessageLength = serializedMessage.length - (4 + headerLength);

serializedMessage.set([
serializedMessageLength >> 24 & 0xFF,
serializedMessageLength >> 16 & 0xFF,
serializedMessageLength >> 8 & 0xFF,
serializedMessageLength & 0xFF,
], 0);
], headerLength);
yield serializedMessage;
}
};
48 changes: 36 additions & 12 deletions lib/internal/test_runner/runner.js
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,8 @@ function getRunArgs({ path, inspectPort, testNamePatterns }) {
const serializer = new DefaultSerializer();
serializer.writeHeader();
const v8Header = serializer.releaseBuffer();
const kSerializedSizeHeader = 4;
const kV8headerLength = TypedArrayPrototypeGetLength(v8Header);
const kSerializedSizeHeader = 4 + kV8headerLength;

class FileTest extends Test {
// This class maintains two buffers:
Expand Down Expand Up @@ -236,22 +237,42 @@ class FileTest extends Test {
this.#handleReportItem(item);
}
reportStarted() {}
report() {
drain() {
this.#drainRawBuffer();
this.#drainReportBuffer();
}
report() {
this.drain();
const skipReporting = this.#skipReporting();
if (!skipReporting) {
super.reportStarted();
super.report();
}
}
parseMessage(readData) {
const dataLength = TypedArrayPrototypeGetLength(readData);
let dataLength = TypedArrayPrototypeGetLength(readData);
if (dataLength === 0) return;
const partialV8Header = readData[dataLength - 1] === v8Header[0];

if (partialV8Header) {
// This will brake if v8Header length (2 bytes) is changed.
// However it is covered by tests.
readData = TypedArrayPrototypeSubarray(readData, 0, dataLength - 1);
dataLength--;
}

ArrayPrototypePush(this.#rawBuffer, readData);
if (this.#rawBuffer[0] && TypedArrayPrototypeGetLength(this.#rawBuffer[0]) < kSerializedSizeHeader) {
this.#rawBuffer[0] = Buffer.concat([this.#rawBuffer[0], readData]);
} else {
ArrayPrototypePush(this.#rawBuffer, readData);
}
this.#rawBufferSize += dataLength;
this.#proccessRawBuffer();

if (partialV8Header) {
ArrayPrototypePush(this.#rawBuffer, TypedArrayPrototypeSubarray(v8Header, 0, 1));
this.#rawBufferSize++;
}
}
#drainRawBuffer() {
while (this.#rawBuffer.length > 0) {
Expand All @@ -264,16 +285,16 @@ class FileTest extends Test {
let headerIndex = bufferHead.indexOf(v8Header);
let nonSerialized = Buffer.alloc(0);

while (bufferHead && headerIndex !== kSerializedSizeHeader) {
while (bufferHead && headerIndex !== 0) {
const nonSerializedData = headerIndex === -1 ?
bufferHead :
bufferHead.slice(0, headerIndex - kSerializedSizeHeader);
bufferHead.slice(0, headerIndex);
nonSerialized = Buffer.concat([nonSerialized, nonSerializedData]);
this.#rawBufferSize -= TypedArrayPrototypeGetLength(nonSerializedData);
if (headerIndex === -1) {
ArrayPrototypeShift(this.#rawBuffer);
} else {
this.#rawBuffer[0] = bufferHead.subarray(headerIndex - kSerializedSizeHeader);
this.#rawBuffer[0] = TypedArrayPrototypeSubarray(bufferHead, headerIndex);
}
bufferHead = this.#rawBuffer[0];
headerIndex = bufferHead?.indexOf(v8Header);
Expand All @@ -295,10 +316,10 @@ class FileTest extends Test {
// We call `readUInt32BE` manually here, because this is faster than first converting
// it to a buffer and using `readUInt32BE` on that.
const fullMessageSize = (
bufferHead[0] << 24 |
bufferHead[1] << 16 |
bufferHead[2] << 8 |
bufferHead[3]
bufferHead[kV8headerLength] << 24 |
bufferHead[kV8headerLength + 1] << 16 |
bufferHead[kV8headerLength + 2] << 8 |
bufferHead[kV8headerLength + 3]
) + kSerializedSizeHeader;

if (this.#rawBufferSize < fullMessageSize) break;
Expand Down Expand Up @@ -475,4 +496,7 @@ function run(options) {
return root.reporter;
}

module.exports = { run };
module.exports = {
FileTest,
run,
};
103 changes: 103 additions & 0 deletions test/parallel/test-runner-v8-deserializer.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
// Flags: --expose-internals --no-warnings

import '../common/index.mjs';
import { describe, it, beforeEach } from 'node:test';
import assert from 'node:assert';
import { finished } from 'node:stream/promises';
import { DefaultSerializer } from 'node:v8';
import serializer from 'internal/test_runner/reporter/v8-serializer';
import runner from 'internal/test_runner/runner';

async function toArray(chunks) {
const arr = [];
for await (const i of chunks) arr.push(i);
return arr;
}

const chunks = await toArray(serializer([
{ type: 'test:diagnostic', data: { nesting: 0, details: {}, message: 'diagnostic' } },
]));
const defaultSerializer = new DefaultSerializer();
defaultSerializer.writeHeader();
const headerLength = defaultSerializer.releaseBuffer().length;

describe('v8 deserializer', () => {
let fileTest;
let reported;
beforeEach(() => {
reported = [];
fileTest = new runner.FileTest({ name: 'filetest' });
fileTest.reporter.on('data', (data) => reported.push(data));
assert(fileTest.isClearToSend());
});

async function collectReported(chunks) {
chunks.forEach((chunk) => fileTest.parseMessage(chunk));
fileTest.drain();
fileTest.reporter.end();
await finished(fileTest.reporter);
return reported;
}

it('should do nothing when no chunks', async () => {
const reported = await collectReported([]);
assert.deepStrictEqual(reported, []);
});

it('should desialize a chunk with no serealization', async () => {
const reported = await collectReported([Buffer.from('unknown')]);
assert.deepStrictEqual(reported, [
{ data: { __proto__: null, file: 'filetest', nesting: 0, message: 'unknown' }, type: 'test:diagnostic' },
]);
});

it('should desialize a serialized chunk', async () => {
const reported = await collectReported(chunks);
assert.deepStrictEqual(reported, [
{ data: { nesting: 0, details: {}, message: 'diagnostic' }, type: 'test:diagnostic' },
]);
});

it('should desialize a serialized chunk after non-serialized chunk', async () => {
const reported = await collectReported([Buffer.concat([Buffer.from('unknown'), ...chunks])]);
assert.deepStrictEqual(reported, [
{ data: { __proto__: null, file: 'filetest', nesting: 0, message: 'unknown' }, type: 'test:diagnostic' },
{ data: { nesting: 0, details: {}, message: 'diagnostic' }, type: 'test:diagnostic' },
]);
});

it('should desialize a serialized chunk before non-serialized output', async () => {
const reported = await collectReported([Buffer.concat([ ...chunks, Buffer.from('unknown')])]);
assert.deepStrictEqual(reported, [
{ data: { nesting: 0, details: {}, message: 'diagnostic' }, type: 'test:diagnostic' },
{ data: { __proto__: null, file: 'filetest', nesting: 0, message: 'unknown' }, type: 'test:diagnostic' },
]);
});

const headerPosition = headerLength * 2 + 4;
for (let i = 0; i < headerPosition + 5; i++) {
const message = `should desirialize a serialized message split into two chunks {...${i},${i + 1}...}`;
it(message, async () => {
const data = chunks[0];
const reported = await collectReported([data.subarray(0, i), data.subarray(i)]);
assert.deepStrictEqual(reported, [
{ data: { nesting: 0, details: {}, message: 'diagnostic' }, type: 'test:diagnostic' },
]);
});

it(`${message} wrapped by non-serialized data`, async () => {
const data = chunks[0];
const reported = await collectReported([
Buffer.concat([Buffer.from('unknown'), data.subarray(0, i)]),
Buffer.concat([data.subarray(i), Buffer.from('unknown')]),
]);
assert.deepStrictEqual(reported, [
{ data: { __proto__: null, file: 'filetest', nesting: 0, message: 'unknown' }, type: 'test:diagnostic' },
{ data: { nesting: 0, details: {}, message: 'diagnostic' }, type: 'test:diagnostic' },
{ data: { __proto__: null, file: 'filetest', nesting: 0, message: 'unknown' }, type: 'test:diagnostic' },
]);
}
);
}

});

0 comments on commit 1b32632

Please sign in to comment.