diff --git a/README.md b/README.md index 3ae0261..47e09a7 100644 --- a/README.md +++ b/README.md @@ -87,6 +87,9 @@ It aims to pass all tests, although it allows some exceptions for practical reas * The [test for the prototype of `ReadableStream`'s async iterator][wpt-async-iterator-prototype]. Retrieving the correct `%AsyncIteratorPrototype%` requires using an async generator (`async function* () {}`), which is invalid syntax before ES2018. Instead, the polyfill [creates its own version][stub-async-iterator-prototype] which is functionally equivalent to the real prototype. + * The tests [with patched globals][wpt-rs-patched-global] and [with `Object.prototype.then`][wpt-then-interception]. + These tests are meant for browsers to ensure user-land modifications cannot affect the internal logic of `pipeTo()` and `tee()`. + However, it's not reasonable or desirable for a user-land polyfill to try and isolate itself completely from using the global `Object`. * The ES5 variant passes the same tests as the ES2015 variant, except for various tests about specific characteristics of the constructors, properties and methods. These test failures do not affect the run-time behavior of the polyfill. For example: @@ -122,4 +125,6 @@ Thanks to these people for their work on [the original polyfill][creatorrr-polyf [issue-3]: https://github.com/MattiasBuelens/web-streams-polyfill/issues/3 [wpt-async-iterator-prototype]: https://github.com/web-platform-tests/wpt/blob/87a4c80598aee5178c385628174f1832f5a28ad6/streams/readable-streams/async-iterator.any.js#L24 [stub-async-iterator-prototype]: https://github.com/MattiasBuelens/web-streams-polyfill/blob/v4.0.0-beta.1/src/lib/readable-stream/async-iterator.ts#L153-L161 +[wpt-rs-patched-global]: https://github.com/web-platform-tests/wpt/blob/887350c2f46def5b01c4dd1f8d2eee35dfb9c5bb/streams/readable-streams/patched-global.any.js +[wpt-then-interception]: https://github.com/web-platform-tests/wpt/blob/cf33f00596af295ee0f207c88e23b5f8b0791307/streams/piping/then-interception.any.js [creatorrr-polyfill]: https://github.com/creatorrr/web-streams-polyfill diff --git a/src/lib/readable-stream/tee.ts b/src/lib/readable-stream/tee.ts index a3e25f7..7989411 100644 --- a/src/lib/readable-stream/tee.ts +++ b/src/lib/readable-stream/tee.ts @@ -1,41 +1,15 @@ -import type { ReadableByteStream, ReadableStream, ReadableStreamReader } from '../readable-stream'; -import { - CreateReadableByteStream, - CreateReadableStream, - IsReadableStream, - ReadableStreamCancel +import type { + ReadableByteStream, + ReadableStreamBYOBReader, + ReadableStreamDefaultReader, + ReadableStreamReader } from '../readable-stream'; -import { ReadableStreamReaderGenericRelease } from './generic-reader'; -import type { ReadRequest } from './default-reader'; -import { - AcquireReadableStreamDefaultReader, - IsReadableStreamDefaultReader, - ReadableStreamDefaultReaderRead -} from './default-reader'; -import type { ReadIntoRequest } from './byob-reader'; -import { - AcquireReadableStreamBYOBReader, - IsReadableStreamBYOBReader, - ReadableStreamBYOBReaderRead -} from './byob-reader'; +import { IsReadableStream, ReadableStream } from '../readable-stream'; import assert from '../../stub/assert'; -import { newPromise, promiseResolvedWith, queueMicrotask, uponRejection } from '../helpers/webidl'; +import { newPromise, promiseResolvedWith, uponPromise, uponRejection } from '../helpers/webidl'; import type { ReadableStreamDefaultController } from './default-controller'; -import { - ReadableStreamDefaultControllerClose, - ReadableStreamDefaultControllerEnqueue, - ReadableStreamDefaultControllerError -} from './default-controller'; -import { - IsReadableByteStreamController, - ReadableByteStreamControllerClose, - ReadableByteStreamControllerEnqueue, - ReadableByteStreamControllerError, - ReadableByteStreamControllerGetBYOBRequest, - ReadableByteStreamControllerRespond, - ReadableByteStreamControllerRespondWithNewView -} from './byte-stream-controller'; -import { CreateArrayFromList } from '../abstract-ops/ecmascript'; +import type { ReadableByteStreamController } from './byte-stream-controller'; +import { IsReadableByteStreamController } from './byte-stream-controller'; import { CloneAsUint8Array } from '../abstract-ops/miscellaneous'; export function ReadableStreamTee(stream: ReadableStream, @@ -54,18 +28,18 @@ export function ReadableStreamDefaultTee(stream: ReadableStream, assert(IsReadableStream(stream)); assert(typeof cloneForBranch2 === 'boolean'); - const reader = AcquireReadableStreamDefaultReader(stream); + const reader = stream.getReader(); let reading = false; let canceled1 = false; let canceled2 = false; let reason1: any; let reason2: any; - let branch1: ReadableStream; - let branch2: ReadableStream; + let controller1: ReadableStreamDefaultController; + let controller2: ReadableStreamDefaultController; - let resolveCancelPromise: (value: undefined | Promise) => void; - const cancelPromise = newPromise(resolve => { + let resolveCancelPromise: (value: void | Promise) => void; + const cancelPromise = newPromise(resolve => { resolveCancelPromise = resolve; }); @@ -76,55 +50,44 @@ export function ReadableStreamDefaultTee(stream: ReadableStream, reading = true; - const readRequest: ReadRequest = { - _chunkSteps: chunk => { - // This needs to be delayed a microtask because it takes at least a microtask to detect errors (using - // reader._closedPromise below), and we want errors in stream to error both branches immediately. We cannot let - // successful synchronously-available reads get ahead of asynchronously-available errors. - queueMicrotask(() => { - reading = false; - const chunk1 = chunk; - const chunk2 = chunk; - - // There is no way to access the cloning code right now in the reference implementation. - // If we add one then we'll need an implementation for serializable objects. - // if (!canceled2 && cloneForBranch2) { - // chunk2 = StructuredDeserialize(StructuredSerialize(chunk2)); - // } - - if (!canceled1) { - ReadableStreamDefaultControllerEnqueue( - branch1._readableStreamController as ReadableStreamDefaultController, - chunk1 - ); - } + uponPromise(reader.read(), result => { + reading = false; - if (!canceled2) { - ReadableStreamDefaultControllerEnqueue( - branch2._readableStreamController as ReadableStreamDefaultController, - chunk2 - ); - } - }); - }, - _closeSteps: () => { - reading = false; + if (result.done) { if (!canceled1) { - ReadableStreamDefaultControllerClose(branch1._readableStreamController as ReadableStreamDefaultController); + controller1.close(); } if (!canceled2) { - ReadableStreamDefaultControllerClose(branch2._readableStreamController as ReadableStreamDefaultController); + controller2.close(); } if (!canceled1 || !canceled2) { resolveCancelPromise(undefined); } - }, - _errorSteps: () => { - reading = false; + return null; } - }; - ReadableStreamDefaultReaderRead(reader, readRequest); + + const chunk = result.value; + const chunk1 = chunk; + const chunk2 = chunk; + + // There is no way to access the cloning code right now in the reference implementation. + // If we add one then we'll need an implementation for serializable objects. + // if (!canceled2 && cloneForBranch2) { + // chunk2 = StructuredDeserialize(StructuredSerialize(chunk2)); + // } + + if (!canceled1) { + controller1.enqueue(chunk1); + } + if (!canceled2) { + controller2.enqueue(chunk2); + } + return null; + }, () => { + reading = false; + return null; + }); return promiseResolvedWith(undefined); } @@ -133,8 +96,8 @@ export function ReadableStreamDefaultTee(stream: ReadableStream, canceled1 = true; reason1 = reason; if (canceled2) { - const compositeReason = CreateArrayFromList([reason1, reason2]); - const cancelResult = ReadableStreamCancel(stream, compositeReason); + const compositeReason = [reason1, reason2]; + const cancelResult = reader.cancel(compositeReason); resolveCancelPromise(cancelResult); } return cancelPromise; @@ -144,23 +107,32 @@ export function ReadableStreamDefaultTee(stream: ReadableStream, canceled2 = true; reason2 = reason; if (canceled1) { - const compositeReason = CreateArrayFromList([reason1, reason2]); - const cancelResult = ReadableStreamCancel(stream, compositeReason); + const compositeReason = [reason1, reason2]; + const cancelResult = reader.cancel(compositeReason); resolveCancelPromise(cancelResult); } return cancelPromise; } - function startAlgorithm() { - // do nothing - } + const branch1 = new ReadableStream({ + start(c) { + controller1 = c; + }, + pull: pullAlgorithm, + cancel: cancel1Algorithm + }); - branch1 = CreateReadableStream(startAlgorithm, pullAlgorithm, cancel1Algorithm); - branch2 = CreateReadableStream(startAlgorithm, pullAlgorithm, cancel2Algorithm); + const branch2 = new ReadableStream({ + start(c) { + controller2 = c; + }, + pull: pullAlgorithm, + cancel: cancel2Algorithm + }); - uponRejection(reader._closedPromise, (r: any) => { - ReadableStreamDefaultControllerError(branch1._readableStreamController as ReadableStreamDefaultController, r); - ReadableStreamDefaultControllerError(branch2._readableStreamController as ReadableStreamDefaultController, r); + uponRejection(reader.closed, (r: any) => { + controller1.error(r); + controller2.error(r); if (!canceled1 || !canceled2) { resolveCancelPromise(undefined); } @@ -174,27 +146,28 @@ export function ReadableByteStreamTee(stream: ReadableByteStream): [ReadableByte assert(IsReadableStream(stream)); assert(IsReadableByteStreamController(stream._readableStreamController)); - let reader: ReadableStreamReader = AcquireReadableStreamDefaultReader(stream); + let reader: ReadableStreamReader = stream.getReader(); + let isByobReader = false; let reading = false; let canceled1 = false; let canceled2 = false; let reason1: any; let reason2: any; - let branch1: ReadableByteStream; - let branch2: ReadableByteStream; + let controller1: ReadableByteStreamController; + let controller2: ReadableByteStreamController; - let resolveCancelPromise: (value: undefined | Promise) => void; + let resolveCancelPromise: (value: void | Promise) => void; const cancelPromise = newPromise(resolve => { resolveCancelPromise = resolve; }); function forwardReaderError(thisReader: ReadableStreamReader) { - uponRejection(thisReader._closedPromise, r => { + uponRejection(thisReader.closed, r => { if (thisReader !== reader) { return null; } - ReadableByteStreamControllerError(branch1._readableStreamController, r); - ReadableByteStreamControllerError(branch2._readableStreamController, r); + controller1.error(r); + controller2.error(r); if (!canceled1 || !canceled2) { resolveCancelPromise(undefined); } @@ -203,143 +176,118 @@ export function ReadableByteStreamTee(stream: ReadableByteStream): [ReadableByte } function pullWithDefaultReader() { - if (IsReadableStreamBYOBReader(reader)) { - assert(reader._readIntoRequests.length === 0); - ReadableStreamReaderGenericRelease(reader); - - reader = AcquireReadableStreamDefaultReader(stream); + if (isByobReader) { + reader.releaseLock(); + reader = stream.getReader(); forwardReaderError(reader); + isByobReader = false; } - const readRequest: ReadRequest = { - _chunkSteps: chunk => { - // This needs to be delayed a microtask because it takes at least a microtask to detect errors (using - // reader._closedPromise below), and we want errors in stream to error both branches immediately. We cannot let - // successful synchronously-available reads get ahead of asynchronously-available errors. - queueMicrotask(() => { - reading = false; - - const chunk1 = chunk; - let chunk2 = chunk; - if (!canceled1 && !canceled2) { - try { - chunk2 = CloneAsUint8Array(chunk); - } catch (cloneE) { - ReadableByteStreamControllerError(branch1._readableStreamController, cloneE); - ReadableByteStreamControllerError(branch2._readableStreamController, cloneE); - resolveCancelPromise(ReadableStreamCancel(stream, cloneE)); - return; - } - } + uponPromise((reader as ReadableStreamDefaultReader).read(), result => { + reading = false; - if (!canceled1) { - ReadableByteStreamControllerEnqueue(branch1._readableStreamController, chunk1); - } - if (!canceled2) { - ReadableByteStreamControllerEnqueue(branch2._readableStreamController, chunk2); - } - }); - }, - _closeSteps: () => { - reading = false; + if (result.done) { if (!canceled1) { - ReadableByteStreamControllerClose(branch1._readableStreamController); + controller1.close(); } if (!canceled2) { - ReadableByteStreamControllerClose(branch2._readableStreamController); - } - if (branch1._readableStreamController._pendingPullIntos.length > 0) { - ReadableByteStreamControllerRespond(branch1._readableStreamController, 0); - } - if (branch2._readableStreamController._pendingPullIntos.length > 0) { - ReadableByteStreamControllerRespond(branch2._readableStreamController, 0); + controller2.close(); } + controller1.byobRequest?.respond(0); + controller2.byobRequest?.respond(0); if (!canceled1 || !canceled2) { resolveCancelPromise(undefined); } - }, - _errorSteps: () => { - reading = false; + return null; + } + + const chunk = result.value; + const chunk1 = chunk; + let chunk2 = chunk; + if (!canceled1 && !canceled2) { + try { + chunk2 = CloneAsUint8Array(chunk); + } catch (cloneE) { + controller1.error(cloneE); + controller2.error(cloneE); + resolveCancelPromise(reader.cancel(cloneE)); + return null; + } + } + if (!canceled1) { + controller1.enqueue(chunk1); } - }; - ReadableStreamDefaultReaderRead(reader, readRequest); + if (!canceled2) { + controller2.enqueue(chunk2); + } + return null; + }, () => { + reading = false; + return null; + }); } function pullWithBYOBReader(view: ArrayBufferView, forBranch2: boolean) { - if (IsReadableStreamDefaultReader(reader)) { - assert(reader._readRequests.length === 0); - ReadableStreamReaderGenericRelease(reader); - - reader = AcquireReadableStreamBYOBReader(stream); + if (!isByobReader) { + reader.releaseLock(); + reader = stream.getReader({ mode: 'byob' }); forwardReaderError(reader); + isByobReader = true; } - const byobBranch = forBranch2 ? branch2 : branch1; - const otherBranch = forBranch2 ? branch1 : branch2; - - const readIntoRequest: ReadIntoRequest = { - _chunkSteps: chunk => { - // This needs to be delayed a microtask because it takes at least a microtask to detect errors (using - // reader._closedPromise below), and we want errors in stream to error both branches immediately. We cannot let - // successful synchronously-available reads get ahead of asynchronously-available errors. - queueMicrotask(() => { - reading = false; - - const byobCanceled = forBranch2 ? canceled2 : canceled1; - const otherCanceled = forBranch2 ? canceled1 : canceled2; - - if (!otherCanceled) { - let clonedChunk; - try { - clonedChunk = CloneAsUint8Array(chunk); - } catch (cloneE) { - ReadableByteStreamControllerError(byobBranch._readableStreamController, cloneE); - ReadableByteStreamControllerError(otherBranch._readableStreamController, cloneE); - resolveCancelPromise(ReadableStreamCancel(stream, cloneE)); - return; - } - if (!byobCanceled) { - ReadableByteStreamControllerRespondWithNewView(byobBranch._readableStreamController, chunk); - } - ReadableByteStreamControllerEnqueue(otherBranch._readableStreamController, clonedChunk); - } else if (!byobCanceled) { - ReadableByteStreamControllerRespondWithNewView(byobBranch._readableStreamController, chunk); - } - }); - }, - _closeSteps: chunk => { - reading = false; - - const byobCanceled = forBranch2 ? canceled2 : canceled1; - const otherCanceled = forBranch2 ? canceled1 : canceled2; + const byobController = forBranch2 ? controller2 : controller1; + const otherController = forBranch2 ? controller1 : controller2; + uponPromise((reader as ReadableStreamBYOBReader).read(view), result => { + reading = false; + const byobCanceled = forBranch2 ? canceled2 : canceled1; + const otherCanceled = forBranch2 ? canceled1 : canceled2; + if (result.done) { if (!byobCanceled) { - ReadableByteStreamControllerClose(byobBranch._readableStreamController); + byobController.close(); } if (!otherCanceled) { - ReadableByteStreamControllerClose(otherBranch._readableStreamController); + otherController.close(); } - + const chunk = result.value; if (chunk !== undefined) { assert(chunk.byteLength === 0); - if (!byobCanceled) { - ReadableByteStreamControllerRespondWithNewView(byobBranch._readableStreamController, chunk); + byobController.byobRequest!.respondWithNewView(chunk); } - if (!otherCanceled && otherBranch._readableStreamController._pendingPullIntos.length > 0) { - ReadableByteStreamControllerRespond(otherBranch._readableStreamController, 0); + if (!otherCanceled) { + otherController.byobRequest?.respond(0); } } - if (!byobCanceled || !otherCanceled) { resolveCancelPromise(undefined); } - }, - _errorSteps: () => { - reading = false; + return null; } - }; - ReadableStreamBYOBReaderRead(reader, view, readIntoRequest); + + const chunk = result.value; + if (!otherCanceled) { + let clonedChunk; + try { + clonedChunk = CloneAsUint8Array(chunk); + } catch (cloneE) { + byobController.error(cloneE); + otherController.error(cloneE); + resolveCancelPromise(reader.cancel(cloneE)); + return null; + } + if (!byobCanceled) { + byobController.byobRequest!.respondWithNewView(chunk); + } + otherController.enqueue(clonedChunk); + } else if (!byobCanceled) { + byobController.byobRequest!.respondWithNewView(chunk); + } + return null; + }, () => { + reading = false; + return null; + }); } function pull1Algorithm(): Promise { @@ -349,11 +297,11 @@ export function ReadableByteStreamTee(stream: ReadableByteStream): [ReadableByte reading = true; - const byobRequest = ReadableByteStreamControllerGetBYOBRequest(branch1._readableStreamController); + const byobRequest = controller1.byobRequest; if (byobRequest === null) { pullWithDefaultReader(); } else { - pullWithBYOBReader(byobRequest._view!, false); + pullWithBYOBReader(byobRequest.view!, false); } return promiseResolvedWith(undefined); @@ -366,11 +314,11 @@ export function ReadableByteStreamTee(stream: ReadableByteStream): [ReadableByte reading = true; - const byobRequest = ReadableByteStreamControllerGetBYOBRequest(branch2._readableStreamController); + const byobRequest = controller2.byobRequest; if (byobRequest === null) { pullWithDefaultReader(); } else { - pullWithBYOBReader(byobRequest._view!, true); + pullWithBYOBReader(byobRequest.view!, true); } return promiseResolvedWith(undefined); @@ -380,8 +328,8 @@ export function ReadableByteStreamTee(stream: ReadableByteStream): [ReadableByte canceled1 = true; reason1 = reason; if (canceled2) { - const compositeReason = CreateArrayFromList([reason1, reason2]); - const cancelResult = ReadableStreamCancel(stream, compositeReason); + const compositeReason = [reason1, reason2]; + const cancelResult = reader.cancel(compositeReason); resolveCancelPromise(cancelResult); } return cancelPromise; @@ -391,19 +339,30 @@ export function ReadableByteStreamTee(stream: ReadableByteStream): [ReadableByte canceled2 = true; reason2 = reason; if (canceled1) { - const compositeReason = CreateArrayFromList([reason1, reason2]); - const cancelResult = ReadableStreamCancel(stream, compositeReason); + const compositeReason = [reason1, reason2]; + const cancelResult = reader.cancel(compositeReason); resolveCancelPromise(cancelResult); } return cancelPromise; } - function startAlgorithm(): void { - return; - } - - branch1 = CreateReadableByteStream(startAlgorithm, pull1Algorithm, cancel1Algorithm); - branch2 = CreateReadableByteStream(startAlgorithm, pull2Algorithm, cancel2Algorithm); + const branch1 = new ReadableStream({ + type: 'bytes', + start(c) { + controller1 = c; + }, + pull: pull1Algorithm, + cancel: cancel1Algorithm + }) as ReadableByteStream; + + const branch2 = new ReadableStream({ + type: 'bytes', + start(c) { + controller2 = c; + }, + pull: pull2Algorithm, + cancel: cancel2Algorithm + }) as ReadableByteStream; forwardReaderError(reader); diff --git a/test/wpt/browser/run.js b/test/wpt/browser/run.js index f2f84a8..1c93d99 100644 --- a/test/wpt/browser/run.js +++ b/test/wpt/browser/run.js @@ -14,6 +14,7 @@ const { FilteringReporter } = require('../shared/filtering-reporter.js'); const { excludedTestsBase, mergeIgnoredFailures, + skippedTests, ignoredFailuresBase, ignoredFailuresMinified, ignoredFailuresES5 @@ -93,7 +94,8 @@ async function runTests({ entryFile, includedTests, excludedTests, ignoredFailur return includeMatcher(testPath) && !excludeMatcher(testPath); }); - const reporter = new FilteringReporter(consoleReporter, ignoredFailures); + const skippedAndIgnoredFailures = mergeIgnoredFailures(skippedTests, ignoredFailures); + const reporter = new FilteringReporter(consoleReporter, skippedAndIgnoredFailures); console.log(`>>> ${entryFile}`); @@ -103,22 +105,14 @@ async function runTests({ entryFile, includedTests, excludedTests, ignoredFailur await context.addInitScript({ path: entryPath }); await context.route(`${urlPrefix}/resources/testharnessreport.js`, route => { route.fulfill({ - body: ` - window.fetch_tests_from_worker = () => undefined; - window.add_result_callback(({ name, status, message, stack }) => { - window.__wptResultCallback({ name, status, message, stack }); - }); - window.add_completion_callback((tests, { status, message, stack }) => { - window.__wptCompletionCallback({ status, message, stack }); - }); - ` + path: path.resolve(__dirname, 'testharnessreport.js') }); }); for (const testPath of testPaths) { reporter.startSuite(testPath); const page = await context.newPage(); const testUrl = `${urlPrefix}${testsBase}${testPath}`; - await runTest(page, testUrl, reporter); + await runTest(page, testUrl, skippedTests[testPath], reporter); await page.close(); } } finally { @@ -136,13 +130,17 @@ async function runTests({ entryFile, includedTests, excludedTests, ignoredFailur return { entryFile, failures, testResults }; } -async function runTest(page, testUrl, reporter) { +async function runTest(page, testUrl, skippedTestsForPath, reporter) { let hasFailed = false; let resolveDone; const donePromise = new Promise(resolve => { resolveDone = resolve; }); + await page.addInitScript(skipped => { + window.__wptSkippedTests = skipped; + }, skippedTestsForPath); + await page.exposeFunction('__wptResultCallback', test => { if (test.status === 0) { reporter.pass(test.name); diff --git a/test/wpt/browser/testharnessreport.js b/test/wpt/browser/testharnessreport.js new file mode 100644 index 0000000..8ba2b8d --- /dev/null +++ b/test/wpt/browser/testharnessreport.js @@ -0,0 +1,30 @@ +window.fetch_tests_from_worker = () => undefined; + +window.add_result_callback(({ name, status, message, stack }) => { + window.__wptResultCallback({ name, status, message, stack }); +}); +window.add_completion_callback((tests, { status, message, stack }) => { + window.__wptCompletionCallback({ status, message, stack }); +}); + +const skippedTests = window.__wptSkippedTests; +const originalPromiseTest = window.promise_test; +window.promise_test = function (func, name, ...extras) { + if (skippedTests && skippedTests.some(test => matches(test, name))) { + // Replace the actual test with one that always fails + func = async () => { + window.assert_implements_optional(false, 'skipped'); + }; + } + return originalPromiseTest(func, name, ...extras); +}; + +function matches(test, input) { + if (typeof test === 'string') { + return input.includes(test); + } + if (test instanceof RegExp) { + return test.test(input); + } + return false; +} diff --git a/test/wpt/node/run.js b/test/wpt/node/run.js index d1f0a2f..1201452 100644 --- a/test/wpt/node/run.js +++ b/test/wpt/node/run.js @@ -13,6 +13,7 @@ const allSettled = require('@ungap/promise-all-settled'); const { excludedTestsNonES2018, excludedTestsBase, + skippedTests, ignoredFailuresBase, ignoredFailuresMinified, ignoredFailuresES5, @@ -72,7 +73,11 @@ async function main() { process.exitCode = failures; } -async function runTests(entryFile, { includedTests = ['**/*.html'], excludedTests = [], ignoredFailures = {} } = {}) { +async function runTests(entryFile, { + includedTests = ['**/*.html'], + excludedTests = [], + ignoredFailures = {} +} = {}) { const entryPath = path.resolve(__dirname, `../../../dist/${entryFile}`); const wptPath = path.resolve(__dirname, '../../web-platform-tests'); const testsPath = path.resolve(wptPath, 'streams'); @@ -81,7 +86,8 @@ async function runTests(entryFile, { includedTests = ['**/*.html'], excludedTest const excludeMatcher = micromatch.matcher(excludedTests); const workerTestPattern = /\.(?:dedicated|shared|service)worker(?:\.https)?\.html$/; - const reporter = new FilteringReporter(consoleReporter, ignoredFailures); + const skippedAndIgnoredFailures = mergeIgnoredFailures(skippedTests, ignoredFailures); + const reporter = new FilteringReporter(consoleReporter, skippedAndIgnoredFailures); const bundledJS = await readFileAsync(entryPath, { encoding: 'utf8' }); @@ -106,15 +112,27 @@ async function runTests(entryFile, { includedTests = ['**/*.html'], excludedTest }; }; window.eval(bundledJS); + + const testPath = window.location.pathname.replace('/streams/', ''); + hookIntoTestHarnessReport(window, () => { + const originalPromiseTest = window.promise_test; + window.promise_test = function (func, name, ...extras) { + if (skippedTests[testPath] && skippedTests[testPath].some(test => matches(test, name))) { + // Replace the actual test with one that always fails + func = async () => { + window.assert_implements_optional(false, 'skipped'); + }; + } + return originalPromiseTest(func, name, ...extras); + }; + }); }, filter(testPath) { // Ignore the worker versions if (workerTestPattern.test(testPath)) { return false; } - - return includeMatcher(testPath) && - !excludeMatcher(testPath); + return includeMatcher(testPath) && !excludeMatcher(testPath); } }); @@ -147,3 +165,32 @@ function runtimeSupportsAsyncGenerators() { return false; } } + +function hookIntoTestHarnessReport(window, callback) { + // HACK: wpt-runner defines a window.__setupJSDOMReporter() function, + // which it calls when it loads testharnessreport.js + // We patch it before wpt-runner sets this variable, + // so we can run extra code when testharnessreport.js is loaded. + let __setupJSDOMReporter; + Object.defineProperty(window, '__setupJSDOMReporter', { + get() { + return __setupJSDOMReporter; + }, + set(value) { + __setupJSDOMReporter = function () { + callback(); + value(); + }; + } + }); +} + +function matches(test, input) { + if (typeof test === 'string') { + return input.includes(test); + } + if (test instanceof RegExp) { + return test.test(input); + } + return false; +} diff --git a/test/wpt/shared/exclusions.js b/test/wpt/shared/exclusions.js index 21071a1..a408ac5 100644 --- a/test/wpt/shared/exclusions.js +++ b/test/wpt/shared/exclusions.js @@ -7,7 +7,12 @@ const excludedTestsBase = [ // Disable tests for different size functions per realm, since they need a working