diff --git a/index.bs b/index.bs index 3f04a8a4..9f2acc3e 100644 --- a/index.bs +++ b/index.bs @@ -1701,23 +1701,26 @@ partial interface MLGraphBuilder { The {{MLGraph}} interface represents a compiled computational graph. A compiled graph once constructed is immutable and cannot be subsequently changed. @@ -1742,24 +1745,23 @@ interface MLGraph {
- : compute(inputs, outputs) + : compute(inputs, outputNames) :: - Issue a compute request of the {{MLGraph}} given {{MLNamedInputs}} and optional {{MLNamedOutputs}}. The returned {{Promise}} resolves when the results in {{MLNamedOutputs}} are ready to be consumed. + Issue a compute request of the {{MLGraph}} given {{MLNamedInputs}} and optional [=sequence=]<{{DOMString}}>. Return {{MLNamedOutputs}}.
**Called on:** {{MLGraph}} |this|. **Arguments:** -
+            
                 |inputs|: a {{MLNamedInputs}}. The data and optional dimensions of inputs for the compute request.
-                |outputs|: an optional {{MLNamedOutputs}}. The names and pre-allocated resources of required outputs for the compute request. Default to be an empty [=record=] which means that the compute request is for all outputs.
+                |outputNames|: an optional [=sequence=]<{{DOMString}}>. The names of required outputs for the compute request. Default to be an empty [=sequence=] which means that the compute request is for all outputs.
             
- **Returns:** {{Promise}}<{{MLNamedOutputs}}>. The dimensions and data of outputs returned by the compute request. + **Returns:** {{MLNamedOutputs}}. - 1. Let |promise| be [=a new promise=]. - 1. If any of the following requirements are unmet, then [=reject=] |promise| with a {{TypeError}} and stop. + 1. If any of the following requirements are unmet, then throw a {{TypeError}} and stop.
1. For each |key| -> |value| of |inputs|: @@ -1774,22 +1776,22 @@ interface MLGraph { 1. Let |dimension| be |value|.{{MLInput/dimensions}}[|i|]. 1. |dimension| must be greater than 0. 1. If |inputOperand|.{{MLOperandDescriptor/dimensions}}[|i|] is greater than 0, then |dimension| must be equal to |inputOperand|.{{MLOperandDescriptor/dimensions}}[|i|]. - 1. Set |i| to |i| + 1. + 1. Increment |i| by 1. 1. If |i| if equal to the length of |value|.{{MLInput/dimensions}}, then break. 1. Else: 1. For each |dimension| of |inputOperand|.{{MLOperandDescriptor/dimensions}}: 1. The value of |dimension| must be greater than 0. - 1. If |outputs| was not an empty [=record=], then: - 1. For each |key| -> |value| of |outputs|: - 1. |this|.{{MLGraph/[[outputOperands]]}}[|key|] must exist. - 1. If |value|.{{MLOutput/data}} was given, then the kind of |value|.{{MLOutput/data}} must be compatible to |this|.{{MLGraph/[[outputOperands]]}}[|key|] according to [this table](#appendices-mloperandtype-arraybufferview-compatibility). + 1. If |outputNames| was not an empty [=sequence=], then: + 1. For each |name| of |outputNames|: + 1. |this|.{{MLGraph/[[outputOperands]]}}[|name|] must exist. +
1. Let |requiredOutputNames| be a new [=ordered set=]<{{DOMString}}>. - 1. If |outputs| was not an empty [=record=], then: - 1. For each |key| -> |value| of |outputs|: - 1. Append |key| to |requiredOutputNames|. + 1. If |outputNames| was not an empty [=sequence=], then: + 1. For each |name| of |outputNames|: + 1. Append |name| to |requiredOutputNames|. 1. Else: 1. For each |key| -> |value| of |this|.{{MLGraph/[[outputOperands]]}}: 1. Append |key| to |requiredOutputNames|. @@ -1804,45 +1806,22 @@ interface MLGraph { 1. Set |copiedInputs|[key] to |copiedInputs|. 1. Let |results| be a new {{MLNamedOutputs}}. - 1. Let |remainingOutputNames| be a new [=ordered set=]<{{DOMString}}>. - 1. Set the content of |remainingOutputNames| to the content of |requiredOutputNames|. 1. Issue the following steps on the [=Device timeline=] of |this|.{{MLGraph/[[implementation]]}}:
1. For each |outputName| of |requiredOutputNames|: 1. Issue a compute request of |this|.{{MLGraph/[[implementation]]}} for output whose name is |outputName| with given |copiedInputs|. - 1. When the compute request is completed, issue the following steps on the appropriate [=Queue timeline=]: -
- 1. If there is an error returned by |this|.{{MLGraph/[[implementation]]}}, then: - 1. [=reject=] |promise| with an {{OperationError}} and stop. - 1. Else: - 1. Let |outputRank| be a {{unsigned long}}. - 1. Set |outputRank| to the rank of output tensor returned by |this|.{{MLGraph/[[implementation]]}}. - 1. Let |outputDemisions| be a new [=sequence=]<{{long}}> of size |outputRank|. - 1. Let |i| be 0. - 1. Let |outputSize| to 1. - 1. While true: - 1. Set |outputDimensions|[|i|] to the dimension at |i|th axis of output tensor returned by |this|.{{MLGraph/[[implementation]]}}. - 1. Set |outputSize| to |outputSize| * |outputDimensions|[|i|]. - 1. Set |i| to |i| + 1. - 1. If |i| is equal to |outputRank|, then break. - 1. Set |results|[|outputName|].{{MLOutput/dimensions}} to |outputDemisions|. - 1. If |this|.{{MLGraph/[[context]]}} is created from {{MLContextOptions}}, then: - 1. If |outputs|[|outputName|].{{MLOutput/data}} was given, then: - 1. If outputs|[|outputName|].{{MLOutput/data}} is not an {{ArrayBufferView}}, then [=reject=] |promise| with an {{TypeError}} and stop. - 1. If the kind of |outputs|[|outputName|].{{MLOutput/data}} is not compatible to output tensor according to [this table](#appendices-mloperandtype-arraybufferview-compatibility), then [=reject=] |promise| with a {{TypeError}} and stop. - 1. If the length of |outputs|[|outputName|].{{MLOutput/data}} is less than |outputSize|, then [=reject=] |promise| with a {{TypeError}} and stop. - 1. Set the content of |outputs|[|outputName|].{{MLOutput/data}} to the content of output tensor returned by |this|.{{MLGraph/[[implementation]]}}. - 1. Else: - 1. Let |results|[|outputName|].{{MLOutput/data}} be a new {{ArrayBufferView}} of size |outputSize| and kind that is compatible to output tensor according to [this table](#appendices-mloperandtype-arraybufferview-compatibility). - 1. Set the content of |results|[|outputName|].{{MLOutput/data}} to the content of output tensor returned by |this|.{{MLGraph/[[implementation]]}}. - 1. Remove |outputName| from |remainingOutputNames|. - 1. If |remainingOutputNames| is empty, then resolve |promise| with |results| and stop. -
+ 1. If there is an error returned by |this|.{{MLGraph/[[implementation]]}}, then: + 1. Throw an {{OperationError}} and stop. + 1. Else: + 1. Let |output| be a new {{MLOutput}}. + 1. Associate |output| with the output tensor returned by |this|.{{MLGraph/[[implementation]]}}. + 1. Set |results|[|outputName|] to |output|.
- - 1. Return |promise|. + 1. Return |results|. Issue: Describe the algorithm steps for |this|.{{MLGraph/[[context]]}} created from {{WebGLRenderingContext}} and {{GPUDevice}}. + + Issue: Describe the algorithm steps for {{MLOutput}}.
@@ -1860,7 +1839,7 @@ const a = builder.input('a', descA); const descB = {type: 'float32', dimensions: [4, -1]}; const b = builder.input('b', descB); const c = builder.matmul(a, b); -const graph = await builder.build({c}); +const graph = await builder.build({'c': c}); async function compute(shapeA, shapeB) { const bufferA = new Float32Array(sizeOfShape(shapeA)).fill(0.5); @@ -1871,8 +1850,8 @@ async function compute(shapeA, shapeB) { 'a': {data: bufferA, dimensions: shapeA}, 'b': {data: bufferB, dimensions: shapeB}, }; - const outputs = await graph.compute(inputs); - console.log(`shape: [${outputs.c.dimensions}], values: ${outputs.c.data}`); + const outputs = graph.compute(inputs); + console.log(`shape: [${outputs.c.dimensions()}], values: ${await outputs.c.data()}`); } await compute([3, 4], [4, 3]); @@ -1895,14 +1874,15 @@ const descB = {type: 'float32', dimensions: [4, 3]}; const bufferB = new Float32Array(sizeOfShape(descB.dimensions)).fill(0.5); const b = builder.constant(descB, bufferB); const c = builder.matmul(a, b); -const graph = await builder.build({c}); +const graph = await builder.build({'c': c}); const bufferA = new Float32Array(sizeOfShape(descA.dimensions)).fill(0.5); const inputs = {'a': {data: bufferA}}; // Pre-allocate output buffer for c. -const outputs = {'c': {data: new Float32Array(sizeOfShape([3, 3]))}}; -await graph.compute(inputs, outputs); -console.log(`values: ${outputs.c.data}`); +const bufferC = new Float32Array(sizeOfShape([3, 3])); +const outputs = graph.compute(inputs); +await outputs.c.data(bufferC); +console.log(`values: ${bufferC}`); @@ -1923,24 +1903,52 @@ const bufferC = new Float32Array(sizeOfShape(descC.dimensions)).fill(1); const c = builder.constant(descC, bufferC); const d = builder.matmul(a, b); const e = builder.add(d, c); -const graph = await builder.build({d, e}); +const graph = await builder.build({'d': d, 'e': e}); const bufferA = new Float32Array(sizeOfShape(descA.dimensions)).fill(0.5); const inputs = {'a': {data: bufferA}}; // Compute both d and e. -let outputs = await graph.compute(inputs); +let outputs = graph.compute(inputs); console.log(`outputs include ${Object.keys(outputs)}`); // Compute d. -outputs = await graph.compute(inputs, {d}); +outputs = graph.compute(inputs, ['d']); console.log(`outputs include ${Object.keys(outputs)}`); -console.log(`shape: [${outputs.d.dimensions}], values: ${outputs.d.data}`); +console.log(`shape: [${outputs.d.dimensions()}], values: ${await outputs.d.data()}`); // Compute e. -outputs = await graph.compute(inputs, {e}); +outputs = graph.compute(inputs, ['e']); console.log(`outputs include ${Object.keys(outputs)}`); -console.log(`shape: [${outputs.e.dimensions}], values: ${outputs.e.data}`); +console.log(`shape: [${outputs.e.dimensions()}], values: ${await outputs.e.data()}`); + + + +
+The following code showcases the computation of multiple graphs without accessing the intermediate results. +
+const context = navigator.ml.createContext();
+const builder = new MLGraphBuilder(context);
+
+async function buildConv2d(inputShape, filterShape) {
+  const input = builder.input('input', {type: 'float32', dimensions: inputShape});
+  const filter = builder.constant({type: 'float32', dimensions: filterShape},
+                                  new Float32Array(sizeOfShape(filterShape)).fill(0.5));
+  const output = builder.conv2d(input, filter);
+  return await builder.build({'output': output});
+}
+
+// Build three graphs that each one contains a conv2d op.
+const conv2dOp1 = await buildConv2d([1, 1, 9, 9], [1, 1, 3, 3]);
+const conv2dOp2 = await buildConv2d([1, 1, 7, 7], [1, 1, 3, 3]);
+const conv2dOp3 = await buildConv2d([1, 1, 5, 5], [1, 1, 3, 3]);
+
+// Compute the graphs and access the final result.
+const inputBuffer = new Float32Array(9*9).fill(0.5);
+const output1 = conv2dOp1.compute({'input': {data: inputBuffer}).output;
+const output2 = conv2dOp2.compute({'input': output1}).output;
+const output3 = conv2dOp3.compute({'input': output2}).output;
+console.log(`shape: [${output3.dimensions()}], values: ${await output3.data()}`);