Skip to content

Commit

Permalink
Cancel method memory leak test (#245)
Browse files Browse the repository at this point in the history
* Add unit test for memory allocation in predict function
  • Loading branch information
thekevinscott authored Mar 4, 2022
1 parent 1673e23 commit fdcbdff
Show file tree
Hide file tree
Showing 7 changed files with 256 additions and 69 deletions.
5 changes: 1 addition & 4 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -118,11 +118,8 @@
"typescript": "^3.9.7",
"uglify": "^0.1.5",
"webpack": "^5.24.3",
"yargs": "^16.2.0"
},
"dependencies": {
"chokidar": "^3.5.3",
"tensor-as-base64": "^0.1.1"
"yargs": "^16.2.0"
},
"packageManager": "yarn@3.1.0"
}
3 changes: 1 addition & 2 deletions packages/upscalerjs/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -78,8 +78,7 @@
"@tensorflow/tfjs": "^3.13.0"
},
"dependencies": {
"isomorphic-fetch": "^3.0.0",
"tensor-as-base64": "^0.1.1"
"isomorphic-fetch": "^3.0.0"
},
"gitHead": "cccae63a0a7aeccaa78d30e55a8e12ec72f5c19f",
"devDependencies": {
Expand Down
90 changes: 89 additions & 1 deletion packages/upscalerjs/src/upscale.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ import {
WARNING_PROGRESS_WITHOUT_PATCH_SIZE,
WARNING_UNDEFINED_PADDING,
} from './upscale';
import { wrapGenerator } from './utils';
import { wrapGenerator, isTensor } from './utils';
import * as tensorAsBase from 'tensor-as-base64';
import * as image from './image.generated';
import { IModelDefinition, } from './types';
Expand Down Expand Up @@ -1383,6 +1383,94 @@ describe('predict', () => {
);
expect(console.warn).toHaveBeenCalledWith(WARNING_PROGRESS_WITHOUT_PATCH_SIZE);
});

describe('memory cleanup in predict', () => {
it('should clear up all memory while running predict without patch size', async () => {
const img: tf.Tensor4D = tf.tidy(() => tf.ones([4, 4, 3,]).expandDims(0));
const startingTensors = tf.memory().numTensors;
const scale = 2;
const patchSize = 2;
const model = {
predict: (pixel: any) => tf.tidy(() => tf
.fill([patchSize * scale, patchSize * scale, 3,], pixel.dataSync()[0])
.expandDims(0)),
} as unknown as tf.LayersModel;
const gen = predict(img, {}, { model, modelDefinition: { scale, } as IModelDefinition });
let { value, done } = await gen.next();
expect(done).toEqual(true);
expect(Array.isArray(value)).toEqual(false);
expect((value as tf.Tensor).dataSync()).toEqual(img.dataSync());
(value as tf.Tensor).dispose();
expect(tf.memory().numTensors).toEqual(startingTensors);
img.dispose();
});

it('should clear up all memory while running predict with patch size', async () => {
const IMG_SIZE = 4;
const img: tf.Tensor4D = tf.tidy(() => tf.ones([IMG_SIZE, IMG_SIZE, 3,]).expandDims(0));
const startingTensors = tf.memory().numTensors;
const scale = 2;
const patchSize = 2;
const model = {
predict: (pixel: any) => tf.tidy(() => tf
.fill([patchSize * scale, patchSize * scale, 3,], pixel.dataSync()[0])
.expandDims(0)),
} as unknown as tf.LayersModel;
const gen = predict(img, {
patchSize,
}, { model, modelDefinition: { scale, } as IModelDefinition });

let count = 0;
const getColExpectations = () => ([
{count: startingTensors + 2 },
{count: startingTensors + 3 },
{count: startingTensors + 3 },
{count: startingTensors + 3 },
{count: startingTensors + 3 },
{count: startingTensors + 2 },
]);
const getRowExpectations = () => ([
// for row loop, row = 0
{count: startingTensors + 2 },
// for col loop, row = 0, col = 0
...getColExpectations(),
// for col loop, row = 0, col = 1
...getColExpectations(),
// for row loop, row = 0
{count: startingTensors + 1 },
]);
const expectations: Array<{count: number, shouldDispose?: boolean}> = [
{count: startingTensors, },
{count: startingTensors + 1 },

// for row loop, row = 0
...getRowExpectations(),

// for row loop, row = 1
...getRowExpectations(),
];
let result = await gen.next();
while (!result.done) {
const expectation = expectations[count];
// console.log('memory', result, count, tf.memory(), expectation);
expect(tf.memory().numTensors).toEqual(expectation.count);
if (expectation.shouldDispose) {
if (Array.isArray(result.value)) {
result.value.forEach(t => t.dispose());
} else if (isTensor(result.value)) {
result.value.dispose();
}
}
count++;
result = await gen.next()
}
(result.value as tf.Tensor).dispose();
expect(count === expectations.length);

expect(tf.memory().numTensors).toEqual(startingTensors);
img.dispose();
});
});
});

describe('upscale', () => {
Expand Down
71 changes: 49 additions & 22 deletions packages/upscalerjs/src/upscale.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,23 @@
import { tf, } from './dependencies.generated';
import type { UpscaleArgs, IModelDefinition, ProcessFn, ResultFormat, UpscaleResponse, Progress, MultiArgProgress, } from './types';
import type {
UpscaleArgs,
IModelDefinition,
ProcessFn,
ResultFormat,
UpscaleResponse,
Progress,
MultiArgProgress,
} from './types';
import { getImageAsTensor, } from './image.generated';
import tensorAsBase64 from 'tensor-as-base64';
import { wrapGenerator, warn, isTensor, isProgress, isMultiArgTensorProgress, isAborted, } from './utils';
import {
wrapGenerator,
warn,
isTensor,
isProgress,
isMultiArgTensorProgress,
isAborted,
} from './utils';
import type { GetImageAsTensorInput, } from './image.generated';

export class AbortError extends Error {
Expand Down Expand Up @@ -229,7 +244,7 @@ export async function* predict<P extends Progress<O, PO>, O extends ResultFormat
model,
modelDefinition,
}: UpscaleInternalArgs
): AsyncGenerator<undefined | tf.Tensor3D> {
): AsyncGenerator<YieldedIntermediaryValue, tf.Tensor3D> {
const scale = modelDefinition.scale;

if (originalPatchSize && padding === undefined) {
Expand All @@ -251,13 +266,13 @@ export async function* predict<P extends Progress<O, PO>, O extends ResultFormat
width,
padding,
});
yield;
let upscaledTensor: tf.Tensor4D = tf.zeros([
1,
0,
originalSize[1] * scale * columns,
channels,
]);
yield upscaledTensor;
const total = rows * columns;
for (let row = 0; row < rows; row++) {
let colTensor: tf.Tensor4D = tf.zeros([
Expand All @@ -266,6 +281,7 @@ export async function* predict<P extends Progress<O, PO>, O extends ResultFormat
0,
channels,
]);
yield [colTensor, upscaledTensor];
for (let col = 0; col < columns; col++) {
const { origin, size, sliceOrigin, sliceSize, } = getTensorDimensions({
row,
Expand All @@ -275,19 +291,21 @@ export async function* predict<P extends Progress<O, PO>, O extends ResultFormat
height,
width,
});
yield;
yield [upscaledTensor, colTensor];
const slicedPixels = pixels.slice(
[0, origin[0], origin[1],],
[-1, size[0], size[1],],
);
yield [upscaledTensor, colTensor, slicedPixels];
const prediction = model.predict(slicedPixels) as tf.Tensor4D;
yield;
slicedPixels.dispose();
yield [upscaledTensor, colTensor, prediction];
const slicedPrediction = prediction.slice(
[0, sliceOrigin[0] * scale, sliceOrigin[1] * scale,],
[-1, sliceSize[0] * scale, sliceSize[1] * scale,],
);
prediction.dispose();
yield [upscaledTensor, colTensor, slicedPrediction];

if (progress !== undefined && isProgress(progress)) {
const index = row * columns + col + 1;
Expand All @@ -297,26 +315,26 @@ export async function* predict<P extends Progress<O, PO>, O extends ResultFormat
} else {
const squeezedTensor: tf.Tensor3D = slicedPrediction.squeeze();
if (isMultiArgTensorProgress(progress, output, progressOutput)) {
// if we are returning a tensor, we can not safely dispose of the tensor
// because we are returning a tensor, we cannot safely dispose of it
(<MultiArgProgress<'tensor'>>progress)(percent, squeezedTensor);
} else {
const sliceSrc = await tensorAsBase64(squeezedTensor);
yield;
// if we are returning a string, we can safely dispose of the tensor
// because we are returning a string, we can safely dispose of our tensor
const src = await tensorAsBase64(squeezedTensor);
squeezedTensor.dispose();
(<MultiArgProgress<'src'>>progress)(percent, sliceSrc);
(<MultiArgProgress<'src'>>progress)(percent, src);
}
}
yield;
}
yield [upscaledTensor, colTensor, slicedPrediction];

colTensor = concatTensors<tf.Tensor4D>([colTensor, slicedPrediction,], 2);
slicedPrediction.dispose();
yield;
yield [upscaledTensor, colTensor];
}

upscaledTensor = concatTensors<tf.Tensor4D>([upscaledTensor, colTensor,], 1);
colTensor.dispose();
yield [upscaledTensor];
}
/* eslint-disable @typescript-eslint/no-unnecessary-type-assertion */
const squeezedTensor = upscaledTensor.squeeze() as tf.Tensor3D;
Expand Down Expand Up @@ -350,7 +368,7 @@ export function getProcessedPixels<T extends tf.Tensor3D | tf.Tensor4D>(
// what input is in which format
export const getCopyOfInput = (input: GetImageAsTensorInput) => isTensor(input) ? input.clone() : input;

type YieldedIntermediaryValue = undefined | tf.Tensor4D | tf.Tensor3D;
type YieldedIntermediaryValue = undefined | tf.Tensor4D | tf.Tensor3D | Array<tf.Tensor3D | tf.Tensor4D>;

export async function* upscale<P extends Progress<O, PO>, O extends ResultFormat = 'src', PO extends ResultFormat = undefined>(
input: GetImageAsTensorInput,
Expand All @@ -376,15 +394,20 @@ export async function* upscale<P extends Progress<O, PO>, O extends ResultFormat
modelDefinition,
}
);
let { value: upscaledPixels, done } = await gen.next();
yield upscaledPixels;
while (done === false) {
const genResult = await gen.next();
upscaledPixels = genResult.value;
done = genResult.done;
yield upscaledPixels;
let result = await gen.next();
yield result.value;
while (!result.done) {
result = await gen.next();
if (Array.isArray(result.value)) {
yield [...result.value, preprocessedPixels];
} else if (isTensor(result.value)) {
yield [result.value, preprocessedPixels];
} else {
yield preprocessedPixels;
}
}
preprocessedPixels.dispose();
const upscaledPixels: tf.Tensor3D = result.value;

const postprocessedPixels = getProcessedPixels<tf.Tensor3D>(
upscaledPixels,
Expand Down Expand Up @@ -414,7 +437,11 @@ export async function cancellableUpscale<P extends Progress<O, PO>, O extends Re
const tick = async (result?: YieldedIntermediaryValue) => {
await tf.nextFrame();
if (isAborted(signal)) {
if (isTensor(result)) {
// only dispose tensor if we are aborting; if aborted, the called function will have
// no opportunity to dispose of its memory
if (Array.isArray(result)) {
result.forEach(r => r.dispose());
} else if (isTensor(result)) {
result.dispose();
}
throw new AbortError();
Expand Down
12 changes: 11 additions & 1 deletion packages/upscalerjs/src/utils.test.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,15 @@
import * as tf from '@tensorflow/tfjs';
import { wrapGenerator, isSingleArgProgress, isMultiArgTensorProgress, isString, isFourDimensionalTensor, isThreeDimensionalTensor, isTensor, warn, isAborted, } from './utils';
import {
wrapGenerator,
isSingleArgProgress,
isMultiArgTensorProgress,
isString,
isFourDimensionalTensor,
isThreeDimensionalTensor,
isTensor,
warn,
isAborted,
} from './utils';

describe('isAborted', () => {
it('handles an undefined signal', () => {
Expand Down
14 changes: 8 additions & 6 deletions scripts/test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -90,12 +90,14 @@ const getRunner = (runner?: string): 'local' | 'browserstack' => {
await buildUpscaler(platform);
}
const yarnArgs = [
'yarn',
'jest',
'--config',
`test/jestconfig.${platform}.${runner}.js`,
'--detectOpenHandles',
argv.watch ? '--watch' : undefined,
'jest',
// 'node',
// '--expose-gc',
// './node_modules/.bin/jest',
'--config',
`test/jestconfig.${platform}.${runner}.js`,
'--detectOpenHandles',
argv.watch ? '--watch' : undefined,
...argv._,
].filter(Boolean).map(arg => `${arg}`);
const code = await runProcess(yarnArgs[0], yarnArgs.slice(1));
Expand Down
Loading

0 comments on commit fdcbdff

Please sign in to comment.