Skip to content

Commit

Permalink
Allow frame release to be controlled outside FrameProcessor.
Browse files Browse the repository at this point in the history
Adds a method to FrameProcessor.Listener to be called when an
output frame is available and a method releaseOutputFrame in
FrameProcessor allowing the caller to trigger release of the
oldest available output frame at a given timestamp. Late frames
or frames with unset release times are dropped in the
FinalMatrixTransformationProcessorWrapper.

More than one output frame can become available before they are
released if the penultimate GlTextureProcessor is capable of producing
multiple output frames. Processing continues while waiting for
releaseOutputFrame to be called. Frame release tasks are prioritized
over other tasks.

PiperOrigin-RevId: 468473072
  • Loading branch information
Googler authored and marcbaechinger committed Oct 19, 2022
1 parent 32ee448 commit 2c06354
Show file tree
Hide file tree
Showing 6 changed files with 590 additions and 117 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,361 @@
/*
* Copyright 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.effect;

import static androidx.test.core.app.ApplicationProvider.getApplicationContext;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.common.truth.Truth.assertThat;

import android.graphics.PixelFormat;
import android.media.Image;
import android.media.ImageReader;
import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.FrameInfo;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.FrameProcessor;
import androidx.media3.common.SurfaceInfo;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.video.ColorInfo;
import com.google.common.collect.ImmutableList;
import java.util.ArrayList;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import org.checkerframework.checker.nullness.qual.EnsuresNonNull;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.junit.After;
import org.junit.Test;
import org.junit.runner.RunWith;

/** Tests for frame release in {@link GlEffectsFrameProcessor}. */
@RunWith(AndroidJUnit4.class)
public final class GlEffectsFrameProcessorFrameReleaseTest {

private static final int WIDTH = 200;
private static final int HEIGHT = 100;
private static final long FRAME_PROCESSING_WAIT_MS = 5000L;
private static final long MILLIS_TO_NANOS = 1_000_000L;
private static final long MICROS_TO_NANOS = 1000L;

private final AtomicReference<FrameProcessingException> frameProcessingException =
new AtomicReference<>();
private final Queue<Long> outputReleaseTimesNs = new ConcurrentLinkedQueue<>();

private @MonotonicNonNull GlEffectsFrameProcessor glEffectsFrameProcessor;
private volatile @MonotonicNonNull Runnable produceBlankFramesTask;

@After
public void release() {
if (glEffectsFrameProcessor != null) {
glEffectsFrameProcessor.release();
}
}

@Test
public void automaticFrameRelease_withOneFrame_reusesInputTimestamp() throws Exception {
long originalPresentationTimeUs = 1234;
AtomicLong actualPresentationTimeNs = new AtomicLong();
setupGlEffectsFrameProcessorWithBlankFrameProducer(
/* inputPresentationTimesUs= */ new long[] {originalPresentationTimeUs},
/* onFrameAvailableListener= */ actualPresentationTimeNs::set,
/* releaseFramesAutomatically= */ true);

checkNotNull(produceBlankFramesTask).run();
Thread.sleep(FRAME_PROCESSING_WAIT_MS);

assertThat(frameProcessingException.get()).isNull();
assertThat(actualPresentationTimeNs.get())
.isEqualTo(MICROS_TO_NANOS * originalPresentationTimeUs);
assertThat(outputReleaseTimesNs).containsExactly(MICROS_TO_NANOS * originalPresentationTimeUs);
}

@Test
public void automaticFrameRelease_withThreeFrames_reusesInputTimestamps() throws Exception {
long[] originalPresentationTimesUs = new long[] {1234, 3456, 4567};
ArrayList<Long> actualPresentationTimesNs = new ArrayList<>();
setupGlEffectsFrameProcessorWithBlankFrameProducer(
originalPresentationTimesUs,
/* onFrameAvailableListener= */ actualPresentationTimesNs::add,
/* releaseFramesAutomatically= */ true);

checkNotNull(produceBlankFramesTask).run();
Thread.sleep(FRAME_PROCESSING_WAIT_MS);

assertThat(frameProcessingException.get()).isNull();
assertThat(actualPresentationTimesNs)
.containsExactly(
MICROS_TO_NANOS * originalPresentationTimesUs[0],
MICROS_TO_NANOS * originalPresentationTimesUs[1],
MICROS_TO_NANOS * originalPresentationTimesUs[2])
.inOrder();
assertThat(outputReleaseTimesNs).containsExactlyElementsIn(actualPresentationTimesNs).inOrder();
}

@Test
public void controlledFrameRelease_withOneFrame_usesGivenTimestamp() throws Exception {
long originalPresentationTimeUs = 1234;
long releaseTimesNs = System.nanoTime() + MILLIS_TO_NANOS * FRAME_PROCESSING_WAIT_MS + 345678;
AtomicLong actualPresentationTimeNs = new AtomicLong();
setupGlEffectsFrameProcessorWithBlankFrameProducer(
/* inputPresentationTimesUs= */ new long[] {originalPresentationTimeUs},
/* onFrameAvailableListener= */ presentationTimeNs -> {
actualPresentationTimeNs.set(presentationTimeNs);
checkNotNull(glEffectsFrameProcessor).releaseOutputFrame(releaseTimesNs);
},
/* releaseFramesAutomatically= */ false);

checkNotNull(produceBlankFramesTask).run();
Thread.sleep(FRAME_PROCESSING_WAIT_MS);

assertThat(frameProcessingException.get()).isNull();
assertThat(actualPresentationTimeNs.get())
.isEqualTo(MICROS_TO_NANOS * originalPresentationTimeUs);
assertThat(outputReleaseTimesNs).containsExactly(releaseTimesNs);
}

@Test
public void controlledFrameRelease_withLateFrame_dropsFrame() throws Exception {
long originalPresentationTimeUs = 1234;
long releaseTimeBeforeCurrentTimeNs = System.nanoTime() - 345678;
AtomicLong actualPresentationTimeNs = new AtomicLong();
setupGlEffectsFrameProcessorWithBlankFrameProducer(
/* inputPresentationTimesUs= */ new long[] {originalPresentationTimeUs},
/* onFrameAvailableListener= */ presentationTimeNs -> {
actualPresentationTimeNs.set(presentationTimeNs);
checkNotNull(glEffectsFrameProcessor).releaseOutputFrame(releaseTimeBeforeCurrentTimeNs);
},
/* releaseFramesAutomatically= */ false);

checkNotNull(produceBlankFramesTask).run();
Thread.sleep(FRAME_PROCESSING_WAIT_MS);

assertThat(frameProcessingException.get()).isNull();
assertThat(actualPresentationTimeNs.get())
.isEqualTo(MICROS_TO_NANOS * originalPresentationTimeUs);
assertThat(outputReleaseTimesNs).isEmpty();
}

@Test
public void controlledFrameRelease_withUnsetReleaseTime_dropsFrame() throws Exception {
long originalPresentationTimeUs = 1234;
AtomicLong actualPresentationTimeNs = new AtomicLong();
setupGlEffectsFrameProcessorWithBlankFrameProducer(
/* inputPresentationTimesUs= */ new long[] {originalPresentationTimeUs},
/* onFrameAvailableListener= */ presentationTimeNs -> {
actualPresentationTimeNs.set(presentationTimeNs);
checkNotNull(glEffectsFrameProcessor)
.releaseOutputFrame(/* releaseTimeNs= */ C.TIME_UNSET);
},
/* releaseFramesAutomatically= */ false);

checkNotNull(produceBlankFramesTask).run();
Thread.sleep(FRAME_PROCESSING_WAIT_MS);

assertThat(frameProcessingException.get()).isNull();
assertThat(actualPresentationTimeNs.get())
.isEqualTo(MICROS_TO_NANOS * originalPresentationTimeUs);
assertThat(outputReleaseTimesNs).isEmpty();
}

@Test
public void controlledFrameRelease_withThreeIndividualFrames_usesGivenTimestamps()
throws Exception {
long[] originalPresentationTimesUs = new long[] {1234, 3456, 4567};
long offsetNs = System.nanoTime() + MILLIS_TO_NANOS * FRAME_PROCESSING_WAIT_MS;
long[] releaseTimesNs = new long[] {offsetNs + 123456, offsetNs + 234567, offsetNs + 345678};
ArrayList<Long> actualPresentationTimesNs = new ArrayList<>();
AtomicInteger frameIndex = new AtomicInteger();
setupGlEffectsFrameProcessorWithBlankFrameProducer(
/* inputPresentationTimesUs= */ originalPresentationTimesUs,
/* onFrameAvailableListener= */ presentationTimeNs -> {
actualPresentationTimesNs.add(presentationTimeNs);
checkNotNull(glEffectsFrameProcessor)
.releaseOutputFrame(releaseTimesNs[frameIndex.getAndIncrement()]);
},
/* releaseFramesAutomatically= */ false);

checkNotNull(produceBlankFramesTask).run();
Thread.sleep(FRAME_PROCESSING_WAIT_MS);

assertThat(frameProcessingException.get()).isNull();
assertThat(actualPresentationTimesNs)
.containsExactly(
MICROS_TO_NANOS * originalPresentationTimesUs[0],
MICROS_TO_NANOS * originalPresentationTimesUs[1],
MICROS_TO_NANOS * originalPresentationTimesUs[2])
.inOrder();
assertThat(frameIndex.get()).isEqualTo(originalPresentationTimesUs.length);
assertThat(outputReleaseTimesNs)
.containsExactly(releaseTimesNs[0], releaseTimesNs[1], releaseTimesNs[2])
.inOrder();
}

@Test
public void controlledFrameRelease_withThreeFramesAtOnce_usesGivenTimestamps() throws Exception {
long[] originalPresentationTimesUs = new long[] {1234, 3456, 4567};
long offsetNs = System.nanoTime() + MILLIS_TO_NANOS * 2 * FRAME_PROCESSING_WAIT_MS;
long[] releaseTimesNs = new long[] {offsetNs + 123456, offsetNs + 234567, offsetNs + 345678};
ArrayList<Long> actualPresentationTimesNs = new ArrayList<>();
setupGlEffectsFrameProcessorWithBlankFrameProducer(
/* inputPresentationTimesUs= */ originalPresentationTimesUs,
/* onFrameAvailableListener= */ actualPresentationTimesNs::add,
/* releaseFramesAutomatically= */ false);

checkNotNull(produceBlankFramesTask).run();
Thread.sleep(FRAME_PROCESSING_WAIT_MS);
glEffectsFrameProcessor.releaseOutputFrame(releaseTimesNs[0]);
glEffectsFrameProcessor.releaseOutputFrame(releaseTimesNs[1]);
glEffectsFrameProcessor.releaseOutputFrame(releaseTimesNs[2]);
Thread.sleep(FRAME_PROCESSING_WAIT_MS);

assertThat(frameProcessingException.get()).isNull();
assertThat(actualPresentationTimesNs)
.containsExactly(
MICROS_TO_NANOS * originalPresentationTimesUs[0],
MICROS_TO_NANOS * originalPresentationTimesUs[1],
MICROS_TO_NANOS * originalPresentationTimesUs[2])
.inOrder();
assertThat(outputReleaseTimesNs)
.containsExactly(releaseTimesNs[0], releaseTimesNs[1], releaseTimesNs[2])
.inOrder();
}

private interface OnFrameAvailableListener {
void onFrameAvailable(long presentationTimeNs);
}

@EnsuresNonNull("glEffectsFrameProcessor")
private void setupGlEffectsFrameProcessorWithBlankFrameProducer(
long[] inputPresentationTimesUs,
OnFrameAvailableListener onFrameAvailableListener,
boolean releaseFramesAutomatically)
throws Exception {
glEffectsFrameProcessor =
checkNotNull(
new GlEffectsFrameProcessor.Factory()
.create(
getApplicationContext(),
new FrameProcessor.Listener() {
@Override
public void onOutputSizeChanged(int width, int height) {
ImageReader outputImageReader =
ImageReader.newInstance(
width,
height,
PixelFormat.RGBA_8888,
/* maxImages= */ inputPresentationTimesUs.length);
checkNotNull(glEffectsFrameProcessor)
.setOutputSurfaceInfo(
new SurfaceInfo(outputImageReader.getSurface(), width, height));
outputImageReader.setOnImageAvailableListener(
imageReader -> {
try (Image image = imageReader.acquireNextImage()) {
outputReleaseTimesNs.add(image.getTimestamp());
}
},
Util.createHandlerForCurrentOrMainLooper());
}

@Override
public void onOutputFrameAvailable(long presentationTimeNs) {
onFrameAvailableListener.onFrameAvailable(presentationTimeNs);
}

@Override
public void onFrameProcessingError(FrameProcessingException exception) {
frameProcessingException.set(exception);
}

@Override
public void onFrameProcessingEnded() {}
},
ImmutableList.of(
(GlEffect)
(context, useHdr) ->
new BlankFrameProducer(inputPresentationTimesUs, useHdr)),
DebugViewProvider.NONE,
ColorInfo.SDR_BT709_LIMITED,
releaseFramesAutomatically));

glEffectsFrameProcessor.setInputFrameInfo(
new FrameInfo(WIDTH, HEIGHT, /* pixelWidthHeightRatio= */ 1, /* streamOffsetUs= */ 0));
// A frame needs to be registered despite not queuing any external input to ensure that the
// frame processor knows about the stream offset.
glEffectsFrameProcessor.registerInputFrame();
}

/** Produces blank frames with the given timestamps. */
private final class BlankFrameProducer implements GlTextureProcessor {

private final TextureInfo blankTexture;
private final long[] presentationTimesUs;

public BlankFrameProducer(long[] presentationTimesUs, boolean useHdr)
throws FrameProcessingException {
this.presentationTimesUs = presentationTimesUs;
try {
int texId = GlUtil.createTexture(WIDTH, HEIGHT, useHdr);
int fboId = GlUtil.createFboForTexture(texId);
blankTexture = new TextureInfo(texId, fboId, WIDTH, HEIGHT);
GlUtil.focusFramebufferUsingCurrentContext(fboId, WIDTH, HEIGHT);
GlUtil.clearOutputFrame();
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e);
}
}

@Override
public void setInputListener(InputListener inputListener) {}

@Override
public void setOutputListener(OutputListener outputListener) {
produceBlankFramesTask =
() -> {
for (long presentationTimeUs : presentationTimesUs) {
outputListener.onOutputFrameAvailable(blankTexture, presentationTimeUs);
}
};
}

@Override
public void setErrorListener(ErrorListener errorListener) {}

@Override
public void queueInputFrame(TextureInfo inputTexture, long presentationTimeUs) {
// No input is queued in these tests. The BlankFrameProducer is used to produce frames.
throw new UnsupportedOperationException();
}

@Override
public void releaseOutputFrame(TextureInfo outputTexture) {}

@Override
public void signalEndOfCurrentInputStream() {
// The tests don't end the input stream.
throw new UnsupportedOperationException();
}

@Override
public void release() {
// Do nothing as destroying the OpenGL context destroys the texture.
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -476,6 +476,11 @@ public void onOutputSizeChanged(int width, int height) {
new SurfaceInfo(outputImageReader.getSurface(), width, height));
}

@Override
public void onOutputFrameAvailable(long presentationTimeNs) {
// Do nothing as frames are released automatically.
}

@Override
public void onFrameProcessingError(FrameProcessingException exception) {
frameProcessingException.set(exception);
Expand All @@ -488,7 +493,8 @@ public void onFrameProcessingEnded() {
},
effects,
DebugViewProvider.NONE,
ColorInfo.SDR_BT709_LIMITED));
ColorInfo.SDR_BT709_LIMITED,
/* releaseFramesAutomatically= */ true));
glEffectsFrameProcessor.setInputFrameInfo(
new FrameInfo(inputWidth, inputHeight, pixelWidthHeightRatio, /* streamOffsetUs= */ 0));
glEffectsFrameProcessor.registerInputFrame();
Expand Down
Loading

0 comments on commit 2c06354

Please sign in to comment.