Skip to content

Commit

Permalink
Fix review comment
Browse files Browse the repository at this point in the history
  • Loading branch information
claincly committed May 3, 2024
1 parent e009c3b commit aedf625
Show file tree
Hide file tree
Showing 14 changed files with 98 additions and 76 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@

package androidx.media3.common;

import androidx.annotation.IntRange;
import androidx.annotation.Nullable;
import androidx.media3.common.util.UnstableApi;

Expand Down Expand Up @@ -73,20 +74,23 @@ interface Listener {
* <p>A underlying processing {@link VideoFrameProcessor} is created every time this method is
* called.
*
* <p>All input must be registered before rendering frames to the underlying
* {@link #getProcessor(int) VideoFrameProcessor}.
*
* <p>If the method throws, the caller must call {@link #release}.
*
* @param sequenceIndex The sequence index of the input which can aid ordering of the inputs. The
* index must start from 0.
* @param inputIndex The index of the input which could be used to order the inputs.
* The index must start from 0.
*/
void registerInput(int sequenceIndex) throws VideoFrameProcessingException;
void registerInput(@IntRange(from = 0) int inputIndex) throws VideoFrameProcessingException;

/**
* Returns the {@link VideoFrameProcessor} that handles the processing for an input registered via
* {@link #registerInput(int)}. If the {@code sequenceIndex} is not {@linkplain
* {@link #registerInput(int)}. If the {@code inputIndex} is not {@linkplain
* #registerInput(int) registered} before, this method will throw an {@link
* IllegalStateException}.
*/
VideoFrameProcessor getProcessor(int sequenceIndex);
VideoFrameProcessor getProcessor(int inputIndex);

/**
* Sets the output surface and supporting information.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static androidx.media3.common.util.Util.contains;
import static java.lang.Math.abs;
import static java.lang.Math.max;

Expand Down Expand Up @@ -78,7 +79,8 @@ public final class DefaultVideoCompositor implements VideoCompositor {

private static final String THREAD_NAME = "Effect:DefaultVideoCompositor:GlThread";
private static final String TAG = "DefaultVideoCompositor";
private static final int PRIMARY_INPUT_ID = 0;
// TODO: b/338579287: Use the first registered index instead of a constant value.
private static final int PRIMARY_INPUT_INDEX = 0;

private final VideoCompositor.Listener listener;
private final GlTextureProducer.Listener textureOutputListener;
Expand Down Expand Up @@ -142,32 +144,35 @@ public DefaultVideoCompositor(
}

@Override
public synchronized void registerInputSource(int sequenceIndex) {
inputSources.put(sequenceIndex, new InputSource());
public synchronized void registerInputSource(@IntRange(from = 0) int inputIndex) {
checkState(!contains(inputSources, inputIndex));
inputSources.put(inputIndex, new InputSource());
}

@Override
public synchronized void signalEndOfInputSource(int inputId) {
inputSources.get(inputId).isInputEnded = true;
public synchronized void signalEndOfInputSource(int inputIndex) {
checkState(!contains(inputSources, inputIndex));
inputSources.get(inputIndex).isInputEnded = true;
boolean allInputsEnded = true;
for (int i = 0; i < inputSources.size(); i++) {
if (!inputSources.get(inputSources.keyAt(i)).isInputEnded) {
if (!inputSources.valueAt(i).isInputEnded) {
allInputsEnded = false;
break;
}
}

this.allInputsEnded = allInputsEnded;
if (inputSources.get(PRIMARY_INPUT_ID).frameInfos.isEmpty()) {
if (inputId == PRIMARY_INPUT_ID) {
if (inputSources.get(PRIMARY_INPUT_INDEX).frameInfos.isEmpty()) {
if (inputIndex == PRIMARY_INPUT_INDEX) {
releaseExcessFramesInAllSecondaryStreams();
}
if (allInputsEnded) {
listener.onEnded();
return;
}
}
if (inputId != PRIMARY_INPUT_ID && inputSources.get(inputId).frameInfos.size() == 1) {
if (inputIndex != PRIMARY_INPUT_INDEX
&& inputSources.get(inputIndex).frameInfos.size() == 1) {
// When a secondary stream ends input, composite if there was only one pending frame in the
// stream.
videoFrameProcessingTaskExecutor.submit(this::maybeComposite);
Expand All @@ -176,12 +181,13 @@ public synchronized void signalEndOfInputSource(int inputId) {

@Override
public synchronized void queueInputTexture(
int inputId,
int inputIndex,
GlTextureProducer textureProducer,
GlTextureInfo inputTexture,
ColorInfo colorInfo,
long presentationTimeUs) {
InputSource inputSource = inputSources.get(inputId);
checkState(!contains(inputSources, inputIndex));
InputSource inputSource = inputSources.get(inputIndex);
checkState(!inputSource.isInputEnded);
checkStateNotNull(!ColorInfo.isTransferHdr(colorInfo), "HDR input is not supported.");
if (configuredColorInfo == null) {
Expand All @@ -195,10 +201,10 @@ public synchronized void queueInputTexture(
textureProducer,
inputTexture,
presentationTimeUs,
settings.getOverlaySettings(inputId, presentationTimeUs));
settings.getOverlaySettings(inputIndex, presentationTimeUs));
inputSource.frameInfos.add(inputFrameInfo);

if (inputId == PRIMARY_INPUT_ID) {
if (inputIndex == PRIMARY_INPUT_INDEX) {
releaseExcessFramesInAllSecondaryStreams();
} else {
releaseExcessFramesInSecondaryStream(inputSource);
Expand All @@ -224,11 +230,11 @@ public void releaseOutputTexture(long presentationTimeUs) {
}

private synchronized void releaseExcessFramesInAllSecondaryStreams() {
for (int i = 0; i < inputSources.size(); i++) {
if (i == PRIMARY_INPUT_ID) {
for (int inputIndex = 0; inputIndex < inputSources.size(); inputIndex++) {
if (inputIndex == PRIMARY_INPUT_INDEX) {
continue;
}
releaseExcessFramesInSecondaryStream(inputSources.get(inputSources.keyAt(i)));
releaseExcessFramesInSecondaryStream(inputSources.valueAt(inputIndex));
}
}

Expand All @@ -240,7 +246,7 @@ private synchronized void releaseExcessFramesInAllSecondaryStreams() {
* began.
*/
private synchronized void releaseExcessFramesInSecondaryStream(InputSource secondaryInputSource) {
InputSource primaryInputSource = inputSources.get(PRIMARY_INPUT_ID);
InputSource primaryInputSource = inputSources.get(PRIMARY_INPUT_INDEX);
// If the primary stream output is ended, all secondary frames can be released.
if (primaryInputSource.frameInfos.isEmpty() && primaryInputSource.isInputEnded) {
releaseFrames(
Expand Down Expand Up @@ -291,7 +297,7 @@ private synchronized void maybeComposite()
return;
}

InputFrameInfo primaryInputFrame = framesToComposite.get(PRIMARY_INPUT_ID);
InputFrameInfo primaryInputFrame = framesToComposite.get(PRIMARY_INPUT_INDEX);

ImmutableList.Builder<Size> inputSizes = new ImmutableList.Builder<>();
for (int i = 0; i < framesToComposite.size(); i++) {
Expand All @@ -312,7 +318,7 @@ private synchronized void maybeComposite()
textureOutputListener.onTextureRendered(
/* textureProducer= */ this, outputTexture, outputPresentationTimestampUs, syncObject);

InputSource primaryInputSource = inputSources.get(PRIMARY_INPUT_ID);
InputSource primaryInputSource = inputSources.get(PRIMARY_INPUT_INDEX);
releaseFrames(primaryInputSource, /* numberOfFramesToRelease= */ 1);
releaseExcessFramesInAllSecondaryStreams();

Expand All @@ -332,18 +338,18 @@ private synchronized ImmutableList<InputFrameInfo> getFramesToComposite() {
if (outputTexturePool.freeTextureCount() == 0) {
return ImmutableList.of();
}
for (int inputId = 0; inputId < inputSources.size(); inputId++) {
if (inputSources.get(inputSources.keyAt(inputId)).frameInfos.isEmpty()) {
for (int i = 0; i < inputSources.size(); i++) {
if (inputSources.valueAt(i).frameInfos.isEmpty()) {
return ImmutableList.of();
}
}
ImmutableList.Builder<InputFrameInfo> framesToComposite = new ImmutableList.Builder<>();
InputFrameInfo primaryFrameToComposite =
inputSources.get(PRIMARY_INPUT_ID).frameInfos.element();
inputSources.get(PRIMARY_INPUT_INDEX).frameInfos.element();
framesToComposite.add(primaryFrameToComposite);

for (int inputId = 0; inputId < inputSources.size(); inputId++) {
if (inputId == PRIMARY_INPUT_ID) {
for (int i = 0; i < inputSources.size(); i++) {
if (i == PRIMARY_INPUT_INDEX) {
continue;
}
// Select the secondary streams' frame that would be composited next. The frame selected is
Expand All @@ -352,7 +358,7 @@ private synchronized ImmutableList<InputFrameInfo> getFramesToComposite() {
// 2. Two or more frames, and at least one frame has timestamp greater than the target
// timestamp.
// The smaller timestamp is taken if two timestamps have the same distance from the primary.
InputSource secondaryInputSource = inputSources.get(inputSources.keyAt(inputId));
InputSource secondaryInputSource = inputSources.valueAt(i);
if (secondaryInputSource.frameInfos.size() == 1 && !secondaryInputSource.isInputEnded) {
return ImmutableList.of();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import android.util.SparseArray;
import androidx.annotation.IntRange;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.ColorInfo;
Expand Down Expand Up @@ -211,9 +212,11 @@ public void onEnded() {
}

@Override
public void registerInput(int sequenceIndex) throws VideoFrameProcessingException {
public void registerInput(@IntRange(from = 0) int inputIndex)
throws VideoFrameProcessingException {
checkStateNotNull(videoCompositor);
videoCompositor.registerInputSource(sequenceIndex);
checkState(!contains(preProcessors, inputIndex));
videoCompositor.registerInputSource(inputIndex);
// Creating a new VideoFrameProcessor for the input.
VideoFrameProcessor preProcessor =
videoFrameProcessorFactory
Expand All @@ -222,7 +225,7 @@ public void registerInput(int sequenceIndex) throws VideoFrameProcessingExceptio
// Texture output to compositor.
(textureProducer, texture, presentationTimeUs, syncObject) ->
queuePreProcessingOutputToCompositor(
sequenceIndex, textureProducer, texture, presentationTimeUs),
inputIndex, textureProducer, texture, presentationTimeUs),
PRE_COMPOSITOR_TEXTURE_OUTPUT_CAPACITY)
.build()
.create(
Expand Down Expand Up @@ -253,16 +256,16 @@ public void onError(VideoFrameProcessingException exception) {

@Override
public void onEnded() {
onPreProcessingVideoFrameProcessorEnded(sequenceIndex);
onPreProcessingVideoFrameProcessorEnded(inputIndex);
}
});
preProcessors.put(sequenceIndex, preProcessor);
preProcessors.put(inputIndex, preProcessor);
}

@Override
public VideoFrameProcessor getProcessor(int sequenceIndex) {
checkState(contains(preProcessors, sequenceIndex));
return preProcessors.get(sequenceIndex);
public VideoFrameProcessor getProcessor(int inputIndex) {
checkState(contains(preProcessors, inputIndex));
return preProcessors.get(inputIndex);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,12 @@

package androidx.media3.effect;

import static androidx.media3.common.util.Assertions.checkArgument;
import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.Assertions.checkStateNotNull;

import android.content.Context;
import androidx.annotation.IntRange;
import androidx.annotation.Nullable;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider;
Expand Down Expand Up @@ -99,7 +101,9 @@ public void initialize() {
}

@Override
public void registerInput(int sequenceIndex) throws VideoFrameProcessingException {
public void registerInput(@IntRange(from = 0, to = 0) int inputIndex)
throws VideoFrameProcessingException {
checkArgument(inputIndex == SINGLE_INPUT_INDEX);
checkStateNotNull(videoFrameProcessor == null && !released);

videoFrameProcessor =
Expand Down Expand Up @@ -162,7 +166,8 @@ public void onEnded() {
}

@Override
public VideoFrameProcessor getProcessor(int sequenceIndex) {
public VideoFrameProcessor getProcessor(int inputIndex) {
checkArgument(inputIndex == SINGLE_INPUT_INDEX);
return checkStateNotNull(videoFrameProcessor);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
*/
package androidx.media3.effect;

import androidx.annotation.IntRange;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.VideoFrameProcessingException;
Expand Down Expand Up @@ -48,29 +49,33 @@ interface Listener {
/**
* Registers a new input source.
*
* @param sequenceIndex The sequence index of the input source which is used to determine the
* order of the input sources. The same index should to be used in {@link #queueInputTexture}.
* @param inputIndex The index of the input source which could be used to determine the order of
* the input sources. The same index should to be used in {@link #queueInputTexture}. The
* index must start from 0. All inputs must be registered before
* {@linkplain #queueInputTexture(int, GlTextureProducer, GlTextureInfo, ColorInfo, long) queueing}
* textures.
*/
void registerInputSource(int sequenceIndex);
void registerInputSource(@IntRange(from = 0) int inputIndex);

/**
* Signals that no more frames will come from the upstream {@link GlTextureProducer.Listener}.
*
* @param inputId The identifier for an input source, returned from {@link #registerInputSource}.
* @param inputIndex The index of the input source.
*/
void signalEndOfInputSource(int inputId);
void signalEndOfInputSource(int inputIndex);

/**
* Queues an input texture to be composited.
*
* @param inputId The identifier for an input source, returned from {@link #registerInputSource}.
* @param inputIndex The index of the input source, the same index used when {@linkplain
* #registerInputSource(int) registering the input source}.
* @param textureProducer The source from where the {@code inputTexture} is produced.
* @param inputTexture The {@link GlTextureInfo} to composite.
* @param colorInfo The {@link ColorInfo} of {@code inputTexture}.
* @param presentationTimeUs The presentation time of {@code inputTexture}, in microseconds.
*/
void queueInputTexture(
int inputId,
int inputIndex,
GlTextureProducer textureProducer,
GlTextureInfo inputTexture,
ColorInfo colorInfo,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -429,7 +429,6 @@ private VideoFrameProcessor initialize(Format sourceFormat, Clock clock)
outputColorInfo =
inputColorInfo.buildUpon().setColorTransfer(C.COLOR_TRANSFER_ST2084).build();
}
int videoGraphInputId;
try {
videoGraph =
previewingVideoGraphFactory.create(
Expand All @@ -445,12 +444,12 @@ private VideoFrameProcessor initialize(Format sourceFormat, Clock clock)
Size size = currentSurfaceAndSize.second;
maybeSetOutputSurfaceInfo(surface, size.getWidth(), size.getHeight());
}
videoGraphInputId = videoGraph.registerInput();
videoGraph.registerInput(/* inputIndex= */ 0);
} catch (VideoFrameProcessingException e) {
throw new VideoSink.VideoSinkException(e, sourceFormat);
}
state = STATE_INITIALIZED;
return videoGraph.getProcessor(videoGraphInputId);
return videoGraph.getProcessor(/* inputIndex= */ 0);
}

private boolean isInitialized() {
Expand Down Expand Up @@ -560,8 +559,6 @@ public VideoSinkImpl(Context context) {
// reduces decoder timeouts, and consider restoring.
videoFrameProcessorMaxPendingFrameCount =
Util.getMaxPendingFramesCountForMediaCodecDecoders(context);
videoGraph.registerInput(/* sequenceIndex= */ 0);
videoFrameProcessor = videoGraph.getProcessor(/* sequenceIndex= */ 0);

videoEffects = new ArrayList<>();
finalBufferPresentationTimeUs = C.TIME_UNSET;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -856,8 +856,8 @@ private static VideoFrameProcessorTestRunner.Builder createVideoFrameProcessorTe
VideoCompositor videoCompositor,
@Nullable ExecutorService executorService,
GlObjectsProvider glObjectsProvider) {
int sequenceIndex = 0;
videoCompositor.registerInputSource(sequenceIndex);
int inputIndex = 0;
videoCompositor.registerInputSource(inputIndex);
DefaultVideoFrameProcessor.Factory.Builder defaultVideoFrameProcessorFactoryBuilder =
new DefaultVideoFrameProcessor.Factory.Builder()
.setGlObjectsProvider(glObjectsProvider)
Expand All @@ -871,7 +871,7 @@ private static VideoFrameProcessorTestRunner.Builder createVideoFrameProcessorTe
textureBitmapReader.readBitmapUnpremultipliedAlpha(
outputTexture, presentationTimeUs);
videoCompositor.queueInputTexture(
sequenceIndex,
inputIndex,
outputTextureProducer,
outputTexture,
ColorInfo.SRGB_BT709_FULL,
Expand All @@ -885,7 +885,7 @@ private static VideoFrameProcessorTestRunner.Builder createVideoFrameProcessorTe
.setTestId(testId)
.setVideoFrameProcessorFactory(defaultVideoFrameProcessorFactoryBuilder.build())
.setBitmapReader(textureBitmapReader)
.setOnEndedListener(() -> videoCompositor.signalEndOfInputSource(sequenceIndex));
.setOnEndedListener(() -> videoCompositor.signalEndOfInputSource(inputIndex));
}
}

Expand Down
Loading

0 comments on commit aedf625

Please sign in to comment.