Skip to content

Commit

Permalink
Fix review comment
Browse files Browse the repository at this point in the history
  • Loading branch information
claincly committed May 8, 2024
1 parent 0403e58 commit 766ff44
Show file tree
Hide file tree
Showing 15 changed files with 108 additions and 83 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@

package androidx.media3.common;

import androidx.annotation.IntRange;
import androidx.annotation.Nullable;
import androidx.media3.common.util.UnstableApi;

Expand Down Expand Up @@ -73,20 +74,23 @@ interface Listener {
* <p>A underlying processing {@link VideoFrameProcessor} is created every time this method is
* called.
*
* <p>All inputs must be registered before rendering frames to the underlying
* {@link #getProcessor(int) VideoFrameProcessor}.
*
* <p>If the method throws, the caller must call {@link #release}.
*
* @param sequenceIndex The sequence index of the input which can aid ordering of the inputs. The
* index must start from 0.
* @param inputIndex The index of the input which could be used to order the inputs.
* The index must start from 0.
*/
void registerInput(int sequenceIndex) throws VideoFrameProcessingException;
void registerInput(@IntRange(from = 0) int inputIndex) throws VideoFrameProcessingException;

/**
* Returns the {@link VideoFrameProcessor} that handles the processing for an input registered via
* {@link #registerInput(int)}. If the {@code sequenceIndex} is not {@linkplain
* {@link #registerInput(int)}. If the {@code inputIndex} is not {@linkplain
* #registerInput(int) registered} before, this method will throw an {@link
* IllegalStateException}.
*/
VideoFrameProcessor getProcessor(int sequenceIndex);
VideoFrameProcessor getProcessor(int inputIndex);

/**
* Sets the output surface and supporting information.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static androidx.media3.common.util.Util.contains;
import static java.lang.Math.abs;
import static java.lang.Math.max;

Expand Down Expand Up @@ -78,7 +79,8 @@ public final class DefaultVideoCompositor implements VideoCompositor {

private static final String THREAD_NAME = "Effect:DefaultVideoCompositor:GlThread";
private static final String TAG = "DefaultVideoCompositor";
private static final int PRIMARY_INPUT_ID = 0;
// TODO: b/338579287: Use the first registered index instead of a constant value.
private static final int PRIMARY_INPUT_INDEX = 0;

private final VideoCompositor.Listener listener;
private final GlTextureProducer.Listener textureOutputListener;
Expand Down Expand Up @@ -142,32 +144,35 @@ public DefaultVideoCompositor(
}

@Override
public synchronized void registerInputSource(int sequenceIndex) {
inputSources.put(sequenceIndex, new InputSource());
public synchronized void registerInputSource(@IntRange(from = 0) int inputIndex) {
checkState(!contains(inputSources, inputIndex));
inputSources.put(inputIndex, new InputSource());
}

@Override
public synchronized void signalEndOfInputSource(int inputId) {
inputSources.get(inputId).isInputEnded = true;
public synchronized void signalEndOfInputSource(int inputIndex) {
checkState(contains(inputSources, inputIndex));
inputSources.get(inputIndex).isInputEnded = true;
boolean allInputsEnded = true;
for (int i = 0; i < inputSources.size(); i++) {
if (!inputSources.get(inputSources.keyAt(i)).isInputEnded) {
if (!inputSources.valueAt(i).isInputEnded) {
allInputsEnded = false;
break;
}
}

this.allInputsEnded = allInputsEnded;
if (inputSources.get(PRIMARY_INPUT_ID).frameInfos.isEmpty()) {
if (inputId == PRIMARY_INPUT_ID) {
if (inputSources.get(PRIMARY_INPUT_INDEX).frameInfos.isEmpty()) {
if (inputIndex == PRIMARY_INPUT_INDEX) {
releaseExcessFramesInAllSecondaryStreams();
}
if (allInputsEnded) {
listener.onEnded();
return;
}
}
if (inputId != PRIMARY_INPUT_ID && inputSources.get(inputId).frameInfos.size() == 1) {
if (inputIndex != PRIMARY_INPUT_INDEX
&& inputSources.get(inputIndex).frameInfos.size() == 1) {
// When a secondary stream ends input, composite if there was only one pending frame in the
// stream.
videoFrameProcessingTaskExecutor.submit(this::maybeComposite);
Expand All @@ -176,12 +181,13 @@ public synchronized void signalEndOfInputSource(int inputId) {

@Override
public synchronized void queueInputTexture(
int inputId,
int inputIndex,
GlTextureProducer textureProducer,
GlTextureInfo inputTexture,
ColorInfo colorInfo,
long presentationTimeUs) {
InputSource inputSource = inputSources.get(inputId);
checkState(contains(inputSources, inputIndex));
InputSource inputSource = inputSources.get(inputIndex);
checkState(!inputSource.isInputEnded);
checkStateNotNull(!ColorInfo.isTransferHdr(colorInfo), "HDR input is not supported.");
if (configuredColorInfo == null) {
Expand All @@ -195,10 +201,10 @@ public synchronized void queueInputTexture(
textureProducer,
inputTexture,
presentationTimeUs,
settings.getOverlaySettings(inputId, presentationTimeUs));
settings.getOverlaySettings(inputIndex, presentationTimeUs));
inputSource.frameInfos.add(inputFrameInfo);

if (inputId == PRIMARY_INPUT_ID) {
if (inputIndex == PRIMARY_INPUT_INDEX) {
releaseExcessFramesInAllSecondaryStreams();
} else {
releaseExcessFramesInSecondaryStream(inputSource);
Expand All @@ -224,11 +230,11 @@ public void releaseOutputTexture(long presentationTimeUs) {
}

private synchronized void releaseExcessFramesInAllSecondaryStreams() {
for (int i = 0; i < inputSources.size(); i++) {
if (i == PRIMARY_INPUT_ID) {
for (int inputIndex = 0; inputIndex < inputSources.size(); inputIndex++) {
if (inputIndex == PRIMARY_INPUT_INDEX) {
continue;
}
releaseExcessFramesInSecondaryStream(inputSources.get(inputSources.keyAt(i)));
releaseExcessFramesInSecondaryStream(inputSources.valueAt(inputIndex));
}
}

Expand All @@ -240,7 +246,7 @@ private synchronized void releaseExcessFramesInAllSecondaryStreams() {
* began.
*/
private synchronized void releaseExcessFramesInSecondaryStream(InputSource secondaryInputSource) {
InputSource primaryInputSource = inputSources.get(PRIMARY_INPUT_ID);
InputSource primaryInputSource = inputSources.get(PRIMARY_INPUT_INDEX);
// If the primary stream output is ended, all secondary frames can be released.
if (primaryInputSource.frameInfos.isEmpty() && primaryInputSource.isInputEnded) {
releaseFrames(
Expand Down Expand Up @@ -291,7 +297,7 @@ private synchronized void maybeComposite()
return;
}

InputFrameInfo primaryInputFrame = framesToComposite.get(PRIMARY_INPUT_ID);
InputFrameInfo primaryInputFrame = framesToComposite.get(PRIMARY_INPUT_INDEX);

ImmutableList.Builder<Size> inputSizes = new ImmutableList.Builder<>();
for (int i = 0; i < framesToComposite.size(); i++) {
Expand All @@ -312,7 +318,7 @@ private synchronized void maybeComposite()
textureOutputListener.onTextureRendered(
/* textureProducer= */ this, outputTexture, outputPresentationTimestampUs, syncObject);

InputSource primaryInputSource = inputSources.get(PRIMARY_INPUT_ID);
InputSource primaryInputSource = inputSources.get(PRIMARY_INPUT_INDEX);
releaseFrames(primaryInputSource, /* numberOfFramesToRelease= */ 1);
releaseExcessFramesInAllSecondaryStreams();

Expand All @@ -332,18 +338,18 @@ private synchronized ImmutableList<InputFrameInfo> getFramesToComposite() {
if (outputTexturePool.freeTextureCount() == 0) {
return ImmutableList.of();
}
for (int inputId = 0; inputId < inputSources.size(); inputId++) {
if (inputSources.get(inputSources.keyAt(inputId)).frameInfos.isEmpty()) {
for (int i = 0; i < inputSources.size(); i++) {
if (inputSources.valueAt(i).frameInfos.isEmpty()) {
return ImmutableList.of();
}
}
ImmutableList.Builder<InputFrameInfo> framesToComposite = new ImmutableList.Builder<>();
InputFrameInfo primaryFrameToComposite =
inputSources.get(PRIMARY_INPUT_ID).frameInfos.element();
inputSources.get(PRIMARY_INPUT_INDEX).frameInfos.element();
framesToComposite.add(primaryFrameToComposite);

for (int inputId = 0; inputId < inputSources.size(); inputId++) {
if (inputId == PRIMARY_INPUT_ID) {
for (int i = 0; i < inputSources.size(); i++) {
if (i == PRIMARY_INPUT_INDEX) {
continue;
}
// Select the secondary streams' frame that would be composited next. The frame selected is
Expand All @@ -352,7 +358,7 @@ private synchronized ImmutableList<InputFrameInfo> getFramesToComposite() {
// 2. Two or more frames, and at least one frame has timestamp greater than the target
// timestamp.
// The smaller timestamp is taken if two timestamps have the same distance from the primary.
InputSource secondaryInputSource = inputSources.get(inputSources.keyAt(inputId));
InputSource secondaryInputSource = inputSources.valueAt(i);
if (secondaryInputSource.frameInfos.size() == 1 && !secondaryInputSource.isInputEnded) {
return ImmutableList.of();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import android.util.SparseArray;
import androidx.annotation.IntRange;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.ColorInfo;
Expand Down Expand Up @@ -211,9 +212,10 @@ public void onEnded() {
}

@Override
public void registerInput(int sequenceIndex) throws VideoFrameProcessingException {
checkStateNotNull(videoCompositor);
videoCompositor.registerInputSource(sequenceIndex);
public void registerInput(@IntRange(from = 0) int inputIndex)
throws VideoFrameProcessingException {
checkState(!contains(preProcessors, inputIndex));
checkNotNull(videoCompositor).registerInputSource(inputIndex);
// Creating a new VideoFrameProcessor for the input.
VideoFrameProcessor preProcessor =
videoFrameProcessorFactory
Expand All @@ -222,7 +224,7 @@ public void registerInput(int sequenceIndex) throws VideoFrameProcessingExceptio
// Texture output to compositor.
(textureProducer, texture, presentationTimeUs, syncObject) ->
queuePreProcessingOutputToCompositor(
sequenceIndex, textureProducer, texture, presentationTimeUs),
inputIndex, textureProducer, texture, presentationTimeUs),
PRE_COMPOSITOR_TEXTURE_OUTPUT_CAPACITY)
.build()
.create(
Expand Down Expand Up @@ -253,16 +255,16 @@ public void onError(VideoFrameProcessingException exception) {

@Override
public void onEnded() {
onPreProcessingVideoFrameProcessorEnded(sequenceIndex);
onPreProcessingVideoFrameProcessorEnded(inputIndex);
}
});
preProcessors.put(sequenceIndex, preProcessor);
preProcessors.put(inputIndex, preProcessor);
}

@Override
public VideoFrameProcessor getProcessor(int sequenceIndex) {
checkState(contains(preProcessors, sequenceIndex));
return preProcessors.get(sequenceIndex);
public VideoFrameProcessor getProcessor(int inputIndex) {
checkState(contains(preProcessors, inputIndex));
return preProcessors.get(inputIndex);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,6 @@ private PreviewingSingleInputVideoGraph(

@Override
public void renderOutputFrame(long renderTimeNs) {
getProcessor(SINGLE_INPUT_INDEX).renderOutputFrame(renderTimeNs);
getProcessor(getInputIndex()).renderOutputFrame(renderTimeNs);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,13 @@

package androidx.media3.effect;

import static androidx.media3.common.util.Assertions.checkArgument;
import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.Assertions.checkStateNotNull;

import android.content.Context;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect;
Expand All @@ -38,9 +40,6 @@
@UnstableApi
public abstract class SingleInputVideoGraph implements VideoGraph {

/** The index of the only {@linkplain #registerInput(int) registered} input. */
public static final int SINGLE_INPUT_INDEX = 0;

private final Context context;
private final VideoFrameProcessor.Factory videoFrameProcessorFactory;
private final ColorInfo outputColorInfo;
Expand All @@ -56,6 +55,7 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
private boolean isEnded;
private boolean released;
private volatile boolean hasProducedFrameWithTimestampZero;
private int inputIndex;

/**
* Creates an instance.
Expand Down Expand Up @@ -86,6 +86,7 @@ public SingleInputVideoGraph(
this.renderFramesAutomatically = renderFramesAutomatically;
this.presentation = presentation;
this.initialTimestampOffsetUs = initialTimestampOffsetUs;
this.inputIndex = C.INDEX_UNSET;
}

/**
Expand All @@ -99,9 +100,11 @@ public void initialize() {
}

@Override
public void registerInput(int sequenceIndex) throws VideoFrameProcessingException {
public void registerInput(int inputIndex) throws VideoFrameProcessingException {
checkStateNotNull(videoFrameProcessor == null && !released);
checkState(this.inputIndex == C.INDEX_UNSET);

this.inputIndex = inputIndex;
videoFrameProcessor =
videoFrameProcessorFactory.create(
context,
Expand Down Expand Up @@ -162,7 +165,8 @@ public void onEnded() {
}

@Override
public VideoFrameProcessor getProcessor(int sequenceIndex) {
public VideoFrameProcessor getProcessor(int inputIndex) {
checkArgument(this.inputIndex != C.INDEX_UNSET && this.inputIndex == inputIndex);
return checkStateNotNull(videoFrameProcessor);
}

Expand Down Expand Up @@ -192,6 +196,10 @@ public void release() {
released = true;
}

protected int getInputIndex() {
return inputIndex;
}

protected long getInitialTimestampOffsetUs() {
return initialTimestampOffsetUs;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
*/
package androidx.media3.effect;

import androidx.annotation.IntRange;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.VideoFrameProcessingException;
Expand Down Expand Up @@ -48,29 +49,33 @@ interface Listener {
/**
* Registers a new input source.
*
* @param sequenceIndex The sequence index of the input source which is used to determine the
* order of the input sources. The same index should to be used in {@link #queueInputTexture}.
* @param inputIndex The index of the input source which could be used to determine the order of
* the input sources. The same index should to be used in {@link #queueInputTexture}. The
* index must start from 0. All inputs must be registered before
* {@linkplain #queueInputTexture(int, GlTextureProducer, GlTextureInfo, ColorInfo, long) queueing}
* textures.
*/
void registerInputSource(int sequenceIndex);
void registerInputSource(@IntRange(from = 0) int inputIndex);

/**
* Signals that no more frames will come from the upstream {@link GlTextureProducer.Listener}.
*
* @param inputId The identifier for an input source, returned from {@link #registerInputSource}.
* @param inputIndex The index of the input source.
*/
void signalEndOfInputSource(int inputId);
void signalEndOfInputSource(int inputIndex);

/**
* Queues an input texture to be composited.
*
* @param inputId The identifier for an input source, returned from {@link #registerInputSource}.
* @param inputIndex The index of the input source, the same index used when {@linkplain
* #registerInputSource(int) registering the input source}.
* @param textureProducer The source from where the {@code inputTexture} is produced.
* @param inputTexture The {@link GlTextureInfo} to composite.
* @param colorInfo The {@link ColorInfo} of {@code inputTexture}.
* @param presentationTimeUs The presentation time of {@code inputTexture}, in microseconds.
*/
void queueInputTexture(
int inputId,
int inputIndex,
GlTextureProducer textureProducer,
GlTextureInfo inputTexture,
ColorInfo colorInfo,
Expand Down
Loading

0 comments on commit 766ff44

Please sign in to comment.