Skip to content

Commit

Permalink
CompositionPlayer: skip decode-only frames upstream of the VideoGraph
Browse files Browse the repository at this point in the history
This is necessary for prewarming. With prewarming, in a sequence of 2
videos, the second renderer is enabled before the first one is disabled,
and decode-only frames should be skipped before the second renderer is
started. The problem is that the second renderer will forward frames to
a BufferingVideoSink before it is started, which  will delay the frame
handling and therefore not skip the frame before the renderer is
started.

PiperOrigin-RevId: 721032049
  • Loading branch information
kim-vde authored and copybara-github committed Jan 29, 2025
1 parent 3c0e2ee commit 9af43c7
Show file tree
Hide file tree
Showing 6 changed files with 28 additions and 43 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -173,8 +173,7 @@ public void setVideoEffects(List<Effect> videoEffects) {
}

@Override
public void setStreamTimestampInfo(
long streamStartPositionUs, long bufferTimestampAdjustmentUs, long lastResetPositionUs) {
public void setStreamTimestampInfo(long streamStartPositionUs, long bufferTimestampAdjustmentUs) {
if (streamStartPositionUs != this.streamStartPositionUs) {
videoFrameRenderControl.onStreamStartPositionChanged(streamStartPositionUs);
this.streamStartPositionUs = streamStartPositionUs;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -814,9 +814,7 @@ protected void onPositionReset(long positionUs, boolean joining) throws ExoPlayb
videoSink.flush(/* resetPosition= */ true);
}
videoSink.setStreamTimestampInfo(
getOutputStreamStartPositionUs(),
getBufferTimestampAdjustmentUs(),
getLastResetPositionUs());
getOutputStreamStartPositionUs(), getBufferTimestampAdjustmentUs());
pendingVideoSinkInputStreamChange = true;
}
super.onPositionReset(positionUs, joining);
Expand Down Expand Up @@ -1480,6 +1478,11 @@ protected boolean processOutputBuffer(
long presentationTimeUs = bufferPresentationTimeUs - outputStreamOffsetUs;

if (videoSink != null) {
// Skip decode-only buffers, e.g. after seeking, immediately.
if (isDecodeOnlyBuffer && !isLastBuffer) {
skipOutputBuffer(codec, bufferIndex, presentationTimeUs);
return true;
}
long framePresentationTimeUs = bufferPresentationTimeUs + getBufferTimestampAdjustmentUs();
return videoSink.handleInputFrame(
framePresentationTimeUs,
Expand Down Expand Up @@ -1631,9 +1634,7 @@ protected void onProcessedStreamChange() {
// Signaling end of the previous stream.
videoSink.signalEndOfCurrentInputStream();
videoSink.setStreamTimestampInfo(
getOutputStreamStartPositionUs(),
getBufferTimestampAdjustmentUs(),
getLastResetPositionUs());
getOutputStreamStartPositionUs(), getBufferTimestampAdjustmentUs());
} else {
videoFrameReleaseControl.onProcessedStreamChange();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -441,7 +441,7 @@ public void onOutputFrameAvailableForRendering(long framePresentationTimeUs) {
if (newOutputStreamStartPositionUs != null
&& newOutputStreamStartPositionUs != outputStreamStartPositionUs) {
defaultVideoSink.setStreamTimestampInfo(
newOutputStreamStartPositionUs, bufferTimestampAdjustmentUs, /* unused */ C.TIME_UNSET);
newOutputStreamStartPositionUs, bufferTimestampAdjustmentUs);
outputStreamStartPositionUs = newOutputStreamStartPositionUs;
}
boolean isLastFrame =
Expand Down Expand Up @@ -588,8 +588,7 @@ private void flush(boolean resetPosition) {
if (streamStartPositionsUs.size() == 1) {
long lastStartPositionUs = checkNotNull(streamStartPositionsUs.pollFirst());
// defaultVideoSink should use the latest startPositionUs if none is passed after flushing.
defaultVideoSink.setStreamTimestampInfo(
lastStartPositionUs, bufferTimestampAdjustmentUs, /* unused */ C.TIME_UNSET);
defaultVideoSink.setStreamTimestampInfo(lastStartPositionUs, bufferTimestampAdjustmentUs);
}
lastOutputBufferPresentationTimeUs = C.TIME_UNSET;
finalBufferPresentationTimeUs = C.TIME_UNSET;
Expand All @@ -611,7 +610,7 @@ private void setPlaybackSpeed(float speed) {
private void setBufferTimestampAdjustment(long bufferTimestampAdjustmentUs) {
this.bufferTimestampAdjustmentUs = bufferTimestampAdjustmentUs;
defaultVideoSink.setStreamTimestampInfo(
outputStreamStartPositionUs, bufferTimestampAdjustmentUs, /* unused */ C.TIME_UNSET);
outputStreamStartPositionUs, bufferTimestampAdjustmentUs);
}

private boolean shouldRenderToInputVideoSink() {
Expand All @@ -638,7 +637,6 @@ private final class InputVideoSink implements VideoSink, PlaybackVideoGraphWrapp
@Nullable private Format inputFormat;
private @InputType int inputType;
private long inputBufferTimestampAdjustmentUs;
private long lastResetPositionUs;

/**
* The buffer presentation timestamp, in microseconds, of the most recently registered frame.
Expand Down Expand Up @@ -796,7 +794,7 @@ public void setVideoEffects(List<Effect> videoEffects) {

@Override
public void setStreamTimestampInfo(
long streamStartPositionUs, long bufferTimestampAdjustmentUs, long lastResetPositionUs) {
long streamStartPositionUs, long bufferTimestampAdjustmentUs) {
// Input timestamps should always be positive because they are offset by ExoPlayer. Adding a
// position to the queue with timestamp 0 should therefore always apply it as long as it is
// the only position in the queue.
Expand All @@ -809,7 +807,6 @@ public void setStreamTimestampInfo(
// directly at the output of the VideoGraph because no frame has been input yet following the
// flush.
PlaybackVideoGraphWrapper.this.setBufferTimestampAdjustment(inputBufferTimestampAdjustmentUs);
this.lastResetPositionUs = lastResetPositionUs;
}

@Override
Expand Down Expand Up @@ -837,26 +834,10 @@ public void enableMayRenderStartOfStream() {
public boolean handleInputFrame(
long framePresentationTimeUs, boolean isLastFrame, VideoFrameHandler videoFrameHandler) {
checkState(isInitialized());

if (!shouldRenderToInputVideoSink()) {
return false;
}

// The sink takes in frames with monotonically increasing, non-offset frame
// timestamps. That is, with two ten-second long videos, the first frame of the second video
// should bear a timestamp of 10s seen from VideoFrameProcessor; while in ExoPlayer, the
// timestamp of the said frame would be 0s, but the streamOffset is incremented by 10s to
// include the duration of the first video. Thus this correction is needed to account for the
// different handling of presentation timestamps in ExoPlayer and VideoFrameProcessor.
//
// inputBufferTimestampAdjustmentUs adjusts the frame presentation time (which is relative to
// the start of a composition) to the buffer timestamp (that corresponds to the player
// position).
long bufferPresentationTimeUs = framePresentationTimeUs - inputBufferTimestampAdjustmentUs;
if (bufferPresentationTimeUs < lastResetPositionUs && !isLastFrame) {
videoFrameHandler.skip();
return true;
}

if (checkStateNotNull(videoFrameProcessor).getPendingInputFrameCount()
>= videoFrameProcessorMaxPendingFrameCount) {
return false;
Expand All @@ -865,7 +846,17 @@ public boolean handleInputFrame(
return false;
}

lastBufferPresentationTimeUs = bufferPresentationTimeUs;
// The sink takes in frames with monotonically increasing, non-offset frame
// timestamps. That is, with two 10s long videos, the first frame of the second video should
// bear a timestamp of 10s seen from VideoFrameProcessor; while in ExoPlayer, the timestamp of
// the said frame would be 0s, but the streamOffset is incremented by 10s to include the
// duration of the first video. Thus this correction is needed to account for the different
// handling of presentation timestamps in ExoPlayer and VideoFrameProcessor.
//
// inputBufferTimestampAdjustmentUs adjusts the frame presentation time (which is relative to
// the start of a composition) to the buffer timestamp (that corresponds to the player
// position).
lastBufferPresentationTimeUs = framePresentationTimeUs - inputBufferTimestampAdjustmentUs;
// Use the frame presentation time as render time so that the SurfaceTexture is accompanied
// by this timestamp. Setting a realtime based release time is only relevant when rendering to
// a SurfaceView, but we render to a surface in this case.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -217,10 +217,8 @@ interface VideoFrameHandler {
* current stream, in microseconds.
* @param bufferTimestampAdjustmentUs The timestamp adjustment to add to the buffer presentation
* timestamps to convert them to frame presentation timestamps, in microseconds.
* @param lastResetPositionUs The renderer last reset position, in microseconds.
*/
void setStreamTimestampInfo(
long streamStartPositionUs, long bufferTimestampAdjustmentUs, long lastResetPositionUs);
void setStreamTimestampInfo(long streamStartPositionUs, long bufferTimestampAdjustmentUs);

/** Sets the output surface info. */
void setOutputSurfaceInfo(Surface outputSurface, Size outputResolution);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -193,12 +193,10 @@ public void setVideoEffects(List<Effect> videoEffects) {
}

@Override
public void setStreamTimestampInfo(
long streamStartPositionUs, long bufferTimestampAdjustmentUs, long lastResetPositionUs) {
public void setStreamTimestampInfo(long streamStartPositionUs, long bufferTimestampAdjustmentUs) {
executeOrDelay(
videoSink ->
videoSink.setStreamTimestampInfo(
streamStartPositionUs, bufferTimestampAdjustmentUs, lastResetPositionUs));
videoSink.setStreamTimestampInfo(streamStartPositionUs, bufferTimestampAdjustmentUs));
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -505,9 +505,7 @@ protected boolean processOutputBuffer(
if (inputStreamPending) {
checkState(streamStartPositionUs != C.TIME_UNSET);
videoSink.setStreamTimestampInfo(
streamStartPositionUs,
/* bufferTimestampAdjustmentUs= */ offsetToCompositionTimeUs,
getLastResetPositionUs());
streamStartPositionUs, /* bufferTimestampAdjustmentUs= */ offsetToCompositionTimeUs);
videoSink.onInputStreamChanged(
VideoSink.INPUT_TYPE_BITMAP,
new Format.Builder()
Expand Down

0 comments on commit 9af43c7

Please sign in to comment.