diff --git a/cc/layers/video_frame_provider.h b/cc/layers/video_frame_provider.h index e7619a3fb7a84..a6789c11166f9 100644 --- a/cc/layers/video_frame_provider.h +++ b/cc/layers/video_frame_provider.h @@ -43,8 +43,6 @@ class CC_EXPORT VideoFrameProvider { virtual void StopRendering() = 0; // Notifies the client that GetCurrentFrame() will return new data. - // TODO(dalecurtis): Nuke this once VideoFrameProviderClientImpl is using a - // BeginFrameObserver based approach. http://crbug.com/336733 virtual void DidReceiveFrame() = 0; protected: @@ -75,9 +73,6 @@ class CC_EXPORT VideoFrameProvider { // // Clients should call this in response to UpdateCurrentFrame() returning true // or in response to a DidReceiveFrame() call. - // - // TODO(dalecurtis): Remove text about DidReceiveFrame() once the old path - // has been removed. http://crbug.com/439548 virtual scoped_refptr GetCurrentFrame() = 0; // Called in response to DidReceiveFrame() or a return value of true from diff --git a/chromecast/renderer/media/cma_renderer.cc b/chromecast/renderer/media/cma_renderer.cc index 2db1906e6ee76..0cdd1aa6f2d86 100644 --- a/chromecast/renderer/media/cma_renderer.cc +++ b/chromecast/renderer/media/cma_renderer.cc @@ -378,7 +378,7 @@ void CmaRenderer::OnStatisticsUpdated( void CmaRenderer::OnNaturalSizeChanged(const gfx::Size& size) { DCHECK(thread_checker_.CalledOnValidThread()); - video_renderer_sink_->PaintFrameUsingOldRenderingPath( + video_renderer_sink_->PaintSingleFrame( video_overlay_factory_->CreateFrame(size)); client_->OnVideoNaturalSizeChange(size); } diff --git a/media/base/null_video_sink.cc b/media/base/null_video_sink.cc index f8471056c99c8..e96fdda240dcb 100644 --- a/media/base/null_video_sink.cc +++ b/media/base/null_video_sink.cc @@ -88,8 +88,7 @@ void NullVideoSink::CallRender() { delay); } -void NullVideoSink::PaintFrameUsingOldRenderingPath( - const scoped_refptr& frame) { +void NullVideoSink::PaintSingleFrame(const scoped_refptr& frame) { new_frame_cb_.Run(frame); } diff --git a/media/base/null_video_sink.h b/media/base/null_video_sink.h index ca298b61fbaf1..3c458744b1390 100644 --- a/media/base/null_video_sink.h +++ b/media/base/null_video_sink.h @@ -35,8 +35,7 @@ class MEDIA_EXPORT NullVideoSink : public VideoRendererSink { // VideoRendererSink implementation. void Start(RenderCallback* callback) override; void Stop() override; - void PaintFrameUsingOldRenderingPath( - const scoped_refptr& frame) override; + void PaintSingleFrame(const scoped_refptr& frame) override; void set_tick_clock_for_testing(base::TickClock* tick_clock) { tick_clock_ = tick_clock; diff --git a/media/base/null_video_sink_unittest.cc b/media/base/null_video_sink_unittest.cc index 494dd4e6f464f..aa0603be7ba65 100644 --- a/media/base/null_video_sink_unittest.cc +++ b/media/base/null_video_sink_unittest.cc @@ -74,7 +74,7 @@ TEST_F(NullVideoSinkTest, BasicFunctionality) { // The sink shouldn't have to be started to use the paint method. EXPECT_CALL(*this, FrameReceived(test_frame)); - sink->PaintFrameUsingOldRenderingPath(test_frame); + sink->PaintSingleFrame(test_frame); { SCOPED_TRACE("Waiting for sink startup."); diff --git a/media/base/video_renderer_sink.h b/media/base/video_renderer_sink.h index 9dd62d87e6af3..f8e64bfc19f27 100644 --- a/media/base/video_renderer_sink.h +++ b/media/base/video_renderer_sink.h @@ -53,16 +53,12 @@ class MEDIA_EXPORT VideoRendererSink { // liberally if no new frames are expected. virtual void Stop() = 0; - // Instead of using a callback driven rendering path, allow clients to paint - // frames as they see fit without regard for the compositor. - // TODO(dalecurtis): This should be nuked once experiments show the new path - // is amazing and the old path is not! http://crbug.com/439548 - virtual void PaintFrameUsingOldRenderingPath( - const scoped_refptr& frame) = 0; - - // TODO(dalecurtis): We may need OnSizeChanged() and OnOpacityChanged() - // methods on this interface if background rendering is handled inside of - // the media layer instead of by VideoFrameCompositor. + // Instead of using a callback driven rendering path, allow clients to paint a + // single frame as they see fit without regard for the compositor; this is + // useful for painting poster images or hole frames without having to issue a + // Start() -> Render() -> Stop(). Clients are free to mix usage of Render() + // based painting and PaintSingleFrame(). + virtual void PaintSingleFrame(const scoped_refptr& frame) = 0; virtual ~VideoRendererSink() {} }; diff --git a/media/blink/video_frame_compositor.cc b/media/blink/video_frame_compositor.cc index 36392a34b0e2b..3cf2a1cb22d85 100644 --- a/media/blink/video_frame_compositor.cc +++ b/media/blink/video_frame_compositor.cc @@ -131,13 +131,12 @@ void VideoFrameCompositor::Stop() { base::Unretained(this), false)); } -void VideoFrameCompositor::PaintFrameUsingOldRenderingPath( +void VideoFrameCompositor::PaintSingleFrame( const scoped_refptr& frame) { if (!compositor_task_runner_->BelongsToCurrentThread()) { compositor_task_runner_->PostTask( - FROM_HERE, - base::Bind(&VideoFrameCompositor::PaintFrameUsingOldRenderingPath, - base::Unretained(this), frame)); + FROM_HERE, base::Bind(&VideoFrameCompositor::PaintSingleFrame, + base::Unretained(this), frame)); return; } @@ -172,7 +171,7 @@ base::TimeDelta VideoFrameCompositor::GetCurrentFrameTimestamp() const { // When the VFC is stopped, |callback_| is cleared; this synchronously // prevents CallRender() from invoking ProcessNewFrame(), and so // |current_frame_| won't change again until after Start(). (Assuming that - // PaintFrameUsingOldRenderingPath() is not also called while stopped.) + // PaintSingleFrame() is not also called while stopped.) if (!current_frame_) return base::TimeDelta(); return current_frame_->timestamp(); diff --git a/media/blink/video_frame_compositor.h b/media/blink/video_frame_compositor.h index b116e4f69883d..68a4d79fe173a 100644 --- a/media/blink/video_frame_compositor.h +++ b/media/blink/video_frame_compositor.h @@ -75,8 +75,7 @@ class MEDIA_BLINK_EXPORT VideoFrameCompositor // same thread (typically the media thread). void Start(RenderCallback* callback) override; void Stop() override; - void PaintFrameUsingOldRenderingPath( - const scoped_refptr& frame) override; + void PaintSingleFrame(const scoped_refptr& frame) override; // Returns |current_frame_| if |client_| is set. If no |client_| is set, // |is_background_rendering_| is true, and |callback_| is set, it requests a @@ -94,7 +93,7 @@ class MEDIA_BLINK_EXPORT VideoFrameCompositor // Returns the timestamp of the current (possibly stale) frame, or // base::TimeDelta() if there is no current frame. This method may be called // from the media thread as long as the VFC is stopped. (Assuming that - // PaintFrameUsingOldRenderingPath() is not also called while stopped.) + // PaintSingleFrame() is not also called while stopped.) base::TimeDelta GetCurrentFrameTimestamp() const; void set_tick_clock_for_testing(std::unique_ptr tick_clock) { diff --git a/media/blink/video_frame_compositor_unittest.cc b/media/blink/video_frame_compositor_unittest.cc index 5d901f4ab0287..74d42d0e85488 100644 --- a/media/blink/video_frame_compositor_unittest.cc +++ b/media/blink/video_frame_compositor_unittest.cc @@ -102,12 +102,12 @@ TEST_F(VideoFrameCompositorTest, InitialValues) { EXPECT_FALSE(compositor()->GetCurrentFrame().get()); } -TEST_F(VideoFrameCompositorTest, PaintFrameUsingOldRenderingPath) { +TEST_F(VideoFrameCompositorTest, PaintSingleFrame) { scoped_refptr expected = VideoFrame::CreateEOSFrame(); // Should notify compositor synchronously. EXPECT_EQ(0, did_receive_frame_count()); - compositor()->PaintFrameUsingOldRenderingPath(expected); + compositor()->PaintSingleFrame(expected); scoped_refptr actual = compositor()->GetCurrentFrame(); EXPECT_EQ(expected, actual); EXPECT_EQ(1, did_receive_frame_count()); diff --git a/media/blink/webmediaplayer_impl.cc b/media/blink/webmediaplayer_impl.cc index 6391207e4b56e..0b6354a9f296e 100644 --- a/media/blink/webmediaplayer_impl.cc +++ b/media/blink/webmediaplayer_impl.cc @@ -913,7 +913,7 @@ void WebMediaPlayerImpl::OnPipelineSuspended() { if (isRemote()) { scoped_refptr frame = cast_impl_.GetCastingBanner(); if (frame) - compositor_->PaintFrameUsingOldRenderingPath(frame); + compositor_->PaintSingleFrame(frame); } #endif @@ -1199,7 +1199,7 @@ void WebMediaPlayerImpl::SuspendForRemote() { if (pipeline_controller_.IsPipelineSuspended()) { scoped_refptr frame = cast_impl_.GetCastingBanner(); if (frame) - compositor_->PaintFrameUsingOldRenderingPath(frame); + compositor_->PaintSingleFrame(frame); } UpdatePlayState(); diff --git a/media/filters/video_renderer_algorithm.h b/media/filters/video_renderer_algorithm.h index 24e648411da85..b5ce7932c223d 100644 --- a/media/filters/video_renderer_algorithm.h +++ b/media/filters/video_renderer_algorithm.h @@ -136,6 +136,8 @@ class MEDIA_EXPORT VideoRendererAlgorithm { size_t frames_queued() const { return frame_queue_.size(); } + scoped_refptr first_frame() { return frame_queue_.front().frame; } + // Returns the average of the duration of all frames in |frame_queue_| // as measured in wall clock (not media) time. base::TimeDelta average_frame_duration() const { diff --git a/media/mojo/services/mojo_renderer_impl.cc b/media/mojo/services/mojo_renderer_impl.cc index 564941378a7ef..6f028c877e71c 100644 --- a/media/mojo/services/mojo_renderer_impl.cc +++ b/media/mojo/services/mojo_renderer_impl.cc @@ -190,7 +190,7 @@ void MojoRendererImpl::OnVideoNaturalSizeChange(mojo::SizePtr size) { DVLOG(2) << __FUNCTION__ << ": " << new_size.ToString(); DCHECK(task_runner_->BelongsToCurrentThread()); - video_renderer_sink_->PaintFrameUsingOldRenderingPath( + video_renderer_sink_->PaintSingleFrame( video_overlay_factory_->CreateFrame(new_size)); client_->OnVideoNaturalSizeChange(new_size); } diff --git a/media/renderers/video_renderer_impl.cc b/media/renderers/video_renderer_impl.cc index e216f2a8f0816..cb92e45dcc62a 100644 --- a/media/renderers/video_renderer_impl.cc +++ b/media/renderers/video_renderer_impl.cc @@ -57,8 +57,6 @@ VideoRendererImpl::VideoRendererImpl( tick_clock_(new base::DefaultTickClock()), was_background_rendering_(false), time_progressing_(false), - render_first_frame_and_stop_(false), - posted_maybe_stop_after_first_paint_(false), last_video_memory_usage_(0), have_renderered_frames_(false), last_frame_opaque_(false), @@ -139,8 +137,6 @@ void VideoRendererImpl::Initialize( DCHECK(!init_cb.is_null()); DCHECK(!wall_clock_time_cb.is_null()); DCHECK_EQ(kUninitialized, state_); - DCHECK(!render_first_frame_and_stop_); - DCHECK(!posted_maybe_stop_after_first_paint_); DCHECK(!was_background_rendering_); DCHECK(!time_progressing_); DCHECK(!have_renderered_frames_); @@ -182,40 +178,12 @@ scoped_refptr VideoRendererImpl::Render( // we've had a proper startup sequence. DCHECK(result); - // Notify client of size and opacity changes if this is the first frame - // or if those have changed from the last frame. - if (!have_renderered_frames_ || - (last_frame_natural_size_ != result->natural_size())) { - last_frame_natural_size_ = result->natural_size(); - task_runner_->PostTask( - FROM_HERE, - base::Bind(&VideoRendererImpl::OnVideoNaturalSizeChange, - weak_factory_.GetWeakPtr(), last_frame_natural_size_)); - } - if (!have_renderered_frames_ || - (last_frame_opaque_ != IsOpaque(result->format()))) { - last_frame_opaque_ = IsOpaque(result->format()); - task_runner_->PostTask( - FROM_HERE, base::Bind(&VideoRendererImpl::OnVideoOpacityChange, - weak_factory_.GetWeakPtr(), last_frame_opaque_)); - } - // Declare HAVE_NOTHING if we reach a state where we can't progress playback // any further. We don't want to do this if we've already done so, reached // end of stream, or have frames available. We also don't want to do this in // background rendering mode unless this isn't the first background render // tick and we haven't seen any decoded frames since the last one. - // - // We use the inverse of |render_first_frame_and_stop_| as a proxy for the - // value of |time_progressing_| here since we can't access it from the - // compositor thread. If we're here (in Render()) the sink must have been - // started -- but if it was started only to render the first frame and stop, - // then |time_progressing_| is likely false. If we're still in Render() when - // |render_first_frame_and_stop_| is false, then |time_progressing_| is true. - // If |time_progressing_| is actually true when |render_first_frame_and_stop_| - // is also true, then the ended callback will be harmlessly delayed until - // MaybeStopSinkAfterFirstPaint() runs and the next Render() call comes in. - MaybeFireEndedCallback_Locked(!render_first_frame_and_stop_); + MaybeFireEndedCallback_Locked(true); if (buffering_state_ == BUFFERING_HAVE_ENOUGH && !received_end_of_stream_ && !algorithm_->effective_frames_queued() && (!background_rendering || @@ -237,28 +205,15 @@ scoped_refptr VideoRendererImpl::Render( UpdateStats_Locked(); was_background_rendering_ = background_rendering; - // After painting the first frame, if playback hasn't started, we post a - // delayed task to request that the sink be stopped. The task is delayed to - // give videos with autoplay time to start. - // - // OnTimeStateChanged() will clear this flag if time starts before we get here - // and MaybeStopSinkAfterFirstPaint() will ignore this request if time starts - // before the call executes. - if (render_first_frame_and_stop_ && !posted_maybe_stop_after_first_paint_) { - posted_maybe_stop_after_first_paint_ = true; - task_runner_->PostDelayedTask( - FROM_HERE, base::Bind(&VideoRendererImpl::MaybeStopSinkAfterFirstPaint, - weak_factory_.GetWeakPtr()), - base::TimeDelta::FromMilliseconds(250)); - } - // Always post this task, it will acquire new frames if necessary and since it // happens on another thread, even if we don't have room in the queue now, by // the time it runs (may be delayed up to 50ms for complex decodes!) we might. - task_runner_->PostTask(FROM_HERE, base::Bind(&VideoRendererImpl::AttemptRead, - weak_factory_.GetWeakPtr())); + task_runner_->PostTask( + FROM_HERE, + base::Bind(&VideoRendererImpl::AttemptReadAndCheckForMetadataChanges, + weak_factory_.GetWeakPtr(), result->format(), + result->natural_size())); - have_renderered_frames_ = true; return result; } @@ -316,16 +271,6 @@ void VideoRendererImpl::OnWaitingForDecryptionKey() { client_->OnWaitingForDecryptionKey(); } -void VideoRendererImpl::OnVideoNaturalSizeChange(const gfx::Size& size) { - DCHECK(task_runner_->BelongsToCurrentThread()); - client_->OnVideoNaturalSizeChange(size); -} - -void VideoRendererImpl::OnVideoOpacityChange(bool opaque) { - DCHECK(task_runner_->BelongsToCurrentThread()); - client_->OnVideoOpacityChange(opaque); -} - void VideoRendererImpl::SetTickClockForTesting( std::unique_ptr tick_clock) { tick_clock_.swap(tick_clock); @@ -390,92 +335,86 @@ void VideoRendererImpl::FrameReady(uint32_t sequence_token, VideoFrameStream::Status status, const scoped_refptr& frame) { DCHECK(task_runner_->BelongsToCurrentThread()); - bool start_sink = false; - { - base::AutoLock auto_lock(lock_); - // Stream has been reset and this VideoFrame was decoded before the reset - // but the async copy finished after. - if (sequence_token != sequence_token_) - return; - - DCHECK_NE(state_, kUninitialized); - DCHECK_NE(state_, kFlushed); - - CHECK(pending_read_); - pending_read_ = false; + base::AutoLock auto_lock(lock_); - if (status == VideoFrameStream::DECODE_ERROR) { - DCHECK(!frame); - task_runner_->PostTask( - FROM_HERE, - base::Bind(&VideoRendererImpl::OnPlaybackError, - weak_factory_.GetWeakPtr(), PIPELINE_ERROR_DECODE)); - return; - } + // Stream has been reset and this VideoFrame was decoded before the reset + // but the async copy finished after. + if (sequence_token != sequence_token_) + return; - // Already-queued VideoFrameStream ReadCB's can fire after various state - // transitions have happened; in that case just drop those frames - // immediately. - if (state_ == kFlushing) - return; + DCHECK_NE(state_, kUninitialized); + DCHECK_NE(state_, kFlushed); - DCHECK_EQ(state_, kPlaying); + CHECK(pending_read_); + pending_read_ = false; - // Can happen when demuxers are preparing for a new Seek(). - if (!frame) { - DCHECK_EQ(status, VideoFrameStream::DEMUXER_READ_ABORTED); - return; - } + if (status == VideoFrameStream::DECODE_ERROR) { + DCHECK(!frame); + task_runner_->PostTask( + FROM_HERE, + base::Bind(&VideoRendererImpl::OnPlaybackError, + weak_factory_.GetWeakPtr(), PIPELINE_ERROR_DECODE)); + return; + } - if (frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM)) { - DCHECK(!received_end_of_stream_); - received_end_of_stream_ = true; - - // See if we can fire EOS immediately instead of waiting for Render(). - MaybeFireEndedCallback_Locked(time_progressing_); - } else if ((low_delay_ || !video_frame_stream_->CanReadWithoutStalling()) && - IsBeforeStartTime(frame->timestamp())) { - // Don't accumulate frames that are earlier than the start time if we - // won't have a chance for a better frame, otherwise we could declare - // HAVE_ENOUGH_DATA and start playback prematurely. - AttemptRead_Locked(); - return; - } else { - // If the sink hasn't been started, we still have time to release less - // than ideal frames prior to startup. We don't use IsBeforeStartTime() - // here since it's based on a duration estimate and we can be exact here. - if (!sink_started_ && frame->timestamp() <= start_timestamp_) - algorithm_->Reset(); - - AddReadyFrame_Locked(frame); - } + // Already-queued VideoFrameStream ReadCB's can fire after various state + // transitions have happened; in that case just drop those frames + // immediately. + if (state_ == kFlushing) + return; - // Attempt to purge bad frames in case of underflow or backgrounding. - RemoveFramesForUnderflowOrBackgroundRendering(); + DCHECK_EQ(state_, kPlaying); - // Signal buffering state if we've met our conditions. - if (buffering_state_ == BUFFERING_HAVE_NOTHING && HaveEnoughData_Locked()) { - TransitionToHaveEnough_Locked(); - if (!sink_started_ && !rendered_end_of_stream_) { - start_sink = true; - render_first_frame_and_stop_ = true; - posted_maybe_stop_after_first_paint_ = false; - } - } + // Can happen when demuxers are preparing for a new Seek(). + if (!frame) { + DCHECK_EQ(status, VideoFrameStream::DEMUXER_READ_ABORTED); + return; + } - // Always request more decoded video if we have capacity. This serves two - // purposes: - // 1) Prerolling while paused - // 2) Keeps decoding going if video rendering thread starts falling behind + if (frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM)) { + DCHECK(!received_end_of_stream_); + received_end_of_stream_ = true; + + // See if we can fire EOS immediately instead of waiting for Render(). + MaybeFireEndedCallback_Locked(time_progressing_); + } else if ((low_delay_ || !video_frame_stream_->CanReadWithoutStalling()) && + IsBeforeStartTime(frame->timestamp())) { + // Don't accumulate frames that are earlier than the start time if we + // won't have a chance for a better frame, otherwise we could declare + // HAVE_ENOUGH_DATA and start playback prematurely. AttemptRead_Locked(); + return; + } else { + // If the sink hasn't been started, we still have time to release less + // than ideal frames prior to startup. We don't use IsBeforeStartTime() + // here since it's based on a duration estimate and we can be exact here. + if (!sink_started_ && frame->timestamp() <= start_timestamp_) + algorithm_->Reset(); + + AddReadyFrame_Locked(frame); } - // If time is progressing, the sink has already been started; this may be true - // if we have previously underflowed, yet weren't stopped because of audio. - if (start_sink) { - DCHECK(!sink_started_); - StartSink(); + // Attempt to purge bad frames in case of underflow or backgrounding. + RemoveFramesForUnderflowOrBackgroundRendering(); + + // Signal buffering state if we've met our conditions. + if (buffering_state_ == BUFFERING_HAVE_NOTHING && HaveEnoughData_Locked()) { + TransitionToHaveEnough_Locked(); + + // Paint the first frame if necessary. + if (!rendered_end_of_stream_ && !sink_started_) { + DCHECK(algorithm_->frames_queued()); + scoped_refptr frame = algorithm_->first_frame(); + CheckForMetadataChanges(frame->format(), frame->natural_size()); + sink_->PaintSingleFrame(frame); + } } + + // Always request more decoded video if we have capacity. This serves two + // purposes: + // 1) Prerolling while paused + // 2) Keeps decoding going if video rendering thread starts falling behind + AttemptRead_Locked(); } bool VideoRendererImpl::HaveEnoughData_Locked() { @@ -532,11 +471,6 @@ void VideoRendererImpl::AddReadyFrame_Locked( algorithm_->EnqueueFrame(frame); } -void VideoRendererImpl::AttemptRead() { - base::AutoLock auto_lock(lock_); - AttemptRead_Locked(); -} - void VideoRendererImpl::AttemptRead_Locked() { DCHECK(task_runner_->BelongsToCurrentThread()); lock_.AssertAcquired(); @@ -604,16 +538,6 @@ void VideoRendererImpl::UpdateStats_Locked() { } } -void VideoRendererImpl::MaybeStopSinkAfterFirstPaint() { - DCHECK(task_runner_->BelongsToCurrentThread()); - - if (!time_progressing_ && sink_started_) - StopSink(); - - base::AutoLock auto_lock(lock_); - render_first_frame_and_stop_ = false; -} - bool VideoRendererImpl::HaveReachedBufferingCap() { DCHECK(task_runner_->BelongsToCurrentThread()); const size_t kMaxVideoFrames = limits::kMaxVideoFrames; @@ -731,4 +655,32 @@ void VideoRendererImpl::RemoveFramesForUnderflowOrBackgroundRendering() { } } +void VideoRendererImpl::CheckForMetadataChanges(VideoPixelFormat pixel_format, + const gfx::Size& natural_size) { + DCHECK(task_runner_->BelongsToCurrentThread()); + + // Notify client of size and opacity changes if this is the first frame + // or if those have changed from the last frame. + if (!have_renderered_frames_ || last_frame_natural_size_ != natural_size) { + last_frame_natural_size_ = natural_size; + client_->OnVideoNaturalSizeChange(last_frame_natural_size_); + } + + const bool is_opaque = IsOpaque(pixel_format); + if (!have_renderered_frames_ || last_frame_opaque_ != is_opaque) { + last_frame_opaque_ = is_opaque; + client_->OnVideoOpacityChange(last_frame_opaque_); + } + + have_renderered_frames_ = true; +} + +void VideoRendererImpl::AttemptReadAndCheckForMetadataChanges( + VideoPixelFormat pixel_format, + const gfx::Size& natural_size) { + base::AutoLock auto_lock(lock_); + CheckForMetadataChanges(pixel_format, natural_size); + AttemptRead_Locked(); +} + } // namespace media diff --git a/media/renderers/video_renderer_impl.h b/media/renderers/video_renderer_impl.h index a21bc2808268d..5eb2c00f06983 100644 --- a/media/renderers/video_renderer_impl.h +++ b/media/renderers/video_renderer_impl.h @@ -95,8 +95,6 @@ class MEDIA_EXPORT VideoRendererImpl void OnStatisticsUpdate(const PipelineStatistics& stats); void OnBufferingStateChange(BufferingState state); void OnWaitingForDecryptionKey(); - void OnVideoNaturalSizeChange(const gfx::Size& size); - void OnVideoOpacityChange(bool opaque); // Callback for |video_frame_stream_| to deliver decoded video frames and // report video decoding status. If a frame is available the planes will be @@ -117,7 +115,6 @@ class MEDIA_EXPORT VideoRendererImpl // Helper method that schedules an asynchronous read from the // |video_frame_stream_| as long as there isn't a pending read and we have // capacity. - void AttemptRead(); void AttemptRead_Locked(); // Called when VideoFrameStream::Reset() completes. @@ -133,10 +130,6 @@ class MEDIA_EXPORT VideoRendererImpl // them to 0. void UpdateStats_Locked(); - // Called after we've painted the first frame. If |time_progressing_| is - // false it Stop() on |sink_|. - void MaybeStopSinkAfterFirstPaint(); - // Returns true if there is no more room for additional buffered frames. bool HaveReachedBufferingCap(); @@ -150,8 +143,8 @@ class MEDIA_EXPORT VideoRendererImpl // // When called from the media thread, |time_progressing| should reflect the // value of |time_progressing_|. When called from Render() on the sink - // callback thread, the inverse of |render_first_frame_and_stop_| should be - // used as a proxy for |time_progressing_|. + // callback thread, |time_progressing| must be true since Render() could not + // have been called otherwise. void MaybeFireEndedCallback_Locked(bool time_progressing); // Helper method for converting a single media timestamp to wall clock time. @@ -179,6 +172,16 @@ class MEDIA_EXPORT VideoRendererImpl // avoid any stalling. void RemoveFramesForUnderflowOrBackgroundRendering(); + // Notifies |client_| in the event of frame size or opacity changes. Must be + // called on |task_runner_|. + void CheckForMetadataChanges(VideoPixelFormat pixel_format, + const gfx::Size& natural_size); + + // Both calls AttemptRead_Locked() and CheckForMetadataChanges(). Must be + // called on |task_runner_|. + void AttemptReadAndCheckForMetadataChanges(VideoPixelFormat pixel_format, + const gfx::Size& natural_size); + scoped_refptr task_runner_; // Sink which calls into VideoRendererImpl via Render() for video frames. Do @@ -277,18 +280,13 @@ class MEDIA_EXPORT VideoRendererImpl // only be accessed from |task_runner_|. bool time_progressing_; - // Indicates that Render() should only render the first frame and then request - // that the sink be stopped. |posted_maybe_stop_after_first_paint_| is used - // to avoid repeated task posts. - bool render_first_frame_and_stop_; - bool posted_maybe_stop_after_first_paint_; - // Memory usage of |algorithm_| recorded during the last UpdateStats_Locked() // call. int64_t last_video_memory_usage_; - // Indicates if Render() has been called yet. + // Indicates if a frame has been processed by CheckForMetadataChanges(). bool have_renderered_frames_; + // Tracks last frame properties to detect and notify client of any changes. gfx::Size last_frame_natural_size_; bool last_frame_opaque_; diff --git a/media/renderers/video_renderer_impl_unittest.cc b/media/renderers/video_renderer_impl_unittest.cc index ff8d050565c65..737729c02fdb8 100644 --- a/media/renderers/video_renderer_impl_unittest.cc +++ b/media/renderers/video_renderer_impl_unittest.cc @@ -724,13 +724,11 @@ TEST_F(VideoRendererImplTest, UnderflowRecovery_CantReadWithoutStalling) { UnderflowRecoveryTest(UnderflowTestType::CANT_READ_WITHOUT_STALLING); } -// Verifies that the sink is stopped after rendering the first frame if -// playback hasn't started. +// Verifies that the first frame is painted w/o rendering being started. TEST_F(VideoRendererImplTest, RenderingStopsAfterFirstFrame) { InitializeWithLowDelay(true); QueueFrames("0"); - EXPECT_CALL(mock_cb_, FrameReceived(HasTimestamp(0))); EXPECT_CALL(mock_cb_, OnBufferingStateChange(BUFFERING_HAVE_ENOUGH)); EXPECT_CALL(mock_cb_, OnStatisticsUpdate(_)).Times(AnyNumber()); EXPECT_CALL(mock_cb_, OnVideoNaturalSizeChange(_)).Times(1); @@ -738,11 +736,11 @@ TEST_F(VideoRendererImplTest, RenderingStopsAfterFirstFrame) { EXPECT_CALL(mock_cb_, OnEnded()).Times(0); { - SCOPED_TRACE("Waiting for sink to stop."); + SCOPED_TRACE("Waiting for first frame to be painted."); WaitableMessageLoopEvent event; - null_video_sink_->set_background_render(true); - null_video_sink_->set_stop_cb(event.GetClosure()); + EXPECT_CALL(mock_cb_, FrameReceived(HasTimestamp(0))) + .WillOnce(RunClosure(event.GetClosure())); StartPlayingFrom(0); EXPECT_TRUE(IsReadPending()); @@ -755,12 +753,12 @@ TEST_F(VideoRendererImplTest, RenderingStopsAfterFirstFrame) { } // Verifies that the sink is stopped after rendering the first frame if -// playback ha started. +// playback has started. TEST_F(VideoRendererImplTest, RenderingStopsAfterOneFrameWithEOS) { InitializeWithLowDelay(true); QueueFrames("0"); - EXPECT_CALL(mock_cb_, FrameReceived(HasTimestamp(0))); + EXPECT_CALL(mock_cb_, FrameReceived(HasTimestamp(0))).Times(2); EXPECT_CALL(mock_cb_, OnBufferingStateChange(BUFFERING_HAVE_ENOUGH)); EXPECT_CALL(mock_cb_, OnStatisticsUpdate(_)).Times(AnyNumber()); EXPECT_CALL(mock_cb_, OnVideoNaturalSizeChange(_)).Times(1); @@ -800,15 +798,11 @@ TEST_F(VideoRendererImplTest, RenderingStartedThenStopped) { EXPECT_CALL(mock_cb_, OnBufferingStateChange(BUFFERING_HAVE_ENOUGH)); EXPECT_CALL(mock_cb_, FrameReceived(HasTimestamp(0))) .WillOnce(RunClosure(event.GetClosure())); - EXPECT_CALL(mock_cb_, OnStatisticsUpdate(_)) - .WillRepeatedly(SaveArg<0>(&last_pipeline_statistics)); EXPECT_CALL(mock_cb_, OnVideoNaturalSizeChange(_)).Times(1); EXPECT_CALL(mock_cb_, OnVideoOpacityChange(_)).Times(1); StartPlayingFrom(0); event.RunAndWait(); Mock::VerifyAndClearExpectations(&mock_cb_); - EXPECT_EQ(0u, last_pipeline_statistics.video_frames_dropped); - EXPECT_EQ(460800, last_pipeline_statistics.video_memory_usage); } // Consider the case that rendering is faster than we setup the test event. @@ -819,6 +813,8 @@ TEST_F(VideoRendererImplTest, RenderingStartedThenStopped) { .Times(testing::AtMost(1)); EXPECT_CALL(mock_cb_, OnBufferingStateChange(BUFFERING_HAVE_NOTHING)) .Times(testing::AtMost(1)); + EXPECT_CALL(mock_cb_, OnStatisticsUpdate(_)) + .WillRepeatedly(SaveArg<0>(&last_pipeline_statistics)); renderer_->OnTimeStateChanged(true); time_source_.StartTicking(); @@ -836,7 +832,7 @@ TEST_F(VideoRendererImplTest, RenderingStartedThenStopped) { // reported EXPECT_EQ(0u, last_pipeline_statistics.video_frames_dropped); EXPECT_EQ(4u, last_pipeline_statistics.video_frames_decoded); - EXPECT_EQ(460800, last_pipeline_statistics.video_memory_usage); + EXPECT_EQ(115200, last_pipeline_statistics.video_memory_usage); AdvanceTimeInMs(30); WaitForEnded(); diff --git a/media/test/pipeline_integration_test_base.cc b/media/test/pipeline_integration_test_base.cc index a87f1810baac2..3842183676024 100644 --- a/media/test/pipeline_integration_test_base.cc +++ b/media/test/pipeline_integration_test_base.cc @@ -354,8 +354,9 @@ void PipelineIntegrationTestBase::OnVideoFramePaint( int result; if (frame->metadata()->GetInteger(VideoFrameMetadata::COLOR_SPACE, &result)) last_video_frame_color_space_ = static_cast(result); - if (!hashing_enabled_) + if (!hashing_enabled_ || last_frame_ == frame) return; + last_frame_ = frame; VideoFrame::HashFrameForTesting(&md5_context_, frame); } diff --git a/media/test/pipeline_integration_test_base.h b/media/test/pipeline_integration_test_base.h index c97e23abf841e..816492816998e 100644 --- a/media/test/pipeline_integration_test_base.h +++ b/media/test/pipeline_integration_test_base.h @@ -135,6 +135,7 @@ class PipelineIntegrationTestBase : public Pipeline::Client { DummyTickClock dummy_clock_; AudioHardwareConfig hardware_config_; PipelineMetadata metadata_; + scoped_refptr last_frame_; PipelineStatus StartInternal(std::unique_ptr data_source, CdmContext* cdm_context,