From 0fa90887c5bf15aa6e73c2df78cae31feb82fa54 Mon Sep 17 00:00:00 2001 From: Per K Date: Wed, 13 Mar 2024 09:52:41 +0100 Subject: [PATCH] Deprecate VideoFrame::timestamp() and set_timestamp MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Instead, add rtp_timestamp and set_rtp_timestamp. Bug: webrtc:13756 Change-Id: Ic4266394003e0d49e525d71f4d830f5e518299cc Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/342781 Commit-Queue: Per Kjellander Reviewed-by: Magnus Jedvert Reviewed-by: Erik Språng Reviewed-by: Markus Handell Cr-Commit-Position: refs/heads/main@{#41894} --- api/video/video_frame.cc | 6 ++ api/video/video_frame.h | 8 ++ ...oder_software_fallback_wrapper_unittest.cc | 4 +- call/call_perf_tests.cc | 2 +- common_video/video_frame_unittest.cc | 18 ++--- media/engine/simulcast_encoder_adapter.cc | 2 +- .../simulcast_encoder_adapter_unittest.cc | 22 +++--- media/engine/webrtc_video_engine_unittest.cc | 10 +-- modules/video_capture/video_capture_impl.cc | 2 +- .../video_coding/codecs/av1/dav1d_decoder.cc | 2 +- .../codecs/av1/libaom_av1_encoder.cc | 2 +- .../codecs/av1/libaom_av1_encoder_unittest.cc | 2 +- .../codecs/h264/h264_decoder_impl.cc | 2 +- .../codecs/h264/h264_encoder_impl.cc | 2 +- .../test/encoded_video_frame_producer.cc | 2 +- .../codecs/test/video_codec_unittest.cc | 2 +- .../codecs/test/videoprocessor.cc | 6 +- .../codecs/test/videoprocessor_unittest.cc | 16 ++-- .../codecs/vp8/libvpx_vp8_decoder.cc | 2 +- .../codecs/vp8/libvpx_vp8_encoder.cc | 13 ++-- .../codecs/vp8/test/vp8_impl_unittest.cc | 6 +- .../codecs/vp9/libvpx_vp9_decoder.cc | 2 +- .../codecs/vp9/libvpx_vp9_encoder.cc | 4 +- .../codecs/vp9/test/vp9_impl_unittest.cc | 8 +- modules/video_coding/generic_decoder.cc | 8 +- .../video_coding/generic_decoder_unittest.cc | 2 +- .../utility/simulcast_test_fixture_impl.cc | 74 +++++++++---------- rtc_tools/video_encoder/video_encoder.cc | 2 +- rtc_tools/video_replay.cc | 2 +- sdk/android/src/jni/video_encoder_wrapper.cc | 2 +- sdk/android/src/jni/video_frame.cc | 2 +- .../native/src/objc_video_decoder_factory.mm | 15 ++-- sdk/objc/native/src/objc_video_frame.mm | 2 +- test/configurable_frame_size_encoder.cc | 2 +- test/fake_decoder.cc | 2 +- test/fake_encoder.cc | 2 +- test/fake_texture_frame.cc | 2 +- test/fake_vp8_decoder.cc | 2 +- test/frame_utils.cc | 2 +- test/mappable_native_buffer.cc | 2 +- ...ideo_quality_analyzer_metric_names_test.cc | 2 +- .../default_video_quality_analyzer_test.cc | 2 +- .../video/quality_analyzing_video_decoder.cc | 8 +- .../video/quality_analyzing_video_encoder.cc | 4 +- .../ivf_video_frame_generator_unittest.cc | 2 +- test/video_codec_tester.cc | 20 ++--- test/video_codec_tester_unittest.cc | 4 +- video/adaptation/overuse_frame_detector.cc | 4 +- .../overuse_frame_detector_unittest.cc | 14 ++-- video/end_to_end_tests/fec_tests.cc | 4 +- .../multi_codec_receive_tests.cc | 4 +- .../end_to_end_tests/retransmission_tests.cc | 4 +- video/frame_encode_metadata_writer.cc | 2 +- .../frame_encode_metadata_writer_unittest.cc | 24 +++--- video/receive_statistics_proxy_unittest.cc | 2 +- video/render/video_render_frames.cc | 5 +- video/video_analyzer.cc | 17 +++-- video/video_receive_stream2.h | 2 +- video/video_receive_stream2_unittest.cc | 8 +- video/video_send_stream_tests.cc | 2 +- video/video_stream_encoder.cc | 4 +- video/video_stream_encoder_unittest.cc | 9 ++- 62 files changed, 216 insertions(+), 199 deletions(-) diff --git a/api/video/video_frame.cc b/api/video/video_frame.cc index fd975dfd87..d23a16cfd0 100644 --- a/api/video/video_frame.cc +++ b/api/video/video_frame.cc @@ -199,6 +199,12 @@ VideoFrame::Builder& VideoFrame::Builder::set_reference_time( return *this; } +VideoFrame::Builder& VideoFrame::Builder::set_rtp_timestamp( + uint32_t rtp_timestamp) { + timestamp_rtp_ = rtp_timestamp; + return *this; +} + VideoFrame::Builder& VideoFrame::Builder::set_timestamp_rtp( uint32_t timestamp_rtp) { timestamp_rtp_ = timestamp_rtp; diff --git a/api/video/video_frame.h b/api/video/video_frame.h index 5b77bcca23..656c2ee27e 100644 --- a/api/video/video_frame.h +++ b/api/video/video_frame.h @@ -111,6 +111,8 @@ class RTC_EXPORT VideoFrame { const absl::optional& capture_time_identifier); Builder& set_reference_time( const absl::optional& reference_time); + Builder& set_rtp_timestamp(uint32_t rtp_timestamp); + // TODO(https://bugs.webrtc.org/13756): Deprecate and use set_rtp_timestamp. Builder& set_timestamp_rtp(uint32_t timestamp_rtp); Builder& set_ntp_time_ms(int64_t ntp_time_ms); Builder& set_rotation(VideoRotation rotation); @@ -188,9 +190,15 @@ class RTC_EXPORT VideoFrame { } // Set frame timestamp (90kHz). + void set_rtp_timestamp(uint32_t rtp_timestamp) { + timestamp_rtp_ = rtp_timestamp; + } + // TODO(https://bugs.webrtc.org/13756): Deprecate and use set_rtp_timestamp. void set_timestamp(uint32_t timestamp) { timestamp_rtp_ = timestamp; } // Get frame timestamp (90kHz). + uint32_t rtp_timestamp() const { return timestamp_rtp_; } + // TODO(https://bugs.webrtc.org/13756): Deprecate and use rtp_timestamp. uint32_t timestamp() const { return timestamp_rtp_; } // Set capture ntp time in milliseconds. diff --git a/api/video_codecs/test/video_encoder_software_fallback_wrapper_unittest.cc b/api/video_codecs/test/video_encoder_software_fallback_wrapper_unittest.cc index b3fadcbecf..6a21ecf29a 100644 --- a/api/video_codecs/test/video_encoder_software_fallback_wrapper_unittest.cc +++ b/api/video_codecs/test/video_encoder_software_fallback_wrapper_unittest.cc @@ -376,7 +376,7 @@ TEST_F(VideoEncoderSoftwareFallbackWrapperTest, // Encoding a frame using the fallback should arrive at the new callback. std::vector types(1, VideoFrameType::kVideoFrameKey); - frame_->set_timestamp(frame_->timestamp() + 1000); + frame_->set_rtp_timestamp(frame_->rtp_timestamp() + 1000); EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_->Encode(*frame_, &types)); EXPECT_EQ(callback2.callback_count_, 1); @@ -384,7 +384,7 @@ TEST_F(VideoEncoderSoftwareFallbackWrapperTest, InitEncode(); EXPECT_EQ(&callback2, fake_encoder_->encode_complete_callback_); - frame_->set_timestamp(frame_->timestamp() + 2000); + frame_->set_rtp_timestamp(frame_->rtp_timestamp() + 2000); EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_->Encode(*frame_, &types)); EXPECT_EQ(callback2.callback_count_, 2); } diff --git a/call/call_perf_tests.cc b/call/call_perf_tests.cc index 33ea4d18a1..0e8fb428cd 100644 --- a/call/call_perf_tests.cc +++ b/call/call_perf_tests.cc @@ -456,7 +456,7 @@ void CallPerfTest::TestCaptureNtpTime( } FrameCaptureTimeList::iterator iter = - capture_time_list_.find(video_frame.timestamp()); + capture_time_list_.find(video_frame.rtp_timestamp()); EXPECT_TRUE(iter != capture_time_list_.end()); // The real capture time has been wrapped to uint32_t before converted diff --git a/common_video/video_frame_unittest.cc b/common_video/video_frame_unittest.cc index ae8e54e7d3..12f8e0860c 100644 --- a/common_video/video_frame_unittest.cc +++ b/common_video/video_frame_unittest.cc @@ -211,8 +211,8 @@ TEST(TestVideoFrame, WidthHeightValues) { const int valid_value = 10; EXPECT_EQ(valid_value, frame.width()); EXPECT_EQ(valid_value, frame.height()); - frame.set_timestamp(123u); - EXPECT_EQ(123u, frame.timestamp()); + frame.set_rtp_timestamp(123u); + EXPECT_EQ(123u, frame.rtp_timestamp()); frame.set_ntp_time_ms(456); EXPECT_EQ(456, frame.ntp_time_ms()); EXPECT_EQ(789, frame.render_time_ms()); @@ -246,7 +246,7 @@ TEST(TestVideoFrame, ShallowCopy) { .set_rotation(kRotation) .set_timestamp_us(0) .build(); - frame1.set_timestamp(timestamp); + frame1.set_rtp_timestamp(timestamp); frame1.set_ntp_time_ms(ntp_time_ms); frame1.set_timestamp_us(timestamp_us); VideoFrame frame2(frame1); @@ -260,17 +260,17 @@ TEST(TestVideoFrame, ShallowCopy) { EXPECT_EQ(yuv1->DataU(), yuv2->DataU()); EXPECT_EQ(yuv1->DataV(), yuv2->DataV()); - EXPECT_EQ(frame2.timestamp(), frame1.timestamp()); + EXPECT_EQ(frame2.rtp_timestamp(), frame1.rtp_timestamp()); EXPECT_EQ(frame2.ntp_time_ms(), frame1.ntp_time_ms()); EXPECT_EQ(frame2.timestamp_us(), frame1.timestamp_us()); EXPECT_EQ(frame2.rotation(), frame1.rotation()); - frame2.set_timestamp(timestamp + 1); + frame2.set_rtp_timestamp(timestamp + 1); frame2.set_ntp_time_ms(ntp_time_ms + 1); frame2.set_timestamp_us(timestamp_us + 1); frame2.set_rotation(kVideoRotation_90); - EXPECT_NE(frame2.timestamp(), frame1.timestamp()); + EXPECT_NE(frame2.rtp_timestamp(), frame1.rtp_timestamp()); EXPECT_NE(frame2.ntp_time_ms(), frame1.ntp_time_ms()); EXPECT_NE(frame2.timestamp_us(), frame1.timestamp_us()); EXPECT_NE(frame2.rotation(), frame1.rotation()); @@ -281,14 +281,14 @@ TEST(TestVideoFrame, TextureInitialValues) { 640, 480, 100, 10, webrtc::kVideoRotation_0); EXPECT_EQ(640, frame.width()); EXPECT_EQ(480, frame.height()); - EXPECT_EQ(100u, frame.timestamp()); + EXPECT_EQ(100u, frame.rtp_timestamp()); EXPECT_EQ(10, frame.render_time_ms()); ASSERT_TRUE(frame.video_frame_buffer() != nullptr); EXPECT_TRUE(frame.video_frame_buffer()->type() == VideoFrameBuffer::Type::kNative); - frame.set_timestamp(200); - EXPECT_EQ(200u, frame.timestamp()); + frame.set_rtp_timestamp(200); + EXPECT_EQ(200u, frame.rtp_timestamp()); frame.set_timestamp_us(20); EXPECT_EQ(20, frame.timestamp_us()); } diff --git a/media/engine/simulcast_encoder_adapter.cc b/media/engine/simulcast_encoder_adapter.cc index 653c5db06f..d274c85f3b 100644 --- a/media/engine/simulcast_encoder_adapter.cc +++ b/media/engine/simulcast_encoder_adapter.cc @@ -534,7 +534,7 @@ int SimulcastEncoderAdapter::Encode( // Convert timestamp from RTP 90kHz clock. const Timestamp frame_timestamp = - Timestamp::Micros((1000 * input_image.timestamp()) / 90); + Timestamp::Micros((1000 * input_image.rtp_timestamp()) / 90); // If adapter is passed through and only one sw encoder does simulcast, // frame types for all streams should be passed to the encoder unchanged. diff --git a/media/engine/simulcast_encoder_adapter_unittest.cc b/media/engine/simulcast_encoder_adapter_unittest.cc index 42589c75e6..6d71d84772 100644 --- a/media/engine/simulcast_encoder_adapter_unittest.cc +++ b/media/engine/simulcast_encoder_adapter_unittest.cc @@ -672,7 +672,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, ReusesEncodersInOrder) { rtc::scoped_refptr buffer(I420Buffer::Create(1280, 720)); VideoFrame input_frame = VideoFrame::Builder() .set_video_frame_buffer(buffer) - .set_timestamp_rtp(100) + .set_rtp_timestamp(100) .set_timestamp_ms(1000) .set_rotation(kVideoRotation_180) .build(); @@ -1046,7 +1046,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, /*allow_to_i420=*/false)); VideoFrame input_frame = VideoFrame::Builder() .set_video_frame_buffer(buffer) - .set_timestamp_rtp(100) + .set_rtp_timestamp(100) .set_timestamp_ms(1000) .set_rotation(kVideoRotation_180) .build(); @@ -1083,7 +1083,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, NativeHandleForwardingOnlyIfSupported) { /*allow_to_i420=*/true)); VideoFrame input_frame = VideoFrame::Builder() .set_video_frame_buffer(buffer) - .set_timestamp_rtp(100) + .set_rtp_timestamp(100) .set_timestamp_ms(1000) .set_rotation(kVideoRotation_180) .build(); @@ -1141,7 +1141,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, GeneratesKeyFramesOnRequestedLayers) { .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK)); VideoFrame first_frame = VideoFrame::Builder() .set_video_frame_buffer(buffer) - .set_timestamp_rtp(0) + .set_rtp_timestamp(0) .set_timestamp_ms(0) .build(); EXPECT_EQ(0, adapter_->Encode(first_frame, &frame_types)); @@ -1161,7 +1161,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, GeneratesKeyFramesOnRequestedLayers) { frame_types[2] = VideoFrameType::kVideoFrameDelta; VideoFrame second_frame = VideoFrame::Builder() .set_video_frame_buffer(buffer) - .set_timestamp_rtp(10000) + .set_rtp_timestamp(10000) .set_timestamp_ms(100000) .build(); EXPECT_EQ(0, adapter_->Encode(second_frame, &frame_types)); @@ -1181,7 +1181,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, GeneratesKeyFramesOnRequestedLayers) { frame_types[2] = VideoFrameType::kVideoFrameDelta; VideoFrame third_frame = VideoFrame::Builder() .set_video_frame_buffer(buffer) - .set_timestamp_rtp(20000) + .set_rtp_timestamp(20000) .set_timestamp_ms(200000) .build(); EXPECT_EQ(0, adapter_->Encode(third_frame, &frame_types)); @@ -1205,7 +1205,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, TestFailureReturnCodesFromEncodeCalls) { input_buffer->InitializeData(); VideoFrame input_frame = VideoFrame::Builder() .set_video_frame_buffer(input_buffer) - .set_timestamp_rtp(0) + .set_rtp_timestamp(0) .set_timestamp_us(0) .set_rotation(kVideoRotation_0) .build(); @@ -1310,7 +1310,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, ActivatesCorrectStreamsInInitEncode) { rtc::scoped_refptr buffer(I420Buffer::Create(1280, 720)); VideoFrame input_frame = VideoFrame::Builder() .set_video_frame_buffer(buffer) - .set_timestamp_rtp(100) + .set_rtp_timestamp(100) .set_timestamp_ms(1000) .set_rotation(kVideoRotation_180) .build(); @@ -1348,7 +1348,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, TrustedRateControl) { rtc::scoped_refptr buffer(I420Buffer::Create(1280, 720)); VideoFrame input_frame = VideoFrame::Builder() .set_video_frame_buffer(buffer) - .set_timestamp_rtp(100) + .set_rtp_timestamp(100) .set_timestamp_ms(1000) .set_rotation(kVideoRotation_180) .build(); @@ -1655,7 +1655,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, SupportsSimulcast) { rtc::scoped_refptr buffer(I420Buffer::Create(1280, 720)); VideoFrame input_frame = VideoFrame::Builder() .set_video_frame_buffer(buffer) - .set_timestamp_rtp(100) + .set_rtp_timestamp(100) .set_timestamp_ms(1000) .set_rotation(kVideoRotation_180) .build(); @@ -1706,7 +1706,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, SupportsFallback) { rtc::scoped_refptr buffer(I420Buffer::Create(1280, 720)); VideoFrame input_frame = VideoFrame::Builder() .set_video_frame_buffer(buffer) - .set_timestamp_rtp(100) + .set_rtp_timestamp(100) .set_timestamp_ms(1000) .set_rotation(kVideoRotation_180) .build(); diff --git a/media/engine/webrtc_video_engine_unittest.cc b/media/engine/webrtc_video_engine_unittest.cc index 0e2548ae52..e40b4e3e4a 100644 --- a/media/engine/webrtc_video_engine_unittest.cc +++ b/media/engine/webrtc_video_engine_unittest.cc @@ -4270,7 +4270,7 @@ TEST_F(WebRtcVideoChannelTest, EstimatesNtpStartTimeCorrectly) { webrtc::VideoFrame video_frame = webrtc::VideoFrame::Builder() .set_video_frame_buffer(CreateBlackFrameBuffer(4, 4)) - .set_timestamp_rtp(kInitialTimestamp) + .set_rtp_timestamp(kInitialTimestamp) .set_timestamp_us(0) .set_rotation(webrtc::kVideoRotation_0) .build(); @@ -4284,7 +4284,7 @@ TEST_F(WebRtcVideoChannelTest, EstimatesNtpStartTimeCorrectly) { // triggers a constant-overflow warning, hence we're calculating it explicitly // here. time_controller_.AdvanceTime(webrtc::TimeDelta::Millis(kFrameOffsetMs)); - video_frame.set_timestamp(kFrameOffsetMs * 90 - 1); + video_frame.set_rtp_timestamp(kFrameOffsetMs * 90 - 1); video_frame.set_ntp_time_ms(kInitialNtpTimeMs + kFrameOffsetMs); stream->InjectFrame(video_frame); @@ -7542,7 +7542,7 @@ TEST_F(WebRtcVideoChannelTest, ReceiveDifferentUnsignaledSsrc) { webrtc::VideoFrame video_frame = webrtc::VideoFrame::Builder() .set_video_frame_buffer(CreateBlackFrameBuffer(4, 4)) - .set_timestamp_rtp(100) + .set_rtp_timestamp(100) .set_timestamp_us(0) .set_rotation(webrtc::kVideoRotation_0) .build(); @@ -7561,7 +7561,7 @@ TEST_F(WebRtcVideoChannelTest, ReceiveDifferentUnsignaledSsrc) { webrtc::VideoFrame video_frame2 = webrtc::VideoFrame::Builder() .set_video_frame_buffer(CreateBlackFrameBuffer(4, 4)) - .set_timestamp_rtp(200) + .set_rtp_timestamp(200) .set_timestamp_us(0) .set_rotation(webrtc::kVideoRotation_0) .build(); @@ -7581,7 +7581,7 @@ TEST_F(WebRtcVideoChannelTest, ReceiveDifferentUnsignaledSsrc) { webrtc::VideoFrame video_frame3 = webrtc::VideoFrame::Builder() .set_video_frame_buffer(CreateBlackFrameBuffer(4, 4)) - .set_timestamp_rtp(300) + .set_rtp_timestamp(300) .set_timestamp_us(0) .set_rotation(webrtc::kVideoRotation_0) .build(); diff --git a/modules/video_capture/video_capture_impl.cc b/modules/video_capture/video_capture_impl.cc index 428253bf23..5588fae161 100644 --- a/modules/video_capture/video_capture_impl.cc +++ b/modules/video_capture/video_capture_impl.cc @@ -223,7 +223,7 @@ int32_t VideoCaptureImpl::IncomingFrame(uint8_t* videoFrame, VideoFrame captureFrame = VideoFrame::Builder() .set_video_frame_buffer(buffer) - .set_timestamp_rtp(0) + .set_rtp_timestamp(0) .set_timestamp_ms(rtc::TimeMillis()) .set_rotation(!apply_rotation_ ? _rotateFrame : kVideoRotation_0) .build(); diff --git a/modules/video_coding/codecs/av1/dav1d_decoder.cc b/modules/video_coding/codecs/av1/dav1d_decoder.cc index d658e401e8..82c0d92cb6 100644 --- a/modules/video_coding/codecs/av1/dav1d_decoder.cc +++ b/modules/video_coding/codecs/av1/dav1d_decoder.cc @@ -186,7 +186,7 @@ int32_t Dav1dDecoder::Decode(const EncodedImage& encoded_image, VideoFrame decoded_frame = VideoFrame::Builder() .set_video_frame_buffer(wrapped_buffer) - .set_timestamp_rtp(encoded_image.RtpTimestamp()) + .set_rtp_timestamp(encoded_image.RtpTimestamp()) .set_ntp_time_ms(encoded_image.ntp_time_ms_) .set_color_space(encoded_image.ColorSpace()) .build(); diff --git a/modules/video_coding/codecs/av1/libaom_av1_encoder.cc b/modules/video_coding/codecs/av1/libaom_av1_encoder.cc index 4e2d9bd544..522ce38073 100644 --- a/modules/video_coding/codecs/av1/libaom_av1_encoder.cc +++ b/modules/video_coding/codecs/av1/libaom_av1_encoder.cc @@ -730,7 +730,7 @@ int32_t LibaomAv1Encoder::Encode( encoded_image._frameType = layer_frame->IsKeyframe() ? VideoFrameType::kVideoFrameKey : VideoFrameType::kVideoFrameDelta; - encoded_image.SetRtpTimestamp(frame.timestamp()); + encoded_image.SetRtpTimestamp(frame.rtp_timestamp()); encoded_image.SetCaptureTimeIdentifier(frame.capture_time_identifier()); encoded_image.capture_time_ms_ = frame.render_time_ms(); encoded_image.rotation_ = frame.rotation(); diff --git a/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc b/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc index 5b022592ce..abb6fce0cf 100644 --- a/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc +++ b/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc @@ -426,7 +426,7 @@ TEST(LibaomAv1EncoderTest, AdheresToTargetBitrateDespiteUnevenFrameTiming) { VideoFrame frame = VideoFrame::Builder() .set_video_frame_buffer( frame_buffer_generator->NextFrame().buffer) - .set_timestamp_rtp(rtp_timestamp) + .set_rtp_timestamp(rtp_timestamp) .build(); RTC_CHECK_EQ(encoder->Encode(frame, &frame_types), WEBRTC_VIDEO_CODEC_OK); diff --git a/modules/video_coding/codecs/h264/h264_decoder_impl.cc b/modules/video_coding/codecs/h264/h264_decoder_impl.cc index c6446c25ce..127b70931b 100644 --- a/modules/video_coding/codecs/h264/h264_decoder_impl.cc +++ b/modules/video_coding/codecs/h264/h264_decoder_impl.cc @@ -616,7 +616,7 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image, VideoFrame decoded_frame = VideoFrame::Builder() .set_video_frame_buffer(cropped_buffer) - .set_timestamp_rtp(input_image.RtpTimestamp()) + .set_rtp_timestamp(input_image.RtpTimestamp()) .set_color_space(color_space) .build(); diff --git a/modules/video_coding/codecs/h264/h264_encoder_impl.cc b/modules/video_coding/codecs/h264/h264_encoder_impl.cc index 71e80aaf30..65d60a94e7 100644 --- a/modules/video_coding/codecs/h264/h264_encoder_impl.cc +++ b/modules/video_coding/codecs/h264/h264_encoder_impl.cc @@ -544,7 +544,7 @@ int32_t H264EncoderImpl::Encode( encoded_images_[i]._encodedWidth = configurations_[i].width; encoded_images_[i]._encodedHeight = configurations_[i].height; - encoded_images_[i].SetRtpTimestamp(input_frame.timestamp()); + encoded_images_[i].SetRtpTimestamp(input_frame.rtp_timestamp()); encoded_images_[i].SetColorSpace(input_frame.color_space()); encoded_images_[i]._frameType = ConvertToVideoFrameType(info.eFrameType); encoded_images_[i].SetSimulcastIndex(configurations_[i].simulcast_idx); diff --git a/modules/video_coding/codecs/test/encoded_video_frame_producer.cc b/modules/video_coding/codecs/test/encoded_video_frame_producer.cc index be2f2bfcba..10879639e7 100644 --- a/modules/video_coding/codecs/test/encoded_video_frame_producer.cc +++ b/modules/video_coding/codecs/test/encoded_video_frame_producer.cc @@ -61,7 +61,7 @@ EncodedVideoFrameProducer::Encode() { VideoFrame frame = VideoFrame::Builder() .set_video_frame_buffer(frame_buffer_generator->NextFrame().buffer) - .set_timestamp_rtp(rtp_timestamp_) + .set_rtp_timestamp(rtp_timestamp_) .set_capture_time_identifier(capture_time_identifier_) .build(); rtp_timestamp_ += rtp_tick; diff --git a/modules/video_coding/codecs/test/video_codec_unittest.cc b/modules/video_coding/codecs/test/video_codec_unittest.cc index 5ac589aaa5..556a167412 100644 --- a/modules/video_coding/codecs/test/video_codec_unittest.cc +++ b/modules/video_coding/codecs/test/video_codec_unittest.cc @@ -110,7 +110,7 @@ VideoFrame VideoCodecUnitTest::NextInputFrame() { const uint32_t timestamp = last_input_frame_timestamp_ + kVideoPayloadTypeFrequency / codec_settings_.maxFramerate; - input_frame.set_timestamp(timestamp); + input_frame.set_rtp_timestamp(timestamp); input_frame.set_timestamp_us(timestamp * (1000 / 90)); last_input_frame_timestamp_ = timestamp; diff --git a/modules/video_coding/codecs/test/videoprocessor.cc b/modules/video_coding/codecs/test/videoprocessor.cc index 2f159fce83..15a51e0dda 100644 --- a/modules/video_coding/codecs/test/videoprocessor.cc +++ b/modules/video_coding/codecs/test/videoprocessor.cc @@ -254,7 +254,7 @@ void VideoProcessor::ProcessFrame() { VideoFrame input_frame = VideoFrame::Builder() .set_video_frame_buffer(buffer) - .set_timestamp_rtp(static_cast(timestamp)) + .set_rtp_timestamp(static_cast(timestamp)) .set_timestamp_ms(static_cast(timestamp / kMsToRtpTimestamp)) .set_rotation(webrtc::kVideoRotation_0) .build(); @@ -352,7 +352,7 @@ int32_t VideoProcessor::VideoProcessorDecodeCompleteCallback::Decoded( .set_timestamp_us(image.timestamp_us()) .set_id(image.id()) .build(); - copy.set_timestamp(image.timestamp()); + copy.set_rtp_timestamp(image.rtp_timestamp()); task_queue_->PostTask([this, copy]() { video_processor_->FrameDecoded(copy, simulcast_svc_idx_); @@ -555,7 +555,7 @@ void VideoProcessor::FrameDecoded(const VideoFrame& decoded_frame, const int64_t decode_stop_ns = rtc::TimeNanos(); FrameStatistics* frame_stat = - stats_->GetFrameWithTimestamp(decoded_frame.timestamp(), spatial_idx); + stats_->GetFrameWithTimestamp(decoded_frame.rtp_timestamp(), spatial_idx); const size_t frame_number = frame_stat->frame_number; if (!first_decoded_frame_[spatial_idx]) { diff --git a/modules/video_coding/codecs/test/videoprocessor_unittest.cc b/modules/video_coding/codecs/test/videoprocessor_unittest.cc index 40cb5b6395..28c369f00a 100644 --- a/modules/video_coding/codecs/test/videoprocessor_unittest.cc +++ b/modules/video_coding/codecs/test/videoprocessor_unittest.cc @@ -105,15 +105,15 @@ TEST_F(VideoProcessorTest, ProcessFrames_FixedFramerate) { EXPECT_CALL(frame_reader_mock_, PullFrame(_, _, _)) .WillRepeatedly(Return(I420Buffer::Create(kWidth, kHeight))); - EXPECT_CALL( - encoder_mock_, - Encode(Property(&VideoFrame::timestamp, 1 * 90000 / kFramerateFps), _)) + EXPECT_CALL(encoder_mock_, Encode(Property(&VideoFrame::rtp_timestamp, + 1 * 90000 / kFramerateFps), + _)) .Times(1); q_.SendTask([this] { video_processor_->ProcessFrame(); }); - EXPECT_CALL( - encoder_mock_, - Encode(Property(&VideoFrame::timestamp, 2 * 90000 / kFramerateFps), _)) + EXPECT_CALL(encoder_mock_, Encode(Property(&VideoFrame::rtp_timestamp, + 2 * 90000 / kFramerateFps), + _)) .Times(1); q_.SendTask([this] { video_processor_->ProcessFrame(); }); @@ -135,7 +135,7 @@ TEST_F(VideoProcessorTest, ProcessFrames_VariableFramerate) { EXPECT_CALL(frame_reader_mock_, PullFrame(_, _, _)) .WillRepeatedly(Return(I420Buffer::Create(kWidth, kHeight))); EXPECT_CALL(encoder_mock_, - Encode(Property(&VideoFrame::timestamp, kStartTimestamp), _)) + Encode(Property(&VideoFrame::rtp_timestamp, kStartTimestamp), _)) .Times(1); q_.SendTask([this] { video_processor_->ProcessFrame(); }); @@ -149,7 +149,7 @@ TEST_F(VideoProcessorTest, ProcessFrames_VariableFramerate) { [=] { video_processor_->SetRates(kBitrateKbps, kNewFramerateFps); }); EXPECT_CALL(encoder_mock_, - Encode(Property(&VideoFrame::timestamp, + Encode(Property(&VideoFrame::rtp_timestamp, kStartTimestamp + 90000 / kNewFramerateFps), _)) .Times(1); diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc b/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc index 3eed384716..9a7a6f1498 100644 --- a/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc +++ b/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc @@ -302,7 +302,7 @@ int LibvpxVp8Decoder::ReturnFrame( VideoFrame decoded_image = VideoFrame::Builder() .set_video_frame_buffer(buffer) - .set_timestamp_rtp(timestamp) + .set_rtp_timestamp(timestamp) .set_color_space(explicit_color_space) .build(); decode_complete_callback_->Decoded(decoded_image, absl::nullopt, qp); diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc index 3368b07946..a6befaf33b 100644 --- a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc +++ b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc @@ -1035,11 +1035,12 @@ int LibvpxVp8Encoder::Encode(const VideoFrame& frame, if (frame.update_rect().IsEmpty() && num_steady_state_frames_ >= 3 && !key_frame_requested) { if (variable_framerate_experiment_.enabled && - framerate_controller_.DropFrame(frame.timestamp() / kRtpTicksPerMs) && + framerate_controller_.DropFrame(frame.rtp_timestamp() / + kRtpTicksPerMs) && frame_drop_overrides_.empty()) { return WEBRTC_VIDEO_CODEC_OK; } - framerate_controller_.AddFrame(frame.timestamp() / kRtpTicksPerMs); + framerate_controller_.AddFrame(frame.rtp_timestamp() / kRtpTicksPerMs); } bool send_key_frame = key_frame_requested; @@ -1048,7 +1049,7 @@ int LibvpxVp8Encoder::Encode(const VideoFrame& frame, Vp8FrameConfig tl_configs[kMaxSimulcastStreams]; for (size_t i = 0; i < encoders_.size(); ++i) { tl_configs[i] = - frame_buffer_controller_->NextFrameConfig(i, frame.timestamp()); + frame_buffer_controller_->NextFrameConfig(i, frame.rtp_timestamp()); send_key_frame |= tl_configs[i].IntraFrame(); drop_frame |= tl_configs[i].drop_frame; RTC_DCHECK(i == 0 || @@ -1251,7 +1252,7 @@ int LibvpxVp8Encoder::GetEncodedPartitions(const VideoFrame& input_image, encoded_images_[encoder_idx].set_size(encoded_pos); encoded_images_[encoder_idx].SetSimulcastIndex(stream_idx); PopulateCodecSpecific(&codec_specific, *pkt, stream_idx, encoder_idx, - input_image.timestamp()); + input_image.rtp_timestamp()); if (codec_specific.codecSpecific.VP8.temporalIdx != kNoTemporalIdx) { encoded_images_[encoder_idx].SetTemporalIndex( codec_specific.codecSpecific.VP8.temporalIdx); @@ -1259,7 +1260,7 @@ int LibvpxVp8Encoder::GetEncodedPartitions(const VideoFrame& input_image, break; } } - encoded_images_[encoder_idx].SetRtpTimestamp(input_image.timestamp()); + encoded_images_[encoder_idx].SetRtpTimestamp(input_image.rtp_timestamp()); encoded_images_[encoder_idx].SetCaptureTimeIdentifier( input_image.capture_time_identifier()); encoded_images_[encoder_idx].SetColorSpace(input_image.color_space()); @@ -1297,7 +1298,7 @@ int LibvpxVp8Encoder::GetEncodedPartitions(const VideoFrame& input_image, if (encoded_images_[encoder_idx].size() == 0) { // Dropped frame that will be re-encoded. frame_buffer_controller_->OnFrameDropped(stream_idx, - input_image.timestamp()); + input_image.rtp_timestamp()); } } } diff --git a/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc b/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc index 6ecba2af7c..1602c0d314 100644 --- a/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc +++ b/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc @@ -253,7 +253,7 @@ TEST_F(TestVp8Impl, Configure) { TEST_F(TestVp8Impl, OnEncodedImageReportsInfo) { constexpr Timestamp kCaptureTimeIdentifier = Timestamp::Micros(1000); VideoFrame input_frame = NextInputFrame(); - input_frame.set_timestamp(kInitialTimestampRtp); + input_frame.set_rtp_timestamp(kInitialTimestampRtp); input_frame.set_timestamp_us(kInitialTimestampMs * rtc::kNumMicrosecsPerMillisec); input_frame.set_capture_time_identifier(kCaptureTimeIdentifier); @@ -493,7 +493,7 @@ TEST_F(TestVp8Impl, ChecksSimulcastSettings) { #endif TEST_F(TestVp8Impl, MAYBE_AlignedStrideEncodeDecode) { VideoFrame input_frame = NextInputFrame(); - input_frame.set_timestamp(kInitialTimestampRtp); + input_frame.set_rtp_timestamp(kInitialTimestampRtp); input_frame.set_timestamp_us(kInitialTimestampMs * rtc::kNumMicrosecsPerMillisec); EncodedImage encoded_frame; @@ -511,7 +511,7 @@ TEST_F(TestVp8Impl, MAYBE_AlignedStrideEncodeDecode) { ASSERT_TRUE(decoded_frame); // Compute PSNR on all planes (faster than SSIM). EXPECT_GT(I420PSNR(&input_frame, decoded_frame.get()), 36); - EXPECT_EQ(kInitialTimestampRtp, decoded_frame->timestamp()); + EXPECT_EQ(kInitialTimestampRtp, decoded_frame->rtp_timestamp()); } TEST_F(TestVp8Impl, EncoderWith2TemporalLayers) { diff --git a/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc b/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc index 8e1bab4ed3..915b148a3e 100644 --- a/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc +++ b/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc @@ -345,7 +345,7 @@ int LibvpxVp9Decoder::ReturnFrame( auto builder = VideoFrame::Builder() .set_video_frame_buffer(img_wrapped_buffer) - .set_timestamp_rtp(timestamp); + .set_rtp_timestamp(timestamp); if (explicit_color_space) { builder.set_color_space(*explicit_color_space); } else { diff --git a/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc b/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc index 56267be98d..24d862c4c2 100644 --- a/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc +++ b/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc @@ -1055,7 +1055,7 @@ int LibvpxVp9Encoder::Encode(const VideoFrame& input_image, if (codec_.mode == VideoCodecMode::kScreensharing) { const uint32_t frame_timestamp_ms = - 1000 * input_image.timestamp() / kVideoPayloadTypeFrequency; + 1000 * input_image.rtp_timestamp() / kVideoPayloadTypeFrequency; // To ensure that several rate-limiters with different limits don't // interfere, they must be queried in order of increasing limit. @@ -1766,7 +1766,7 @@ void LibvpxVp9Encoder::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) { UpdateReferenceBuffers(*pkt, pics_since_key_); TRACE_COUNTER1("webrtc", "EncodedFrameSize", encoded_image_.size()); - encoded_image_.SetRtpTimestamp(input_image_->timestamp()); + encoded_image_.SetRtpTimestamp(input_image_->rtp_timestamp()); encoded_image_.SetCaptureTimeIdentifier( input_image_->capture_time_identifier()); encoded_image_.SetColorSpace(input_image_->color_space()); diff --git a/modules/video_coding/codecs/vp9/test/vp9_impl_unittest.cc b/modules/video_coding/codecs/vp9/test/vp9_impl_unittest.cc index 396e64de0b..b2c0da12ef 100644 --- a/modules/video_coding/codecs/vp9/test/vp9_impl_unittest.cc +++ b/modules/video_coding/codecs/vp9/test/vp9_impl_unittest.cc @@ -1940,9 +1940,9 @@ TEST_F(TestVp9ImplFrameDropping, PreEncodeFrameDropping) { VideoFrame input_frame = NextInputFrame(); for (size_t frame_num = 0; frame_num < num_frames_to_encode; ++frame_num) { EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(input_frame, nullptr)); - const size_t timestamp = input_frame.timestamp() + + const size_t timestamp = input_frame.rtp_timestamp() + kVideoPayloadTypeFrequency / input_framerate_fps; - input_frame.set_timestamp(static_cast(timestamp)); + input_frame.set_rtp_timestamp(static_cast(timestamp)); } const size_t num_encoded_frames = GetNumEncodedFrames(); @@ -1992,9 +1992,9 @@ TEST_F(TestVp9ImplFrameDropping, DifferentFrameratePerSpatialLayer) { VideoFrame input_frame = NextInputFrame(); for (size_t frame_num = 0; frame_num < num_input_frames; ++frame_num) { EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(input_frame, nullptr)); - const size_t timestamp = input_frame.timestamp() + + const size_t timestamp = input_frame.rtp_timestamp() + kVideoPayloadTypeFrequency / input_framerate_fps; - input_frame.set_timestamp(static_cast(timestamp)); + input_frame.set_rtp_timestamp(static_cast(timestamp)); } std::vector encoded_frames; diff --git a/modules/video_coding/generic_decoder.cc b/modules/video_coding/generic_decoder.cc index 00585abbc9..0aafd1ac2c 100644 --- a/modules/video_coding/generic_decoder.cc +++ b/modules/video_coding/generic_decoder.cc @@ -104,7 +104,7 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage, absl::optional qp) { RTC_DCHECK(_receiveCallback) << "Callback must not be null at this point"; TRACE_EVENT_INSTANT1("webrtc", "VCMDecodedFrameCallback::Decoded", - "timestamp", decodedImage.timestamp()); + "timestamp", decodedImage.rtp_timestamp()); // TODO(holmer): We should improve this so that we can handle multiple // callbacks from one call to Decode(). absl::optional frame_info; @@ -113,7 +113,7 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage, { MutexLock lock(&lock_); std::tie(frame_info, dropped_frames) = - FindFrameInfo(decodedImage.timestamp()); + FindFrameInfo(decodedImage.rtp_timestamp()); timestamp_map_size = frame_infos_.size(); } if (dropped_frames > 0) { @@ -123,7 +123,7 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage, if (!frame_info) { RTC_LOG(LS_WARNING) << "Too many frames backed up in the decoder, dropping " "frame with timestamp " - << decodedImage.timestamp(); + << decodedImage.rtp_timestamp(); return; } @@ -203,7 +203,7 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage, timing_frame_info.decode_finish_ms = now.ms(); timing_frame_info.render_time_ms = frame_info->render_time ? frame_info->render_time->ms() : -1; - timing_frame_info.rtp_timestamp = decodedImage.timestamp(); + timing_frame_info.rtp_timestamp = decodedImage.rtp_timestamp(); timing_frame_info.receive_start_ms = frame_info->timing.receive_start_ms; timing_frame_info.receive_finish_ms = frame_info->timing.receive_finish_ms; RTC_HISTOGRAM_COUNTS_1000( diff --git a/modules/video_coding/generic_decoder_unittest.cc b/modules/video_coding/generic_decoder_unittest.cc index d0f6d53744..1142304653 100644 --- a/modules/video_coding/generic_decoder_unittest.cc +++ b/modules/video_coding/generic_decoder_unittest.cc @@ -118,7 +118,7 @@ TEST_F(GenericDecoderTest, FrameDroppedIfTooManyFramesInFlight) { ASSERT_EQ(10U, frames.size()); // Expect that the first frame was dropped since all decodes released at the // same time and the oldest frame info is the first one dropped. - EXPECT_EQ(frames[0].timestamp(), 90000u); + EXPECT_EQ(frames[0].rtp_timestamp(), 90000u); EXPECT_EQ(1u, user_callback_.frames_dropped()); } diff --git a/modules/video_coding/utility/simulcast_test_fixture_impl.cc b/modules/video_coding/utility/simulcast_test_fixture_impl.cc index 3636366e4e..4f0bb327d0 100644 --- a/modules/video_coding/utility/simulcast_test_fixture_impl.cc +++ b/modules/video_coding/utility/simulcast_test_fixture_impl.cc @@ -314,11 +314,11 @@ void SimulcastTestFixtureImpl::RunActiveStreamsTest( SetRates(kMaxBitrates[0] + kMaxBitrates[1] + kMaxBitrates[2], 30); ExpectStreams(VideoFrameType::kVideoFrameKey, active_streams); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); ExpectStreams(VideoFrameType::kVideoFrameDelta, active_streams); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); } @@ -399,32 +399,32 @@ void SimulcastTestFixtureImpl::TestKeyFrameRequestsOnAllStreams() { EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); ExpectStreams(VideoFrameType::kVideoFrameDelta, kNumberOfSimulcastStreams); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); frame_types[0] = VideoFrameType::kVideoFrameKey; ExpectStreams(VideoFrameType::kVideoFrameKey, kNumberOfSimulcastStreams); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); std::fill(frame_types.begin(), frame_types.end(), VideoFrameType::kVideoFrameDelta); frame_types[1] = VideoFrameType::kVideoFrameKey; ExpectStreams(VideoFrameType::kVideoFrameKey, kNumberOfSimulcastStreams); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); std::fill(frame_types.begin(), frame_types.end(), VideoFrameType::kVideoFrameDelta); frame_types[2] = VideoFrameType::kVideoFrameKey; ExpectStreams(VideoFrameType::kVideoFrameKey, kNumberOfSimulcastStreams); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); std::fill(frame_types.begin(), frame_types.end(), VideoFrameType::kVideoFrameDelta); ExpectStreams(VideoFrameType::kVideoFrameDelta, kNumberOfSimulcastStreams); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); } @@ -438,14 +438,14 @@ void SimulcastTestFixtureImpl::TestKeyFrameRequestsOnSpecificStreams() { EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); ExpectStreams(VideoFrameType::kVideoFrameDelta, kNumberOfSimulcastStreams); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); frame_types[0] = VideoFrameType::kVideoFrameKey; ExpectStream(VideoFrameType::kVideoFrameKey, kScaleResolutionDownBy[0]); ExpectStream(VideoFrameType::kVideoFrameDelta, kScaleResolutionDownBy[1]); ExpectStream(VideoFrameType::kVideoFrameDelta, kScaleResolutionDownBy[2]); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); std::fill(frame_types.begin(), frame_types.end(), @@ -454,7 +454,7 @@ void SimulcastTestFixtureImpl::TestKeyFrameRequestsOnSpecificStreams() { ExpectStream(VideoFrameType::kVideoFrameDelta, kScaleResolutionDownBy[0]); ExpectStream(VideoFrameType::kVideoFrameKey, kScaleResolutionDownBy[1]); ExpectStream(VideoFrameType::kVideoFrameDelta, kScaleResolutionDownBy[2]); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); std::fill(frame_types.begin(), frame_types.end(), @@ -463,7 +463,7 @@ void SimulcastTestFixtureImpl::TestKeyFrameRequestsOnSpecificStreams() { ExpectStream(VideoFrameType::kVideoFrameDelta, kScaleResolutionDownBy[0]); ExpectStream(VideoFrameType::kVideoFrameDelta, kScaleResolutionDownBy[1]); ExpectStream(VideoFrameType::kVideoFrameKey, kScaleResolutionDownBy[2]); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); std::fill(frame_types.begin(), frame_types.end(), @@ -473,7 +473,7 @@ void SimulcastTestFixtureImpl::TestKeyFrameRequestsOnSpecificStreams() { ExpectStream(VideoFrameType::kVideoFrameKey, kScaleResolutionDownBy[0]); ExpectStream(VideoFrameType::kVideoFrameDelta, kScaleResolutionDownBy[1]); ExpectStream(VideoFrameType::kVideoFrameKey, kScaleResolutionDownBy[2]); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); std::fill(frame_types.begin(), frame_types.end(), @@ -481,13 +481,13 @@ void SimulcastTestFixtureImpl::TestKeyFrameRequestsOnSpecificStreams() { ExpectStream(VideoFrameType::kVideoFrameKey, kScaleResolutionDownBy[0]); ExpectStream(VideoFrameType::kVideoFrameKey, kScaleResolutionDownBy[1]); ExpectStream(VideoFrameType::kVideoFrameKey, kScaleResolutionDownBy[2]); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); std::fill(frame_types.begin(), frame_types.end(), VideoFrameType::kVideoFrameDelta); ExpectStreams(VideoFrameType::kVideoFrameDelta, kNumberOfSimulcastStreams); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); } @@ -500,7 +500,7 @@ void SimulcastTestFixtureImpl::TestPaddingAllStreams() { EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); ExpectStreams(VideoFrameType::kVideoFrameDelta, 1); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); } @@ -513,7 +513,7 @@ void SimulcastTestFixtureImpl::TestPaddingTwoStreams() { EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); ExpectStreams(VideoFrameType::kVideoFrameDelta, 1); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); } @@ -527,7 +527,7 @@ void SimulcastTestFixtureImpl::TestPaddingTwoStreamsOneMaxedOut() { EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); ExpectStreams(VideoFrameType::kVideoFrameDelta, 1); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); } @@ -540,7 +540,7 @@ void SimulcastTestFixtureImpl::TestPaddingOneStream() { EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); ExpectStreams(VideoFrameType::kVideoFrameDelta, 2); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); } @@ -554,7 +554,7 @@ void SimulcastTestFixtureImpl::TestPaddingOneStreamTwoMaxedOut() { EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); ExpectStreams(VideoFrameType::kVideoFrameDelta, 2); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); } @@ -567,7 +567,7 @@ void SimulcastTestFixtureImpl::TestSendAllStreams() { EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); ExpectStreams(VideoFrameType::kVideoFrameDelta, 3); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); } @@ -580,40 +580,40 @@ void SimulcastTestFixtureImpl::TestDisablingStreams() { EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); ExpectStreams(VideoFrameType::kVideoFrameDelta, 3); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); // We should only get two streams and padding for one. SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] / 2, 30); ExpectStreams(VideoFrameType::kVideoFrameDelta, 2); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); // We should only get the first stream and padding for two. SetRates(kTargetBitrates[0] + kMinBitrates[1] / 2, 30); ExpectStreams(VideoFrameType::kVideoFrameDelta, 1); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); // We don't have enough bitrate for the thumbnail stream, but we should get // it anyway with current configuration. SetRates(kTargetBitrates[0] - 1, 30); ExpectStreams(VideoFrameType::kVideoFrameDelta, 1); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); // We should only get two streams and padding for one. SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] / 2, 30); // We get a key frame because a new stream is being enabled. ExpectStreams(VideoFrameType::kVideoFrameKey, 2); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); // We should get all three streams. SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kTargetBitrates[2], 30); // We get a key frame because a new stream is being enabled. ExpectStreams(VideoFrameType::kVideoFrameKey, 3); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); } @@ -747,7 +747,7 @@ void SimulcastTestFixtureImpl::TestSpatioTemporalLayers333PatternEncoder() { &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); // Next frame: #1. - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); SetExpectedValues3(2, 2, 2, expected_temporal_idx); SetExpectedValues3(true, true, true, expected_layer_sync); @@ -755,7 +755,7 @@ void SimulcastTestFixtureImpl::TestSpatioTemporalLayers333PatternEncoder() { &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); // Next frame: #2. - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); SetExpectedValues3(1, 1, 1, expected_temporal_idx); SetExpectedValues3(true, true, true, expected_layer_sync); @@ -763,7 +763,7 @@ void SimulcastTestFixtureImpl::TestSpatioTemporalLayers333PatternEncoder() { &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); // Next frame: #3. - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); SetExpectedValues3(2, 2, 2, expected_temporal_idx); SetExpectedValues3(false, false, false, expected_layer_sync); @@ -771,7 +771,7 @@ void SimulcastTestFixtureImpl::TestSpatioTemporalLayers333PatternEncoder() { &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); // Next frame: #4. - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); SetExpectedValues3(0, 0, 0, expected_temporal_idx); SetExpectedValues3(false, false, false, expected_layer_sync); @@ -779,7 +779,7 @@ void SimulcastTestFixtureImpl::TestSpatioTemporalLayers333PatternEncoder() { &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); // Next frame: #5. - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); SetExpectedValues3(2, 2, 2, expected_temporal_idx); SetExpectedValues3(is_h264, is_h264, is_h264, expected_layer_sync); @@ -817,7 +817,7 @@ void SimulcastTestFixtureImpl::TestSpatioTemporalLayers321PatternEncoder() { &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); // Next frame: #1. - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); SetExpectedValues3(2, 1, 255, expected_temporal_idx); SetExpectedValues3(true, true, false, expected_layer_sync); @@ -825,7 +825,7 @@ void SimulcastTestFixtureImpl::TestSpatioTemporalLayers321PatternEncoder() { &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); // Next frame: #2. - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); SetExpectedValues3(1, 0, 255, expected_temporal_idx); SetExpectedValues3(true, false, false, expected_layer_sync); @@ -833,7 +833,7 @@ void SimulcastTestFixtureImpl::TestSpatioTemporalLayers321PatternEncoder() { &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); // Next frame: #3. - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); SetExpectedValues3(2, 1, 255, expected_temporal_idx); SetExpectedValues3(false, false, false, expected_layer_sync); @@ -841,7 +841,7 @@ void SimulcastTestFixtureImpl::TestSpatioTemporalLayers321PatternEncoder() { &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); // Next frame: #4. - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); SetExpectedValues3(0, 0, 255, expected_temporal_idx); SetExpectedValues3(false, false, false, expected_layer_sync); @@ -849,7 +849,7 @@ void SimulcastTestFixtureImpl::TestSpatioTemporalLayers321PatternEncoder() { &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); // Next frame: #5. - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); SetExpectedValues3(2, 1, 255, expected_temporal_idx); SetExpectedValues3(false, true, false, expected_layer_sync); @@ -891,7 +891,7 @@ void SimulcastTestFixtureImpl::TestStrideEncodeDecode() { plane_offset[kUPlane] += 1; plane_offset[kVPlane] += 1; CreateImage(input_buffer_, plane_offset); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); EncodedImage encoded_frame; diff --git a/rtc_tools/video_encoder/video_encoder.cc b/rtc_tools/video_encoder/video_encoder.cc index 9ea8ee841c..5148752959 100644 --- a/rtc_tools/video_encoder/video_encoder.cc +++ b/rtc_tools/video_encoder/video_encoder.cc @@ -544,7 +544,7 @@ int main(int argc, char* argv[]) { webrtc::VideoFrame frame = webrtc::VideoFrame::Builder() .set_video_frame_buffer(frame_buffer_generator->NextFrame().buffer) - .set_timestamp_rtp(rtp_timestamp) + .set_rtp_timestamp(rtp_timestamp) .build(); ret = video_encoder->Encode(frame, &frame_types); RTC_CHECK_EQ(ret, WEBRTC_VIDEO_CODEC_OK); diff --git a/rtc_tools/video_replay.cc b/rtc_tools/video_replay.cc index 243919ca94..1a1d4fdb37 100644 --- a/rtc_tools/video_replay.cc +++ b/rtc_tools/video_replay.cc @@ -218,7 +218,7 @@ class FileRenderPassthrough : public rtc::VideoSinkInterface { return; std::stringstream filename; - filename << basename_ << count_++ << "_" << video_frame.timestamp() + filename << basename_ << count_++ << "_" << video_frame.rtp_timestamp() << ".jpg"; test::JpegFrameWriter frame_writer(filename.str()); diff --git a/sdk/android/src/jni/video_encoder_wrapper.cc b/sdk/android/src/jni/video_encoder_wrapper.cc index bc5650a774..d0f205f14d 100644 --- a/sdk/android/src/jni/video_encoder_wrapper.cc +++ b/sdk/android/src/jni/video_encoder_wrapper.cc @@ -164,7 +164,7 @@ int32_t VideoEncoderWrapper::Encode( FrameExtraInfo info; info.capture_time_ns = frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec; - info.timestamp_rtp = frame.timestamp(); + info.timestamp_rtp = frame.rtp_timestamp(); { MutexLock lock(&frame_extra_infos_lock_); frame_extra_infos_.push_back(info); diff --git a/sdk/android/src/jni/video_frame.cc b/sdk/android/src/jni/video_frame.cc index 121b34fa94..58f247f4f1 100644 --- a/sdk/android/src/jni/video_frame.cc +++ b/sdk/android/src/jni/video_frame.cc @@ -282,7 +282,7 @@ VideoFrame JavaToNativeFrame(JNIEnv* jni, JavaToNativeFrameBuffer(jni, j_video_frame_buffer); return VideoFrame::Builder() .set_video_frame_buffer(buffer) - .set_timestamp_rtp(timestamp_rtp) + .set_rtp_timestamp(timestamp_rtp) .set_timestamp_ms(timestamp_ns / rtc::kNumNanosecsPerMillisec) .set_rotation(static_cast(rotation)) .build(); diff --git a/sdk/objc/native/src/objc_video_decoder_factory.mm b/sdk/objc/native/src/objc_video_decoder_factory.mm index bc50892401..f9ad401613 100644 --- a/sdk/objc/native/src/objc_video_decoder_factory.mm +++ b/sdk/objc/native/src/objc_video_decoder_factory.mm @@ -55,15 +55,12 @@ int32_t Decode(const EncodedImage &input_image, int64_t render_time_ms = -1) ove int32_t RegisterDecodeCompleteCallback(DecodedImageCallback *callback) override { [decoder_ setCallback:^(RTC_OBJC_TYPE(RTCVideoFrame) * frame) { const auto buffer = rtc::make_ref_counted(frame.buffer); - VideoFrame videoFrame = - VideoFrame::Builder() - .set_video_frame_buffer(buffer) - .set_timestamp_rtp((uint32_t)(frame.timeStampNs / rtc::kNumNanosecsPerMicrosec)) - .set_timestamp_ms(0) - .set_rotation((VideoRotation)frame.rotation) - .build(); - videoFrame.set_timestamp(frame.timeStamp); - + VideoFrame videoFrame = VideoFrame::Builder() + .set_video_frame_buffer(buffer) + .set_rtp_timestamp(frame.timeStamp) + .set_timestamp_ms(0) + .set_rotation((VideoRotation)frame.rotation) + .build(); callback->Decoded(videoFrame); }]; diff --git a/sdk/objc/native/src/objc_video_frame.mm b/sdk/objc/native/src/objc_video_frame.mm index 2e8ce6153e..ff07dc8552 100644 --- a/sdk/objc/native/src/objc_video_frame.mm +++ b/sdk/objc/native/src/objc_video_frame.mm @@ -20,7 +20,7 @@ initWithBuffer:ToObjCVideoFrameBuffer(frame.video_frame_buffer()) rotation:RTCVideoRotation(frame.rotation()) timeStampNs:frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec]; - videoFrame.timeStamp = frame.timestamp(); + videoFrame.timeStamp = frame.rtp_timestamp(); return videoFrame; } diff --git a/test/configurable_frame_size_encoder.cc b/test/configurable_frame_size_encoder.cc index 2d5f097398..e18a87cd7e 100644 --- a/test/configurable_frame_size_encoder.cc +++ b/test/configurable_frame_size_encoder.cc @@ -53,7 +53,7 @@ int32_t ConfigurableFrameSizeEncoder::Encode( encodedImage._encodedHeight = inputImage.height(); encodedImage._encodedWidth = inputImage.width(); encodedImage._frameType = VideoFrameType::kVideoFrameKey; - encodedImage.SetRtpTimestamp(inputImage.timestamp()); + encodedImage.SetRtpTimestamp(inputImage.rtp_timestamp()); encodedImage.capture_time_ms_ = inputImage.render_time_ms(); CodecSpecificInfo specific{}; specific.codecType = codec_type_; diff --git a/test/fake_decoder.cc b/test/fake_decoder.cc index 12bff8d36c..fc38531d29 100644 --- a/test/fake_decoder.cc +++ b/test/fake_decoder.cc @@ -54,7 +54,7 @@ int32_t FakeDecoder::Decode(const EncodedImage& input, .set_rotation(webrtc::kVideoRotation_0) .set_timestamp_ms(render_time_ms) .build(); - frame.set_timestamp(input.RtpTimestamp()); + frame.set_rtp_timestamp(input.RtpTimestamp()); frame.set_ntp_time_ms(input.ntp_time_ms_); if (decode_delay_ms_ == 0 || !task_queue_) { diff --git a/test/fake_encoder.cc b/test/fake_encoder.cc index 009af7b843..195ff44026 100644 --- a/test/fake_encoder.cc +++ b/test/fake_encoder.cc @@ -141,7 +141,7 @@ int32_t FakeEncoder::Encode(const VideoFrame& input_image, EncodedImage encoded; encoded.SetEncodedData(buffer); - encoded.SetRtpTimestamp(input_image.timestamp()); + encoded.SetRtpTimestamp(input_image.rtp_timestamp()); encoded._frameType = frame_info.keyframe ? VideoFrameType::kVideoFrameKey : VideoFrameType::kVideoFrameDelta; encoded._encodedWidth = simulcast_streams[i].width; diff --git a/test/fake_texture_frame.cc b/test/fake_texture_frame.cc index 9c17e4c1c0..c2308960d6 100644 --- a/test/fake_texture_frame.cc +++ b/test/fake_texture_frame.cc @@ -23,7 +23,7 @@ VideoFrame FakeNativeBuffer::CreateFrame(int width, return VideoFrame::Builder() .set_video_frame_buffer( rtc::make_ref_counted(width, height)) - .set_timestamp_rtp(timestamp) + .set_rtp_timestamp(timestamp) .set_timestamp_ms(render_time_ms) .set_rotation(rotation) .build(); diff --git a/test/fake_vp8_decoder.cc b/test/fake_vp8_decoder.cc index 4ed2523c82..8f29b74050 100644 --- a/test/fake_vp8_decoder.cc +++ b/test/fake_vp8_decoder.cc @@ -57,7 +57,7 @@ int32_t FakeVp8Decoder::Decode(const EncodedImage& input, .set_rotation(webrtc::kVideoRotation_0) .set_timestamp_ms(render_time_ms) .build(); - frame.set_timestamp(input.RtpTimestamp()); + frame.set_rtp_timestamp(input.RtpTimestamp()); frame.set_ntp_time_ms(input.ntp_time_ms_); callback_->Decoded(frame, /*decode_time_ms=*/absl::nullopt, diff --git a/test/frame_utils.cc b/test/frame_utils.cc index b280de1ad1..e0765c4dd7 100644 --- a/test/frame_utils.cc +++ b/test/frame_utils.cc @@ -36,7 +36,7 @@ bool EqualPlane(const uint8_t* data1, } bool FramesEqual(const webrtc::VideoFrame& f1, const webrtc::VideoFrame& f2) { - if (f1.timestamp() != f2.timestamp() || + if (f1.rtp_timestamp() != f2.rtp_timestamp() || f1.ntp_time_ms() != f2.ntp_time_ms() || f1.render_time_ms() != f2.render_time_ms()) { return false; diff --git a/test/mappable_native_buffer.cc b/test/mappable_native_buffer.cc index 1b171e604b..67db8eb247 100644 --- a/test/mappable_native_buffer.cc +++ b/test/mappable_native_buffer.cc @@ -49,7 +49,7 @@ VideoFrame CreateMappableNativeFrame(int64_t ntp_time_ms, VideoFrame::Builder() .set_video_frame_buffer(rtc::make_ref_counted( mappable_type, width, height)) - .set_timestamp_rtp(99) + .set_rtp_timestamp(99) .set_timestamp_ms(99) .set_rotation(kVideoRotation_0) .build(); diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_metric_names_test.cc b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_metric_names_test.cc index 072c2ef202..d1fcc3ec96 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_metric_names_test.cc +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_metric_names_test.cc @@ -72,7 +72,7 @@ EncodedImage FakeEncode(const VideoFrame& frame) { packet_infos.push_back(RtpPacketInfo( /*ssrc=*/1, /*csrcs=*/{}, - /*rtp_timestamp=*/frame.timestamp(), + /*rtp_timestamp=*/frame.rtp_timestamp(), /*receive_time=*/Timestamp::Micros(frame.timestamp_us() + 10000))); image.SetPacketInfos(RtpPacketInfos(packet_infos)); return image; diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc index 7b52bad0a8..97c0496824 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc @@ -81,7 +81,7 @@ EncodedImage FakeEncode(const VideoFrame& frame) { packet_infos.push_back(RtpPacketInfo( /*ssrc=*/1, /*csrcs=*/{}, - /*rtp_timestamp=*/frame.timestamp(), + /*rtp_timestamp=*/frame.rtp_timestamp(), /*receive_time=*/Timestamp::Micros(frame.timestamp_us() + 10000))); image.SetPacketInfos(RtpPacketInfos(packet_infos)); return image; diff --git a/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc b/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc index 3cd179370f..2a5e8dc730 100644 --- a/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc +++ b/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc @@ -192,7 +192,7 @@ QualityAnalyzingVideoDecoder::DecoderCallback::IrrelevantSimulcastStreamDecoded( webrtc::VideoFrame dummy_frame = webrtc::VideoFrame::Builder() .set_video_frame_buffer(GetDummyFrameBuffer()) - .set_timestamp_rtp(timestamp_ms) + .set_rtp_timestamp(timestamp_ms) .set_id(frame_id) .build(); MutexLock lock(&callback_mutex_); @@ -218,19 +218,19 @@ void QualityAnalyzingVideoDecoder::OnFrameDecoded( std::string codec_name; { MutexLock lock(&mutex_); - auto it = timestamp_to_frame_id_.find(frame->timestamp()); + auto it = timestamp_to_frame_id_.find(frame->rtp_timestamp()); if (it == timestamp_to_frame_id_.end()) { // Ensure, that we have info about this frame. It can happen that for some // reasons decoder response, that it failed to decode, when we were // posting frame to it, but then call the callback for this frame. RTC_LOG(LS_ERROR) << "QualityAnalyzingVideoDecoder::OnFrameDecoded: No " "frame id for frame for frame->timestamp()=" - << frame->timestamp(); + << frame->rtp_timestamp(); return; } frame_id = it->second; timestamp_to_frame_id_.erase(it); - decoding_images_.erase(frame->timestamp()); + decoding_images_.erase(frame->rtp_timestamp()); codec_name = codec_name_; } // Set frame id to the value, that was extracted from corresponding encoded diff --git a/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc b/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc index ef72dcf64d..b5dca40a33 100644 --- a/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc +++ b/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc @@ -142,7 +142,7 @@ int32_t QualityAnalyzingVideoEncoder::Encode( { MutexLock lock(&mutex_); // Store id to be able to retrieve it in analyzing callback. - timestamp_to_frame_id_list_.push_back({frame.timestamp(), frame.id()}); + timestamp_to_frame_id_list_.push_back({frame.rtp_timestamp(), frame.id()}); // If this list is growing, it means that we are not receiving new encoded // images from encoder. So it should be a bug in setup on in the encoder. RTC_DCHECK_LT(timestamp_to_frame_id_list_.size(), kMaxFrameInPipelineCount); @@ -159,7 +159,7 @@ int32_t QualityAnalyzingVideoEncoder::Encode( auto it = timestamp_to_frame_id_list_.end(); while (it != timestamp_to_frame_id_list_.begin()) { --it; - if (it->first == frame.timestamp()) { + if (it->first == frame.rtp_timestamp()) { timestamp_to_frame_id_list_.erase(it); break; } diff --git a/test/testsupport/ivf_video_frame_generator_unittest.cc b/test/testsupport/ivf_video_frame_generator_unittest.cc index d6227b9986..cac69b4ddf 100644 --- a/test/testsupport/ivf_video_frame_generator_unittest.cc +++ b/test/testsupport/ivf_video_frame_generator_unittest.cc @@ -146,7 +146,7 @@ class IvfVideoFrameGeneratorTest : public ::testing::Test { const uint32_t timestamp = last_frame_timestamp + kVideoPayloadTypeFrequency / codec_settings.maxFramerate; - frame.set_timestamp(timestamp); + frame.set_rtp_timestamp(timestamp); last_frame_timestamp = timestamp; diff --git a/test/video_codec_tester.cc b/test/video_codec_tester.cc index 05c94ec0ce..457903097c 100644 --- a/test/video_codec_tester.cc +++ b/test/video_codec_tester.cc @@ -108,7 +108,7 @@ class VideoSource { frame_num_[timestamp_rtp] = frame_num; return VideoFrame::Builder() .set_video_frame_buffer(buffer) - .set_timestamp_rtp(timestamp_rtp) + .set_rtp_timestamp(timestamp_rtp) .set_timestamp_us((timestamp_rtp / k90kHz).us()) .build(); } @@ -125,7 +125,7 @@ class VideoSource { frame_reader_->ReadFrame(frame_num_.at(timestamp_rtp), resolution); return VideoFrame::Builder() .set_video_frame_buffer(buffer) - .set_timestamp_rtp(timestamp_rtp) + .set_rtp_timestamp(timestamp_rtp) .build(); } @@ -331,7 +331,7 @@ class VideoCodecAnalyzer : public VideoCodecTester::VideoCodecStats { void StartEncode(const VideoFrame& video_frame, const EncodingSettings& encoding_settings) { int64_t encode_start_us = rtc::TimeMicros(); - task_queue_.PostTask([this, timestamp_rtp = video_frame.timestamp(), + task_queue_.PostTask([this, timestamp_rtp = video_frame.rtp_timestamp(), encoding_settings, encode_start_us]() { RTC_CHECK(frames_.find(timestamp_rtp) == frames_.end()) << "Duplicate frame. Frame with timestamp " << timestamp_rtp @@ -418,7 +418,7 @@ class VideoCodecAnalyzer : public VideoCodecTester::VideoCodecStats { void FinishDecode(const VideoFrame& decoded_frame, int spatial_idx) { int64_t decode_finished_us = rtc::TimeMicros(); - task_queue_.PostTask([this, timestamp_rtp = decoded_frame.timestamp(), + task_queue_.PostTask([this, timestamp_rtp = decoded_frame.rtp_timestamp(), spatial_idx, width = decoded_frame.width(), height = decoded_frame.height(), decode_finished_us]() { @@ -439,7 +439,7 @@ class VideoCodecAnalyzer : public VideoCodecTester::VideoCodecStats { decoded_frame.video_frame_buffer()->ToI420(); task_queue_.PostTask([this, decoded_buffer, - timestamp_rtp = decoded_frame.timestamp(), + timestamp_rtp = decoded_frame.rtp_timestamp(), spatial_idx]() { VideoFrame ref_frame = video_source_->ReadFrame( timestamp_rtp, {.width = decoded_buffer->width(), @@ -926,10 +926,11 @@ class Encoder : public EncodedImageCallback { EncodeCallback callback) { { MutexLock lock(&mutex_); - callbacks_[input_frame.timestamp()] = std::move(callback); + callbacks_[input_frame.rtp_timestamp()] = std::move(callback); } - Timestamp pts = Timestamp::Micros((input_frame.timestamp() / k90kHz).us()); + Timestamp pts = + Timestamp::Micros((input_frame.rtp_timestamp() / k90kHz).us()); task_queue_.PostScheduledTask( [this, input_frame, encoding_settings] { @@ -943,8 +944,9 @@ class Encoder : public EncodedImageCallback { int error = encoder_->Encode(input_frame, /*frame_types=*/nullptr); if (error != 0) { - RTC_LOG(LS_WARNING) << "Encode failed with error code " << error - << " RTP timestamp " << input_frame.timestamp(); + RTC_LOG(LS_WARNING) + << "Encode failed with error code " << error + << " RTP timestamp " << input_frame.rtp_timestamp(); } }, pacer_.Schedule(pts)); diff --git a/test/video_codec_tester_unittest.cc b/test/video_codec_tester_unittest.cc index a5088ab6f5..6405080a63 100644 --- a/test/video_codec_tester_unittest.cc +++ b/test/video_codec_tester_unittest.cc @@ -125,7 +125,7 @@ class TestVideoEncoder : public MockVideoEncoder { encoded_frame.SetFrameType(frame.keyframe ? VideoFrameType::kVideoFrameKey : VideoFrameType::kVideoFrameDelta); - encoded_frame.SetRtpTimestamp(input_frame.timestamp()); + encoded_frame.SetRtpTimestamp(input_frame.rtp_timestamp()); encoded_frame.SetSpatialIndex(frame.layer_id.spatial_idx); encoded_frame.SetTemporalIndex(frame.layer_id.temporal_idx); encoded_frame.SetEncodedData( @@ -161,7 +161,7 @@ class TestVideoDecoder : public MockVideoDecoder { VideoFrame decoded_frame = VideoFrame::Builder() .set_video_frame_buffer(frame_buffer) - .set_timestamp_rtp(encoded_frame.RtpTimestamp()) + .set_rtp_timestamp(encoded_frame.RtpTimestamp()) .build(); callback_->Decoded(decoded_frame); frame_sizes_.push_back(DataSize::Bytes(encoded_frame.size())); diff --git a/video/adaptation/overuse_frame_detector.cc b/video/adaptation/overuse_frame_detector.cc index 56fe71af41..2edbbe2340 100644 --- a/video/adaptation/overuse_frame_detector.cc +++ b/video/adaptation/overuse_frame_detector.cc @@ -105,8 +105,8 @@ class SendProcessingUsage1 : public OveruseFrameDetector::ProcessingUsage { if (last_capture_time_us != -1) AddCaptureSample(1e-3 * (time_when_first_seen_us - last_capture_time_us)); - frame_timing_.push_back(FrameTiming(frame.timestamp_us(), frame.timestamp(), - time_when_first_seen_us)); + frame_timing_.push_back(FrameTiming( + frame.timestamp_us(), frame.rtp_timestamp(), time_when_first_seen_us)); } absl::optional FrameSent( diff --git a/video/adaptation/overuse_frame_detector_unittest.cc b/video/adaptation/overuse_frame_detector_unittest.cc index 85a84fe23a..7ae9735b3f 100644 --- a/video/adaptation/overuse_frame_detector_unittest.cc +++ b/video/adaptation/overuse_frame_detector_unittest.cc @@ -105,7 +105,7 @@ class OveruseFrameDetectorTest : public ::testing::Test, .build(); uint32_t timestamp = 0; while (num_frames-- > 0) { - frame.set_timestamp(timestamp); + frame.set_rtp_timestamp(timestamp); int64_t capture_time_us = rtc::TimeMicros(); overuse_detector_->FrameCaptured(frame, capture_time_us); clock_.AdvanceTime(TimeDelta::Micros(delay_us)); @@ -131,7 +131,7 @@ class OveruseFrameDetectorTest : public ::testing::Test, .build(); uint32_t timestamp = 0; while (num_frames-- > 0) { - frame.set_timestamp(timestamp); + frame.set_rtp_timestamp(timestamp); int64_t capture_time_us = rtc::TimeMicros(); overuse_detector_->FrameCaptured(frame, capture_time_us); int max_delay_us = 0; @@ -166,7 +166,7 @@ class OveruseFrameDetectorTest : public ::testing::Test, .build(); uint32_t timestamp = 0; while (num_frames-- > 0) { - frame.set_timestamp(timestamp); + frame.set_rtp_timestamp(timestamp); int interval_us = random.Rand(min_interval_us, max_interval_us); int64_t capture_time_us = rtc::TimeMicros(); overuse_detector_->FrameCaptured(frame, capture_time_us); @@ -381,7 +381,7 @@ TEST_F(OveruseFrameDetectorTest, MeasuresMultipleConcurrentSamples) { .build(); for (size_t i = 0; i < 1000; ++i) { // Unique timestamps. - frame.set_timestamp(static_cast(i)); + frame.set_rtp_timestamp(static_cast(i)); int64_t capture_time_us = rtc::TimeMicros(); overuse_detector_->FrameCaptured(frame, capture_time_us); clock_.AdvanceTime(TimeDelta::Micros(kIntervalUs)); @@ -408,7 +408,7 @@ TEST_F(OveruseFrameDetectorTest, UpdatesExistingSamples) { .build(); uint32_t timestamp = 0; for (size_t i = 0; i < 1000; ++i) { - frame.set_timestamp(timestamp); + frame.set_rtp_timestamp(timestamp); int64_t capture_time_us = rtc::TimeMicros(); overuse_detector_->FrameCaptured(frame, capture_time_us); // Encode and send first parts almost instantly. @@ -863,7 +863,7 @@ TEST_F(OveruseFrameDetectorTest2, MeasuresMultipleConcurrentSamples) { .build(); for (size_t i = 0; i < 1000; ++i) { // Unique timestamps. - frame.set_timestamp(static_cast(i)); + frame.set_rtp_timestamp(static_cast(i)); int64_t capture_time_us = rtc::TimeMicros(); overuse_detector_->FrameCaptured(frame, capture_time_us); clock_.AdvanceTime(TimeDelta::Micros(kIntervalUs)); @@ -890,7 +890,7 @@ TEST_F(OveruseFrameDetectorTest2, UpdatesExistingSamples) { .build(); uint32_t timestamp = 0; for (size_t i = 0; i < 1000; ++i) { - frame.set_timestamp(timestamp); + frame.set_rtp_timestamp(timestamp); int64_t capture_time_us = rtc::TimeMicros(); overuse_detector_->FrameCaptured(frame, capture_time_us); // Encode and send first parts almost instantly. diff --git a/video/end_to_end_tests/fec_tests.cc b/video/end_to_end_tests/fec_tests.cc index 11d11dcc0d..9def152ac1 100644 --- a/video/end_to_end_tests/fec_tests.cc +++ b/video/end_to_end_tests/fec_tests.cc @@ -110,7 +110,7 @@ TEST_F(FecEndToEndTest, ReceivesUlpfec) { MutexLock lock(&mutex_); // Rendering frame with timestamp of packet that was dropped -> FEC // protection worked. - auto it = dropped_timestamps_.find(video_frame.timestamp()); + auto it = dropped_timestamps_.find(video_frame.rtp_timestamp()); if (it != dropped_timestamps_.end()) { observation_complete_.Set(); } @@ -289,7 +289,7 @@ class FlexfecRenderObserver : public test::EndToEndTest, MutexLock lock(&mutex_); // Rendering frame with timestamp of packet that was dropped -> FEC // protection worked. - auto it = dropped_timestamps_.find(video_frame.timestamp()); + auto it = dropped_timestamps_.find(video_frame.rtp_timestamp()); if (it != dropped_timestamps_.end()) { if (!expect_flexfec_rtcp_ || received_flexfec_rtcp_) { observation_complete_.Set(); diff --git a/video/end_to_end_tests/multi_codec_receive_tests.cc b/video/end_to_end_tests/multi_codec_receive_tests.cc index 4027b607ae..e817ce2967 100644 --- a/video/end_to_end_tests/multi_codec_receive_tests.cc +++ b/video/end_to_end_tests/multi_codec_receive_tests.cc @@ -107,11 +107,11 @@ class FrameObserver : public test::RtpRtcpObserver, // Verifies that all sent frames are decoded and rendered. void OnFrame(const VideoFrame& rendered_frame) override { MutexLock lock(&mutex_); - EXPECT_THAT(sent_timestamps_, Contains(rendered_frame.timestamp())); + EXPECT_THAT(sent_timestamps_, Contains(rendered_frame.rtp_timestamp())); // Remove old timestamps too, only the newest decoded frame is rendered. num_rendered_frames_ += - RemoveOlderOrEqual(rendered_frame.timestamp(), &sent_timestamps_); + RemoveOlderOrEqual(rendered_frame.rtp_timestamp(), &sent_timestamps_); if (num_rendered_frames_ >= kFramesToObserve) { EXPECT_TRUE(sent_timestamps_.empty()) << "All sent frames not decoded."; diff --git a/video/end_to_end_tests/retransmission_tests.cc b/video/end_to_end_tests/retransmission_tests.cc index a0e404ff84..e15ef16ad0 100644 --- a/video/end_to_end_tests/retransmission_tests.cc +++ b/video/end_to_end_tests/retransmission_tests.cc @@ -316,7 +316,7 @@ void RetransmissionEndToEndTest::ReceivesPliAndRecovers(int rtp_history_ms) { void OnFrame(const VideoFrame& video_frame) override { MutexLock lock(&mutex_); if (received_pli_ && - video_frame.timestamp() > highest_dropped_timestamp_) { + video_frame.rtp_timestamp() > highest_dropped_timestamp_) { observation_complete_.Set(); } if (!received_pli_) @@ -412,7 +412,7 @@ void RetransmissionEndToEndTest::DecodesRetransmittedFrame(bool enable_rtx, EXPECT_EQ(kVideoRotation_90, frame.rotation()); { MutexLock lock(&mutex_); - if (frame.timestamp() == retransmitted_timestamp_) + if (frame.rtp_timestamp() == retransmitted_timestamp_) observation_complete_.Set(); } orig_renderer_->OnFrame(frame); diff --git a/video/frame_encode_metadata_writer.cc b/video/frame_encode_metadata_writer.cc index e921a93457..1b9dc04716 100644 --- a/video/frame_encode_metadata_writer.cc +++ b/video/frame_encode_metadata_writer.cc @@ -99,7 +99,7 @@ void FrameEncodeMetadataWriter::OnEncodeStarted(const VideoFrame& frame) { timing_frames_info_.resize(num_spatial_layers_); FrameMetadata metadata; - metadata.rtp_timestamp = frame.timestamp(); + metadata.rtp_timestamp = frame.rtp_timestamp(); metadata.encode_start_time_ms = rtc::TimeMillis(); metadata.ntp_time_ms = frame.ntp_time_ms(); metadata.timestamp_us = frame.timestamp_us(); diff --git a/video/frame_encode_metadata_writer_unittest.cc b/video/frame_encode_metadata_writer_unittest.cc index 5106e0e16d..c645a85fe2 100644 --- a/video/frame_encode_metadata_writer_unittest.cc +++ b/video/frame_encode_metadata_writer_unittest.cc @@ -92,7 +92,7 @@ std::vector> GetTimingFrames( for (int i = 0; i < num_frames; ++i) { current_timestamp += 1; VideoFrame frame = VideoFrame::Builder() - .set_timestamp_rtp(current_timestamp * 90) + .set_rtp_timestamp(current_timestamp * 90) .set_timestamp_ms(current_timestamp) .set_video_frame_buffer(kFrameBuffer) .build(); @@ -213,7 +213,7 @@ TEST(FrameEncodeMetadataWriterTest, NoTimingFrameIfNoEncodeStartTime) { // Verify a single frame works with encode start time set. VideoFrame frame = VideoFrame::Builder() .set_timestamp_ms(timestamp) - .set_timestamp_rtp(timestamp * 90) + .set_rtp_timestamp(timestamp * 90) .set_video_frame_buffer(kFrameBuffer) .build(); encode_timer.OnEncodeStarted(frame); @@ -244,14 +244,14 @@ TEST(FrameEncodeMetadataWriterTest, NotifiesAboutDroppedFrames) { EncodedImage image; VideoFrame frame = VideoFrame::Builder() - .set_timestamp_rtp(kTimestampMs1 * 90) + .set_rtp_timestamp(kTimestampMs1 * 90) .set_timestamp_ms(kTimestampMs1) .set_video_frame_buffer(kFrameBuffer) .build(); image.capture_time_ms_ = kTimestampMs1; image.SetRtpTimestamp(static_cast(image.capture_time_ms_ * 90)); - frame.set_timestamp(image.capture_time_ms_ * 90); + frame.set_rtp_timestamp(image.capture_time_ms_ * 90); frame.set_timestamp_us(image.capture_time_ms_ * 1000); encode_timer.OnEncodeStarted(frame); @@ -261,7 +261,7 @@ TEST(FrameEncodeMetadataWriterTest, NotifiesAboutDroppedFrames) { image.capture_time_ms_ = kTimestampMs2; image.SetRtpTimestamp(static_cast(image.capture_time_ms_ * 90)); image.timing_ = EncodedImage::Timing(); - frame.set_timestamp(image.capture_time_ms_ * 90); + frame.set_rtp_timestamp(image.capture_time_ms_ * 90); frame.set_timestamp_us(image.capture_time_ms_ * 1000); encode_timer.OnEncodeStarted(frame); // No OnEncodedImageCall for timestamp2. Yet, at this moment it's not known @@ -271,7 +271,7 @@ TEST(FrameEncodeMetadataWriterTest, NotifiesAboutDroppedFrames) { image.capture_time_ms_ = kTimestampMs3; image.SetRtpTimestamp(static_cast(image.capture_time_ms_ * 90)); image.timing_ = EncodedImage::Timing(); - frame.set_timestamp(image.capture_time_ms_ * 90); + frame.set_rtp_timestamp(image.capture_time_ms_ * 90); frame.set_timestamp_us(image.capture_time_ms_ * 1000); encode_timer.OnEncodeStarted(frame); encode_timer.FillTimingInfo(0, &image); @@ -280,7 +280,7 @@ TEST(FrameEncodeMetadataWriterTest, NotifiesAboutDroppedFrames) { image.capture_time_ms_ = kTimestampMs4; image.SetRtpTimestamp(static_cast(image.capture_time_ms_ * 90)); image.timing_ = EncodedImage::Timing(); - frame.set_timestamp(image.capture_time_ms_ * 90); + frame.set_rtp_timestamp(image.capture_time_ms_ * 90); frame.set_timestamp_us(image.capture_time_ms_ * 1000); encode_timer.OnEncodeStarted(frame); encode_timer.FillTimingInfo(0, &image); @@ -303,7 +303,7 @@ TEST(FrameEncodeMetadataWriterTest, RestoresCaptureTimestamps) { image.SetRtpTimestamp(static_cast(image.capture_time_ms_ * 90)); VideoFrame frame = VideoFrame::Builder() .set_timestamp_ms(image.capture_time_ms_) - .set_timestamp_rtp(image.capture_time_ms_ * 90) + .set_rtp_timestamp(image.capture_time_ms_ * 90) .set_video_frame_buffer(kFrameBuffer) .build(); encode_timer.OnEncodeStarted(frame); @@ -327,7 +327,7 @@ TEST(FrameEncodeMetadataWriterTest, CopiesRotation) { image.SetRtpTimestamp(static_cast(kTimestampMs * 90)); VideoFrame frame = VideoFrame::Builder() .set_timestamp_ms(kTimestampMs) - .set_timestamp_rtp(kTimestampMs * 90) + .set_rtp_timestamp(kTimestampMs * 90) .set_rotation(kVideoRotation_180) .set_video_frame_buffer(kFrameBuffer) .build(); @@ -353,7 +353,7 @@ TEST(FrameEncodeMetadataWriterTest, SetsContentType) { image.SetRtpTimestamp(static_cast(kTimestampMs * 90)); VideoFrame frame = VideoFrame::Builder() .set_timestamp_ms(kTimestampMs) - .set_timestamp_rtp(kTimestampMs * 90) + .set_rtp_timestamp(kTimestampMs * 90) .set_rotation(kVideoRotation_180) .set_video_frame_buffer(kFrameBuffer) .build(); @@ -379,7 +379,7 @@ TEST(FrameEncodeMetadataWriterTest, CopiesColorSpace) { image.SetRtpTimestamp(static_cast(kTimestampMs * 90)); VideoFrame frame = VideoFrame::Builder() .set_timestamp_ms(kTimestampMs) - .set_timestamp_rtp(kTimestampMs * 90) + .set_rtp_timestamp(kTimestampMs * 90) .set_color_space(color_space) .set_video_frame_buffer(kFrameBuffer) .build(); @@ -405,7 +405,7 @@ TEST(FrameEncodeMetadataWriterTest, CopiesPacketInfos) { image.SetRtpTimestamp(static_cast(kTimestampMs * 90)); VideoFrame frame = VideoFrame::Builder() .set_timestamp_ms(kTimestampMs) - .set_timestamp_rtp(kTimestampMs * 90) + .set_rtp_timestamp(kTimestampMs * 90) .set_packet_infos(packet_infos) .set_video_frame_buffer(kFrameBuffer) .build(); diff --git a/video/receive_statistics_proxy_unittest.cc b/video/receive_statistics_proxy_unittest.cc index a30a7e4490..276c113d58 100644 --- a/video/receive_statistics_proxy_unittest.cc +++ b/video/receive_statistics_proxy_unittest.cc @@ -83,7 +83,7 @@ class ReceiveStatisticsProxyTest : public ::testing::Test { VideoFrame frame = VideoFrame::Builder() .set_video_frame_buffer(I420Buffer::Create(width, height)) - .set_timestamp_rtp(0) + .set_rtp_timestamp(0) .set_timestamp_ms(render_time_ms) .set_rotation(kVideoRotation_0) .build(); diff --git a/video/render/video_render_frames.cc b/video/render/video_render_frames.cc index ea1362abbb..4b2e7033eb 100644 --- a/video/render/video_render_frames.cc +++ b/video/render/video_render_frames.cc @@ -55,14 +55,15 @@ int32_t VideoRenderFrames::AddFrame(VideoFrame&& new_frame) { // really slow system never renders any frames. if (!incoming_frames_.empty() && new_frame.render_time_ms() + kOldRenderTimestampMS < time_now) { - RTC_LOG(LS_WARNING) << "Too old frame, timestamp=" << new_frame.timestamp(); + RTC_LOG(LS_WARNING) << "Too old frame, timestamp=" + << new_frame.rtp_timestamp(); ++frames_dropped_; return -1; } if (new_frame.render_time_ms() > time_now + kFutureRenderTimestampMS) { RTC_LOG(LS_WARNING) << "Frame too long into the future, timestamp=" - << new_frame.timestamp(); + << new_frame.rtp_timestamp(); ++frames_dropped_; return -1; } diff --git a/video/video_analyzer.cc b/video/video_analyzer.cc index 9f17e3e015..02b8c99e39 100644 --- a/video/video_analyzer.cc +++ b/video/video_analyzer.cc @@ -256,12 +256,12 @@ void VideoAnalyzer::DeliverRtpPacket( void VideoAnalyzer::PreEncodeOnFrame(const VideoFrame& video_frame) { MutexLock lock(&lock_); if (!first_encoded_timestamp_) { - while (frames_.front().timestamp() != video_frame.timestamp()) { + while (frames_.front().rtp_timestamp() != video_frame.rtp_timestamp()) { ++dropped_frames_before_first_encode_; frames_.pop_front(); RTC_CHECK(!frames_.empty()); } - first_encoded_timestamp_ = video_frame.timestamp(); + first_encoded_timestamp_ = video_frame.rtp_timestamp(); } } @@ -317,9 +317,10 @@ void VideoAnalyzer::OnFrame(const VideoFrame& video_frame) { StartExcludingCpuThreadTime(); int64_t send_timestamp = - wrap_handler_.Unwrap(video_frame.timestamp() - rtp_timestamp_delta_); + wrap_handler_.Unwrap(video_frame.rtp_timestamp() - rtp_timestamp_delta_); - while (wrap_handler_.Unwrap(frames_.front().timestamp()) < send_timestamp) { + while (wrap_handler_.Unwrap(frames_.front().rtp_timestamp()) < + send_timestamp) { if (!last_rendered_frame_) { // No previous frame rendered, this one was dropped after sending but // before rendering. @@ -335,7 +336,7 @@ void VideoAnalyzer::OnFrame(const VideoFrame& video_frame) { VideoFrame reference_frame = frames_.front(); frames_.pop_front(); int64_t reference_timestamp = - wrap_handler_.Unwrap(reference_frame.timestamp()); + wrap_handler_.Unwrap(reference_frame.rtp_timestamp()); if (send_timestamp == reference_timestamp - 1) { // TODO(ivica): Make this work for > 2 streams. // Look at RTPSender::BuildRTPHeader. @@ -906,7 +907,7 @@ void VideoAnalyzer::AddFrameComparison(const VideoFrame& reference, const VideoFrame& render, bool dropped, int64_t render_time_ms) { - int64_t reference_timestamp = wrap_handler_.Unwrap(reference.timestamp()); + int64_t reference_timestamp = wrap_handler_.Unwrap(reference.rtp_timestamp()); int64_t send_time_ms = send_times_[reference_timestamp]; send_times_.erase(reference_timestamp); int64_t recv_time_ms = recv_times_[reference_timestamp]; @@ -1011,10 +1012,10 @@ void VideoAnalyzer::CapturedFrameForwarder::OnFrame( VideoFrame copy = video_frame; // Frames from the capturer does not have a rtp timestamp. // Create one so it can be used for comparison. - RTC_DCHECK_EQ(0, video_frame.timestamp()); + RTC_DCHECK_EQ(0, video_frame.rtp_timestamp()); if (video_frame.ntp_time_ms() == 0) copy.set_ntp_time_ms(clock_->CurrentNtpInMilliseconds()); - copy.set_timestamp(copy.ntp_time_ms() * 90); + copy.set_rtp_timestamp(copy.ntp_time_ms() * 90); analyzer_->AddCapturedFrameForComparison(copy); MutexLock lock(&lock_); ++captured_frames_; diff --git a/video/video_receive_stream2.h b/video/video_receive_stream2.h index cfdea630b0..05e719c39c 100644 --- a/video/video_receive_stream2.h +++ b/video/video_receive_stream2.h @@ -61,7 +61,7 @@ class CallStats; // multiple calls to clock->Now(). struct VideoFrameMetaData { VideoFrameMetaData(const webrtc::VideoFrame& frame, Timestamp now) - : rtp_timestamp(frame.timestamp()), + : rtp_timestamp(frame.rtp_timestamp()), timestamp_us(frame.timestamp_us()), ntp_time_ms(frame.ntp_time_ms()), width(frame.width()), diff --git a/video/video_receive_stream2_unittest.cc b/video/video_receive_stream2_unittest.cc index 5b1b664453..10b9d88324 100644 --- a/video/video_receive_stream2_unittest.cc +++ b/video/video_receive_stream2_unittest.cc @@ -126,10 +126,10 @@ class FakeVideoRenderer : public rtc::VideoSinkInterface { void OnFrame(const VideoFrame& frame) override { RTC_LOG(LS_VERBOSE) << "Received frame with timestamp=" - << frame.timestamp(); + << frame.rtp_timestamp(); if (!last_frame_.empty()) { RTC_LOG(LS_INFO) << "Already had frame queue with timestamp=" - << last_frame_.back().timestamp(); + << last_frame_.back().rtp_timestamp(); } last_frame_.push_back(frame); } @@ -164,9 +164,9 @@ MATCHER_P2(MatchResolution, w, h, "") { } MATCHER_P(RtpTimestamp, timestamp, "") { - if (arg.timestamp() != timestamp) { + if (arg.rtp_timestamp() != timestamp) { *result_listener->stream() - << "rtp timestamp was " << arg.timestamp() << " != " << timestamp; + << "rtp timestamp was " << arg.rtp_timestamp() << " != " << timestamp; return false; } return true; diff --git a/video/video_send_stream_tests.cc b/video/video_send_stream_tests.cc index e040815fee..c46529d8da 100644 --- a/video/video_send_stream_tests.cc +++ b/video/video_send_stream_tests.cc @@ -2915,7 +2915,7 @@ TEST_F(VideoSendStreamTest, ReportsSentResolution) { auto buffer = EncodedImageBuffer::Create(16); memset(buffer->data(), 0, 16); encoded.SetEncodedData(buffer); - encoded.SetRtpTimestamp(input_image.timestamp()); + encoded.SetRtpTimestamp(input_image.rtp_timestamp()); encoded.capture_time_ms_ = input_image.render_time_ms(); for (size_t i = 0; i < kNumStreams; ++i) { diff --git a/video/video_stream_encoder.cc b/video/video_stream_encoder.cc index c28d086fa4..daa2976807 100644 --- a/video/video_stream_encoder.cc +++ b/video/video_stream_encoder.cc @@ -1520,7 +1520,7 @@ void VideoStreamEncoder::OnFrame(Timestamp post_time, // Convert NTP time, in ms, to RTP timestamp. const int kMsToRtpTimestamp = 90; - incoming_frame.set_timestamp( + incoming_frame.set_rtp_timestamp( kMsToRtpTimestamp * static_cast(incoming_frame.ntp_time_ms())); // Identifier should remain the same for newly produced incoming frame and the @@ -2015,7 +2015,7 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, << out_frame.width() << "x" << out_frame.height(); TRACE_EVENT1("webrtc", "VCMGenericEncoder::Encode", "timestamp", - out_frame.timestamp()); + out_frame.rtp_timestamp()); frame_encode_metadata_writer_.OnEncodeStarted(out_frame); diff --git a/video/video_stream_encoder_unittest.cc b/video/video_stream_encoder_unittest.cc index ae0358b121..4b4f6c1af6 100644 --- a/video/video_stream_encoder_unittest.cc +++ b/video/video_stream_encoder_unittest.cc @@ -1231,17 +1231,18 @@ class VideoStreamEncoderTest : public ::testing::Test { { MutexLock lock(&local_mutex_); if (expect_null_frame_) { - EXPECT_EQ(input_image.timestamp(), 0u); + EXPECT_EQ(input_image.rtp_timestamp(), 0u); EXPECT_EQ(input_image.width(), 1); last_frame_types_ = *frame_types; expect_null_frame_ = false; } else { - EXPECT_GT(input_image.timestamp(), timestamp_); + EXPECT_GT(input_image.rtp_timestamp(), timestamp_); EXPECT_GT(input_image.ntp_time_ms(), ntp_time_ms_); - EXPECT_EQ(input_image.timestamp(), input_image.ntp_time_ms() * 90); + EXPECT_EQ(input_image.rtp_timestamp(), + input_image.ntp_time_ms() * 90); } - timestamp_ = input_image.timestamp(); + timestamp_ = input_image.rtp_timestamp(); ntp_time_ms_ = input_image.ntp_time_ms(); last_input_width_ = input_image.width(); last_input_height_ = input_image.height();