Skip to content

Commit

Permalink
Revert "NV12 support for VP8 simulcast"
Browse files Browse the repository at this point in the history
This reverts commit 76d3e7a.

Reason for revert: Causes multiple Chromium WPT tests to crash, preventing rolls.

Sample failed run:
https://ci.chromium.org/p/chromium/builders/try/win10_chromium_x64_rel_ng/685757?

Sample stack trace:
#0 0x7ff8623fbde9 base::debug::CollectStackTrace()
STDERR: #1 0x7ff862311ca3 [2665012:17:1009/162250.249660:WARNING:timestamp_aligner.cc(131)] too short translated timestamp interval: system time (us) = 3042652370324, interval (us) = 834
STDERR: base::debug::StackTrace::StackTrace()
STDERR: #2 0x7ff8623fb93b base::debug::(anonymous namespace)::StackDumpSignalHandler()
STDERR: #3 0x7ff857a70140 [2665012:17:1009/162250.249947:WARNING:timestamp_aligner.cc(131)] too short translated timestamp interval: system time (us) = 3042652370634, interval (us) = 742
STDERR: (/lib/x86_64-linux-gnu/libpthread-2.31.so+0x1413f)
STDERR: OpenNetLab#4 0x7ff85778edb1 gsignal
STDERR: OpenNetLab#5 0x7ff857778537 abort
STDERR: OpenNetLab#6 0x7ff855d5eee2 [2665012:17:1009/162250.250342:WARNING:timestamp_aligner.cc(131)] too short translated timestamp interval: system time (us) = 3042652371030, interval (us) = 706
STDERR: [2665012:17:1009/162250.250514:WARNING:timestamp_aligner.cc(131)] too short translated timestamp interval: system time (us) = 3042652371204, interval (us) = 963
STDERR: rtc::webrtc_checks_impl::FatalLog()
STDERR: OpenNetLab#7 0x7ff855f14e62 webrtc::LibvpxVp8Encoder::PrepareRawImagesForEncoding()
STDERR: OpenNetLab#8 0x7ff855f14412 webrtc::LibvpxVp8Encoder::Encode()
STDERR: OpenNetLab#9 0x7ff855bae765 webrtc::SimulcastEncoderAdapter::Encode()
STDERR: OpenNetLab#10 0x7ff85607d598 webrtc::VideoStreamEncoder::EncodeVideoFrame()
STDERR: OpenNetLab#11 0x7ff85607c60d webrtc::VideoStreamEncoder::MaybeEncodeVideoFrame()
STDERR: OpenNetLab#12 0x7ff8560816f5 webrtc::webrtc_new_closure_impl::ClosureTask<>::Run()
STDERR: OpenNetLab#13 0x7ff855b352b5 (anonymous namespace)::WebrtcTaskQueue::RunTask()
STDERR: OpenNetLab#14 0x7ff855b3531e base::internal::Invoker<>::RunOnce()
STDERR: OpenNetLab#15 0x7ff86239785b base::TaskAnnotator::RunTask()
STDERR: OpenNetLab#16 0x7ff8623c8557 base::internal::TaskTracker::RunSkipOnShutdown()
STDERR: OpenNetLab#17 0x7ff8623c7d92 base::internal::TaskTracker::RunTask()
STDERR: OpenNetLab#18 0x7ff862415a06 base::internal::TaskTrackerPosix::RunTask()
STDERR: OpenNetLab#19 0x7ff8623c75e6 base::internal::TaskTracker::RunAndPopNextTask()
STDERR: OpenNetLab#20 0x7ff8623d3a8d base::internal::WorkerThread::RunWorker()
STDERR: OpenNetLab#21 0x7ff8623d368d base::internal::WorkerThread::RunPooledWorker()
STDERR: OpenNetLab#22 0x7ff862416509 base::(anonymous namespace)::ThreadFunc()
STDERR: OpenNetLab#23 0x7ff857a64ea7 start_thread 

Original change's description:
> NV12 support for VP8 simulcast
>
> Tested using video_loopback with generated NV12 frames.
>
> Bug: webrtc:11635, webrtc:11975
> Change-Id: I14b2d663c55a83d80e48e226fcf706cb18903193
> Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/186722
> Commit-Queue: Evan Shrubsole <eshr@google.com>
> Reviewed-by: Ilya Nikolaevskiy <ilnik@webrtc.org>
> Cr-Commit-Position: refs/heads/master@{#32325}

TBR=ilnik@webrtc.org,eshr@google.com

# Not skipping CQ checks because original CL landed > 1 day ago.

Bug: webrtc:11635
Bug: webrtc:11975
Change-Id: I61c8aed1270bc9c2f7f0577fa5ca717a325f548a
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/187484
Reviewed-by: Guido Urdaneta <guidou@webrtc.org>
Reviewed-by: Ilya Nikolaevskiy <ilnik@webrtc.org>
Commit-Queue: Guido Urdaneta <guidou@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#32369}
  • Loading branch information
Guido Urdaneta authored and Commit Bot committed Oct 9, 2020
1 parent 6f04b65 commit 7ef8093
Show file tree
Hide file tree
Showing 3 changed files with 42 additions and 158 deletions.
156 changes: 42 additions & 114 deletions modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc
Original file line number Diff line number Diff line change
Expand Up @@ -497,10 +497,6 @@ int LibvpxVp8Encoder::InitEncode(const VideoCodec* inst,
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}

// Use the previous pixel format to avoid extra image allocations.
vpx_img_fmt_t pixel_format =
raw_images_.empty() ? VPX_IMG_FMT_I420 : raw_images_[0].fmt;

int retVal = Release();
if (retVal < 0) {
return retVal;
Expand Down Expand Up @@ -654,8 +650,8 @@ int LibvpxVp8Encoder::InitEncode(const VideoCodec* inst,
// Creating a wrapper to the image - setting image data to NULL.
// Actual pointer will be set in encode. Setting align to 1, as it
// is meaningless (no memory allocation is done here).
libvpx_->img_wrap(&raw_images_[0], pixel_format, inst->width, inst->height, 1,
NULL);
libvpx_->img_wrap(&raw_images_[0], VPX_IMG_FMT_I420, inst->width,
inst->height, 1, NULL);

// Note the order we use is different from webm, we have lowest resolution
// at position 0 and they have highest resolution at position 0.
Expand Down Expand Up @@ -703,9 +699,10 @@ int LibvpxVp8Encoder::InitEncode(const VideoCodec* inst,
// Setting alignment to 32 - as that ensures at least 16 for all
// planes (32 for Y, 16 for U,V). Libvpx sets the requested stride for
// the y plane, but only half of it to the u and v planes.
libvpx_->img_alloc(
&raw_images_[i], pixel_format, inst->simulcastStream[stream_idx].width,
inst->simulcastStream[stream_idx].height, kVp832ByteAlign);
libvpx_->img_alloc(&raw_images_[i], VPX_IMG_FMT_I420,
inst->simulcastStream[stream_idx].width,
inst->simulcastStream[stream_idx].height,
kVp832ByteAlign);
SetStreamState(stream_bitrates[stream_idx] > 0, stream_idx);
vpx_configs_[i].rc_target_bitrate = stream_bitrates[stream_idx];
if (stream_bitrates[stream_idx] > 0) {
Expand Down Expand Up @@ -1017,12 +1014,26 @@ int LibvpxVp8Encoder::Encode(const VideoFrame& frame,
flags[i] = send_key_frame ? VPX_EFLAG_FORCE_KF : EncodeFlags(tl_configs[i]);
}

rtc::scoped_refptr<VideoFrameBuffer> input_image = frame.video_frame_buffer();
if (input_image->type() != VideoFrameBuffer::Type::kI420 &&
input_image->type() != VideoFrameBuffer::Type::kNV12) {
input_image = input_image->ToI420();
}
PrepareRawImagesForEncoding(input_image);
rtc::scoped_refptr<I420BufferInterface> input_image =
frame.video_frame_buffer()->ToI420();
// Since we are extracting raw pointers from |input_image| to
// |raw_images_[0]|, the resolution of these frames must match.
RTC_DCHECK_EQ(input_image->width(), raw_images_[0].d_w);
RTC_DCHECK_EQ(input_image->height(), raw_images_[0].d_h);

// Image in vpx_image_t format.
// Input image is const. VP8's raw image is not defined as const.
raw_images_[0].planes[VPX_PLANE_Y] =
const_cast<uint8_t*>(input_image->DataY());
raw_images_[0].planes[VPX_PLANE_U] =
const_cast<uint8_t*>(input_image->DataU());
raw_images_[0].planes[VPX_PLANE_V] =
const_cast<uint8_t*>(input_image->DataV());

raw_images_[0].stride[VPX_PLANE_Y] = input_image->StrideY();
raw_images_[0].stride[VPX_PLANE_U] = input_image->StrideU();
raw_images_[0].stride[VPX_PLANE_V] = input_image->StrideV();

struct CleanUpOnExit {
explicit CleanUpOnExit(vpx_image_t& raw_image) : raw_image_(raw_image) {}
~CleanUpOnExit() {
Expand All @@ -1033,6 +1044,22 @@ int LibvpxVp8Encoder::Encode(const VideoFrame& frame,
vpx_image_t& raw_image_;
} clean_up_on_exit(raw_images_[0]);

for (size_t i = 1; i < encoders_.size(); ++i) {
// Scale the image down a number of times by downsampling factor
libyuv::I420Scale(
raw_images_[i - 1].planes[VPX_PLANE_Y],
raw_images_[i - 1].stride[VPX_PLANE_Y],
raw_images_[i - 1].planes[VPX_PLANE_U],
raw_images_[i - 1].stride[VPX_PLANE_U],
raw_images_[i - 1].planes[VPX_PLANE_V],
raw_images_[i - 1].stride[VPX_PLANE_V], raw_images_[i - 1].d_w,
raw_images_[i - 1].d_h, raw_images_[i].planes[VPX_PLANE_Y],
raw_images_[i].stride[VPX_PLANE_Y], raw_images_[i].planes[VPX_PLANE_U],
raw_images_[i].stride[VPX_PLANE_U], raw_images_[i].planes[VPX_PLANE_V],
raw_images_[i].stride[VPX_PLANE_V], raw_images_[i].d_w,
raw_images_[i].d_h, libyuv::kFilterBilinear);
}

if (send_key_frame) {
// Adapt the size of the key frame when in screenshare with 1 temporal
// layer.
Expand Down Expand Up @@ -1284,105 +1311,6 @@ int LibvpxVp8Encoder::RegisterEncodeCompleteCallback(
return WEBRTC_VIDEO_CODEC_OK;
}

void LibvpxVp8Encoder::PrepareRawImagesForEncoding(
const rtc::scoped_refptr<VideoFrameBuffer>& frame) {
// Since we are extracting raw pointers from |input_image| to
// |raw_images_[0]|, the resolution of these frames must match.
RTC_DCHECK_EQ(frame->width(), raw_images_[0].d_w);
RTC_DCHECK_EQ(frame->height(), raw_images_[0].d_h);
switch (frame->type()) {
case VideoFrameBuffer::Type::kI420:
return PrepareI420Image(frame->GetI420());
case VideoFrameBuffer::Type::kNV12:
return PrepareNV12Image(frame->GetNV12());
default:
RTC_NOTREACHED();
}
}

void LibvpxVp8Encoder::MaybeUpdatePixelFormat(vpx_img_fmt fmt) {
RTC_DCHECK(!raw_images_.empty());
if (raw_images_[0].fmt == fmt) {
RTC_DCHECK(std::all_of(
std::next(raw_images_.begin()), raw_images_.end(),
[fmt](const vpx_image_t& raw_img) { return raw_img.fmt == fmt; }))
<< "Not all raw images had the right format!";
return;
}
RTC_LOG(INFO) << "Updating vp8 encoder pixel format to "
<< (fmt == VPX_IMG_FMT_NV12 ? "NV12" : "I420");
for (size_t i = 0; i < raw_images_.size(); ++i) {
vpx_image_t& img = raw_images_[i];
auto d_w = img.d_w;
auto d_h = img.d_h;
libvpx_->img_free(&img);
// First image is wrapping the input frame, the rest are allocated.
if (i == 0) {
libvpx_->img_wrap(&img, fmt, d_w, d_h, 1, NULL);
} else {
libvpx_->img_alloc(&img, fmt, d_w, d_h, kVp832ByteAlign);
}
}
}

void LibvpxVp8Encoder::PrepareI420Image(const I420BufferInterface* frame) {
RTC_DCHECK(!raw_images_.empty());
MaybeUpdatePixelFormat(VPX_IMG_FMT_I420);
// Image in vpx_image_t format.
// Input image is const. VP8's raw image is not defined as const.
raw_images_[0].planes[VPX_PLANE_Y] = const_cast<uint8_t*>(frame->DataY());
raw_images_[0].planes[VPX_PLANE_U] = const_cast<uint8_t*>(frame->DataU());
raw_images_[0].planes[VPX_PLANE_V] = const_cast<uint8_t*>(frame->DataV());

raw_images_[0].stride[VPX_PLANE_Y] = frame->StrideY();
raw_images_[0].stride[VPX_PLANE_U] = frame->StrideU();
raw_images_[0].stride[VPX_PLANE_V] = frame->StrideV();

for (size_t i = 1; i < encoders_.size(); ++i) {
// Scale the image down a number of times by downsampling factor
libyuv::I420Scale(
raw_images_[i - 1].planes[VPX_PLANE_Y],
raw_images_[i - 1].stride[VPX_PLANE_Y],
raw_images_[i - 1].planes[VPX_PLANE_U],
raw_images_[i - 1].stride[VPX_PLANE_U],
raw_images_[i - 1].planes[VPX_PLANE_V],
raw_images_[i - 1].stride[VPX_PLANE_V], raw_images_[i - 1].d_w,
raw_images_[i - 1].d_h, raw_images_[i].planes[VPX_PLANE_Y],
raw_images_[i].stride[VPX_PLANE_Y], raw_images_[i].planes[VPX_PLANE_U],
raw_images_[i].stride[VPX_PLANE_U], raw_images_[i].planes[VPX_PLANE_V],
raw_images_[i].stride[VPX_PLANE_V], raw_images_[i].d_w,
raw_images_[i].d_h, libyuv::kFilterBilinear);
}
}

void LibvpxVp8Encoder::PrepareNV12Image(const NV12BufferInterface* frame) {
RTC_DCHECK(!raw_images_.empty());
MaybeUpdatePixelFormat(VPX_IMG_FMT_NV12);
// Image in vpx_image_t format.
// Input image is const. VP8's raw image is not defined as const.
raw_images_[0].planes[VPX_PLANE_Y] = const_cast<uint8_t*>(frame->DataY());
raw_images_[0].planes[VPX_PLANE_U] = const_cast<uint8_t*>(frame->DataUV());
raw_images_[0].planes[VPX_PLANE_V] = raw_images_[0].planes[VPX_PLANE_U] + 1;
raw_images_[0].stride[VPX_PLANE_Y] = frame->StrideY();
raw_images_[0].stride[VPX_PLANE_U] = frame->StrideUV();
raw_images_[0].stride[VPX_PLANE_V] = frame->StrideUV();

for (size_t i = 1; i < encoders_.size(); ++i) {
// Scale the image down a number of times by downsampling factor
libyuv::NV12Scale(
raw_images_[i - 1].planes[VPX_PLANE_Y],
raw_images_[i - 1].stride[VPX_PLANE_Y],
raw_images_[i - 1].planes[VPX_PLANE_U],
raw_images_[i - 1].stride[VPX_PLANE_U], raw_images_[i - 1].d_w,
raw_images_[i - 1].d_h, raw_images_[i].planes[VPX_PLANE_Y],
raw_images_[i].stride[VPX_PLANE_Y], raw_images_[i].planes[VPX_PLANE_U],
raw_images_[i].stride[VPX_PLANE_U], raw_images_[i].d_w,
raw_images_[i].d_h, libyuv::kFilterBilinear);
raw_images_[i].planes[VPX_PLANE_V] = raw_images_[i].planes[VPX_PLANE_U] + 1;
raw_images_[i].stride[VPX_PLANE_V] = raw_images_[i].stride[VPX_PLANE_U] + 1;
}
}

// static
LibvpxVp8Encoder::VariableFramerateExperiment
LibvpxVp8Encoder::ParseVariableFramerateConfig(std::string group_name) {
Expand Down
6 changes: 0 additions & 6 deletions modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h
Original file line number Diff line number Diff line change
Expand Up @@ -93,12 +93,6 @@ class LibvpxVp8Encoder : public VideoEncoder {

bool UpdateVpxConfiguration(size_t stream_index);

void PrepareRawImagesForEncoding(
const rtc::scoped_refptr<VideoFrameBuffer>& frame);
void MaybeUpdatePixelFormat(vpx_img_fmt fmt);
void PrepareI420Image(const I420BufferInterface* frame);
void PrepareNV12Image(const NV12BufferInterface* frame);

const std::unique_ptr<LibvpxInterface> libvpx_;

const CpuSpeedExperiment experimental_cpu_speed_config_arm_;
Expand Down
38 changes: 0 additions & 38 deletions modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
Original file line number Diff line number Diff line change
Expand Up @@ -266,44 +266,6 @@ TEST_F(TestVp8Impl, EncodeFrameAndRelease) {
encoder_->Encode(NextInputFrame(), nullptr));
}

TEST_F(TestVp8Impl, EncodeNv12FrameSimulcast) {
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));

EncodedImage encoded_frame;
CodecSpecificInfo codec_specific_info;
input_frame_generator_ = test::CreateSquareFrameGenerator(
kWidth, kHeight, test::FrameGeneratorInterface::OutputType::kNV12,
absl::nullopt);
EncodeAndWaitForFrame(NextInputFrame(), &encoded_frame, &codec_specific_info);

EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
EXPECT_EQ(WEBRTC_VIDEO_CODEC_UNINITIALIZED,
encoder_->Encode(NextInputFrame(), nullptr));
}

TEST_F(TestVp8Impl, EncodeI420FrameAfterNv12Frame) {
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));

EncodedImage encoded_frame;
CodecSpecificInfo codec_specific_info;
input_frame_generator_ = test::CreateSquareFrameGenerator(
kWidth, kHeight, test::FrameGeneratorInterface::OutputType::kNV12,
absl::nullopt);
EncodeAndWaitForFrame(NextInputFrame(), &encoded_frame, &codec_specific_info);
input_frame_generator_ = test::CreateSquareFrameGenerator(
kWidth, kHeight, test::FrameGeneratorInterface::OutputType::kI420,
absl::nullopt);
EncodeAndWaitForFrame(NextInputFrame(), &encoded_frame, &codec_specific_info);

EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
EXPECT_EQ(WEBRTC_VIDEO_CODEC_UNINITIALIZED,
encoder_->Encode(NextInputFrame(), nullptr));
}

TEST_F(TestVp8Impl, InitDecode) {
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Release());
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
Expand Down

0 comments on commit 7ef8093

Please sign in to comment.