diff options
author | emircan <emircan@chromium.org> | 2015-09-01 12:45:33 -0700 |
---|---|---|
committer | Commit bot <commit-bot@chromium.org> | 2015-09-01 19:46:15 +0000 |
commit | 65af59bb1cc5578cd9fb4e4fe7fba0a834f34f35 (patch) | |
tree | dc0918bac922b87e14371700fc3d279c0fa508dc | |
parent | 4252ddfd1fe465add350d2c6cd2d2a15df241d35 (diff) | |
download | chromium_src-65af59bb1cc5578cd9fb4e4fe7fba0a834f34f35.zip chromium_src-65af59bb1cc5578cd9fb4e4fe7fba0a834f34f35.tar.gz chromium_src-65af59bb1cc5578cd9fb4e4fe7fba0a834f34f35.tar.bz2 |
Merge media::VideoCapturePixelFormat with media::VideoPixelFormat
This CL merges these 2 enums that overlap and represent pixel formats.
- Moved types that do not exist to media::VideoPixelFormat
- Updated histograms.xml for UMA values
- Updated media_types.mojom for matching mojo enums
- Added kWinSupportedCaptureFormats to capability_list_win.cc such that we are no longer dependant on the order of media::VideoCapturePixelFormat for choosing the preferred type.
BUG=521068, 504160
CQ_INCLUDE_TRYBOTS=tryserver.blink:linux_blink_rel
Review URL: https://codereview.chromium.org/1301133002
Cr-Commit-Position: refs/heads/master@{#346711}
71 files changed, 499 insertions, 480 deletions
diff --git a/cc/resources/video_resource_updater.cc b/cc/resources/video_resource_updater.cc index 5911d34..dadc62a 100644 --- a/cc/resources/video_resource_updater.cc +++ b/cc/resources/video_resource_updater.cc @@ -406,11 +406,16 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForHardwarePlanes( case media::PIXEL_FORMAT_I420: external_resources.type = VideoFrameExternalResources::YUV_RESOURCE; break; - case media::PIXEL_FORMAT_NV12: case media::PIXEL_FORMAT_YV12: case media::PIXEL_FORMAT_YV16: case media::PIXEL_FORMAT_YV24: case media::PIXEL_FORMAT_YV12A: + case media::PIXEL_FORMAT_NV12: + case media::PIXEL_FORMAT_NV21: + case media::PIXEL_FORMAT_YUY2: + case media::PIXEL_FORMAT_RGB24: + case media::PIXEL_FORMAT_RGB32: + case media::PIXEL_FORMAT_MJPEG: case media::PIXEL_FORMAT_UNKNOWN: DLOG(ERROR) << "Unsupported Texture format" << media::VideoPixelFormatToString(video_frame->format()); diff --git a/chrome/renderer/extensions/cast_streaming_native_handler.cc b/chrome/renderer/extensions/cast_streaming_native_handler.cc index 44b2dde..160b6fb 100644 --- a/chrome/renderer/extensions/cast_streaming_native_handler.cc +++ b/chrome/renderer/extensions/cast_streaming_native_handler.cc @@ -783,9 +783,8 @@ void CastStreamingNativeHandler::StartCastRtpReceiver( return; } - media::VideoCaptureFormat capture_format( - gfx::Size(max_width, max_height), fps, - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420); + media::VideoCaptureFormat capture_format(gfx::Size(max_width, max_height), + fps, media::PIXEL_FORMAT_I420); video_config.target_frame_rate = fps; audio_config.target_frame_rate = 100; diff --git a/content/browser/media/capture/aura_window_capture_machine.cc b/content/browser/media/capture/aura_window_capture_machine.cc index 69edbd3..1da5506 100644 --- a/content/browser/media/capture/aura_window_capture_machine.cc +++ b/content/browser/media/capture/aura_window_capture_machine.cc @@ -308,7 +308,7 @@ bool AuraWindowCaptureMachine::ProcessCopyOutputResponse( if (capture_params_.requested_format.pixel_storage == media::PIXEL_STORAGE_TEXTURE) { - DCHECK_EQ(media::VIDEO_CAPTURE_PIXEL_FORMAT_ARGB, + DCHECK_EQ(media::PIXEL_FORMAT_ARGB, capture_params_.requested_format.pixel_format); DCHECK(!video_frame.get()); cc::TextureMailbox texture_mailbox; diff --git a/content/browser/media/capture/desktop_capture_device.cc b/content/browser/media/capture/desktop_capture_device.cc index b8962af..cb8579b 100644 --- a/content/browser/media/capture/desktop_capture_device.cc +++ b/content/browser/media/capture/desktop_capture_device.cc @@ -314,7 +314,7 @@ void DesktopCaptureDevice::Core::OnCaptureCompleted( output_data, output_bytes, media::VideoCaptureFormat( gfx::Size(output_size.width(), output_size.height()), - requested_frame_rate_, media::VIDEO_CAPTURE_PIXEL_FORMAT_ARGB), + requested_frame_rate_, media::PIXEL_FORMAT_ARGB), 0, base::TimeTicks::Now()); } diff --git a/content/browser/media/capture/desktop_capture_device_aura_unittest.cc b/content/browser/media/capture/desktop_capture_device_aura_unittest.cc index bce2fb5..790334e 100644 --- a/content/browser/media/capture/desktop_capture_device_aura_unittest.cc +++ b/content/browser/media/capture/desktop_capture_device_aura_unittest.cc @@ -59,11 +59,11 @@ class MockDeviceClient : public media::VideoCaptureDevice::Client { // Trampoline methods to workaround GMOCK problems with scoped_ptr<>. scoped_ptr<Buffer> ReserveOutputBuffer( const gfx::Size& dimensions, - media::VideoCapturePixelFormat format, + media::VideoPixelFormat format, media::VideoPixelStorage storage) override { - EXPECT_TRUE((format == media::VIDEO_CAPTURE_PIXEL_FORMAT_I420 && + EXPECT_TRUE((format == media::PIXEL_FORMAT_I420 && storage == media::PIXEL_STORAGE_CPU) || - (format == media::VIDEO_CAPTURE_PIXEL_FORMAT_ARGB && + (format == media::PIXEL_FORMAT_ARGB && storage == media::PIXEL_STORAGE_TEXTURE)); DoReserveOutputBuffer(); return scoped_ptr<Buffer>(); @@ -147,8 +147,7 @@ TEST_F(DesktopCaptureDeviceAuraTest, StartAndStop) { media::VideoCaptureParams capture_params; capture_params.requested_format.frame_size.SetSize(640, 480); capture_params.requested_format.frame_rate = kFrameRate; - capture_params.requested_format.pixel_format = - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420; + capture_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420; capture_device->AllocateAndStart(capture_params, client.Pass()); capture_device->StopAndDeAllocate(); } diff --git a/content/browser/media/capture/desktop_capture_device_unittest.cc b/content/browser/media/capture/desktop_capture_device_unittest.cc index 0afc484..afaac44 100644 --- a/content/browser/media/capture/desktop_capture_device_unittest.cc +++ b/content/browser/media/capture/desktop_capture_device_unittest.cc @@ -80,9 +80,9 @@ class MockDeviceClient : public media::VideoCaptureDevice::Client { // Trampoline methods to workaround GMOCK problems with scoped_ptr<>. scoped_ptr<Buffer> ReserveOutputBuffer( const gfx::Size& dimensions, - media::VideoCapturePixelFormat format, + media::VideoPixelFormat format, media::VideoPixelStorage storage) override { - EXPECT_TRUE(format == media::VIDEO_CAPTURE_PIXEL_FORMAT_I420 && + EXPECT_TRUE(format == media::PIXEL_FORMAT_I420 && storage == media::PIXEL_STORAGE_CPU); DoReserveOutputBuffer(); return scoped_ptr<Buffer>(); @@ -234,7 +234,7 @@ class FormatChecker { EXPECT_EQ(size_for_odd_frames_, format.frame_size); ++frame_count_; EXPECT_EQ(kFrameRate, format.frame_rate); - EXPECT_EQ(media::VIDEO_CAPTURE_PIXEL_FORMAT_ARGB, format.pixel_format); + EXPECT_EQ(media::PIXEL_FORMAT_ARGB, format.pixel_format); } private: @@ -292,8 +292,7 @@ TEST_F(DesktopCaptureDeviceTest, MAYBE_Capture) { media::VideoCaptureParams capture_params; capture_params.requested_format.frame_size.SetSize(640, 480); capture_params.requested_format.frame_rate = kFrameRate; - capture_params.requested_format.pixel_format = - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420; + capture_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420; capture_device_->AllocateAndStart(capture_params, client.Pass()); EXPECT_TRUE(done_event.TimedWait(TestTimeouts::action_max_timeout())); capture_device_->StopAndDeAllocate(); @@ -301,7 +300,7 @@ TEST_F(DesktopCaptureDeviceTest, MAYBE_Capture) { EXPECT_GT(format.frame_size.width(), 0); EXPECT_GT(format.frame_size.height(), 0); EXPECT_EQ(kFrameRate, format.frame_rate); - EXPECT_EQ(media::VIDEO_CAPTURE_PIXEL_FORMAT_ARGB, format.pixel_format); + EXPECT_EQ(media::PIXEL_FORMAT_ARGB, format.pixel_format); EXPECT_EQ(format.frame_size.GetArea() * 4, frame_size); } @@ -328,8 +327,7 @@ TEST_F(DesktopCaptureDeviceTest, ScreenResolutionChangeConstantResolution) { capture_params.requested_format.frame_size.SetSize(kTestFrameWidth1, kTestFrameHeight1); capture_params.requested_format.frame_rate = kFrameRate; - capture_params.requested_format.pixel_format = - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420; + capture_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420; capture_params.resolution_change_policy = media::RESOLUTION_POLICY_FIXED_RESOLUTION; @@ -373,8 +371,7 @@ TEST_F(DesktopCaptureDeviceTest, ScreenResolutionChangeFixedAspectRatio) { std::max(kTestFrameHeight1, kTestFrameHeight2)); capture_params.requested_format.frame_size = high_def_16_by_9; capture_params.requested_format.frame_rate = kFrameRate; - capture_params.requested_format.pixel_format = - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420; + capture_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420; capture_params.resolution_change_policy = media::RESOLUTION_POLICY_FIXED_ASPECT_RATIO; @@ -419,8 +416,7 @@ TEST_F(DesktopCaptureDeviceTest, ScreenResolutionChangeVariableResolution) { std::max(kTestFrameHeight1, kTestFrameHeight2)); capture_params.requested_format.frame_size = high_def_16_by_9; capture_params.requested_format.frame_rate = kFrameRate; - capture_params.requested_format.pixel_format = - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420; + capture_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420; capture_params.resolution_change_policy = media::RESOLUTION_POLICY_ANY_WITHIN_LIMIT; @@ -464,7 +460,7 @@ TEST_F(DesktopCaptureDeviceTest, UnpackedFrame) { kTestFrameHeight1); capture_params.requested_format.frame_rate = kFrameRate; capture_params.requested_format.pixel_format = - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420; + media::PIXEL_FORMAT_I420; capture_device_->AllocateAndStart(capture_params, client.Pass()); @@ -506,8 +502,7 @@ TEST_F(DesktopCaptureDeviceTest, InvertedFrame) { capture_params.requested_format.frame_size.SetSize(kTestFrameWidth1, kTestFrameHeight1); capture_params.requested_format.frame_rate = kFrameRate; - capture_params.requested_format.pixel_format = - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420; + capture_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420; capture_device_->AllocateAndStart(capture_params, client.Pass()); diff --git a/content/browser/media/capture/web_contents_video_capture_device_unittest.cc b/content/browser/media/capture/web_contents_video_capture_device_unittest.cc index 6a49212..d08e1df 100644 --- a/content/browser/media/capture/web_contents_video_capture_device_unittest.cc +++ b/content/browser/media/capture/web_contents_video_capture_device_unittest.cc @@ -360,9 +360,9 @@ class StubClient : public media::VideoCaptureDevice::Client { scoped_ptr<media::VideoCaptureDevice::Client::Buffer> ReserveOutputBuffer( const gfx::Size& dimensions, - media::VideoCapturePixelFormat format, + media::VideoPixelFormat format, media::VideoPixelStorage storage) override { - CHECK_EQ(format, media::VIDEO_CAPTURE_PIXEL_FORMAT_I420); + CHECK_EQ(format, media::PIXEL_FORMAT_I420); int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId; // Ignored. const int buffer_id = buffer_pool_->ReserveForProducer( format, storage, dimensions, &buffer_id_to_drop); @@ -730,8 +730,7 @@ TEST_F(MAYBE_WebContentsVideoCaptureDeviceTest, media::VideoCaptureParams capture_params; capture_params.requested_format.frame_size.SetSize(kTestWidth, kTestHeight); capture_params.requested_format.frame_rate = kTestFramesPerSecond; - capture_params.requested_format.pixel_format = - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420; + capture_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420; device()->AllocateAndStart(capture_params, client_observer()->PassClient()); ASSERT_NO_FATAL_FAILURE(client_observer()->WaitForError()); device()->StopAndDeAllocate(); @@ -749,8 +748,7 @@ TEST_F(MAYBE_WebContentsVideoCaptureDeviceTest, WebContentsDestroyed) { media::VideoCaptureParams capture_params; capture_params.requested_format.frame_size.SetSize(kTestWidth, kTestHeight); capture_params.requested_format.frame_rate = kTestFramesPerSecond; - capture_params.requested_format.pixel_format = - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420; + capture_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420; device()->AllocateAndStart(capture_params, client_observer()->PassClient()); // Do one capture to prove source()->SetSolidColor(SK_ColorRED); @@ -777,8 +775,7 @@ TEST_F(MAYBE_WebContentsVideoCaptureDeviceTest, media::VideoCaptureParams capture_params; capture_params.requested_format.frame_size.SetSize(kTestWidth, kTestHeight); capture_params.requested_format.frame_rate = kTestFramesPerSecond; - capture_params.requested_format.pixel_format = - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420; + capture_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420; device()->AllocateAndStart(capture_params, client_observer()->PassClient()); // Make a point of not running the UI messageloop here. @@ -799,8 +796,7 @@ TEST_F(MAYBE_WebContentsVideoCaptureDeviceTest, StopWithRendererWorkToDo) { media::VideoCaptureParams capture_params; capture_params.requested_format.frame_size.SetSize(kTestWidth, kTestHeight); capture_params.requested_format.frame_rate = kTestFramesPerSecond; - capture_params.requested_format.pixel_format = - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420; + capture_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420; device()->AllocateAndStart(capture_params, client_observer()->PassClient()); base::RunLoop().RunUntilIdle(); @@ -819,8 +815,7 @@ TEST_F(MAYBE_WebContentsVideoCaptureDeviceTest, DeviceRestart) { media::VideoCaptureParams capture_params; capture_params.requested_format.frame_size.SetSize(kTestWidth, kTestHeight); capture_params.requested_format.frame_rate = kTestFramesPerSecond; - capture_params.requested_format.pixel_format = - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420; + capture_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420; device()->AllocateAndStart(capture_params, client_observer()->PassClient()); base::RunLoop().RunUntilIdle(); source()->SetSolidColor(SK_ColorRED); @@ -858,8 +853,7 @@ TEST_F(MAYBE_WebContentsVideoCaptureDeviceTest, GoesThroughAllTheMotions) { media::VideoCaptureParams capture_params; capture_params.requested_format.frame_size.SetSize(kTestWidth, kTestHeight); capture_params.requested_format.frame_rate = kTestFramesPerSecond; - capture_params.requested_format.pixel_format = - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420; + capture_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420; device()->AllocateAndStart(capture_params, client_observer()->PassClient()); for (int i = 0; i < 6; i++) { @@ -909,8 +903,7 @@ TEST_F(MAYBE_WebContentsVideoCaptureDeviceTest, BadFramesGoodFrames) { media::VideoCaptureParams capture_params; capture_params.requested_format.frame_size.SetSize(kTestWidth, kTestHeight); capture_params.requested_format.frame_rate = kTestFramesPerSecond; - capture_params.requested_format.pixel_format = - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420; + capture_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420; // 1x1 is too small to process; we intend for this to result in an error. source()->SetCopyResultSize(1, 1); source()->SetSolidColor(SK_ColorRED); @@ -942,8 +935,7 @@ TEST_F(MAYBE_WebContentsVideoCaptureDeviceTest, media::VideoCaptureParams capture_params; capture_params.requested_format.frame_size.SetSize(kTestWidth, kTestHeight); capture_params.requested_format.frame_rate = kTestFramesPerSecond; - capture_params.requested_format.pixel_format = - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420; + capture_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420; capture_params.resolution_change_policy = media::RESOLUTION_POLICY_FIXED_ASPECT_RATIO; @@ -1000,8 +992,7 @@ TEST_F(MAYBE_WebContentsVideoCaptureDeviceTest, media::VideoCaptureParams capture_params; capture_params.requested_format.frame_size.SetSize(kTestWidth, kTestHeight); capture_params.requested_format.frame_rate = kTestFramesPerSecond; - capture_params.requested_format.pixel_format = - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420; + capture_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420; capture_params.resolution_change_policy = media::RESOLUTION_POLICY_ANY_WITHIN_LIMIT; @@ -1082,8 +1073,7 @@ TEST_F(MAYBE_WebContentsVideoCaptureDeviceTest, media::VideoCaptureParams capture_params; capture_params.requested_format.frame_size = oddball_size; capture_params.requested_format.frame_rate = kTestFramesPerSecond; - capture_params.requested_format.pixel_format = - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420; + capture_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420; capture_params.resolution_change_policy = policy; StubClientObserver unused_observer; device()->AllocateAndStart(capture_params, unused_observer.PassClient()); diff --git a/content/browser/media/media_internals_unittest.cc b/content/browser/media/media_internals_unittest.cc index c255c18..a02acca 100644 --- a/content/browser/media/media_internals_unittest.cc +++ b/content/browser/media/media_internals_unittest.cc @@ -151,15 +151,15 @@ TEST_F(MediaInternalsVideoCaptureDeviceTest, // be updated at the same time as the media internals JS files. const float kFrameRate = 30.0f; const gfx::Size kFrameSize(1280, 720); - const media::VideoCapturePixelFormat kPixelFormat = - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420; + const media::VideoPixelFormat kPixelFormat = + media::PIXEL_FORMAT_I420; const media::VideoPixelStorage kPixelStorage = media::PIXEL_STORAGE_CPU; const media::VideoCaptureFormat capture_format(kFrameSize, kFrameRate, kPixelFormat, kPixelStorage); const std::string expected_string = base::StringPrintf( "(%s)@%.3ffps, pixel format: %s storage: %s.", kFrameSize.ToString().c_str(), kFrameRate, - media::VideoCaptureFormat::PixelFormatToString(kPixelFormat).c_str(), + media::VideoPixelFormatToString(kPixelFormat).c_str(), media::VideoCaptureFormat::PixelStorageToString(kPixelStorage).c_str()); EXPECT_EQ(expected_string, media::VideoCaptureFormat::ToString(capture_format)); @@ -170,8 +170,8 @@ TEST_F(MediaInternalsVideoCaptureDeviceTest, const int kWidth = 1280; const int kHeight = 720; const float kFrameRate = 30.0f; - const media::VideoCapturePixelFormat kPixelFormat = - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420; + const media::VideoPixelFormat kPixelFormat = + media::PIXEL_FORMAT_I420; const media::VideoCaptureFormat format_hd({kWidth, kHeight}, kFrameRate, kPixelFormat); media::VideoCaptureFormats formats{}; diff --git a/content/browser/renderer_host/media/video_capture_buffer_pool.cc b/content/browser/renderer_host/media/video_capture_buffer_pool.cc index a157b4e..f3a2c48 100644 --- a/content/browser/renderer_host/media/video_capture_buffer_pool.cc +++ b/content/browser/renderer_host/media/video_capture_buffer_pool.cc @@ -111,7 +111,7 @@ class GpuMemoryBufferBufferHandle final class VideoCaptureBufferPool::SharedMemTracker final : public Tracker { public: SharedMemTracker(); - bool Init(media::VideoCapturePixelFormat format, + bool Init(media::VideoPixelFormat format, media::VideoPixelStorage storage_type, const gfx::Size& dimensions) override; @@ -141,7 +141,7 @@ class VideoCaptureBufferPool::SharedMemTracker final : public Tracker { class VideoCaptureBufferPool::GpuMemoryBufferTracker final : public Tracker { public: GpuMemoryBufferTracker(); - bool Init(media::VideoCapturePixelFormat format, + bool Init(media::VideoPixelFormat format, media::VideoPixelStorage storage_type, const gfx::Size& dimensions) override; ~GpuMemoryBufferTracker() override; @@ -169,7 +169,7 @@ class VideoCaptureBufferPool::GpuMemoryBufferTracker final : public Tracker { VideoCaptureBufferPool::SharedMemTracker::SharedMemTracker() : Tracker() {} bool VideoCaptureBufferPool::SharedMemTracker::Init( - media::VideoCapturePixelFormat format, + media::VideoPixelFormat format, media::VideoPixelStorage storage_type, const gfx::Size& dimensions) { DVLOG(2) << "allocating ShMem of " << dimensions.ToString(); @@ -197,7 +197,7 @@ VideoCaptureBufferPool::GpuMemoryBufferTracker::~GpuMemoryBufferTracker() { } bool VideoCaptureBufferPool::GpuMemoryBufferTracker::Init( - media::VideoCapturePixelFormat format, + media::VideoPixelFormat format, media::VideoPixelStorage storage_type, const gfx::Size& dimensions) { DVLOG(2) << "allocating GMB for " << dimensions.ToString(); @@ -206,7 +206,7 @@ bool VideoCaptureBufferPool::GpuMemoryBufferTracker::Init( DCHECK(BrowserGpuMemoryBufferManager::current()); // This class is only expected to be called with I420 buffer requests at this // point. - DCHECK_EQ(format, media::VIDEO_CAPTURE_PIXEL_FORMAT_I420); + DCHECK_EQ(format, media::PIXEL_FORMAT_I420); set_pixel_format(format); set_storage_type(storage_type); set_pixel_count(dimensions.GetArea()); @@ -340,7 +340,7 @@ VideoCaptureBufferPool::GetBufferHandle(int buffer_id) { } int VideoCaptureBufferPool::ReserveForProducer( - media::VideoCapturePixelFormat format, + media::VideoPixelFormat format, media::VideoPixelStorage storage, const gfx::Size& dimensions, int* buffer_id_to_drop) { @@ -404,7 +404,7 @@ double VideoCaptureBufferPool::GetBufferPoolUtilization() const { } int VideoCaptureBufferPool::ReserveForProducerInternal( - media::VideoCapturePixelFormat pixel_format, + media::VideoPixelFormat pixel_format, media::VideoPixelStorage storage_type, const gfx::Size& dimensions, int* buffer_id_to_drop) { diff --git a/content/browser/renderer_host/media/video_capture_buffer_pool.h b/content/browser/renderer_host/media/video_capture_buffer_pool.h index 44d55f3c..defd2af6 100644 --- a/content/browser/renderer_host/media/video_capture_buffer_pool.h +++ b/content/browser/renderer_host/media/video_capture_buffer_pool.h @@ -88,7 +88,7 @@ class CONTENT_EXPORT VideoCaptureBufferPool // On occasion, this call will decide to free an old buffer to make room for a // new allocation at a larger size. If so, the ID of the destroyed buffer is // returned via |buffer_id_to_drop|. - int ReserveForProducer(media::VideoCapturePixelFormat format, + int ReserveForProducer(media::VideoPixelFormat format, media::VideoPixelStorage storage, const gfx::Size& dimensions, int* buffer_id_to_drop); @@ -123,17 +123,17 @@ class CONTENT_EXPORT VideoCaptureBufferPool Tracker() : pixel_count_(0), held_by_producer_(false), consumer_hold_count_(0) {} - virtual bool Init(media::VideoCapturePixelFormat format, + virtual bool Init(media::VideoPixelFormat format, media::VideoPixelStorage storage_type, const gfx::Size& dimensions) = 0; virtual ~Tracker(); size_t pixel_count() const { return pixel_count_; } void set_pixel_count(size_t count) { pixel_count_ = count; } - media::VideoCapturePixelFormat pixel_format() const { + media::VideoPixelFormat pixel_format() const { return pixel_format_; } - void set_pixel_format(media::VideoCapturePixelFormat format) { + void set_pixel_format(media::VideoPixelFormat format) { pixel_format_ = format; } media::VideoPixelStorage storage_type() const { return storage_type_; } @@ -157,7 +157,7 @@ class CONTENT_EXPORT VideoCaptureBufferPool private: size_t pixel_count_; - media::VideoCapturePixelFormat pixel_format_; + media::VideoPixelFormat pixel_format_; media::VideoPixelStorage storage_type_; // Indicates whether this Tracker is currently referenced by the producer. bool held_by_producer_; @@ -168,7 +168,7 @@ class CONTENT_EXPORT VideoCaptureBufferPool friend class base::RefCountedThreadSafe<VideoCaptureBufferPool>; virtual ~VideoCaptureBufferPool(); - int ReserveForProducerInternal(media::VideoCapturePixelFormat format, + int ReserveForProducerInternal(media::VideoPixelFormat format, media::VideoPixelStorage storage, const gfx::Size& dimensions, int* tracker_id_to_drop); diff --git a/content/browser/renderer_host/media/video_capture_buffer_pool_unittest.cc b/content/browser/renderer_host/media/video_capture_buffer_pool_unittest.cc index 4f1b29d..90b2d49 100644 --- a/content/browser/renderer_host/media/video_capture_buffer_pool_unittest.cc +++ b/content/browser/renderer_host/media/video_capture_buffer_pool_unittest.cc @@ -23,16 +23,16 @@ namespace content { struct PixelFormatAndStorage { - media::VideoCapturePixelFormat pixel_format; + media::VideoPixelFormat pixel_format; media::VideoPixelStorage pixel_storage; }; static const PixelFormatAndStorage kCapturePixelFormatAndStorages[] = { - {media::VIDEO_CAPTURE_PIXEL_FORMAT_I420, media::PIXEL_STORAGE_CPU}, - {media::VIDEO_CAPTURE_PIXEL_FORMAT_ARGB, media::PIXEL_STORAGE_CPU}, - {media::VIDEO_CAPTURE_PIXEL_FORMAT_ARGB, media::PIXEL_STORAGE_TEXTURE}, + {media::PIXEL_FORMAT_I420, media::PIXEL_STORAGE_CPU}, + {media::PIXEL_FORMAT_ARGB, media::PIXEL_STORAGE_CPU}, + {media::PIXEL_FORMAT_ARGB, media::PIXEL_STORAGE_TEXTURE}, #if !defined(OS_ANDROID) - {media::VIDEO_CAPTURE_PIXEL_FORMAT_I420, + {media::PIXEL_FORMAT_I420, media::PIXEL_STORAGE_GPUMEMORYBUFFER}, #endif }; @@ -161,9 +161,8 @@ class VideoCaptureBufferPoolTest int buffer_id_to_drop = ~expected_dropped_id_; DVLOG(1) << media::VideoCaptureFormat::PixelStorageToString( format_and_storage.pixel_storage) << " " - << media::VideoCaptureFormat::PixelFormatToString( - format_and_storage.pixel_format) << " " - << dimensions.ToString(); + << media::VideoPixelFormatToString(format_and_storage.pixel_format) + << " " << dimensions.ToString(); const int buffer_id = pool_->ReserveForProducer( format_and_storage.pixel_format, format_and_storage.pixel_storage, dimensions, &buffer_id_to_drop); diff --git a/content/browser/renderer_host/media/video_capture_controller_unittest.cc b/content/browser/renderer_host/media/video_capture_controller_unittest.cc index a999db1..e376b90 100644 --- a/content/browser/renderer_host/media/video_capture_controller_unittest.cc +++ b/content/browser/renderer_host/media/video_capture_controller_unittest.cc @@ -166,7 +166,7 @@ class VideoCaptureControllerTest : public testing::Test { TEST_F(VideoCaptureControllerTest, AddAndRemoveClients) { media::VideoCaptureParams session_100; session_100.requested_format = media::VideoCaptureFormat( - gfx::Size(320, 240), 30, media::VIDEO_CAPTURE_PIXEL_FORMAT_I420); + gfx::Size(320, 240), 30, media::PIXEL_FORMAT_I420); media::VideoCaptureParams session_200 = session_100; media::VideoCaptureParams session_300 = session_100; @@ -278,7 +278,7 @@ TEST_F(VideoCaptureControllerTest, NormalCaptureMultipleClients) { media::VideoCaptureParams session_100; session_100.requested_format = media::VideoCaptureFormat( - gfx::Size(320, 240), 30, media::VIDEO_CAPTURE_PIXEL_FORMAT_I420); + gfx::Size(320, 240), 30, media::PIXEL_FORMAT_I420); media::VideoCaptureParams session_200 = session_100; @@ -291,7 +291,7 @@ TEST_F(VideoCaptureControllerTest, NormalCaptureMultipleClients) { // The device format needn't match the VideoCaptureParams (the camera can do // what it wants). Pick something random. media::VideoCaptureFormat device_format( - gfx::Size(10, 10), 25, media::VIDEO_CAPTURE_PIXEL_FORMAT_RGB24); + gfx::Size(10, 10), 25, media::PIXEL_FORMAT_RGB24); const VideoCaptureControllerID client_a_route_1(0xa1a1a1a1); const VideoCaptureControllerID client_a_route_2(0xa2a2a2a2); @@ -322,7 +322,7 @@ TEST_F(VideoCaptureControllerTest, NormalCaptureMultipleClients) { ASSERT_EQ(0.0, device_->GetBufferPoolUtilization()); scoped_ptr<media::VideoCaptureDevice::Client::Buffer> buffer( device_->ReserveOutputBuffer(capture_resolution, - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420, + media::PIXEL_FORMAT_I420, media::PIXEL_STORAGE_CPU)); ASSERT_TRUE(buffer.get()); ASSERT_EQ(1.0 / kPoolSize, device_->GetBufferPoolUtilization()); @@ -373,7 +373,7 @@ TEST_F(VideoCaptureControllerTest, NormalCaptureMultipleClients) { // delay. This shouldn't affect anything. scoped_ptr<media::VideoCaptureDevice::Client::Buffer> buffer2 = device_->ReserveOutputBuffer(capture_resolution, - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420, + media::PIXEL_FORMAT_I420, media::PIXEL_STORAGE_CPU); ASSERT_TRUE(buffer2.get()); memset(buffer2->data(), buffer_no++, buffer2->mapped_size()); @@ -419,7 +419,7 @@ TEST_F(VideoCaptureControllerTest, NormalCaptureMultipleClients) { for (int i = 0; i < kPoolSize; i++) { scoped_ptr<media::VideoCaptureDevice::Client::Buffer> buffer = device_->ReserveOutputBuffer(capture_resolution, - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420, + media::PIXEL_FORMAT_I420, media::PIXEL_STORAGE_CPU); ASSERT_TRUE(buffer.get()); memset(buffer->data(), buffer_no++, buffer->mapped_size()); @@ -431,7 +431,7 @@ TEST_F(VideoCaptureControllerTest, NormalCaptureMultipleClients) { // ReserveOutputBuffer ought to fail now, because the pool is depleted. ASSERT_FALSE( device_->ReserveOutputBuffer(capture_resolution, - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420, + media::PIXEL_FORMAT_I420, media::PIXEL_STORAGE_CPU).get()); // The new client needs to be told of 3 buffers; the old clients only 2. @@ -467,7 +467,7 @@ TEST_F(VideoCaptureControllerTest, NormalCaptureMultipleClients) { // Queue up another buffer. scoped_ptr<media::VideoCaptureDevice::Client::Buffer> buffer3 = device_->ReserveOutputBuffer(capture_resolution, - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420, + media::PIXEL_FORMAT_I420, media::PIXEL_STORAGE_CPU); ASSERT_TRUE(buffer3.get()); memset(buffer3->data(), buffer_no++, buffer3->mapped_size()); @@ -478,7 +478,7 @@ TEST_F(VideoCaptureControllerTest, NormalCaptureMultipleClients) { scoped_ptr<media::VideoCaptureDevice::Client::Buffer> buffer4 = device_->ReserveOutputBuffer(capture_resolution, - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420, + media::PIXEL_FORMAT_I420, media::PIXEL_STORAGE_CPU); { // Kill A2 via session close (posts a task to disconnect, but A2 must not @@ -517,7 +517,7 @@ TEST_F(VideoCaptureControllerTest, NormalCaptureMultipleClients) { for (int i = 0; i < shm_buffers; ++i) { scoped_ptr<media::VideoCaptureDevice::Client::Buffer> buffer = device_->ReserveOutputBuffer(capture_resolution, - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420, + media::PIXEL_FORMAT_I420, media::PIXEL_STORAGE_CPU); ASSERT_TRUE(buffer.get()); video_frame = @@ -530,7 +530,7 @@ TEST_F(VideoCaptureControllerTest, NormalCaptureMultipleClients) { for (int i = 0; i < mailbox_buffers; ++i) { scoped_ptr<media::VideoCaptureDevice::Client::Buffer> buffer = device_->ReserveOutputBuffer(capture_resolution, - media::VIDEO_CAPTURE_PIXEL_FORMAT_ARGB, + media::PIXEL_FORMAT_ARGB, media::PIXEL_STORAGE_TEXTURE); ASSERT_TRUE(buffer.get()); #if !defined(OS_ANDROID) @@ -549,11 +549,11 @@ TEST_F(VideoCaptureControllerTest, NormalCaptureMultipleClients) { // the pool is depleted. ASSERT_FALSE( device_->ReserveOutputBuffer(capture_resolution, - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420, + media::PIXEL_FORMAT_I420, media::PIXEL_STORAGE_CPU).get()); ASSERT_FALSE( device_->ReserveOutputBuffer(capture_resolution, - media::VIDEO_CAPTURE_PIXEL_FORMAT_ARGB, + media::PIXEL_FORMAT_ARGB, media::PIXEL_STORAGE_TEXTURE).get()); EXPECT_CALL(*client_b_, DoI420BufferReady(client_b_route_2, capture_resolution)) @@ -584,7 +584,7 @@ TEST_F(VideoCaptureControllerTest, NormalCaptureMultipleClients) { TEST_F(VideoCaptureControllerTest, ErrorBeforeDeviceCreation) { media::VideoCaptureParams session_100; session_100.requested_format = media::VideoCaptureFormat( - gfx::Size(320, 240), 30, media::VIDEO_CAPTURE_PIXEL_FORMAT_I420); + gfx::Size(320, 240), 30, media::PIXEL_FORMAT_I420); media::VideoCaptureParams session_200 = session_100; @@ -610,7 +610,7 @@ TEST_F(VideoCaptureControllerTest, ErrorBeforeDeviceCreation) { scoped_ptr<media::VideoCaptureDevice::Client::Buffer> buffer( device_->ReserveOutputBuffer(capture_resolution, - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420, + media::PIXEL_FORMAT_I420, media::PIXEL_STORAGE_CPU)); ASSERT_TRUE(buffer.get()); scoped_refptr<media::VideoFrame> video_frame = @@ -626,7 +626,7 @@ TEST_F(VideoCaptureControllerTest, ErrorBeforeDeviceCreation) { TEST_F(VideoCaptureControllerTest, ErrorAfterDeviceCreation) { media::VideoCaptureParams session_100; session_100.requested_format = media::VideoCaptureFormat( - gfx::Size(320, 240), 30, media::VIDEO_CAPTURE_PIXEL_FORMAT_I420); + gfx::Size(320, 240), 30, media::PIXEL_FORMAT_I420); media::VideoCaptureParams session_200 = session_100; @@ -636,7 +636,7 @@ TEST_F(VideoCaptureControllerTest, ErrorAfterDeviceCreation) { controller_->AddClient( route_id, client_a_.get(), base::kNullProcessHandle, 100, session_100); media::VideoCaptureFormat device_format( - gfx::Size(10, 10), 25, media::VIDEO_CAPTURE_PIXEL_FORMAT_ARGB); + gfx::Size(10, 10), 25, media::PIXEL_FORMAT_ARGB); // Start the device. Then, before the first buffer, signal an error and // deliver the buffer. The error should be propagated to clients; the buffer @@ -646,7 +646,7 @@ TEST_F(VideoCaptureControllerTest, ErrorAfterDeviceCreation) { const gfx::Size dims(320, 240); scoped_ptr<media::VideoCaptureDevice::Client::Buffer> buffer( - device_->ReserveOutputBuffer(dims, media::VIDEO_CAPTURE_PIXEL_FORMAT_I420, + device_->ReserveOutputBuffer(dims, media::PIXEL_FORMAT_I420, media::PIXEL_STORAGE_CPU)); ASSERT_TRUE(buffer.get()); diff --git a/content/browser/renderer_host/media/video_capture_device_client.cc b/content/browser/renderer_host/media/video_capture_device_client.cc index 2adf8d1..8e8d029 100644 --- a/content/browser/renderer_host/media/video_capture_device_client.cc +++ b/content/browser/renderer_host/media/video_capture_device_client.cc @@ -210,7 +210,7 @@ VideoCaptureDeviceClient::VideoCaptureDeviceClient( use_gpu_memory_buffers_(base::CommandLine::ForCurrentProcess()->HasSwitch( switches::kUseGpuMemoryBuffersForCapture)), capture_task_runner_(capture_task_runner), - last_captured_pixel_format_(media::VIDEO_CAPTURE_PIXEL_FORMAT_UNKNOWN) { + last_captured_pixel_format_(media::PIXEL_FORMAT_UNKNOWN) { DCHECK_CURRENTLY_ON(BrowserThread::IO); } @@ -231,16 +231,15 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData( if (last_captured_pixel_format_ != frame_format.pixel_format) { OnLog("Pixel format: " + - VideoCaptureFormat::PixelFormatToString(frame_format.pixel_format)); + media::VideoPixelFormatToString(frame_format.pixel_format)); last_captured_pixel_format_ = frame_format.pixel_format; - if (frame_format.pixel_format == media::VIDEO_CAPTURE_PIXEL_FORMAT_MJPEG && + if (frame_format.pixel_format == media::PIXEL_FORMAT_MJPEG && !external_jpeg_decoder_initialized_) { external_jpeg_decoder_initialized_ = true; - external_jpeg_decoder_.reset(new VideoCaptureGpuJpegDecoder( - base::Bind( - &VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread, - controller_))); + external_jpeg_decoder_.reset(new VideoCaptureGpuJpegDecoder(base::Bind( + &VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread, + controller_))); external_jpeg_decoder_->Initialize(); } } @@ -291,33 +290,33 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData( bool flip = false; switch (frame_format.pixel_format) { - case media::VIDEO_CAPTURE_PIXEL_FORMAT_UNKNOWN: // Color format not set. + case media::PIXEL_FORMAT_UNKNOWN: // Color format not set. break; - case media::VIDEO_CAPTURE_PIXEL_FORMAT_I420: + case media::PIXEL_FORMAT_I420: DCHECK(!chopped_width && !chopped_height); origin_colorspace = libyuv::FOURCC_I420; break; - case media::VIDEO_CAPTURE_PIXEL_FORMAT_YV12: + case media::PIXEL_FORMAT_YV12: DCHECK(!chopped_width && !chopped_height); origin_colorspace = libyuv::FOURCC_YV12; break; - case media::VIDEO_CAPTURE_PIXEL_FORMAT_NV12: + case media::PIXEL_FORMAT_NV12: DCHECK(!chopped_width && !chopped_height); origin_colorspace = libyuv::FOURCC_NV12; break; - case media::VIDEO_CAPTURE_PIXEL_FORMAT_NV21: + case media::PIXEL_FORMAT_NV21: DCHECK(!chopped_width && !chopped_height); origin_colorspace = libyuv::FOURCC_NV21; break; - case media::VIDEO_CAPTURE_PIXEL_FORMAT_YUY2: + case media::PIXEL_FORMAT_YUY2: DCHECK(!chopped_width && !chopped_height); origin_colorspace = libyuv::FOURCC_YUY2; break; - case media::VIDEO_CAPTURE_PIXEL_FORMAT_UYVY: + case media::PIXEL_FORMAT_UYVY: DCHECK(!chopped_width && !chopped_height); origin_colorspace = libyuv::FOURCC_UYVY; break; - case media::VIDEO_CAPTURE_PIXEL_FORMAT_RGB24: + case media::PIXEL_FORMAT_RGB24: // Linux RGB24 defines red at lowest byte address, // see http://linuxtv.org/downloads/v4l-dvb-apis/packed-rgb.html. // Windows RGB24 defines blue at lowest byte, @@ -337,16 +336,16 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData( flip = true; #endif break; - case media::VIDEO_CAPTURE_PIXEL_FORMAT_RGB32: -// Fallback to VIDEO_CAPTURE_PIXEL_FORMAT_ARGB setting |flip| in Windows + case media::PIXEL_FORMAT_RGB32: +// Fallback to PIXEL_FORMAT_ARGB setting |flip| in Windows // platforms. #if defined(OS_WIN) flip = true; #endif - case media::VIDEO_CAPTURE_PIXEL_FORMAT_ARGB: + case media::PIXEL_FORMAT_ARGB: origin_colorspace = libyuv::FOURCC_ARGB; break; - case media::VIDEO_CAPTURE_PIXEL_FORMAT_MJPEG: + case media::PIXEL_FORMAT_MJPEG: origin_colorspace = libyuv::FOURCC_MJPG; break; default: @@ -363,7 +362,7 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData( if (status == VideoCaptureGpuJpegDecoder::FAILED) { external_jpeg_decoder_.reset(); } else if (status == VideoCaptureGpuJpegDecoder::INIT_PASSED && - frame_format.pixel_format == media::VIDEO_CAPTURE_PIXEL_FORMAT_MJPEG && + frame_format.pixel_format == media::PIXEL_FORMAT_MJPEG && rotation == 0 && !flip) { external_jpeg_decoder_->DecodeCapturedData(data, length, frame_format, timestamp, buffer.Pass()); @@ -388,14 +387,13 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData( rotation_mode, origin_colorspace) != 0) { DLOG(WARNING) << "Failed to convert buffer's pixel format to I420 from " - << VideoCaptureFormat::PixelFormatToString( - frame_format.pixel_format); + << media::VideoPixelFormatToString(frame_format.pixel_format); return; } const VideoCaptureFormat output_format = VideoCaptureFormat( dimensions, frame_format.frame_rate, - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420, output_pixel_storage); + media::PIXEL_FORMAT_I420, output_pixel_storage); OnIncomingCapturedBuffer(buffer.Pass(), output_format, timestamp); } @@ -411,7 +409,7 @@ VideoCaptureDeviceClient::OnIncomingCapturedYuvData( int clockwise_rotation, const base::TimeTicks& timestamp) { TRACE_EVENT0("video", "VideoCaptureDeviceClient::OnIncomingCapturedYuvData"); - DCHECK_EQ(media::VIDEO_CAPTURE_PIXEL_FORMAT_I420, frame_format.pixel_format); + DCHECK_EQ(media::PIXEL_FORMAT_I420, frame_format.pixel_format); DCHECK_EQ(media::PIXEL_STORAGE_CPU, frame_format.pixel_storage); DCHECK_EQ(0, clockwise_rotation) << "Rotation not supported"; @@ -453,16 +451,16 @@ VideoCaptureDeviceClient::OnIncomingCapturedYuvData( scoped_ptr<media::VideoCaptureDevice::Client::Buffer> VideoCaptureDeviceClient::ReserveOutputBuffer( const gfx::Size& frame_size, - media::VideoCapturePixelFormat pixel_format, + media::VideoPixelFormat pixel_format, media::VideoPixelStorage pixel_storage) { - DCHECK(pixel_format == media::VIDEO_CAPTURE_PIXEL_FORMAT_I420 || - pixel_format == media::VIDEO_CAPTURE_PIXEL_FORMAT_ARGB); + DCHECK(pixel_format == media::PIXEL_FORMAT_I420 || + pixel_format == media::PIXEL_FORMAT_ARGB); DCHECK_GT(frame_size.width(), 0); DCHECK_GT(frame_size.height(), 0); if (pixel_storage == media::PIXEL_STORAGE_GPUMEMORYBUFFER && !texture_wrap_helper_) { - DCHECK(pixel_format == media::VIDEO_CAPTURE_PIXEL_FORMAT_I420); + DCHECK(pixel_format == media::PIXEL_FORMAT_I420); texture_wrap_helper_ = new TextureWrapHelper(controller_, capture_task_runner_); } @@ -502,9 +500,9 @@ void VideoCaptureDeviceClient::OnIncomingCapturedBuffer( timestamp)); } else { #ifndef NDEBUG - media::VideoCapturePixelFormat pixel_format = frame_format.pixel_format; - DCHECK(pixel_format == media::VIDEO_CAPTURE_PIXEL_FORMAT_I420 || - pixel_format == media::VIDEO_CAPTURE_PIXEL_FORMAT_ARGB); + media::VideoPixelFormat pixel_format = frame_format.pixel_format; + DCHECK(pixel_format == media::PIXEL_FORMAT_I420 || + pixel_format == media::PIXEL_FORMAT_ARGB); #endif scoped_refptr<VideoFrame> video_frame = VideoFrame::WrapExternalData( @@ -576,7 +574,7 @@ VideoCaptureDeviceClient::ReserveI420OutputBuffer( const media::VideoPixelFormat format = media::PIXEL_FORMAT_I420; scoped_ptr<Buffer> buffer(ReserveOutputBuffer( - dimensions, media::VIDEO_CAPTURE_PIXEL_FORMAT_I420, storage)); + dimensions, media::PIXEL_FORMAT_I420, storage)); if (!buffer) return scoped_ptr<Buffer>(); @@ -616,7 +614,7 @@ VideoCaptureDeviceClient::TextureWrapHelper::OnIncomingCapturedGpuMemoryBuffer( const media::VideoCaptureFormat& frame_format, const base::TimeTicks& timestamp) { DCHECK(capture_task_runner_->BelongsToCurrentThread()); - DCHECK(media::VIDEO_CAPTURE_PIXEL_FORMAT_I420 == frame_format.pixel_format); + DCHECK(media::PIXEL_FORMAT_I420 == frame_format.pixel_format); DCHECK_EQ(media::PIXEL_STORAGE_GPUMEMORYBUFFER, frame_format.pixel_storage); if (!gl_helper_) { // |gl_helper_| might not exist due to asynchronous initialization not diff --git a/content/browser/renderer_host/media/video_capture_device_client.h b/content/browser/renderer_host/media/video_capture_device_client.h index 7f43683..80838ee 100644 --- a/content/browser/renderer_host/media/video_capture_device_client.h +++ b/content/browser/renderer_host/media/video_capture_device_client.h @@ -57,7 +57,7 @@ class CONTENT_EXPORT VideoCaptureDeviceClient const base::TimeTicks& timestamp) override; scoped_ptr<Buffer> ReserveOutputBuffer( const gfx::Size& dimensions, - media::VideoCapturePixelFormat format, + media::VideoPixelFormat format, media::VideoPixelStorage storage) override; void OnIncomingCapturedBuffer(scoped_ptr<Buffer> buffer, const media::VideoCaptureFormat& frame_format, @@ -112,7 +112,7 @@ class CONTENT_EXPORT VideoCaptureDeviceClient // lives. const scoped_refptr<base::SingleThreadTaskRunner> capture_task_runner_; - media::VideoCapturePixelFormat last_captured_pixel_format_; + media::VideoPixelFormat last_captured_pixel_format_; DISALLOW_COPY_AND_ASSIGN(VideoCaptureDeviceClient); }; diff --git a/content/browser/renderer_host/media/video_capture_device_client_unittest.cc b/content/browser/renderer_host/media/video_capture_device_client_unittest.cc index c02ede6..63e9bea 100644 --- a/content/browser/renderer_host/media/video_capture_device_client_unittest.cc +++ b/content/browser/renderer_host/media/video_capture_device_client_unittest.cc @@ -74,8 +74,8 @@ TEST_F(VideoCaptureDeviceClientTest, Minimal) { unsigned char data[kScratchpadSizeInBytes] = {}; const media::VideoCaptureFormat kFrameFormat( gfx::Size(10, 10), 30.0f /*frame_rate*/, - media::VideoCapturePixelFormat::VIDEO_CAPTURE_PIXEL_FORMAT_I420, - media::VideoPixelStorage::PIXEL_STORAGE_CPU); + media::PIXEL_FORMAT_I420, + media::PIXEL_STORAGE_CPU); DCHECK(device_client_.get()); EXPECT_CALL(*controller_, MockDoIncomingCapturedVideoFrameOnIOThread(_)) .Times(1); @@ -94,7 +94,7 @@ TEST_F(VideoCaptureDeviceClientTest, FailsSilentlyGivenInvalidFrameFormat) { const media::VideoCaptureFormat kFrameFormat( gfx::Size(media::limits::kMaxDimension + 1, media::limits::kMaxDimension), media::limits::kMaxFramesPerSecond + 1, - media::VideoCapturePixelFormat::VIDEO_CAPTURE_PIXEL_FORMAT_I420, + media::VideoPixelFormat::PIXEL_FORMAT_I420, media::VideoPixelStorage::PIXEL_STORAGE_CPU); DCHECK(device_client_.get()); // Expect the the call to fail silently inside the VideoCaptureDeviceClient. @@ -113,8 +113,8 @@ TEST_F(VideoCaptureDeviceClientTest, DropsFrameIfNoBuffer) { unsigned char data[kScratchpadSizeInBytes] = {}; const media::VideoCaptureFormat kFrameFormat( gfx::Size(10, 10), 30.0f /*frame_rate*/, - media::VideoCapturePixelFormat::VIDEO_CAPTURE_PIXEL_FORMAT_I420, - media::VideoPixelStorage::PIXEL_STORAGE_CPU); + media::PIXEL_FORMAT_I420, + media::PIXEL_STORAGE_CPU); // We expect the second frame to be silently dropped, so these should // only be called once despite the two frames. EXPECT_CALL(*controller_, MockDoIncomingCapturedVideoFrameOnIOThread(_)) @@ -142,21 +142,26 @@ TEST_F(VideoCaptureDeviceClientTest, DataCaptureInEachVideoFormatInSequence) { ASSERT_GE(kScratchpadSizeInBytes, capture_resolution.GetArea() * 4u) << "Scratchpad is too small to hold the largest pixel format (ARGB)."; - for (int format = 0; format < media::VIDEO_CAPTURE_PIXEL_FORMAT_MAX; + for (int format = 0; format < media::PIXEL_FORMAT_MAX; ++format) { - // Conversion from MJPEG to I420 seems to be unsupported. - if (format == media::VIDEO_CAPTURE_PIXEL_FORMAT_UNKNOWN || - format == media::VIDEO_CAPTURE_PIXEL_FORMAT_MJPEG) { + // Conversion from some formats are unsupported. + if (format == media::PIXEL_FORMAT_UNKNOWN || + format == media::PIXEL_FORMAT_YV16 || + format == media::PIXEL_FORMAT_YV12A || + format == media::PIXEL_FORMAT_YV24 || + format == media::PIXEL_FORMAT_ARGB || + format == media::PIXEL_FORMAT_XRGB || + format == media::PIXEL_FORMAT_MJPEG) { continue; } #if !defined(OS_LINUX) && !defined(OS_WIN) - if (format == media::VIDEO_CAPTURE_PIXEL_FORMAT_RGB24){ + if (format == media::PIXEL_FORMAT_RGB24){ continue; } #endif media::VideoCaptureParams params; params.requested_format = media::VideoCaptureFormat( - capture_resolution, 30.0f, media::VideoCapturePixelFormat(format)); + capture_resolution, 30.0f, media::VideoPixelFormat(format)); EXPECT_CALL(*controller_, MockDoIncomingCapturedVideoFrameOnIOThread(_)) .Times(1); device_client_->OnIncomingCapturedData( @@ -197,7 +202,7 @@ TEST_F(VideoCaptureDeviceClientTest, CheckRotationsAndCrops) { << "Scratchpad is too small to hold the largest pixel format (ARGB)."; params.requested_format = media::VideoCaptureFormat(size_and_rotation.input_resolution, 30.0f, - media::VIDEO_CAPTURE_PIXEL_FORMAT_ARGB); + media::PIXEL_FORMAT_ARGB); gfx::Size coded_size; EXPECT_CALL(*controller_, MockDoIncomingCapturedVideoFrameOnIOThread(_)) .Times(1) diff --git a/content/browser/renderer_host/media/video_capture_host_unittest.cc b/content/browser/renderer_host/media/video_capture_host_unittest.cc index 5256d29..70c6112 100644 --- a/content/browser/renderer_host/media/video_capture_host_unittest.cc +++ b/content/browser/renderer_host/media/video_capture_host_unittest.cc @@ -383,7 +383,7 @@ class VideoCaptureHostTest : public testing::Test { media::VideoCaptureParams params; params.requested_format = media::VideoCaptureFormat( - gfx::Size(352, 288), 30, media::VIDEO_CAPTURE_PIXEL_FORMAT_I420); + gfx::Size(352, 288), 30, media::PIXEL_FORMAT_I420); host_->OnStartCapture(kDeviceId, opened_session_id_, params); run_loop.Run(); } @@ -397,7 +397,7 @@ class VideoCaptureHostTest : public testing::Test { OnStateChanged(kDeviceId, VIDEO_CAPTURE_STATE_STOPPED)); media::VideoCaptureParams params; params.requested_format = media::VideoCaptureFormat( - gfx::Size(352, 288), 30, media::VIDEO_CAPTURE_PIXEL_FORMAT_I420); + gfx::Size(352, 288), 30, media::PIXEL_FORMAT_I420); host_->OnStartCapture(kDeviceId, opened_session_id_, params); host_->OnStopCapture(kDeviceId); run_loop.RunUntilIdle(); diff --git a/content/browser/renderer_host/media/video_capture_manager.cc b/content/browser/renderer_host/media/video_capture_manager.cc index 7b5c5ab..cd745d8 100644 --- a/content/browser/renderer_host/media/video_capture_manager.cc +++ b/content/browser/renderer_host/media/video_capture_manager.cc @@ -71,7 +71,7 @@ void ConsolidateCaptureFormats(media::VideoCaptureFormats* formats) { // anyhow: the actual pixel format is decided at the device level. for (media::VideoCaptureFormats::iterator it = formats->begin(); it != formats->end(); ++it) { - it->pixel_format = media::VIDEO_CAPTURE_PIXEL_FORMAT_I420; + it->pixel_format = media::PIXEL_FORMAT_I420; } } diff --git a/content/browser/renderer_host/media/video_capture_manager_unittest.cc b/content/browser/renderer_host/media/video_capture_manager_unittest.cc index d4ca386..484abe4 100644 --- a/content/browser/renderer_host/media/video_capture_manager_unittest.cc +++ b/content/browser/renderer_host/media/video_capture_manager_unittest.cc @@ -105,7 +105,7 @@ class VideoCaptureManagerTest : public testing::Test { VideoCaptureControllerID StartClient(int session_id, bool expect_success) { media::VideoCaptureParams params; params.requested_format = media::VideoCaptureFormat( - gfx::Size(320, 240), 30, media::VIDEO_CAPTURE_PIXEL_FORMAT_I420); + gfx::Size(320, 240), 30, media::PIXEL_FORMAT_I420); VideoCaptureControllerID client_id(next_client_id_++); base::RunLoop run_loop; diff --git a/content/common/media/video_capture_messages.h b/content/common/media/video_capture_messages.h index bd78d06..38cee11 100644 --- a/content/common/media/video_capture_messages.h +++ b/content/common/media/video_capture_messages.h @@ -23,8 +23,6 @@ IPC_ENUM_TRAITS_MAX_VALUE(media::ResolutionChangePolicy, IPC_ENUM_TRAITS_MAX_VALUE(media::VideoPixelFormat, media::PIXEL_FORMAT_MAX) IPC_ENUM_TRAITS_MAX_VALUE(media::VideoFrame::StorageType, media::VideoFrame::STORAGE_LAST) -IPC_ENUM_TRAITS_MAX_VALUE(media::VideoCapturePixelFormat, - media::VIDEO_CAPTURE_PIXEL_FORMAT_MAX) IPC_ENUM_TRAITS_MAX_VALUE(media::VideoPixelStorage, media::PIXEL_STORAGE_MAX) IPC_ENUM_TRAITS_MAX_VALUE(media::PowerLineFrequency, media::PowerLineFrequency::FREQUENCY_MAX) diff --git a/content/renderer/media/media_stream_video_capturer_source.cc b/content/renderer/media/media_stream_video_capturer_source.cc index f56858a1..cb3829e 100644 --- a/content/renderer/media/media_stream_video_capturer_source.cc +++ b/content/renderer/media/media_stream_video_capturer_source.cc @@ -236,7 +236,7 @@ void VideoCapturerDelegate::GetCurrentSupportedFormats( gfx::Size(width, height), static_cast<float>( std::min(kMaxScreenCastFrameRate, max_requested_frame_rate)), - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420))); + media::PIXEL_FORMAT_I420))); return; } @@ -362,7 +362,7 @@ void VideoCapturerDelegate::OnDeviceSupportedFormatsEnumerated( for (const auto frame_rate : kVideoFrameRates) { default_formats.push_back(media::VideoCaptureFormat( gfx::Size(resolution.width, resolution.height), frame_rate, - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420)); + media::PIXEL_FORMAT_I420)); } } base::ResetAndReturn(&source_formats_callback_).Run(default_formats); diff --git a/content/renderer/media/media_stream_video_capturer_source_unittest.cc b/content/renderer/media/media_stream_video_capturer_source_unittest.cc index b7eb656..f137d28 100644 --- a/content/renderer/media/media_stream_video_capturer_source_unittest.cc +++ b/content/renderer/media/media_stream_video_capturer_source_unittest.cc @@ -116,8 +116,7 @@ TEST_F(MediaStreamVideoCapturerSourceTest, TabCaptureFixedResolutionByDefault) { MediaStreamVideoSource::kDefaultHeight); expected_params.requested_format.frame_rate = MediaStreamVideoSource::kDefaultFrameRate; - expected_params.requested_format.pixel_format = - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420; + expected_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420; expected_params.resolution_change_policy = media::RESOLUTION_POLICY_FIXED_RESOLUTION; @@ -141,8 +140,7 @@ TEST_F(MediaStreamVideoCapturerSourceTest, MediaStreamVideoSource::kDefaultHeight); expected_params.requested_format.frame_rate = MediaStreamVideoSource::kDefaultFrameRate; - expected_params.requested_format.pixel_format = - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420; + expected_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420; expected_params.resolution_change_policy = media::RESOLUTION_POLICY_ANY_WITHIN_LIMIT; @@ -169,8 +167,7 @@ TEST_F(MediaStreamVideoCapturerSourceTest, media::VideoCaptureParams expected_params; expected_params.requested_format.frame_size.SetSize(1920, 1080); expected_params.requested_format.frame_rate = 60.0; - expected_params.requested_format.pixel_format = - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420; + expected_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420; expected_params.resolution_change_policy = media::RESOLUTION_POLICY_FIXED_ASPECT_RATIO; @@ -203,8 +200,7 @@ TEST_F(MediaStreamVideoCapturerSourceTest, media::VideoCaptureParams expected_params; expected_params.requested_format.frame_size.SetSize(1920, 1080); expected_params.requested_format.frame_rate = 60.0; - expected_params.requested_format.pixel_format = - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420; + expected_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420; expected_params.resolution_change_policy = media::RESOLUTION_POLICY_ANY_WITHIN_LIMIT; @@ -230,8 +226,7 @@ TEST_F(MediaStreamVideoCapturerSourceTest, MediaStreamVideoSource::kDefaultHeight); expected_params.requested_format.frame_rate = MediaStreamVideoSource::kDefaultFrameRate; - expected_params.requested_format.pixel_format = - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420; + expected_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420; expected_params.resolution_change_policy = media::RESOLUTION_POLICY_FIXED_RESOLUTION; if (frequency == 50) { @@ -269,8 +264,7 @@ TEST_F(MediaStreamVideoCapturerSourceTest, MediaStreamVideoSource::kDefaultHeight); expected_params.requested_format.frame_rate = MediaStreamVideoSource::kDefaultFrameRate; - expected_params.requested_format.pixel_format = - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420; + expected_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420; expected_params.resolution_change_policy = media::RESOLUTION_POLICY_FIXED_RESOLUTION; // Invalid frequencies should result in default setting. diff --git a/content/renderer/media/media_stream_video_source_unittest.cc b/content/renderer/media/media_stream_video_source_unittest.cc index e7f59fc..b2afab4 100644 --- a/content/renderer/media/media_stream_video_source_unittest.cc +++ b/content/renderer/media/media_stream_video_source_unittest.cc @@ -41,13 +41,13 @@ class MediaStreamVideoSourceTest : public ::testing::Test { mock_source_(new MockMediaStreamVideoSource(true)) { media::VideoCaptureFormats formats; formats.push_back(media::VideoCaptureFormat( - gfx::Size(1280, 720), 30, media::VIDEO_CAPTURE_PIXEL_FORMAT_I420)); + gfx::Size(1280, 720), 30, media::PIXEL_FORMAT_I420)); formats.push_back(media::VideoCaptureFormat( - gfx::Size(640, 480), 30, media::VIDEO_CAPTURE_PIXEL_FORMAT_I420)); + gfx::Size(640, 480), 30, media::PIXEL_FORMAT_I420)); formats.push_back(media::VideoCaptureFormat( - gfx::Size(352, 288), 30, media::VIDEO_CAPTURE_PIXEL_FORMAT_I420)); + gfx::Size(352, 288), 30, media::PIXEL_FORMAT_I420)); formats.push_back(media::VideoCaptureFormat( - gfx::Size(320, 240), 30, media::VIDEO_CAPTURE_PIXEL_FORMAT_I420)); + gfx::Size(320, 240), 30, media::PIXEL_FORMAT_I420)); mock_source_->SetSupportedFormats(formats); webkit_source_.initialize(base::UTF8ToUTF16("dummy_source_id"), blink::WebMediaStreamSource::TypeVideo, @@ -440,7 +440,7 @@ TEST_F(MediaStreamVideoSourceTest, DefaultCapability) { gfx::Size(MediaStreamVideoSource::kDefaultWidth, MediaStreamVideoSource::kDefaultHeight), MediaStreamVideoSource::kDefaultFrameRate, - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420)); + media::PIXEL_FORMAT_I420)); mock_source()->SetSupportedFormats(formats); blink::WebMediaConstraints constraints; @@ -478,7 +478,7 @@ TEST_F(MediaStreamVideoSourceTest, InvalidOptionalConstraint) { TEST_F(MediaStreamVideoSourceTest, ScreencastResolutionWithConstraint) { media::VideoCaptureFormats formats; formats.push_back(media::VideoCaptureFormat( - gfx::Size(480, 270), 30, media::VIDEO_CAPTURE_PIXEL_FORMAT_I420)); + gfx::Size(480, 270), 30, media::PIXEL_FORMAT_I420)); mock_source()->SetSupportedFormats(formats); MockMediaConstraintFactory factory; factory.AddMandatory(MediaStreamVideoSource::kMaxWidth, 480); @@ -718,9 +718,9 @@ TEST_F(MediaStreamVideoSourceTest, IsConstraintSupported) { TEST_F(MediaStreamVideoSourceTest, Use0FpsSupportedFormat) { media::VideoCaptureFormats formats; formats.push_back(media::VideoCaptureFormat( - gfx::Size(640, 480), 0.0f, media::VIDEO_CAPTURE_PIXEL_FORMAT_I420)); + gfx::Size(640, 480), 0.0f, media::PIXEL_FORMAT_I420)); formats.push_back(media::VideoCaptureFormat( - gfx::Size(320, 240), 0.0f, media::VIDEO_CAPTURE_PIXEL_FORMAT_I420)); + gfx::Size(320, 240), 0.0f, media::PIXEL_FORMAT_I420)); mock_source()->SetSupportedFormats(formats); blink::WebMediaConstraints constraints; @@ -751,7 +751,7 @@ TEST_F(MediaStreamVideoSourceTest, MutedSource) { // PostDelayedTask that is dependent on the source frame rate. media::VideoCaptureFormats formats; formats.push_back(media::VideoCaptureFormat( - gfx::Size(640, 480), 2000, media::VIDEO_CAPTURE_PIXEL_FORMAT_I420)); + gfx::Size(640, 480), 2000, media::PIXEL_FORMAT_I420)); SetSourceSupportedFormats(formats); MockMediaConstraintFactory factory; diff --git a/content/renderer/media/mock_media_stream_video_source.cc b/content/renderer/media/mock_media_stream_video_source.cc index e6c0a7f..f80e2c5 100644 --- a/content/renderer/media/mock_media_stream_video_source.cc +++ b/content/renderer/media/mock_media_stream_video_source.cc @@ -21,7 +21,7 @@ MockMediaStreamVideoSource::MockMediaStreamVideoSource( gfx::Size(MediaStreamVideoSource::kDefaultWidth, MediaStreamVideoSource::kDefaultHeight), MediaStreamVideoSource::kDefaultFrameRate, - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420)); + media::PIXEL_FORMAT_I420)); } MockMediaStreamVideoSource::~MockMediaStreamVideoSource() {} diff --git a/content/renderer/media/video_capture_impl_manager_unittest.cc b/content/renderer/media/video_capture_impl_manager_unittest.cc index ed24f01..2193d93 100644 --- a/content/renderer/media/video_capture_impl_manager_unittest.cc +++ b/content/renderer/media/video_capture_impl_manager_unittest.cc @@ -72,7 +72,7 @@ class VideoCaptureImplManagerTest : public ::testing::Test { : manager_(new MockVideoCaptureImplManager( BindToCurrentLoop(cleanup_run_loop_.QuitClosure()))) { params_.requested_format = media::VideoCaptureFormat( - gfx::Size(176, 144), 30, media::VIDEO_CAPTURE_PIXEL_FORMAT_I420); + gfx::Size(176, 144), 30, media::PIXEL_FORMAT_I420); child_process_.reset(new ChildProcess()); } diff --git a/content/renderer/media/video_capture_impl_unittest.cc b/content/renderer/media/video_capture_impl_unittest.cc index 3860080..fbe8f5c 100644 --- a/content/renderer/media/video_capture_impl_unittest.cc +++ b/content/renderer/media/video_capture_impl_unittest.cc @@ -112,10 +112,10 @@ class VideoCaptureImplTest : public ::testing::Test { VideoCaptureImplTest() { params_small_.requested_format = media::VideoCaptureFormat( - gfx::Size(176, 144), 30, media::VIDEO_CAPTURE_PIXEL_FORMAT_I420); + gfx::Size(176, 144), 30, media::PIXEL_FORMAT_I420); params_large_.requested_format = media::VideoCaptureFormat( - gfx::Size(320, 240), 30, media::VIDEO_CAPTURE_PIXEL_FORMAT_I420); + gfx::Size(320, 240), 30, media::PIXEL_FORMAT_I420); child_process_.reset(new ChildProcess()); diff --git a/content/renderer/pepper/pepper_media_stream_video_track_host.cc b/content/renderer/pepper/pepper_media_stream_video_track_host.cc index 3193d04..af1790e 100644 --- a/content/renderer/pepper/pepper_media_stream_video_track_host.cc +++ b/content/renderer/pepper/pepper_media_stream_video_track_host.cc @@ -41,15 +41,15 @@ const char kPepperVideoSourceName[] = "PepperVideoSourceName"; // Default config for output mode. const int kDefaultOutputFrameRate = 30; -media::VideoCapturePixelFormat ToPixelFormat(PP_VideoFrame_Format format) { +media::VideoPixelFormat ToPixelFormat(PP_VideoFrame_Format format) { switch (format) { case PP_VIDEOFRAME_FORMAT_YV12: - return media::VIDEO_CAPTURE_PIXEL_FORMAT_YV12; + return media::PIXEL_FORMAT_YV12; case PP_VIDEOFRAME_FORMAT_I420: - return media::VIDEO_CAPTURE_PIXEL_FORMAT_I420; + return media::PIXEL_FORMAT_I420; default: DVLOG(1) << "Unsupported pixel format " << format; - return media::VIDEO_CAPTURE_PIXEL_FORMAT_UNKNOWN; + return media::PIXEL_FORMAT_UNKNOWN; } } diff --git a/content/renderer/pepper/pepper_video_capture_host.cc b/content/renderer/pepper/pepper_video_capture_host.cc index 6e88fac..22b0389 100644 --- a/content/renderer/pepper/pepper_video_capture_host.cc +++ b/content/renderer/pepper/pepper_video_capture_host.cc @@ -351,7 +351,7 @@ void PepperVideoCaptureHost::SetRequestedInfo( video_capture_params_.requested_format = media::VideoCaptureFormat( gfx::Size(device_info.width, device_info.height), frames_per_second, - media::VIDEO_CAPTURE_PIXEL_FORMAT_I420); + media::PIXEL_FORMAT_I420); } void PepperVideoCaptureHost::DetachPlatformVideoCapture() { diff --git a/media/base/video_capture_types.cc b/media/base/video_capture_types.cc index f56935a0..0bc6ae6 100644 --- a/media/base/video_capture_types.cc +++ b/media/base/video_capture_types.cc @@ -7,18 +7,33 @@ #include "base/logging.h" #include "base/strings/stringprintf.h" #include "media/base/limits.h" +#include "media/base/video_frame.h" namespace media { +// This list is ordered by precedence of use. +static VideoPixelFormat const kSupportedCapturePixelFormats[] = { + PIXEL_FORMAT_I420, + PIXEL_FORMAT_YV12, + PIXEL_FORMAT_NV12, + PIXEL_FORMAT_NV21, + PIXEL_FORMAT_UYVY, + PIXEL_FORMAT_YUY2, + PIXEL_FORMAT_RGB24, + PIXEL_FORMAT_RGB32, + PIXEL_FORMAT_ARGB, + PIXEL_FORMAT_MJPEG, +}; + VideoCaptureFormat::VideoCaptureFormat() : frame_rate(0.0f), - pixel_format(VIDEO_CAPTURE_PIXEL_FORMAT_UNKNOWN), + pixel_format(PIXEL_FORMAT_UNKNOWN), pixel_storage(PIXEL_STORAGE_CPU) { } VideoCaptureFormat::VideoCaptureFormat(const gfx::Size& frame_size, float frame_rate, - VideoCapturePixelFormat pixel_format) + VideoPixelFormat pixel_format) : frame_size(frame_size), frame_rate(frame_rate), pixel_format(pixel_format), @@ -27,7 +42,7 @@ VideoCaptureFormat::VideoCaptureFormat(const gfx::Size& frame_size, VideoCaptureFormat::VideoCaptureFormat(const gfx::Size& frame_size, float frame_rate, - VideoCapturePixelFormat pixel_format, + VideoPixelFormat pixel_format, VideoPixelStorage pixel_storage) : frame_size(frame_size), frame_rate(frame_rate), @@ -43,77 +58,20 @@ bool VideoCaptureFormat::IsValid() const { (frame_rate >= 0.0f) && (frame_rate < media::limits::kMaxFramesPerSecond) && (pixel_storage != PIXEL_STORAGE_TEXTURE || - pixel_format == VIDEO_CAPTURE_PIXEL_FORMAT_ARGB); + pixel_format == PIXEL_FORMAT_ARGB); } size_t VideoCaptureFormat::ImageAllocationSize() const { - size_t result_frame_size = frame_size.GetArea(); - switch (pixel_format) { - case VIDEO_CAPTURE_PIXEL_FORMAT_I420: - case VIDEO_CAPTURE_PIXEL_FORMAT_YV12: - case VIDEO_CAPTURE_PIXEL_FORMAT_NV12: - case VIDEO_CAPTURE_PIXEL_FORMAT_NV21: - result_frame_size = result_frame_size * 3 / 2; - break; - case VIDEO_CAPTURE_PIXEL_FORMAT_UYVY: - case VIDEO_CAPTURE_PIXEL_FORMAT_YUY2: - result_frame_size *= 2; - break; - case VIDEO_CAPTURE_PIXEL_FORMAT_RGB24: - result_frame_size *= 3; - break; - case VIDEO_CAPTURE_PIXEL_FORMAT_RGB32: - case VIDEO_CAPTURE_PIXEL_FORMAT_ARGB: - result_frame_size *= 4; - break; - case VIDEO_CAPTURE_PIXEL_FORMAT_MJPEG: - result_frame_size = 0; - break; - default: // Sizes for the rest of the formats are unknown. - NOTREACHED() << "Unknown pixel format provided."; - break; - } - return result_frame_size; + return VideoFrame::AllocationSize(pixel_format, frame_size); } //static std::string VideoCaptureFormat::ToString(const VideoCaptureFormat& format) { - return base::StringPrintf("(%s)@%.3ffps, pixel format: %s storage: %s.", - format.frame_size.ToString().c_str(), - format.frame_rate, - PixelFormatToString(format.pixel_format).c_str(), - PixelStorageToString(format.pixel_storage).c_str()); -} - -// static -std::string VideoCaptureFormat::PixelFormatToString( - VideoCapturePixelFormat format) { - switch (format) { - case VIDEO_CAPTURE_PIXEL_FORMAT_UNKNOWN: - return "UNKNOWN"; - case VIDEO_CAPTURE_PIXEL_FORMAT_I420: - return "I420"; - case VIDEO_CAPTURE_PIXEL_FORMAT_YUY2: - return "YUY2"; - case VIDEO_CAPTURE_PIXEL_FORMAT_UYVY: - return "UYVY"; - case VIDEO_CAPTURE_PIXEL_FORMAT_RGB24: - return "RGB24"; - case VIDEO_CAPTURE_PIXEL_FORMAT_RGB32: - return "RGB32"; - case VIDEO_CAPTURE_PIXEL_FORMAT_ARGB: - return "ARGB"; - case VIDEO_CAPTURE_PIXEL_FORMAT_MJPEG: - return "MJPEG"; - case VIDEO_CAPTURE_PIXEL_FORMAT_NV12: - return "NV12"; - case VIDEO_CAPTURE_PIXEL_FORMAT_NV21: - return "NV21"; - case VIDEO_CAPTURE_PIXEL_FORMAT_YV12: - return "YV12"; - } - NOTREACHED() << "Invalid VideoCapturePixelFormat provided: " << format; - return std::string(); + return base::StringPrintf( + "(%s)@%.3ffps, pixel format: %s storage: %s.", + format.frame_size.ToString().c_str(), format.frame_rate, + VideoPixelFormatToString(format.pixel_format).c_str(), + PixelStorageToString(format.pixel_storage).c_str()); } // static @@ -132,6 +90,21 @@ std::string VideoCaptureFormat::PixelStorageToString( return std::string(); } +// static +bool VideoCaptureFormat::ComparePixelFormatPreference( + const VideoPixelFormat& lhs, + const VideoPixelFormat& rhs) { + const auto& format_lhs = std::find( + kSupportedCapturePixelFormats, + kSupportedCapturePixelFormats + arraysize(kSupportedCapturePixelFormats), + lhs); + const auto& format_rhs = std::find( + kSupportedCapturePixelFormats, + kSupportedCapturePixelFormats + arraysize(kSupportedCapturePixelFormats), + rhs); + return format_lhs < format_rhs; +} + VideoCaptureParams::VideoCaptureParams() : resolution_change_policy(RESOLUTION_POLICY_FIXED_RESOLUTION), power_line_frequency(PowerLineFrequency::FREQUENCY_DEFAULT) {} diff --git a/media/base/video_capture_types.h b/media/base/video_capture_types.h index 1a02570..4790ea5 100644 --- a/media/base/video_capture_types.h +++ b/media/base/video_capture_types.h @@ -9,6 +9,7 @@ #include "build/build_config.h" #include "media/base/media_export.h" +#include "media/base/video_types.h" #include "ui/gfx/geometry/size.h" namespace media { @@ -17,26 +18,6 @@ namespace media { // shared with device manager. typedef int VideoCaptureSessionId; -// TODO(dshwang): replace it with media::VideoPixelFormat. crbug.com/489744 -// Color formats from camera. This list is sorted in order of preference. -// TODO(emircan): http://crbug.com/521068 Consider if this list can be merged -// with media::Format. -// TODO(mcasas): http://crbug.com/504160 Consider making this an enum class. -enum VideoCapturePixelFormat { - VIDEO_CAPTURE_PIXEL_FORMAT_I420, - VIDEO_CAPTURE_PIXEL_FORMAT_YV12, - VIDEO_CAPTURE_PIXEL_FORMAT_NV12, - VIDEO_CAPTURE_PIXEL_FORMAT_NV21, - VIDEO_CAPTURE_PIXEL_FORMAT_UYVY, - VIDEO_CAPTURE_PIXEL_FORMAT_YUY2, - VIDEO_CAPTURE_PIXEL_FORMAT_RGB24, - VIDEO_CAPTURE_PIXEL_FORMAT_RGB32, - VIDEO_CAPTURE_PIXEL_FORMAT_ARGB, - VIDEO_CAPTURE_PIXEL_FORMAT_MJPEG, - VIDEO_CAPTURE_PIXEL_FORMAT_UNKNOWN, // Color format not set. - VIDEO_CAPTURE_PIXEL_FORMAT_MAX = VIDEO_CAPTURE_PIXEL_FORMAT_UNKNOWN, -}; - // Storage type for the pixels. In principle, all combinations of Storage and // Format are possible, though some are very typical, such as texture + ARGB, // and others are only available if the platform allows it e.g. GpuMemoryBuffer. @@ -101,16 +82,20 @@ struct MEDIA_EXPORT VideoCaptureFormat { VideoCaptureFormat(); VideoCaptureFormat(const gfx::Size& frame_size, float frame_rate, - VideoCapturePixelFormat pixel_format); + VideoPixelFormat pixel_format); VideoCaptureFormat(const gfx::Size& frame_size, float frame_rate, - VideoCapturePixelFormat pixel_format, + VideoPixelFormat pixel_format, VideoPixelStorage pixel_storage); static std::string ToString(const VideoCaptureFormat& format); - static std::string PixelFormatToString(VideoCapturePixelFormat format); static std::string PixelStorageToString(VideoPixelStorage storage); + // Compares the priority of the pixel formats. Returns true if |lhs| is the + // preferred pixel format in comparison with |rhs|. Returns false otherwise. + static bool ComparePixelFormatPreference(const VideoPixelFormat& lhs, + const VideoPixelFormat& rhs); + // Returns the required buffer size to hold an image of a given // VideoCaptureFormat with no padding and tightly packed. size_t ImageAllocationSize() const; @@ -127,7 +112,7 @@ struct MEDIA_EXPORT VideoCaptureFormat { gfx::Size frame_size; float frame_rate; - VideoCapturePixelFormat pixel_format; + VideoPixelFormat pixel_format; VideoPixelStorage pixel_storage; }; diff --git a/media/base/video_frame.cc b/media/base/video_frame.cc index 662d6aa..2009664 100644 --- a/media/base/video_frame.cc +++ b/media/base/video_frame.cc @@ -87,12 +87,17 @@ static gfx::Size SampleSize(VideoPixelFormat format, size_t plane) { case PIXEL_FORMAT_I420: case PIXEL_FORMAT_YV12A: case PIXEL_FORMAT_NV12: + case PIXEL_FORMAT_NV21: return gfx::Size(2, 2); case PIXEL_FORMAT_UNKNOWN: + case PIXEL_FORMAT_UYVY: + case PIXEL_FORMAT_YUY2: case PIXEL_FORMAT_ARGB: case PIXEL_FORMAT_XRGB: - case PIXEL_FORMAT_UYVY: + case PIXEL_FORMAT_RGB24: + case PIXEL_FORMAT_RGB32: + case PIXEL_FORMAT_MJPEG: break; } } @@ -119,10 +124,15 @@ static int BytesPerElement(VideoPixelFormat format, size_t plane) { switch (format) { case PIXEL_FORMAT_ARGB: case PIXEL_FORMAT_XRGB: + case PIXEL_FORMAT_RGB32: return 4; + case PIXEL_FORMAT_RGB24: + return 3; case PIXEL_FORMAT_UYVY: + case PIXEL_FORMAT_YUY2: return 2; - case PIXEL_FORMAT_NV12: { + case PIXEL_FORMAT_NV12: + case PIXEL_FORMAT_NV21: { static const int bytes_per_element[] = {1, 2}; DCHECK_LT(plane, arraysize(bytes_per_element)); return bytes_per_element[plane]; @@ -133,6 +143,8 @@ static int BytesPerElement(VideoPixelFormat format, size_t plane) { case PIXEL_FORMAT_YV12A: case PIXEL_FORMAT_YV24: return 1; + case PIXEL_FORMAT_MJPEG: + return 0; case PIXEL_FORMAT_UNKNOWN: break; } @@ -164,7 +176,7 @@ bool VideoFrame::IsValidConfig(VideoPixelFormat format, return true; // Make sure new formats are properly accounted for in the method. - static_assert(PIXEL_FORMAT_MAX == 9, + static_assert(PIXEL_FORMAT_MAX == 14, "Added pixel format, please review IsValidConfig()"); if (format == PIXEL_FORMAT_UNKNOWN) { @@ -519,15 +531,20 @@ scoped_refptr<VideoFrame> VideoFrame::CreateHoleFrame( // static size_t VideoFrame::NumPlanes(VideoPixelFormat format) { switch (format) { + case PIXEL_FORMAT_UYVY: + case PIXEL_FORMAT_YUY2: case PIXEL_FORMAT_ARGB: case PIXEL_FORMAT_XRGB: - case PIXEL_FORMAT_UYVY: + case PIXEL_FORMAT_RGB24: + case PIXEL_FORMAT_RGB32: + case PIXEL_FORMAT_MJPEG: return 1; case PIXEL_FORMAT_NV12: + case PIXEL_FORMAT_NV21: return 2; + case PIXEL_FORMAT_I420: case PIXEL_FORMAT_YV12: case PIXEL_FORMAT_YV16: - case PIXEL_FORMAT_I420: case PIXEL_FORMAT_YV24: return 3; case PIXEL_FORMAT_YV12A: diff --git a/media/base/video_types.cc b/media/base/video_types.cc index e57b63b..627fc8e 100644 --- a/media/base/video_types.cc +++ b/media/base/video_types.cc @@ -12,24 +12,34 @@ std::string VideoPixelFormatToString(VideoPixelFormat format) { switch (format) { case PIXEL_FORMAT_UNKNOWN: return "PIXEL_FORMAT_UNKNOWN"; + case PIXEL_FORMAT_I420: + return "PIXEL_FORMAT_I420"; case PIXEL_FORMAT_YV12: return "PIXEL_FORMAT_YV12"; case PIXEL_FORMAT_YV16: return "PIXEL_FORMAT_YV16"; - case PIXEL_FORMAT_I420: - return "PIXEL_FORMAT_I420"; case PIXEL_FORMAT_YV12A: return "PIXEL_FORMAT_YV12A"; case PIXEL_FORMAT_YV24: return "PIXEL_FORMAT_YV24"; - case PIXEL_FORMAT_ARGB: - return "PIXEL_FORMAT_ARGB"; - case PIXEL_FORMAT_XRGB: - return "PIXEL_FORMAT_XRGB"; case PIXEL_FORMAT_NV12: return "PIXEL_FORMAT_NV12"; + case PIXEL_FORMAT_NV21: + return "PIXEL_FORMAT_NV21"; case PIXEL_FORMAT_UYVY: return "PIXEL_FORMAT_UYVY"; + case PIXEL_FORMAT_YUY2: + return "PIXEL_FORMAT_YUY2"; + case PIXEL_FORMAT_ARGB: + return "PIXEL_FORMAT_ARGB"; + case PIXEL_FORMAT_XRGB: + return "PIXEL_FORMAT_XRGB"; + case PIXEL_FORMAT_RGB24: + return "PIXEL_FORMAT_RGB24"; + case PIXEL_FORMAT_RGB32: + return "PIXEL_FORMAT_RGB32"; + case PIXEL_FORMAT_MJPEG: + return "PIXEL_FORMAT_MJPEG"; } NOTREACHED() << "Invalid VideoPixelFormat provided: " << format; return ""; @@ -43,12 +53,17 @@ bool IsYuvPlanar(VideoPixelFormat format) { case PIXEL_FORMAT_YV12A: case PIXEL_FORMAT_YV24: case PIXEL_FORMAT_NV12: + case PIXEL_FORMAT_NV21: return true; case PIXEL_FORMAT_UNKNOWN: + case PIXEL_FORMAT_UYVY: + case PIXEL_FORMAT_YUY2: case PIXEL_FORMAT_ARGB: case PIXEL_FORMAT_XRGB: - case PIXEL_FORMAT_UYVY: + case PIXEL_FORMAT_RGB24: + case PIXEL_FORMAT_RGB32: + case PIXEL_FORMAT_MJPEG: return false; } return false; diff --git a/media/base/video_types.h b/media/base/video_types.h index 5e122ed..cecd6f5 100644 --- a/media/base/video_types.h +++ b/media/base/video_types.h @@ -15,22 +15,31 @@ namespace media { // Pixel formats roughly based on FOURCC labels, see: // http://www.fourcc.org/rgb.php and http://www.fourcc.org/yuv.php // Logged to UMA, so never reuse values. Leave gaps if necessary. +// Ordered as planar, semi-planar, YUV-packed, and RGB formats. enum VideoPixelFormat { PIXEL_FORMAT_UNKNOWN = 0, // Unknown or unspecified format value. - PIXEL_FORMAT_YV12 = 1, // 12bpp YVU planar 1x1 Y, 2x2 VU samples. PIXEL_FORMAT_I420 = - 2, // 12bpp YUV planar 1x1 Y, 2x2 UV samples, a.k.a. YU12. + 1, // 12bpp YUV planar 1x1 Y, 2x2 UV samples, a.k.a. YU12. + PIXEL_FORMAT_YV12 = 2, // 12bpp YVU planar 1x1 Y, 2x2 VU samples. PIXEL_FORMAT_YV16 = 3, // 16bpp YVU planar 1x1 Y, 2x1 VU samples. PIXEL_FORMAT_YV12A = 4, // 20bpp YUVA planar 1x1 Y, 2x2 VU, 1x1 A samples. PIXEL_FORMAT_YV24 = 5, // 24bpp YUV planar, no subsampling. PIXEL_FORMAT_NV12 = 6, // 12bpp with Y plane followed by a 2x2 interleaved UV plane. - PIXEL_FORMAT_ARGB = 7, // 32bpp ARGB, 1 plane. - PIXEL_FORMAT_XRGB = 8, // 24bpp XRGB, 1 plane. - PIXEL_FORMAT_UYVY = 9, // 16bpp UYVY 4:2:2, 1 plane. + PIXEL_FORMAT_NV21 = + 7, // 12bpp with Y plane followed by a 2x2 interleaved VU plane. + PIXEL_FORMAT_UYVY = + 8, // 16bpp interleaved 2x1 U, 1x1 Y, 2x1 V, 1x1 Y samples. + PIXEL_FORMAT_YUY2 = + 9, // 16bpp interleaved 1x1 Y, 2x1 U, 1x1 Y, 2x1 V samples. + PIXEL_FORMAT_ARGB = 10, // 32bpp ARGB, 1 plane. + PIXEL_FORMAT_XRGB = 11, // 24bpp XRGB, 1 plane. + PIXEL_FORMAT_RGB24 = 12, // 24bpp BGR, 1 plane. + PIXEL_FORMAT_RGB32 = 13, // 32bpp BGRA, 1 plane. + PIXEL_FORMAT_MJPEG = 14, // MJPEG compressed. // Please update UMA histogram enumeration when adding new formats here. PIXEL_FORMAT_MAX = - PIXEL_FORMAT_UYVY, // Must always be equal to largest entry logged. + PIXEL_FORMAT_MJPEG, // Must always be equal to largest entry logged. }; // Color space or color range used for the pixels. diff --git a/media/blink/skcanvas_video_renderer.cc b/media/blink/skcanvas_video_renderer.cc index 23f48b1..90297f7 100644 --- a/media/blink/skcanvas_video_renderer.cc +++ b/media/blink/skcanvas_video_renderer.cc @@ -412,33 +412,19 @@ void SkCanvasVideoRenderer::ConvertVideoFrameToRGBPixels( NOTREACHED() << "Non YUV formats are not supported"; return; } - DCHECK_EQ(video_frame->stride(VideoFrame::kUPlane), video_frame->stride(VideoFrame::kVPlane)); - const int y_shift = - (video_frame->format() == media::PIXEL_FORMAT_YV16) ? 0 : 1; - // Use the "left" and "top" of the destination rect to locate the offset - // in Y, U and V planes. - const size_t y_offset = (video_frame->stride(VideoFrame::kYPlane) * - video_frame->visible_rect().y()) + - video_frame->visible_rect().x(); - // For format YV12, there is one U, V value per 2x2 block. - // For format YV16, there is one U, V value per 2x1 block. - const size_t uv_offset = (video_frame->stride(VideoFrame::kUPlane) * - (video_frame->visible_rect().y() >> y_shift)) + - (video_frame->visible_rect().x() >> 1); - switch (video_frame->format()) { case PIXEL_FORMAT_YV12: case PIXEL_FORMAT_I420: if (CheckColorSpace(video_frame, COLOR_SPACE_JPEG)) { libyuv::J420ToARGB( - video_frame->data(VideoFrame::kYPlane) + y_offset, + video_frame->visible_data(VideoFrame::kYPlane), video_frame->stride(VideoFrame::kYPlane), - video_frame->data(VideoFrame::kUPlane) + uv_offset, + video_frame->visible_data(VideoFrame::kUPlane), video_frame->stride(VideoFrame::kUPlane), - video_frame->data(VideoFrame::kVPlane) + uv_offset, + video_frame->visible_data(VideoFrame::kVPlane), video_frame->stride(VideoFrame::kVPlane), static_cast<uint8*>(rgb_pixels), row_bytes, @@ -454,9 +440,9 @@ void SkCanvasVideoRenderer::ConvertVideoFrameToRGBPixels( video_frame->visible_rect().height()); #endif } else if (CheckColorSpace(video_frame, COLOR_SPACE_HD_REC709)) { - ConvertYUVToRGB32(video_frame->data(VideoFrame::kYPlane) + y_offset, - video_frame->data(VideoFrame::kUPlane) + uv_offset, - video_frame->data(VideoFrame::kVPlane) + uv_offset, + ConvertYUVToRGB32(video_frame->visible_data(VideoFrame::kYPlane), + video_frame->visible_data(VideoFrame::kUPlane), + video_frame->visible_data(VideoFrame::kVPlane), static_cast<uint8*>(rgb_pixels), video_frame->visible_rect().width(), video_frame->visible_rect().height(), @@ -465,11 +451,11 @@ void SkCanvasVideoRenderer::ConvertVideoFrameToRGBPixels( YV12HD); } else { LIBYUV_I420_TO_ARGB( - video_frame->data(VideoFrame::kYPlane) + y_offset, + video_frame->visible_data(VideoFrame::kYPlane), video_frame->stride(VideoFrame::kYPlane), - video_frame->data(VideoFrame::kUPlane) + uv_offset, + video_frame->visible_data(VideoFrame::kUPlane), video_frame->stride(VideoFrame::kUPlane), - video_frame->data(VideoFrame::kVPlane) + uv_offset, + video_frame->visible_data(VideoFrame::kVPlane), video_frame->stride(VideoFrame::kVPlane), static_cast<uint8*>(rgb_pixels), row_bytes, @@ -479,11 +465,11 @@ void SkCanvasVideoRenderer::ConvertVideoFrameToRGBPixels( break; case PIXEL_FORMAT_YV16: LIBYUV_I422_TO_ARGB( - video_frame->data(VideoFrame::kYPlane) + y_offset, + video_frame->visible_data(VideoFrame::kYPlane), video_frame->stride(VideoFrame::kYPlane), - video_frame->data(VideoFrame::kUPlane) + uv_offset, + video_frame->visible_data(VideoFrame::kUPlane), video_frame->stride(VideoFrame::kUPlane), - video_frame->data(VideoFrame::kVPlane) + uv_offset, + video_frame->visible_data(VideoFrame::kVPlane), video_frame->stride(VideoFrame::kVPlane), static_cast<uint8*>(rgb_pixels), row_bytes, @@ -493,13 +479,13 @@ void SkCanvasVideoRenderer::ConvertVideoFrameToRGBPixels( case PIXEL_FORMAT_YV12A: LIBYUV_I420ALPHA_TO_ARGB( - video_frame->data(VideoFrame::kYPlane) + y_offset, + video_frame->visible_data(VideoFrame::kYPlane), video_frame->stride(VideoFrame::kYPlane), - video_frame->data(VideoFrame::kUPlane) + uv_offset, + video_frame->visible_data(VideoFrame::kUPlane), video_frame->stride(VideoFrame::kUPlane), - video_frame->data(VideoFrame::kVPlane) + uv_offset, + video_frame->visible_data(VideoFrame::kVPlane), video_frame->stride(VideoFrame::kVPlane), - video_frame->data(VideoFrame::kAPlane) + y_offset, + video_frame->visible_data(VideoFrame::kAPlane), video_frame->stride(VideoFrame::kAPlane), static_cast<uint8*>(rgb_pixels), row_bytes, @@ -509,11 +495,11 @@ void SkCanvasVideoRenderer::ConvertVideoFrameToRGBPixels( case PIXEL_FORMAT_YV24: libyuv::I444ToARGB( - video_frame->data(VideoFrame::kYPlane) + y_offset, + video_frame->visible_data(VideoFrame::kYPlane), video_frame->stride(VideoFrame::kYPlane), - video_frame->data(VideoFrame::kUPlane) + uv_offset, + video_frame->visible_data(VideoFrame::kUPlane), video_frame->stride(VideoFrame::kUPlane), - video_frame->data(VideoFrame::kVPlane) + uv_offset, + video_frame->visible_data(VideoFrame::kVPlane), video_frame->stride(VideoFrame::kVPlane), static_cast<uint8*>(rgb_pixels), row_bytes, @@ -530,9 +516,14 @@ void SkCanvasVideoRenderer::ConvertVideoFrameToRGBPixels( #endif break; case PIXEL_FORMAT_NV12: + case PIXEL_FORMAT_NV21: + case PIXEL_FORMAT_UYVY: + case PIXEL_FORMAT_YUY2: case PIXEL_FORMAT_ARGB: case PIXEL_FORMAT_XRGB: - case PIXEL_FORMAT_UYVY: + case PIXEL_FORMAT_RGB24: + case PIXEL_FORMAT_RGB32: + case PIXEL_FORMAT_MJPEG: case PIXEL_FORMAT_UNKNOWN: NOTREACHED(); } diff --git a/media/blink/video_frame_compositor.cc b/media/blink/video_frame_compositor.cc index cc6d207..a8a4169 100644 --- a/media/blink/video_frame_compositor.cc +++ b/media/blink/video_frame_compositor.cc @@ -20,16 +20,21 @@ const int kBackgroundRenderingTimeoutMs = 250; static bool IsOpaque(const scoped_refptr<VideoFrame>& frame) { switch (frame->format()) { case PIXEL_FORMAT_UNKNOWN: - case PIXEL_FORMAT_YV12: case PIXEL_FORMAT_I420: + case PIXEL_FORMAT_YV12: case PIXEL_FORMAT_YV16: case PIXEL_FORMAT_YV24: case PIXEL_FORMAT_NV12: - case PIXEL_FORMAT_XRGB: + case PIXEL_FORMAT_NV21: case PIXEL_FORMAT_UYVY: + case PIXEL_FORMAT_YUY2: + case PIXEL_FORMAT_XRGB: + case PIXEL_FORMAT_RGB24: + case PIXEL_FORMAT_MJPEG: return true; case PIXEL_FORMAT_YV12A: case PIXEL_FORMAT_ARGB: + case PIXEL_FORMAT_RGB32: break; } return false; diff --git a/media/capture/content/screen_capture_device_core.cc b/media/capture/content/screen_capture_device_core.cc index 53dd3818..d99ca5a 100644 --- a/media/capture/content/screen_capture_device_core.cc +++ b/media/capture/content/screen_capture_device_core.cc @@ -42,11 +42,9 @@ void ScreenCaptureDeviceCore::AllocateAndStart( return; } - if (!(params.requested_format.pixel_format == - VIDEO_CAPTURE_PIXEL_FORMAT_I420 && + if (!(params.requested_format.pixel_format == PIXEL_FORMAT_I420 && params.requested_format.pixel_storage == PIXEL_STORAGE_CPU) && - !(params.requested_format.pixel_format == - VIDEO_CAPTURE_PIXEL_FORMAT_ARGB && + !(params.requested_format.pixel_format == PIXEL_FORMAT_ARGB && params.requested_format.pixel_storage == PIXEL_STORAGE_TEXTURE)) { const std::string error_msg = base::StringPrintf( "unsupported format: %s", diff --git a/media/capture/content/thread_safe_capture_oracle.cc b/media/capture/content/thread_safe_capture_oracle.cc index 5bedc4a..22b1dcf 100644 --- a/media/capture/content/thread_safe_capture_oracle.cc +++ b/media/capture/content/thread_safe_capture_oracle.cc @@ -72,8 +72,8 @@ bool ThreadSafeCaptureOracle::ObserveEventAndDecideCapture( client_->ReserveOutputBuffer(coded_size, (params_.requested_format.pixel_storage != media::PIXEL_STORAGE_TEXTURE) - ? media::VIDEO_CAPTURE_PIXEL_FORMAT_I420 - : media::VIDEO_CAPTURE_PIXEL_FORMAT_ARGB, + ? media::PIXEL_FORMAT_I420 + : media::PIXEL_FORMAT_ARGB, params_.requested_format.pixel_storage)); // Get the current buffer pool utilization and attenuate it: The utilization // reported to the oracle is in terms of a maximum sustainable amount (not the diff --git a/media/capture/video/android/video_capture_device_android.cc b/media/capture/video/android/video_capture_device_android.cc index 9df39c4..69d8b05 100644 --- a/media/capture/video/android/video_capture_device_android.cc +++ b/media/capture/video/android/video_capture_device_android.cc @@ -79,8 +79,7 @@ void VideoCaptureDeviceAndroid::AllocateAndStart( capture_format_.frame_rate = Java_VideoCapture_queryFrameRate(env, j_capture_.obj()); capture_format_.pixel_format = GetColorspace(); - DCHECK_NE(capture_format_.pixel_format, - media::VIDEO_CAPTURE_PIXEL_FORMAT_UNKNOWN); + DCHECK_NE(capture_format_.pixel_format, media::PIXEL_FORMAT_UNKNOWN); CHECK(capture_format_.frame_size.GetArea() > 0); CHECK(!(capture_format_.frame_size.width() % 2)); CHECK(!(capture_format_.frame_size.height() % 2)); @@ -175,20 +174,20 @@ void VideoCaptureDeviceAndroid::OnError(JNIEnv* env, SetErrorState(base::android::ConvertJavaStringToUTF8(env, message)); } -VideoCapturePixelFormat VideoCaptureDeviceAndroid::GetColorspace() { +VideoPixelFormat VideoCaptureDeviceAndroid::GetColorspace() { JNIEnv* env = AttachCurrentThread(); - int current_capture_colorspace = + const int current_capture_colorspace = Java_VideoCapture_getColorspace(env, j_capture_.obj()); switch (current_capture_colorspace) { case ANDROID_IMAGE_FORMAT_YV12: - return media::VIDEO_CAPTURE_PIXEL_FORMAT_YV12; + return media::PIXEL_FORMAT_YV12; case ANDROID_IMAGE_FORMAT_YUV_420_888: - return media::VIDEO_CAPTURE_PIXEL_FORMAT_I420; + return media::PIXEL_FORMAT_I420; case ANDROID_IMAGE_FORMAT_NV21: - return media::VIDEO_CAPTURE_PIXEL_FORMAT_NV21; + return media::PIXEL_FORMAT_NV21; case ANDROID_IMAGE_FORMAT_UNKNOWN: default: - return media::VIDEO_CAPTURE_PIXEL_FORMAT_UNKNOWN; + return media::PIXEL_FORMAT_UNKNOWN; } } diff --git a/media/capture/video/android/video_capture_device_android.h b/media/capture/video/android/video_capture_device_android.h index 2089919..ccab5e2 100644 --- a/media/capture/video/android/video_capture_device_android.h +++ b/media/capture/video/android/video_capture_device_android.h @@ -69,7 +69,7 @@ class MEDIA_EXPORT VideoCaptureDeviceAndroid : public VideoCaptureDevice { kError // Hit error. User needs to recover by destroying the object. }; - VideoCapturePixelFormat GetColorspace(); + VideoPixelFormat GetColorspace(); void SetErrorState(const std::string& reason); // Prevent racing on accessing |state_| and |client_| since both could be diff --git a/media/capture/video/android/video_capture_device_factory_android.cc b/media/capture/video/android/video_capture_device_factory_android.cc index fca882e..cce951b 100644 --- a/media/capture/video/android/video_capture_device_factory_android.cc +++ b/media/capture/video/android/video_capture_device_factory_android.cc @@ -102,15 +102,15 @@ void VideoCaptureDeviceFactoryAndroid::GetDeviceSupportedFormats( base::android::ScopedJavaLocalRef<jobject> format( env, env->GetObjectArrayElement(collected_formats.obj(), i)); - VideoCapturePixelFormat pixel_format = - media::VIDEO_CAPTURE_PIXEL_FORMAT_UNKNOWN; + VideoPixelFormat pixel_format = + media::PIXEL_FORMAT_UNKNOWN; switch (media::Java_VideoCaptureFactory_getCaptureFormatPixelFormat( env, format.obj())) { case VideoCaptureDeviceAndroid::ANDROID_IMAGE_FORMAT_YV12: - pixel_format = media::VIDEO_CAPTURE_PIXEL_FORMAT_YV12; + pixel_format = media::PIXEL_FORMAT_YV12; break; case VideoCaptureDeviceAndroid::ANDROID_IMAGE_FORMAT_NV21: - pixel_format = media::VIDEO_CAPTURE_PIXEL_FORMAT_NV21; + pixel_format = media::PIXEL_FORMAT_NV21; break; default: continue; diff --git a/media/capture/video/fake_video_capture_device.cc b/media/capture/video/fake_video_capture_device.cc index 59f6625..6016c8e 100644 --- a/media/capture/video/fake_video_capture_device.cc +++ b/media/capture/video/fake_video_capture_device.cc @@ -100,20 +100,20 @@ void FakeVideoCaptureDevice::AllocateAndStart( if (buffer_ownership_ == BufferOwnership::CLIENT_BUFFERS) { if (planarity_ == BufferPlanarity::PACKED) { capture_format_.pixel_storage = PIXEL_STORAGE_CPU; - capture_format_.pixel_format = VIDEO_CAPTURE_PIXEL_FORMAT_ARGB; + capture_format_.pixel_format = PIXEL_FORMAT_ARGB; DVLOG(1) << "starting with client argb buffers"; } else if (planarity_ == BufferPlanarity::TRIPLANAR) { capture_format_.pixel_storage = PIXEL_STORAGE_GPUMEMORYBUFFER; - capture_format_.pixel_format = VIDEO_CAPTURE_PIXEL_FORMAT_I420; + capture_format_.pixel_format = PIXEL_FORMAT_I420; DVLOG(1) << "starting with gmb I420 buffers"; } } else if (buffer_ownership_ == BufferOwnership::OWN_BUFFERS) { capture_format_.pixel_storage = PIXEL_STORAGE_CPU; - capture_format_.pixel_format = VIDEO_CAPTURE_PIXEL_FORMAT_I420; + capture_format_.pixel_format = PIXEL_FORMAT_I420; DVLOG(1) << "starting with own I420 buffers"; } - if (capture_format_.pixel_format == VIDEO_CAPTURE_PIXEL_FORMAT_I420) { + if (capture_format_.pixel_format == PIXEL_FORMAT_I420) { fake_frame_.reset(new uint8[VideoFrame::AllocationSize( PIXEL_FORMAT_I420, capture_format_.frame_size)]); } @@ -177,7 +177,7 @@ void FakeVideoCaptureDevice::CaptureUsingClientBuffers( DCHECK(capture_buffer->data()) << "Buffer has NO backing memory"; if (capture_format_.pixel_storage == PIXEL_STORAGE_GPUMEMORYBUFFER && - capture_format_.pixel_format == media::VIDEO_CAPTURE_PIXEL_FORMAT_I420) { + capture_format_.pixel_format == media::PIXEL_FORMAT_I420) { // Since SkBitmap expects a packed&continuous memory region for I420, we // need to use |fake_frame_| to draw onto. memset(fake_frame_.get(), 0, capture_format_.ImageAllocationSize()); @@ -196,7 +196,7 @@ void FakeVideoCaptureDevice::CaptureUsingClientBuffers( } } else { DCHECK_EQ(capture_format_.pixel_storage, PIXEL_STORAGE_CPU); - DCHECK_EQ(capture_format_.pixel_format, VIDEO_CAPTURE_PIXEL_FORMAT_ARGB); + DCHECK_EQ(capture_format_.pixel_format, PIXEL_FORMAT_ARGB); uint8_t* data_ptr = static_cast<uint8_t*>(capture_buffer->data()); memset(data_ptr, 0, capture_buffer->mapped_size()); DrawPacman(true /* use_argb */, data_ptr, frame_count_, diff --git a/media/capture/video/fake_video_capture_device_factory.cc b/media/capture/video/fake_video_capture_device_factory.cc index 205e6ae..d6403ee 100644 --- a/media/capture/video/fake_video_capture_device_factory.cc +++ b/media/capture/video/fake_video_capture_device_factory.cc @@ -80,8 +80,8 @@ void FakeVideoCaptureDeviceFactory::GetDeviceSupportedFormats( gfx::Size(1920, 1080)}; supported_formats->clear(); for (const auto& size : supported_sizes) { - supported_formats->push_back(VideoCaptureFormat( - size, frame_rate, media::VIDEO_CAPTURE_PIXEL_FORMAT_I420)); + supported_formats->push_back( + VideoCaptureFormat(size, frame_rate, media::PIXEL_FORMAT_I420)); } } diff --git a/media/capture/video/fake_video_capture_device_unittest.cc b/media/capture/video/fake_video_capture_device_unittest.cc index c73f5888..e323fe3 100644 --- a/media/capture/video/fake_video_capture_device_unittest.cc +++ b/media/capture/video/fake_video_capture_device_unittest.cc @@ -79,11 +79,11 @@ class MockClient : public VideoCaptureDevice::Client { // Virtual methods for capturing using Client's Buffers. scoped_ptr<Buffer> ReserveOutputBuffer(const gfx::Size& dimensions, - media::VideoCapturePixelFormat format, + media::VideoPixelFormat format, media::VideoPixelStorage storage) { - EXPECT_TRUE((format == media::VIDEO_CAPTURE_PIXEL_FORMAT_ARGB && + EXPECT_TRUE((format == media::PIXEL_FORMAT_ARGB && storage == media::PIXEL_STORAGE_CPU) || - (format == media::VIDEO_CAPTURE_PIXEL_FORMAT_I420 && + (format == media::PIXEL_FORMAT_I420 && storage == media::PIXEL_STORAGE_GPUMEMORYBUFFER)); EXPECT_GT(dimensions.GetArea(), 0); const VideoCaptureFormat frame_format(dimensions, 0.0, format); @@ -100,7 +100,7 @@ class MockClient : public VideoCaptureDevice::Client { const scoped_refptr<media::VideoFrame>& frame, const base::TimeTicks& timestamp) { VideoCaptureFormat format(frame->natural_size(), 30.0, - VIDEO_CAPTURE_PIXEL_FORMAT_I420); + PIXEL_FORMAT_I420); frame_cb_.Run(format); } @@ -216,23 +216,19 @@ TEST_F(FakeVideoCaptureDeviceTest, GetDeviceSupportedFormats) { ASSERT_EQ(supported_formats.size(), 4u); EXPECT_EQ(supported_formats[0].frame_size.width(), 320); EXPECT_EQ(supported_formats[0].frame_size.height(), 240); - EXPECT_EQ(supported_formats[0].pixel_format, - VIDEO_CAPTURE_PIXEL_FORMAT_I420); + EXPECT_EQ(supported_formats[0].pixel_format, PIXEL_FORMAT_I420); EXPECT_GE(supported_formats[0].frame_rate, 20.0); EXPECT_EQ(supported_formats[1].frame_size.width(), 640); EXPECT_EQ(supported_formats[1].frame_size.height(), 480); - EXPECT_EQ(supported_formats[1].pixel_format, - VIDEO_CAPTURE_PIXEL_FORMAT_I420); + EXPECT_EQ(supported_formats[1].pixel_format, PIXEL_FORMAT_I420); EXPECT_GE(supported_formats[1].frame_rate, 20.0); EXPECT_EQ(supported_formats[2].frame_size.width(), 1280); EXPECT_EQ(supported_formats[2].frame_size.height(), 720); - EXPECT_EQ(supported_formats[2].pixel_format, - VIDEO_CAPTURE_PIXEL_FORMAT_I420); + EXPECT_EQ(supported_formats[2].pixel_format, PIXEL_FORMAT_I420); EXPECT_GE(supported_formats[2].frame_rate, 20.0); EXPECT_EQ(supported_formats[3].frame_size.width(), 1920); EXPECT_EQ(supported_formats[3].frame_size.height(), 1080); - EXPECT_EQ(supported_formats[3].pixel_format, - VIDEO_CAPTURE_PIXEL_FORMAT_I420); + EXPECT_EQ(supported_formats[3].pixel_format, PIXEL_FORMAT_I420); EXPECT_GE(supported_formats[3].frame_rate, 20.0); } } diff --git a/media/capture/video/file_video_capture_device.cc b/media/capture/video/file_video_capture_device.cc index e78942b..b7c902e 100644 --- a/media/capture/video/file_video_capture_device.cc +++ b/media/capture/video/file_video_capture_device.cc @@ -48,7 +48,7 @@ void ParseY4MRational(const base::StringPiece& token, void ParseY4MTags(const std::string& file_header, media::VideoCaptureFormat* video_format) { media::VideoCaptureFormat format; - format.pixel_format = media::VIDEO_CAPTURE_PIXEL_FORMAT_I420; + format.pixel_format = media::PIXEL_FORMAT_I420; size_t index = 0; size_t blank_position = 0; base::StringPiece token; @@ -230,7 +230,7 @@ bool MjpegFileParser::Initialize(media::VideoCaptureFormat* capture_format) { } VideoCaptureFormat format; - format.pixel_format = media::VIDEO_CAPTURE_PIXEL_FORMAT_MJPEG; + format.pixel_format = media::PIXEL_FORMAT_MJPEG; format.frame_size.set_width(result.frame_header.visible_width); format.frame_size.set_height(result.frame_header.visible_height); format.frame_rate = kMJpegFrameRate; diff --git a/media/capture/video/linux/v4l2_capture_delegate.cc b/media/capture/video/linux/v4l2_capture_delegate.cc index 8010193..88f5075 100644 --- a/media/capture/video/linux/v4l2_capture_delegate.cc +++ b/media/capture/video/linux/v4l2_capture_delegate.cc @@ -39,28 +39,28 @@ const int kTypicalFramerate = 30; // This list is ordered by precedence of use -- but see caveats for MJPEG. static struct { uint32_t fourcc; - VideoCapturePixelFormat pixel_format; + VideoPixelFormat pixel_format; size_t num_planes; } const kSupportedFormatsAndPlanarity[] = { - {V4L2_PIX_FMT_YUV420, VIDEO_CAPTURE_PIXEL_FORMAT_I420, 1}, - {V4L2_PIX_FMT_YUYV, VIDEO_CAPTURE_PIXEL_FORMAT_YUY2, 1}, - {V4L2_PIX_FMT_UYVY, VIDEO_CAPTURE_PIXEL_FORMAT_UYVY, 1}, - {V4L2_PIX_FMT_RGB24, VIDEO_CAPTURE_PIXEL_FORMAT_RGB24, 1}, + {V4L2_PIX_FMT_YUV420, PIXEL_FORMAT_I420, 1}, + {V4L2_PIX_FMT_YUYV, PIXEL_FORMAT_YUY2, 1}, + {V4L2_PIX_FMT_UYVY, PIXEL_FORMAT_UYVY, 1}, + {V4L2_PIX_FMT_RGB24, PIXEL_FORMAT_RGB24, 1}, #if !defined(OS_OPENBSD) // TODO(mcasas): add V4L2_PIX_FMT_YVU420M when available in bots. - {V4L2_PIX_FMT_YUV420M, VIDEO_CAPTURE_PIXEL_FORMAT_I420, 3}, + {V4L2_PIX_FMT_YUV420M, PIXEL_FORMAT_I420, 3}, #endif // MJPEG is usually sitting fairly low since we don't want to have to // decode. // However, is needed for large resolutions due to USB bandwidth // limitations, // so GetListOfUsableFourCcs() can duplicate it on top, see that method. - {V4L2_PIX_FMT_MJPEG, VIDEO_CAPTURE_PIXEL_FORMAT_MJPEG, 1}, + {V4L2_PIX_FMT_MJPEG, PIXEL_FORMAT_MJPEG, 1}, // JPEG works as MJPEG on some gspca webcams from field reports, see // https://code.google.com/p/webrtc/issues/detail?id=529, put it as the // least // preferred format. - {V4L2_PIX_FMT_JPEG, VIDEO_CAPTURE_PIXEL_FORMAT_MJPEG, 1}, + {V4L2_PIX_FMT_JPEG, PIXEL_FORMAT_MJPEG, 1}, }; // static @@ -95,17 +95,17 @@ size_t V4L2CaptureDelegate::GetNumPlanesForFourCc(uint32_t fourcc) { } // static -VideoCapturePixelFormat V4L2CaptureDelegate::V4l2FourCcToChromiumPixelFormat( +VideoPixelFormat V4L2CaptureDelegate::V4l2FourCcToChromiumPixelFormat( uint32_t v4l2_fourcc) { for (const auto& fourcc_and_pixel_format : kSupportedFormatsAndPlanarity) { if (fourcc_and_pixel_format.fourcc == v4l2_fourcc) return fourcc_and_pixel_format.pixel_format; } // Not finding a pixel format is OK during device capabilities enumeration. - // Let the caller decide if VIDEO_CAPTURE_PIXEL_FORMAT_UNKNOWN is an error or + // Let the caller decide if PIXEL_FORMAT_UNKNOWN is an error or // not. DVLOG(1) << "Unsupported pixel format: " << FourccToString(v4l2_fourcc); - return VIDEO_CAPTURE_PIXEL_FORMAT_UNKNOWN; + return PIXEL_FORMAT_UNKNOWN; } // static @@ -224,9 +224,9 @@ void V4L2CaptureDelegate::AllocateAndStart( SetErrorState("Failed to set video capture format"); return; } - const VideoCapturePixelFormat pixel_format = + const VideoPixelFormat pixel_format = V4l2FourCcToChromiumPixelFormat(video_fmt_.fmt.pix.pixelformat); - if (pixel_format == VIDEO_CAPTURE_PIXEL_FORMAT_UNKNOWN) { + if (pixel_format == PIXEL_FORMAT_UNKNOWN) { SetErrorState("Unsupported pixel format"); return; } diff --git a/media/capture/video/linux/v4l2_capture_delegate.h b/media/capture/video/linux/v4l2_capture_delegate.h index 45919d4..1bb1735 100644 --- a/media/capture/video/linux/v4l2_capture_delegate.h +++ b/media/capture/video/linux/v4l2_capture_delegate.h @@ -31,9 +31,8 @@ class V4L2CaptureDelegate // Retrieves the #planes for a given |fourcc|, or 0 if unknown. static size_t GetNumPlanesForFourCc(uint32_t fourcc); - // Returns the Chrome pixel format for |v4l2_fourcc| or - // VIDEO_CAPTURE_PIXEL_FORMAT_UNKNOWN. - static VideoCapturePixelFormat V4l2FourCcToChromiumPixelFormat( + // Returns the Chrome pixel format for |v4l2_fourcc| or PIXEL_FORMAT_UNKNOWN. + static VideoPixelFormat V4l2FourCcToChromiumPixelFormat( uint32_t v4l2_fourcc); // Composes a list of usable and supported pixel formats, in order of diff --git a/media/capture/video/linux/v4l2_capture_delegate_multi_plane.cc b/media/capture/video/linux/v4l2_capture_delegate_multi_plane.cc index 1068252..6aab0de 100644 --- a/media/capture/video/linux/v4l2_capture_delegate_multi_plane.cc +++ b/media/capture/video/linux/v4l2_capture_delegate_multi_plane.cc @@ -63,7 +63,7 @@ void V4L2CaptureDelegateMultiPlane::SetPayloadSize( void V4L2CaptureDelegateMultiPlane::SendBuffer( const scoped_refptr<BufferTracker>& buffer_tracker, const v4l2_format& format) const { - DCHECK_EQ(capture_format().pixel_format, VIDEO_CAPTURE_PIXEL_FORMAT_I420); + DCHECK_EQ(capture_format().pixel_format, PIXEL_FORMAT_I420); const size_t y_stride = format.fmt.pix_mp.plane_fmt[0].bytesperline; const size_t u_stride = format.fmt.pix_mp.plane_fmt[1].bytesperline; const size_t v_stride = format.fmt.pix_mp.plane_fmt[2].bytesperline; diff --git a/media/capture/video/linux/video_capture_device_factory_linux.cc b/media/capture/video/linux/video_capture_device_factory_linux.cc index 5ea299a..f7dfa53 100644 --- a/media/capture/video/linux/video_capture_device_factory_linux.cc +++ b/media/capture/video/linux/video_capture_device_factory_linux.cc @@ -96,7 +96,7 @@ static void GetSupportedFormatsForV4L2BufferType( VideoCaptureDeviceLinux::V4l2FourCcToChromiumPixelFormat( v4l2_format.pixelformat); - if (supported_format.pixel_format == VIDEO_CAPTURE_PIXEL_FORMAT_UNKNOWN) + if (supported_format.pixel_format == PIXEL_FORMAT_UNKNOWN) continue; v4l2_frmsizeenum frame_size = {}; diff --git a/media/capture/video/linux/video_capture_device_linux.cc b/media/capture/video/linux/video_capture_device_linux.cc index 7d39f75..f4ca749 100644 --- a/media/capture/video/linux/video_capture_device_linux.cc +++ b/media/capture/video/linux/video_capture_device_linux.cc @@ -43,12 +43,12 @@ static bool ReadIdFile(const std::string path, std::string* id) { // Translates Video4Linux pixel formats to Chromium pixel formats. // static -VideoCapturePixelFormat +VideoPixelFormat VideoCaptureDeviceLinux::V4l2FourCcToChromiumPixelFormat(uint32 v4l2_fourcc) { return V4L2CaptureDelegate::V4l2FourCcToChromiumPixelFormat(v4l2_fourcc); } -// Gets a list of usable Four CC formats prioritised. +// Gets a list of usable Four CC formats prioritized. // static std::list<uint32_t> VideoCaptureDeviceLinux::GetListOfUsableFourCCs( bool favour_mjpeg) { diff --git a/media/capture/video/linux/video_capture_device_linux.h b/media/capture/video/linux/video_capture_device_linux.h index 2eb2eaf..61e2cba 100644 --- a/media/capture/video/linux/video_capture_device_linux.h +++ b/media/capture/video/linux/video_capture_device_linux.h @@ -25,7 +25,7 @@ class V4L2CaptureDelegate; // Linux V4L2 implementation of VideoCaptureDevice. class VideoCaptureDeviceLinux : public VideoCaptureDevice { public: - static VideoCapturePixelFormat V4l2FourCcToChromiumPixelFormat( + static VideoPixelFormat V4l2FourCcToChromiumPixelFormat( uint32 v4l2_fourcc); static std::list<uint32_t> GetListOfUsableFourCCs(bool favour_mjpeg); @@ -44,7 +44,7 @@ class VideoCaptureDeviceLinux : public VideoCaptureDevice { static int TranslatePowerLineFrequencyToV4L2(int frequency); // Internal delegate doing the actual capture setting, buffer allocation and - // circulacion with the V4L2 API. Created and deleted in the thread where + // circulation with the V4L2 API. Created and deleted in the thread where // VideoCaptureDeviceLinux lives but otherwise operating on |v4l2_thread_|. scoped_refptr<V4L2CaptureDelegate> capture_impl_; diff --git a/media/capture/video/mac/video_capture_device_avfoundation_mac.mm b/media/capture/video/mac/video_capture_device_avfoundation_mac.mm index 54ed686..deb1408 100644 --- a/media/capture/video/mac/video_capture_device_avfoundation_mac.mm +++ b/media/capture/video/mac/video_capture_device_avfoundation_mac.mm @@ -8,6 +8,7 @@ #include "base/logging.h" #include "base/mac/foundation_util.h" +#include "media/base/video_capture_types.h" #include "media/capture/video/mac/video_capture_device_mac.h" #include "ui/gfx/geometry/size.h" @@ -16,17 +17,17 @@ static const int kMjpegWidthThreshold = 640; static const int kMjpegHeightThreshold = 480; // This function translates Mac Core Video pixel formats to Chromium pixel -// formats. Chromium pixel formats are sorted in order of preference. -media::VideoCapturePixelFormat FourCCToChromiumPixelFormat(FourCharCode code) { +// formats. +media::VideoPixelFormat FourCCToChromiumPixelFormat(FourCharCode code) { switch (code) { case kCVPixelFormatType_422YpCbCr8: - return media::VIDEO_CAPTURE_PIXEL_FORMAT_UYVY; + return media::PIXEL_FORMAT_UYVY; case CoreMediaGlue::kCMPixelFormat_422YpCbCr8_yuvs: - return media::VIDEO_CAPTURE_PIXEL_FORMAT_YUY2; + return media::PIXEL_FORMAT_YUY2; case CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML: - return media::VIDEO_CAPTURE_PIXEL_FORMAT_MJPEG; + return media::PIXEL_FORMAT_MJPEG; default: - return media::VIDEO_CAPTURE_PIXEL_FORMAT_UNKNOWN; + return media::PIXEL_FORMAT_UNKNOWN; } } @@ -61,7 +62,7 @@ media::VideoCapturePixelFormat FourCCToChromiumPixelFormat(FourCharCode code) { } + (void)getDevice:(const media::VideoCaptureDevice::Name&)name - supportedFormats:(media::VideoCaptureFormats*)formats { + supportedFormats:(media::VideoCaptureFormats*)formats { NSArray* devices = [AVCaptureDeviceGlue devices]; CrAVCaptureDevice* device = nil; for (device in devices) { @@ -73,10 +74,9 @@ media::VideoCapturePixelFormat FourCCToChromiumPixelFormat(FourCharCode code) { for (CrAVCaptureDeviceFormat* format in device.formats) { // MediaSubType is a CMPixelFormatType but can be used as CVPixelFormatType // as well according to CMFormatDescription.h - const media::VideoCapturePixelFormat pixelFormat = - FourCCToChromiumPixelFormat( - CoreMediaGlue::CMFormatDescriptionGetMediaSubType( - [format formatDescription])); + const media::VideoPixelFormat pixelFormat = FourCCToChromiumPixelFormat( + CoreMediaGlue::CMFormatDescriptionGetMediaSubType( + [format formatDescription])); CoreMediaGlue::CMVideoDimensions dimensions = CoreMediaGlue::CMVideoFormatDescriptionGetDimensions( @@ -204,9 +204,11 @@ media::VideoCapturePixelFormat FourCCToChromiumPixelFormat(FourCharCode code) { best_fourcc = fourcc; break; } + // Compare according to Chromium preference. - if (FourCCToChromiumPixelFormat(fourcc) < - FourCCToChromiumPixelFormat(best_fourcc)) { + if (media::VideoCaptureFormat::ComparePixelFormatPreference( + FourCCToChromiumPixelFormat(fourcc), + FourCCToChromiumPixelFormat(best_fourcc))) { best_fourcc = fourcc; } } @@ -218,7 +220,7 @@ media::VideoCapturePixelFormat FourCCToChromiumPixelFormat(FourCharCode code) { // yes/no and preserve aspect ratio yes/no when scaling. Currently we set // cropping and preservation. NSDictionary* videoSettingsDictionary = @{ - (id)kCVPixelBufferWidthKey : @(width), (id) + (id) kCVPixelBufferWidthKey : @(width), (id) kCVPixelBufferHeightKey : @(height), (id) kCVPixelBufferPixelFormatTypeKey : @(best_fourcc), AVFoundationGlue::AVVideoScalingModeKey() : @@ -279,8 +281,8 @@ media::VideoCapturePixelFormat FourCCToChromiumPixelFormat(FourCharCode code) { // |captureOutput| is called by the capture device to deliver a new frame. - (void)captureOutput:(CrAVCaptureOutput*)captureOutput - didOutputSampleBuffer:(CoreMediaGlue::CMSampleBufferRef)sampleBuffer - fromConnection:(CrAVCaptureConnection*)connection { +didOutputSampleBuffer:(CoreMediaGlue::CMSampleBufferRef)sampleBuffer + fromConnection:(CrAVCaptureConnection*)connection { // AVFoundation calls from a number of threads, depending on, at least, if // Chrome is on foreground or background. Sample the actual thread here. callback_thread_checker_.DetachFromThread(); @@ -339,9 +341,10 @@ media::VideoCapturePixelFormat FourCCToChromiumPixelFormat(FourCharCode code) { - (void)onVideoError:(NSNotification*)errorNotification { NSError* error = base::mac::ObjCCast<NSError>([[errorNotification userInfo] objectForKey:AVFoundationGlue::AVCaptureSessionErrorKey()]); - [self sendErrorString: - [NSString stringWithFormat:@"%@: %@", [error localizedDescription], - [error localizedFailureReason]]]; + [self sendErrorString:[NSString + stringWithFormat:@"%@: %@", + [error localizedDescription], + [error localizedFailureReason]]]; } - (void)sendErrorString:(NSString*)error { diff --git a/media/capture/video/mac/video_capture_device_decklink_mac.mm b/media/capture/video/mac/video_capture_device_decklink_mac.mm index ddf69ad..c73cb40 100644 --- a/media/capture/video/mac/video_capture_device_decklink_mac.mm +++ b/media/capture/video/mac/video_capture_device_decklink_mac.mm @@ -234,14 +234,14 @@ HRESULT DeckLinkCaptureDelegate::VideoInputFrameArrived( uint8* video_data = NULL; video_frame->GetBytes(reinterpret_cast<void**>(&video_data)); - media::VideoCapturePixelFormat pixel_format = - media::VIDEO_CAPTURE_PIXEL_FORMAT_UNKNOWN; + media::VideoPixelFormat pixel_format = + media::PIXEL_FORMAT_UNKNOWN; switch (video_frame->GetPixelFormat()) { case bmdFormat8BitYUV: // A.k.a. '2vuy'; - pixel_format = media::VIDEO_CAPTURE_PIXEL_FORMAT_UYVY; + pixel_format = media::PIXEL_FORMAT_UYVY; break; case bmdFormat8BitARGB: - pixel_format = media::VIDEO_CAPTURE_PIXEL_FORMAT_ARGB; + pixel_format = media::PIXEL_FORMAT_ARGB; break; default: SendErrorString("Unsupported pixel format"); @@ -421,7 +421,7 @@ void VideoCaptureDeviceDeckLinkMac::EnumerateDeviceCapabilities( const media::VideoCaptureFormat format( gfx::Size(display_mode->GetWidth(), display_mode->GetHeight()), GetDisplayModeFrameRate(display_mode), - VIDEO_CAPTURE_PIXEL_FORMAT_UNKNOWN); + PIXEL_FORMAT_UNKNOWN); supported_formats->push_back(format); DVLOG(2) << device.name() << " " << VideoCaptureFormat::ToString(format); display_mode.Release(); diff --git a/media/capture/video/mac/video_capture_device_factory_mac.mm b/media/capture/video/mac/video_capture_device_factory_mac.mm index 896c290..3d7c795 100644 --- a/media/capture/video/mac/video_capture_device_factory_mac.mm +++ b/media/capture/video/mac/video_capture_device_factory_mac.mm @@ -193,7 +193,7 @@ void VideoCaptureDeviceFactoryMac::GetDeviceSupportedFormats( gfx::Size(kBlacklistedCameras[i].capture_width, kBlacklistedCameras[i].capture_height), kBlacklistedCameras[i].capture_frame_rate, - media::VIDEO_CAPTURE_PIXEL_FORMAT_UYVY)); + media::PIXEL_FORMAT_UYVY)); break; } } diff --git a/media/capture/video/mac/video_capture_device_mac.mm b/media/capture/video/mac/video_capture_device_mac.mm index df47e47..8f71b68 100644 --- a/media/capture/video/mac/video_capture_device_mac.mm +++ b/media/capture/video/mac/video_capture_device_mac.mm @@ -390,7 +390,7 @@ void VideoCaptureDeviceMac::AllocateAndStart( std::min(params.requested_format.frame_rate, kMaxFrameRate)); // Leave the pixel format selection to AVFoundation/QTKit. The pixel format // will be passed to |ReceiveFrame|. - capture_format_.pixel_format = VIDEO_CAPTURE_PIXEL_FORMAT_UNKNOWN; + capture_format_.pixel_format = PIXEL_FORMAT_UNKNOWN; // QTKit: Set the capture resolution only if this is VGA or smaller, otherwise // leave it unconfigured and start capturing: QTKit will produce frames at the diff --git a/media/capture/video/mac/video_capture_device_qtkit_mac.mm b/media/capture/video/mac/video_capture_device_qtkit_mac.mm index 2fc0b32..1ea4c9e 100644 --- a/media/capture/video/mac/video_capture_device_qtkit_mac.mm +++ b/media/capture/video/mac/video_capture_device_qtkit_mac.mm @@ -305,7 +305,7 @@ media::VideoCaptureFormat captureFormat( gfx::Size(frameWidth, frameHeight), frameRate_, - media::VIDEO_CAPTURE_PIXEL_FORMAT_UYVY); + media::PIXEL_FORMAT_UYVY); // The aspect ratio dictionary is often missing, in which case we report // a pixel aspect ratio of 0:0. diff --git a/media/capture/video/video_capture_device.h b/media/capture/video/video_capture_device.h index eabefab..33601336 100644 --- a/media/capture/video/video_capture_device.h +++ b/media/capture/video/video_capture_device.h @@ -230,7 +230,7 @@ class MEDIA_EXPORT VideoCaptureDevice { // object is destroyed or returned. virtual scoped_ptr<Buffer> ReserveOutputBuffer( const gfx::Size& dimensions, - VideoCapturePixelFormat format, + VideoPixelFormat format, VideoPixelStorage storage) = 0; // Captured new video data, held in |frame| or |buffer|, respectively for diff --git a/media/capture/video/video_capture_device_unittest.cc b/media/capture/video/video_capture_device_unittest.cc index 44bcaca..b5d1dfc 100644 --- a/media/capture/video/video_capture_device_unittest.cc +++ b/media/capture/video/video_capture_device_unittest.cc @@ -99,7 +99,7 @@ class MockClient : public VideoCaptureDevice::Client { // Trampoline methods to workaround GMOCK problems with scoped_ptr<>. scoped_ptr<Buffer> ReserveOutputBuffer( const gfx::Size& dimensions, - media::VideoCapturePixelFormat format, + media::VideoPixelFormat format, media::VideoPixelStorage storage) override { DoReserveOutputBuffer(); NOTREACHED() << "This should never be called"; @@ -198,7 +198,7 @@ class VideoCaptureDeviceTest : public testing::TestWithParam<gfx::Size> { const VideoCaptureFormat& last_format() const { return last_format_; } scoped_ptr<VideoCaptureDevice::Name> GetFirstDeviceNameSupportingPixelFormat( - const VideoCapturePixelFormat& pixel_format) { + const VideoPixelFormat& pixel_format) { names_ = EnumerateDevices(); if (names_->empty()) { DVLOG(1) << "No camera available."; @@ -215,9 +215,9 @@ class VideoCaptureDeviceTest : public testing::TestWithParam<gfx::Size> { } } } - DVLOG_IF(1, pixel_format != VIDEO_CAPTURE_PIXEL_FORMAT_MAX) + DVLOG_IF(1, pixel_format != PIXEL_FORMAT_MAX) << "No camera can capture the" - << " format: " << VideoCaptureFormat::PixelFormatToString(pixel_format); + << " format: " << VideoPixelFormatToString(pixel_format); return scoped_ptr<VideoCaptureDevice::Name>(); } @@ -286,8 +286,7 @@ TEST_F(VideoCaptureDeviceTest, MAYBE_OpenInvalidDevice) { VideoCaptureParams capture_params; capture_params.requested_format.frame_size.SetSize(640, 480); capture_params.requested_format.frame_rate = 30; - capture_params.requested_format.pixel_format = - VIDEO_CAPTURE_PIXEL_FORMAT_I420; + capture_params.requested_format.pixel_format = PIXEL_FORMAT_I420; device->AllocateAndStart(capture_params, client_.Pass()); device->StopAndDeAllocate(); } @@ -318,13 +317,13 @@ TEST_P(VideoCaptureDeviceTest, CaptureWithSize) { capture_params.requested_format.frame_size.SetSize(width, height); capture_params.requested_format.frame_rate = 30.0f; capture_params.requested_format.pixel_format = - VIDEO_CAPTURE_PIXEL_FORMAT_I420; + PIXEL_FORMAT_I420; device->AllocateAndStart(capture_params, client_.Pass()); // Get captured video frames. WaitForCapturedFrame(); EXPECT_EQ(last_format().frame_size.width(), width); EXPECT_EQ(last_format().frame_size.height(), height); - if (last_format().pixel_format != VIDEO_CAPTURE_PIXEL_FORMAT_MJPEG) + if (last_format().pixel_format != PIXEL_FORMAT_MJPEG) EXPECT_EQ(size.GetArea(), last_format().frame_size.GetArea()); device->StopAndDeAllocate(); } @@ -352,13 +351,13 @@ TEST_F(VideoCaptureDeviceTest, MAYBE_AllocateBadSize) { capture_params.requested_format.frame_size.SetSize(637, 472); capture_params.requested_format.frame_rate = 35; capture_params.requested_format.pixel_format = - VIDEO_CAPTURE_PIXEL_FORMAT_I420; + PIXEL_FORMAT_I420; device->AllocateAndStart(capture_params, client_.Pass()); WaitForCapturedFrame(); device->StopAndDeAllocate(); EXPECT_EQ(last_format().frame_size.width(), input_size.width()); EXPECT_EQ(last_format().frame_size.height(), input_size.height()); - if (last_format().pixel_format != VIDEO_CAPTURE_PIXEL_FORMAT_MJPEG) + if (last_format().pixel_format != PIXEL_FORMAT_MJPEG) EXPECT_EQ(input_size.GetArea(), last_format().frame_size.GetArea()); } @@ -390,8 +389,7 @@ TEST_F(VideoCaptureDeviceTest, MAYBE_ReAllocateCamera) { VideoCaptureParams capture_params; capture_params.requested_format.frame_size = resolution; capture_params.requested_format.frame_rate = 30; - capture_params.requested_format.pixel_format = - VIDEO_CAPTURE_PIXEL_FORMAT_I420; + capture_params.requested_format.pixel_format = PIXEL_FORMAT_I420; device->AllocateAndStart(capture_params, client_.Pass()); device->StopAndDeAllocate(); } @@ -400,8 +398,7 @@ TEST_F(VideoCaptureDeviceTest, MAYBE_ReAllocateCamera) { VideoCaptureParams capture_params; capture_params.requested_format.frame_size.SetSize(320, 240); capture_params.requested_format.frame_rate = 30; - capture_params.requested_format.pixel_format = - VIDEO_CAPTURE_PIXEL_FORMAT_I420; + capture_params.requested_format.pixel_format = PIXEL_FORMAT_I420; ResetWithNewClient(); scoped_ptr<VideoCaptureDevice> device( @@ -430,8 +427,7 @@ TEST_F(VideoCaptureDeviceTest, DeAllocateCameraWhileRunning) { VideoCaptureParams capture_params; capture_params.requested_format.frame_size.SetSize(640, 480); capture_params.requested_format.frame_rate = 30; - capture_params.requested_format.pixel_format = - VIDEO_CAPTURE_PIXEL_FORMAT_I420; + capture_params.requested_format.pixel_format = PIXEL_FORMAT_I420; device->AllocateAndStart(capture_params, client_.Pass()); // Get captured video frames. WaitForCapturedFrame(); @@ -444,7 +440,7 @@ TEST_F(VideoCaptureDeviceTest, DeAllocateCameraWhileRunning) { // Start the camera in 720p to capture MJPEG instead of a raw format. TEST_F(VideoCaptureDeviceTest, MAYBE_CaptureMjpeg) { scoped_ptr<VideoCaptureDevice::Name> name = - GetFirstDeviceNameSupportingPixelFormat(VIDEO_CAPTURE_PIXEL_FORMAT_MJPEG); + GetFirstDeviceNameSupportingPixelFormat(PIXEL_FORMAT_MJPEG); if (!name) { DVLOG(1) << "No camera supports MJPEG format. Exiting test."; return; @@ -458,25 +454,24 @@ TEST_F(VideoCaptureDeviceTest, MAYBE_CaptureMjpeg) { VideoCaptureParams capture_params; capture_params.requested_format.frame_size.SetSize(1280, 720); capture_params.requested_format.frame_rate = 30; - capture_params.requested_format.pixel_format = - VIDEO_CAPTURE_PIXEL_FORMAT_MJPEG; + capture_params.requested_format.pixel_format = PIXEL_FORMAT_MJPEG; device->AllocateAndStart(capture_params, client_.Pass()); // Get captured video frames. WaitForCapturedFrame(); // Verify we get MJPEG from the device. Not all devices can capture 1280x720 // @ 30 fps, so we don't care about the exact resolution we get. - EXPECT_EQ(last_format().pixel_format, VIDEO_CAPTURE_PIXEL_FORMAT_MJPEG); + EXPECT_EQ(last_format().pixel_format, PIXEL_FORMAT_MJPEG); EXPECT_GE(static_cast<size_t>(1280 * 720), last_format().ImageAllocationSize()); device->StopAndDeAllocate(); } TEST_F(VideoCaptureDeviceTest, GetDeviceSupportedFormats) { - // Use VIDEO_CAPTURE_PIXEL_FORMAT_MAX to iterate all device names for testing + // Use PIXEL_FORMAT_MAX to iterate all device names for testing // GetDeviceSupportedFormats(). scoped_ptr<VideoCaptureDevice::Name> name = - GetFirstDeviceNameSupportingPixelFormat(VIDEO_CAPTURE_PIXEL_FORMAT_MAX); - // Verify no camera returned for VIDEO_CAPTURE_PIXEL_FORMAT_MAX. Nothing else + GetFirstDeviceNameSupportingPixelFormat(PIXEL_FORMAT_MAX); + // Verify no camera returned for PIXEL_FORMAT_MAX. Nothing else // to test here // since we cannot forecast the hardware capabilities. ASSERT_FALSE(name); diff --git a/media/capture/video/win/capability_list_win.cc b/media/capture/video/win/capability_list_win.cc index db6e986..32cbb61 100644 --- a/media/capture/video/win/capability_list_win.cc +++ b/media/capture/video/win/capability_list_win.cc @@ -2,6 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. +#include "media/base/video_capture_types.h" #include "media/capture/video/win/capability_list_win.h" #include <algorithm> @@ -36,7 +37,8 @@ static bool CompareCapability(const VideoCaptureFormat& requested, if (diff_fps_lhs != diff_fps_rhs) return diff_fps_lhs < diff_fps_rhs; - return lhs.pixel_format < rhs.pixel_format; + return VideoCaptureFormat::ComparePixelFormatPreference(lhs.pixel_format, + rhs.pixel_format); } const CapabilityWin& GetBestMatchedCapability( diff --git a/media/capture/video/win/sink_filter_win.cc b/media/capture/video/win/sink_filter_win.cc index 2a36746..8e3f910 100644 --- a/media/capture/video/win/sink_filter_win.cc +++ b/media/capture/video/win/sink_filter_win.cc @@ -33,7 +33,7 @@ SinkFilter::SinkFilter(SinkFilterObserver* observer) : input_pin_(NULL) { input_pin_ = new SinkInputPin(this, observer); } -void SinkFilter::SetRequestedMediaFormat(VideoCapturePixelFormat pixel_format, +void SinkFilter::SetRequestedMediaFormat(VideoPixelFormat pixel_format, float frame_rate, const BITMAPINFOHEADER& info_header) { input_pin_->SetRequestedMediaFormat(pixel_format, frame_rate, info_header); diff --git a/media/capture/video/win/sink_filter_win.h b/media/capture/video/win/sink_filter_win.h index d3fa99a..f718f01 100644 --- a/media/capture/video/win/sink_filter_win.h +++ b/media/capture/video/win/sink_filter_win.h @@ -36,7 +36,7 @@ class __declspec(uuid("88cdbbdc-a73b-4afa-acbf-15d5e2ce12c3")) SinkFilter public: explicit SinkFilter(SinkFilterObserver* observer); - void SetRequestedMediaFormat(VideoCapturePixelFormat pixel_format, + void SetRequestedMediaFormat(VideoPixelFormat pixel_format, float frame_rate, const BITMAPINFOHEADER& info_header); // Returns the format that is negotiated when this diff --git a/media/capture/video/win/sink_input_pin_win.cc b/media/capture/video/win/sink_input_pin_win.cc index 240ed9c..9c515a3 100644 --- a/media/capture/video/win/sink_input_pin_win.cc +++ b/media/capture/video/win/sink_input_pin_win.cc @@ -25,7 +25,7 @@ SinkInputPin::SinkInputPin(IBaseFilter* filter, SinkFilterObserver* observer) } void SinkInputPin::SetRequestedMediaFormat( - VideoCapturePixelFormat pixel_format, + VideoPixelFormat pixel_format, float frame_rate, const BITMAPINFOHEADER& info_header) { requested_pixel_format_ = pixel_format; @@ -33,7 +33,7 @@ void SinkInputPin::SetRequestedMediaFormat( requested_info_header_ = info_header; resulting_format_.frame_size.SetSize(0, 0); resulting_format_.frame_rate = 0; - resulting_format_.pixel_format = VIDEO_CAPTURE_PIXEL_FORMAT_UNKNOWN; + resulting_format_.pixel_format = PIXEL_FORMAT_UNKNOWN; } const VideoCaptureFormat& SinkInputPin::ResultingFormat() { @@ -67,27 +67,27 @@ bool SinkInputPin::IsMediaTypeValid(const AM_MEDIA_TYPE* media_type) { } if (sub_type == kMediaSubTypeI420 && pvi->bmiHeader.biCompression == MAKEFOURCC('I', '4', '2', '0')) { - resulting_format_.pixel_format = VIDEO_CAPTURE_PIXEL_FORMAT_I420; + resulting_format_.pixel_format = PIXEL_FORMAT_I420; return true; } if (sub_type == MEDIASUBTYPE_YUY2 && pvi->bmiHeader.biCompression == MAKEFOURCC('Y', 'U', 'Y', '2')) { - resulting_format_.pixel_format = VIDEO_CAPTURE_PIXEL_FORMAT_YUY2; + resulting_format_.pixel_format = PIXEL_FORMAT_YUY2; return true; } if (sub_type == MEDIASUBTYPE_MJPG && pvi->bmiHeader.biCompression == MAKEFOURCC('M', 'J', 'P', 'G')) { - resulting_format_.pixel_format = VIDEO_CAPTURE_PIXEL_FORMAT_MJPEG; + resulting_format_.pixel_format = PIXEL_FORMAT_MJPEG; return true; } if (sub_type == MEDIASUBTYPE_RGB24 && pvi->bmiHeader.biCompression == BI_RGB) { - resulting_format_.pixel_format = VIDEO_CAPTURE_PIXEL_FORMAT_RGB24; + resulting_format_.pixel_format = PIXEL_FORMAT_RGB24; return true; } if (sub_type == MEDIASUBTYPE_RGB32 && pvi->bmiHeader.biCompression == BI_RGB) { - resulting_format_.pixel_format = VIDEO_CAPTURE_PIXEL_FORMAT_RGB32; + resulting_format_.pixel_format = PIXEL_FORMAT_RGB32; return true; } return false; @@ -113,7 +113,7 @@ bool SinkInputPin::GetValidMediaType(int index, AM_MEDIA_TYPE* media_type) { media_type->formattype = FORMAT_VideoInfo; media_type->bTemporalCompression = FALSE; - if (requested_pixel_format_ == VIDEO_CAPTURE_PIXEL_FORMAT_MJPEG) { + if (requested_pixel_format_ == PIXEL_FORMAT_MJPEG) { // If the requested pixel format is MJPEG, accept only MJPEG. // This is ok since the capabilities of the capturer have been // enumerated and we know that it is supported. diff --git a/media/capture/video/win/sink_input_pin_win.h b/media/capture/video/win/sink_input_pin_win.h index b0d2f16..3a2bf7b 100644 --- a/media/capture/video/win/sink_input_pin_win.h +++ b/media/capture/video/win/sink_input_pin_win.h @@ -23,7 +23,7 @@ class SinkInputPin : public PinBase { public: SinkInputPin(IBaseFilter* filter, SinkFilterObserver* observer); - void SetRequestedMediaFormat(VideoCapturePixelFormat pixel_format, + void SetRequestedMediaFormat(VideoPixelFormat pixel_format, float frame_rate, const BITMAPINFOHEADER& info_header); // Returns the capability that is negotiated when this @@ -39,7 +39,7 @@ class SinkInputPin : public PinBase { private: ~SinkInputPin() override; - VideoCapturePixelFormat requested_pixel_format_; + VideoPixelFormat requested_pixel_format_; float requested_frame_rate_; BITMAPINFOHEADER requested_info_header_; VideoCaptureFormat resulting_format_; diff --git a/media/capture/video/win/video_capture_device_factory_win.cc b/media/capture/video/win/video_capture_device_factory_win.cc index 8f1ea11..150ef6e 100644 --- a/media/capture/video/win/video_capture_device_factory_win.cc +++ b/media/capture/video/win/video_capture_device_factory_win.cc @@ -278,7 +278,7 @@ static void GetDeviceSupportedFormatsDirectShow(const Name& device, format.pixel_format = VideoCaptureDeviceWin::TranslateMediaSubtypeToPixelFormat( media_type->subtype); - if (format.pixel_format == VIDEO_CAPTURE_PIXEL_FORMAT_UNKNOWN) + if (format.pixel_format == PIXEL_FORMAT_UNKNOWN) continue; VIDEOINFOHEADER* h = reinterpret_cast<VIDEOINFOHEADER*>(media_type->pbFormat); diff --git a/media/capture/video/win/video_capture_device_mf_win.cc b/media/capture/video/win/video_capture_device_mf_win.cc index 8d3b104..c55cf4a 100644 --- a/media/capture/video/win/video_capture_device_mf_win.cc +++ b/media/capture/video/win/video_capture_device_mf_win.cc @@ -156,18 +156,18 @@ class MFReaderCallback final // static bool VideoCaptureDeviceMFWin::FormatFromGuid(const GUID& guid, - VideoCapturePixelFormat* format) { + VideoPixelFormat* format) { struct { const GUID& guid; - const VideoCapturePixelFormat format; + const VideoPixelFormat format; } static const kFormatMap[] = { - {MFVideoFormat_I420, VIDEO_CAPTURE_PIXEL_FORMAT_I420}, - {MFVideoFormat_YUY2, VIDEO_CAPTURE_PIXEL_FORMAT_YUY2}, - {MFVideoFormat_UYVY, VIDEO_CAPTURE_PIXEL_FORMAT_UYVY}, - {MFVideoFormat_RGB24, VIDEO_CAPTURE_PIXEL_FORMAT_RGB24}, - {MFVideoFormat_ARGB32, VIDEO_CAPTURE_PIXEL_FORMAT_ARGB}, - {MFVideoFormat_MJPG, VIDEO_CAPTURE_PIXEL_FORMAT_MJPEG}, - {MFVideoFormat_YV12, VIDEO_CAPTURE_PIXEL_FORMAT_YV12}, + {MFVideoFormat_I420, PIXEL_FORMAT_I420}, + {MFVideoFormat_YUY2, PIXEL_FORMAT_YUY2}, + {MFVideoFormat_UYVY, PIXEL_FORMAT_UYVY}, + {MFVideoFormat_RGB24, PIXEL_FORMAT_RGB24}, + {MFVideoFormat_ARGB32, PIXEL_FORMAT_ARGB}, + {MFVideoFormat_MJPG, PIXEL_FORMAT_MJPEG}, + {MFVideoFormat_YV12, PIXEL_FORMAT_YV12}, }; for (int i = 0; i < arraysize(kFormatMap); ++i) { diff --git a/media/capture/video/win/video_capture_device_mf_win.h b/media/capture/video/win/video_capture_device_mf_win.h index baff97f..7894864 100644 --- a/media/capture/video/win/video_capture_device_mf_win.h +++ b/media/capture/video/win/video_capture_device_mf_win.h @@ -32,7 +32,7 @@ const DWORD kFirstVideoStream = class MEDIA_EXPORT VideoCaptureDeviceMFWin : public base::NonThreadSafe, public VideoCaptureDevice { public: - static bool FormatFromGuid(const GUID& guid, VideoCapturePixelFormat* format); + static bool FormatFromGuid(const GUID& guid, VideoPixelFormat* format); explicit VideoCaptureDeviceMFWin(const Name& device_name); ~VideoCaptureDeviceMFWin() override; diff --git a/media/capture/video/win/video_capture_device_win.cc b/media/capture/video/win/video_capture_device_win.cc index 20d3179..7d27010 100644 --- a/media/capture/video/win/video_capture_device_win.cc +++ b/media/capture/video/win/video_capture_device_win.cc @@ -147,21 +147,21 @@ ScopedComPtr<IPin> VideoCaptureDeviceWin::GetPin(IBaseFilter* filter, } // static -VideoCapturePixelFormat +VideoPixelFormat VideoCaptureDeviceWin::TranslateMediaSubtypeToPixelFormat( const GUID& sub_type) { static struct { const GUID& sub_type; - VideoCapturePixelFormat format; + VideoPixelFormat format; } pixel_formats[] = { - {kMediaSubTypeI420, VIDEO_CAPTURE_PIXEL_FORMAT_I420}, - {MEDIASUBTYPE_IYUV, VIDEO_CAPTURE_PIXEL_FORMAT_I420}, - {MEDIASUBTYPE_RGB24, VIDEO_CAPTURE_PIXEL_FORMAT_RGB24}, - {MEDIASUBTYPE_YUY2, VIDEO_CAPTURE_PIXEL_FORMAT_YUY2}, - {MEDIASUBTYPE_MJPG, VIDEO_CAPTURE_PIXEL_FORMAT_MJPEG}, - {MEDIASUBTYPE_UYVY, VIDEO_CAPTURE_PIXEL_FORMAT_UYVY}, - {MEDIASUBTYPE_ARGB32, VIDEO_CAPTURE_PIXEL_FORMAT_ARGB}, - {kMediaSubTypeHDYC, VIDEO_CAPTURE_PIXEL_FORMAT_UYVY}, + {kMediaSubTypeI420, PIXEL_FORMAT_I420}, + {MEDIASUBTYPE_IYUV, PIXEL_FORMAT_I420}, + {MEDIASUBTYPE_RGB24, PIXEL_FORMAT_RGB24}, + {MEDIASUBTYPE_YUY2, PIXEL_FORMAT_YUY2}, + {MEDIASUBTYPE_MJPG, PIXEL_FORMAT_MJPEG}, + {MEDIASUBTYPE_UYVY, PIXEL_FORMAT_UYVY}, + {MEDIASUBTYPE_ARGB32, PIXEL_FORMAT_ARGB}, + {kMediaSubTypeHDYC, PIXEL_FORMAT_UYVY}, }; for (size_t i = 0; i < arraysize(pixel_formats); ++i) { if (sub_type == pixel_formats[i].sub_type) @@ -172,7 +172,7 @@ VideoCaptureDeviceWin::TranslateMediaSubtypeToPixelFormat( StringFromGUID2(sub_type, guid_str, arraysize(guid_str)); DVLOG(2) << "Device (also) supports an unknown media type " << guid_str; #endif - return VIDEO_CAPTURE_PIXEL_FORMAT_UNKNOWN; + return PIXEL_FORMAT_UNKNOWN; } void VideoCaptureDeviceWin::ScopedMediaType::Free() { @@ -500,7 +500,7 @@ bool VideoCaptureDeviceWin::CreateCapabilityMap() { VideoCaptureFormat format; format.pixel_format = TranslateMediaSubtypeToPixelFormat(media_type->subtype); - if (format.pixel_format == VIDEO_CAPTURE_PIXEL_FORMAT_UNKNOWN) + if (format.pixel_format == PIXEL_FORMAT_UNKNOWN) continue; VIDEOINFOHEADER* h = diff --git a/media/capture/video/win/video_capture_device_win.h b/media/capture/video/win/video_capture_device_win.h index af171b3..fb1cc39 100644 --- a/media/capture/video/win/video_capture_device_win.h +++ b/media/capture/video/win/video_capture_device_win.h @@ -59,7 +59,7 @@ class VideoCaptureDeviceWin : public base::NonThreadSafe, PIN_DIRECTION pin_dir, REFGUID category, REFGUID major_type); - static VideoCapturePixelFormat TranslateMediaSubtypeToPixelFormat( + static VideoPixelFormat TranslateMediaSubtypeToPixelFormat( const GUID& sub_type); explicit VideoCaptureDeviceWin(const Name& device_name); diff --git a/media/filters/ffmpeg_demuxer.cc b/media/filters/ffmpeg_demuxer.cc index c64e102..2d37630 100644 --- a/media/filters/ffmpeg_demuxer.cc +++ b/media/filters/ffmpeg_demuxer.cc @@ -143,7 +143,7 @@ static void RecordVideoCodecStats(const VideoDecoderConfig& video_config, video_config.visible_rect().width()); UmaHistogramAspectRatio("Media.VideoVisibleAspectRatio", video_config.visible_rect()); - UMA_HISTOGRAM_ENUMERATION("Media.VideoFramePixelFormat", + UMA_HISTOGRAM_ENUMERATION("Media.VideoPixelFormatUnion", video_config.format(), PIXEL_FORMAT_MAX + 1); UMA_HISTOGRAM_ENUMERATION("Media.VideoFrameColorSpace", video_config.color_space(), COLOR_SPACE_MAX + 1); diff --git a/media/mojo/interfaces/media_types.mojom b/media/mojo/interfaces/media_types.mojom index d015da6..3e106f93 100644 --- a/media/mojo/interfaces/media_types.mojom +++ b/media/mojo/interfaces/media_types.mojom @@ -93,16 +93,21 @@ enum SampleFormat { // Kept in sync with media::VideoPixelFormat via static_asserts. enum VideoFormat { UNKNOWN = 0, - YV12, I420, + YV12, YV16, YV12A, YV24, NV12, + NV21, + UYVY, + YUY2, ARGB, XRGB, - UYVY, - FORMAT_MAX = UYVY, + RGB24, + RGB32, + MJPEG, + FORMAT_MAX = MJPEG, }; // Kept in sync with media::ColorSpace via static_asserts. diff --git a/media/mojo/services/media_type_converters.cc b/media/mojo/services/media_type_converters.cc index e228f25..8b32124 100644 --- a/media/mojo/services/media_type_converters.cc +++ b/media/mojo/services/media_type_converters.cc @@ -132,15 +132,20 @@ ASSERT_ENUM_EQ_RAW(DemuxerStream::Status, ASSERT_ENUM_EQ_RAW(VideoPixelFormat, PIXEL_FORMAT_UNKNOWN, VIDEO_FORMAT_UNKNOWN); -ASSERT_ENUM_EQ_RAW(VideoPixelFormat, PIXEL_FORMAT_YV12, VIDEO_FORMAT_YV12); ASSERT_ENUM_EQ_RAW(VideoPixelFormat, PIXEL_FORMAT_I420, VIDEO_FORMAT_I420); +ASSERT_ENUM_EQ_RAW(VideoPixelFormat, PIXEL_FORMAT_YV12, VIDEO_FORMAT_YV12); ASSERT_ENUM_EQ_RAW(VideoPixelFormat, PIXEL_FORMAT_YV16, VIDEO_FORMAT_YV16); ASSERT_ENUM_EQ_RAW(VideoPixelFormat, PIXEL_FORMAT_YV12A, VIDEO_FORMAT_YV12A); ASSERT_ENUM_EQ_RAW(VideoPixelFormat, PIXEL_FORMAT_YV24, VIDEO_FORMAT_YV24); ASSERT_ENUM_EQ_RAW(VideoPixelFormat, PIXEL_FORMAT_NV12, VIDEO_FORMAT_NV12); +ASSERT_ENUM_EQ_RAW(VideoPixelFormat, PIXEL_FORMAT_NV21, VIDEO_FORMAT_NV21); +ASSERT_ENUM_EQ_RAW(VideoPixelFormat, PIXEL_FORMAT_UYVY, VIDEO_FORMAT_UYVY); +ASSERT_ENUM_EQ_RAW(VideoPixelFormat, PIXEL_FORMAT_YUY2, VIDEO_FORMAT_YUY2); ASSERT_ENUM_EQ_RAW(VideoPixelFormat, PIXEL_FORMAT_ARGB, VIDEO_FORMAT_ARGB); ASSERT_ENUM_EQ_RAW(VideoPixelFormat, PIXEL_FORMAT_XRGB, VIDEO_FORMAT_XRGB); -ASSERT_ENUM_EQ_RAW(VideoPixelFormat, PIXEL_FORMAT_UYVY, VIDEO_FORMAT_UYVY); +ASSERT_ENUM_EQ_RAW(VideoPixelFormat, PIXEL_FORMAT_RGB24, VIDEO_FORMAT_RGB24); +ASSERT_ENUM_EQ_RAW(VideoPixelFormat, PIXEL_FORMAT_RGB32, VIDEO_FORMAT_RGB32); +ASSERT_ENUM_EQ_RAW(VideoPixelFormat, PIXEL_FORMAT_MJPEG, VIDEO_FORMAT_MJPEG); ASSERT_ENUM_EQ_RAW(VideoPixelFormat, PIXEL_FORMAT_MAX, VIDEO_FORMAT_FORMAT_MAX); // ColorSpace. diff --git a/media/video/gpu_memory_buffer_video_frame_pool.cc b/media/video/gpu_memory_buffer_video_frame_pool.cc index 41de9d9..c1ff36c 100644 --- a/media/video/gpu_memory_buffer_video_frame_pool.cc +++ b/media/video/gpu_memory_buffer_video_frame_pool.cc @@ -255,9 +255,14 @@ void GpuMemoryBufferVideoFramePool::PoolImpl::CreateHardwareFrame( case PIXEL_FORMAT_YV16: case PIXEL_FORMAT_YV24: case PIXEL_FORMAT_NV12: + case PIXEL_FORMAT_NV21: + case PIXEL_FORMAT_UYVY: + case PIXEL_FORMAT_YUY2: case PIXEL_FORMAT_ARGB: case PIXEL_FORMAT_XRGB: - case PIXEL_FORMAT_UYVY: + case PIXEL_FORMAT_RGB24: + case PIXEL_FORMAT_RGB32: + case PIXEL_FORMAT_MJPEG: case PIXEL_FORMAT_UNKNOWN: frame_ready_cb.Run(video_frame); return; diff --git a/tools/metrics/histograms/histograms.xml b/tools/metrics/histograms/histograms.xml index 878508a..294352d 100644 --- a/tools/metrics/histograms/histograms.xml +++ b/tools/metrics/histograms/histograms.xml @@ -17605,6 +17605,9 @@ http://cs/file:chrome/histograms.xml - but prefer this file for new entries. </histogram> <histogram name="Media.VideoFramePixelFormat" enum="VideoFramePixelFormat"> + <obsolete> + Replaced by Media.VideoPixelFormatUnion 08/2015. + </obsolete> <owner>mcasas@chromium.org</owner> <owner>watk@chromium.org</owner> <summary>Pixel format used in HTML5 video. Emitted on video load.</summary> @@ -17618,6 +17621,14 @@ http://cs/file:chrome/histograms.xml - but prefer this file for new entries. <summary>Pixel format used in HTML5 video. Emitted on video load.</summary> </histogram> +<histogram name="Media.VideoPixelFormatUnion" enum="VideoPixelFormatUnion"> + <owner>mcasas@chromium.org</owner> + <owner>emircan@chromium.org</owner> + <summary> + Pixel format used in capture and HTML5 video. Emitted on video load. + </summary> +</histogram> + <histogram name="Media.VideoRenderer.CadenceChanges" units="changes"> <owner>dalecurtis@chromium.org</owner> <summary> @@ -53870,6 +53881,9 @@ http://cs/file:chrome/histograms.xml - but prefer this file for new entries. </enum> <enum name="CapturePixelFormat" type="int"> + <obsolete> + Deprecated as of 08/2015. + </obsolete> <int value="0" label="UNKNOWN"/> <int value="1" label="I420"/> <int value="2" label="YUY2"/> @@ -73320,6 +73334,9 @@ To add a new entry, add it with any value and run test to compute valid value. </enum> <enum name="VideoFramePixelFormat" type="int"> + <obsolete> + Deprecated as of 08/2015. Substituted by VideoPixelFormatUnion. + </obsolete> <int value="0" label="UNKNOWN"/> <int value="1" label="YV12"/> <int value="2" label="I420"/> @@ -73350,6 +73367,24 @@ To add a new entry, add it with any value and run test to compute valid value. <int value="11" label="YV12HD"/> </enum> +<enum name="VideoPixelFormatUnion" type="int"> + <int value="0" label="UNKNOWN"/> + <int value="1" label="I420"/> + <int value="2" label="YV12"/> + <int value="3" label="YV16"/> + <int value="4" label="YV12A"/> + <int value="5" label="YV24"/> + <int value="6" label="NV12"/> + <int value="7" label="NV21"/> + <int value="8" label="UYVY"/> + <int value="9" label="YUY2"/> + <int value="10" label="ARGB"/> + <int value="11" label="XRGB"/> + <int value="12" label="RGB24"/> + <int value="13" label="RGB32"/> + <int value="14" label="MJPEG"/> +</enum> + <enum name="VideoPlayerCastAPIExtensionStatus" type="int"> <int value="0" label="Skipped (Cast extension is unavailable)"/> <int value="1" label="Installation failed"/> |