summaryrefslogtreecommitdiffstats
path: root/content/renderer
diff options
context:
space:
mode:
authorperkj@chromium.org <perkj@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2014-05-22 17:24:00 +0000
committerperkj@chromium.org <perkj@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2014-05-22 17:24:00 +0000
commitff993f9b82f03a23113fd2ce90cda6f276ae148c (patch)
tree8b1e3bb20a48437ad1dd36eba8d94547d8f170dd /content/renderer
parent714b1bc31b98f73a8c218c40ba6eaaf72ca03b1e (diff)
downloadchromium_src-ff993f9b82f03a23113fd2ce90cda6f276ae148c.zip
chromium_src-ff993f9b82f03a23113fd2ce90cda6f276ae148c.tar.gz
chromium_src-ff993f9b82f03a23113fd2ce90cda6f276ae148c.tar.bz2
Change MediaStreamVideoSource to output different resolutions do different tracks depending on the track constraints.
This cl introduce a new class VideoTrackAdapter. The adapter is responsible for forwarding frames on the IO-thread from a MediaStreamVideoSource to a the tracks connected to the source. It has the ability to wrap media::VideoFrames with new visible_rect and natural_size to match max width and height resolutions as well as min and max aspect ratio per track. Note that this does not yet work for textures. It also changes the WebrtcVideoCaptureAdapter to use libyuv::Scale instead of libyuv::I420Copy. This is needed to allow the video frames sent on a PC to be both cropped and scaled. BUG= 346616 Review URL: https://codereview.chromium.org/246433006 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@272214 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'content/renderer')
-rw-r--r--content/renderer/media/media_stream_constraints_util.cc61
-rw-r--r--content/renderer/media/media_stream_constraints_util.h34
-rw-r--r--content/renderer/media/media_stream_video_capture_source_unittest.cc2
-rw-r--r--content/renderer/media/media_stream_video_source.cc299
-rw-r--r--content/renderer/media/media_stream_video_source.h26
-rw-r--r--content/renderer/media/media_stream_video_source_unittest.cc198
-rw-r--r--content/renderer/media/media_stream_video_track.cc1
-rw-r--r--content/renderer/media/mock_media_stream_video_source.cc1
-rw-r--r--content/renderer/media/video_frame_deliverer.h2
-rw-r--r--content/renderer/media/video_track_adapter.cc335
-rw-r--r--content/renderer/media/video_track_adapter.h88
-rw-r--r--content/renderer/media/webrtc/media_stream_remote_video_source.h1
-rw-r--r--content/renderer/media/webrtc/media_stream_remote_video_source_unittest.cc2
-rw-r--r--content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc80
-rw-r--r--content/renderer/media/webrtc/webrtc_video_capturer_adapter_unittest.cc31
15 files changed, 896 insertions, 265 deletions
diff --git a/content/renderer/media/media_stream_constraints_util.cc b/content/renderer/media/media_stream_constraints_util.cc
index de1856e..ddc223d 100644
--- a/content/renderer/media/media_stream_constraints_util.cc
+++ b/content/renderer/media/media_stream_constraints_util.cc
@@ -25,26 +25,26 @@ bool ConvertStringToBoolean(const std::string& string, bool* value) {
} // namespace
bool GetConstraintValueAsBoolean(const blink::WebMediaConstraints& constraints,
- const std::string& key,
+ const std::string& name,
bool* value) {
- return GetMandatoryConstraintValueAsBoolean(constraints, key, value) ||
- GetOptionalConstraintValueAsBoolean(constraints, key, value);
+ return GetMandatoryConstraintValueAsBoolean(constraints, name, value) ||
+ GetOptionalConstraintValueAsBoolean(constraints, name, value);
}
bool GetConstraintValueAsInteger(const blink::WebMediaConstraints& constraints,
- const std::string& key,
+ const std::string& name,
int* value) {
- return GetMandatoryConstraintValueAsInteger(constraints, key, value) ||
- GetOptionalConstraintValueAsInteger(constraints, key, value);
+ return GetMandatoryConstraintValueAsInteger(constraints, name, value) ||
+ GetOptionalConstraintValueAsInteger(constraints, name, value);
}
bool GetConstraintValueAsString(const blink::WebMediaConstraints& constraints,
- const std::string& key,
+ const std::string& name,
std::string* value) {
blink::WebString value_str;
- base::string16 key_16 = base::UTF8ToUTF16(key);
- if (!constraints.getMandatoryConstraintValue(key_16, value_str) &&
- !constraints.getOptionalConstraintValue(key_16, value_str)) {
+ base::string16 name_16 = base::UTF8ToUTF16(name);
+ if (!constraints.getMandatoryConstraintValue(name_16, value_str) &&
+ !constraints.getOptionalConstraintValue(name_16, value_str)) {
return false;
}
@@ -54,10 +54,10 @@ bool GetConstraintValueAsString(const blink::WebMediaConstraints& constraints,
bool GetMandatoryConstraintValueAsBoolean(
const blink::WebMediaConstraints& constraints,
- const std::string& key,
+ const std::string& name,
bool* value) {
blink::WebString value_str;
- if (!constraints.getMandatoryConstraintValue(base::UTF8ToUTF16(key),
+ if (!constraints.getMandatoryConstraintValue(base::UTF8ToUTF16(name),
value_str)) {
return false;
}
@@ -67,10 +67,10 @@ bool GetMandatoryConstraintValueAsBoolean(
bool GetMandatoryConstraintValueAsInteger(
const blink::WebMediaConstraints& constraints,
- const std::string& key,
+ const std::string& name,
int* value) {
blink::WebString value_str;
- if (!constraints.getMandatoryConstraintValue(base::UTF8ToUTF16(key),
+ if (!constraints.getMandatoryConstraintValue(base::UTF8ToUTF16(name),
value_str)) {
return false;
}
@@ -78,12 +78,24 @@ bool GetMandatoryConstraintValueAsInteger(
return base::StringToInt(value_str.utf8(), value);
}
+bool GetMandatoryConstraintValueAsDouble(
+ const blink::WebMediaConstraints& constraints,
+ const std::string& name,
+ double* value) {
+ blink::WebString value_str;
+ if (!constraints.getMandatoryConstraintValue(base::UTF8ToUTF16(name),
+ value_str)) {
+ return false;
+ }
+ return base::StringToDouble(value_str.utf8(), value);
+}
+
bool GetOptionalConstraintValueAsBoolean(
const blink::WebMediaConstraints& constraints,
- const std::string& key,
+ const std::string& name,
bool* value) {
blink::WebString value_str;
- if (!constraints.getOptionalConstraintValue(base::UTF8ToUTF16(key),
+ if (!constraints.getOptionalConstraintValue(base::UTF8ToUTF16(name),
value_str)) {
return false;
}
@@ -93,10 +105,10 @@ bool GetOptionalConstraintValueAsBoolean(
bool GetOptionalConstraintValueAsInteger(
const blink::WebMediaConstraints& constraints,
- const std::string& key,
+ const std::string& name,
int* value) {
blink::WebString value_str;
- if (!constraints.getOptionalConstraintValue(base::UTF8ToUTF16(key),
+ if (!constraints.getOptionalConstraintValue(base::UTF8ToUTF16(name),
value_str)) {
return false;
}
@@ -104,4 +116,17 @@ bool GetOptionalConstraintValueAsInteger(
return base::StringToInt(value_str.utf8(), value);
}
+bool GetOptionalConstraintValueAsDouble(
+ const blink::WebMediaConstraints& constraints,
+ const std::string& name,
+ double* value) {
+ blink::WebString value_str;
+ if (!constraints.getOptionalConstraintValue(base::UTF8ToUTF16(name),
+ value_str)) {
+ return false;
+ }
+
+ return base::StringToDouble(value_str.utf8(), value);
+}
+
} // namespace content
diff --git a/content/renderer/media/media_stream_constraints_util.h b/content/renderer/media/media_stream_constraints_util.h
index 706ed4f..2068588 100644
--- a/content/renderer/media/media_stream_constraints_util.h
+++ b/content/renderer/media/media_stream_constraints_util.h
@@ -16,58 +16,72 @@ class WebString;
namespace content {
-// Method to get boolean value of constraint with |key| from constraints.
+// Method to get boolean value of constraint with |name| from constraints.
// Returns true if the constraint is specified in either mandatory or optional
// constraints.
bool CONTENT_EXPORT GetConstraintValueAsBoolean(
const blink::WebMediaConstraints& constraints,
- const std::string& key,
+ const std::string& name,
bool* value);
-// Method to get int value of constraint with |key| from constraints.
+// Method to get int value of constraint with |name| from constraints.
// Returns true if the constraint is specified in either mandatory or Optional
// constraints.
bool CONTENT_EXPORT GetConstraintValueAsInteger(
const blink::WebMediaConstraints& constraints,
- const std::string& key,
+ const std::string& name,
int* value);
-// Method to get std::string value of constraint with |key| from constraints.
+// Method to get std::string value of constraint with |name| from constraints.
// Returns true if the constraint is specified in either mandatory or Optional
// constraints.
bool CONTENT_EXPORT GetConstraintValueAsString(
const blink::WebMediaConstraints& constraints,
- const std::string& key,
+ const std::string& name,
std::string* value);
-// Method to get boolean value of constraint with |key| from the
+// Method to get boolean value of constraint with |name| from the
// mandatory constraints.
bool CONTENT_EXPORT GetMandatoryConstraintValueAsBoolean(
const blink::WebMediaConstraints& constraints,
const std::string& name,
bool* value);
-// Method to get int value of constraint with |key| from the
+// Method to get int value of constraint with |name| from the
// mandatory constraints.
bool CONTENT_EXPORT GetMandatoryConstraintValueAsInteger(
const blink::WebMediaConstraints& constraints,
const std::string& name,
int* value);
-// Method to get bool value of constraint with |key| from the
+// Method to get double value of constraint with |name| from the
+// mandatory constraints.
+bool CONTENT_EXPORT GetMandatoryConstraintValueAsDouble(
+ const blink::WebMediaConstraints& constraints,
+ const std::string& name,
+ double* value);
+
+// Method to get bool value of constraint with |name| from the
// optional constraints.
bool CONTENT_EXPORT GetOptionalConstraintValueAsBoolean(
const blink::WebMediaConstraints& constraints,
const std::string& name,
bool* value);
-// Method to get int value of constraint with |key| from the
+// Method to get int value of constraint with |name| from the
// optional constraints.
bool CONTENT_EXPORT GetOptionalConstraintValueAsInteger(
const blink::WebMediaConstraints& constraints,
const std::string& name,
int* value);
+// Method to get double value of constraint with |name| from the
+// optional constraints.
+bool CONTENT_EXPORT GetOptionalConstraintValueAsDouble(
+ const blink::WebMediaConstraints& constraints,
+ const std::string& name,
+ double* value);
+
} // namespace content
#endif // CONTENT_RENDERER_MEDIA_MEDIA_STREAM_CONSTRAINTS_UTIL_H_
diff --git a/content/renderer/media/media_stream_video_capture_source_unittest.cc b/content/renderer/media/media_stream_video_capture_source_unittest.cc
index a26abb1..65deacb 100644
--- a/content/renderer/media/media_stream_video_capture_source_unittest.cc
+++ b/content/renderer/media/media_stream_video_capture_source_unittest.cc
@@ -69,7 +69,7 @@ class MediaStreamVideoCapturerSourceTest : public testing::Test {
void OnConstraintsApplied(MediaStreamSource* source, bool success) {
}
- base::MessageLoop message_loop_;
+ base::MessageLoopForUI message_loop_;
scoped_ptr<ChildProcess> child_process_;
blink::WebMediaStreamSource webkit_source_;
MediaStreamVideoCapturerSource* source_; // owned by webkit_source.
diff --git a/content/renderer/media/media_stream_video_source.cc b/content/renderer/media/media_stream_video_source.cc
index 26b0b5a..8608311 100644
--- a/content/renderer/media/media_stream_video_source.cc
+++ b/content/renderer/media/media_stream_video_source.cc
@@ -14,7 +14,7 @@
#include "content/child/child_process.h"
#include "content/renderer/media/media_stream_constraints_util.h"
#include "content/renderer/media/media_stream_video_track.h"
-#include "content/renderer/media/video_frame_deliverer.h"
+#include "content/renderer/media/video_track_adapter.h"
namespace content {
@@ -54,6 +54,75 @@ const char kGooglePrefix[] = "goog";
// input frame height of max 360 * kMaxCropFactor pixels is accepted.
const int kMaxCropFactor = 2;
+// Returns true if |constraint| has mandatory constraints.
+bool HasMandatoryConstraints(const blink::WebMediaConstraints& constraints) {
+ blink::WebVector<blink::WebMediaConstraint> mandatory_constraints;
+ constraints.getMandatoryConstraints(mandatory_constraints);
+ return !mandatory_constraints.isEmpty();
+}
+
+// Retrieve the desired max width and height from |constraints|. If not set,
+// the |desired_width| and |desired_height| are set to
+// std::numeric_limits<int>::max();
+// If either max width or height is set as a mandatory constraint, the optional
+// constraints are not checked.
+void GetDesiredMaxWidthAndHeight(const blink::WebMediaConstraints& constraints,
+ int* desired_width, int* desired_height) {
+ *desired_width = std::numeric_limits<int>::max();
+ *desired_height = std::numeric_limits<int>::max();
+
+ bool mandatory = GetMandatoryConstraintValueAsInteger(
+ constraints,
+ MediaStreamVideoSource::kMaxWidth,
+ desired_width);
+ mandatory |= GetMandatoryConstraintValueAsInteger(
+ constraints,
+ MediaStreamVideoSource::kMaxHeight,
+ desired_height);
+ if (mandatory)
+ return;
+
+ GetOptionalConstraintValueAsInteger(constraints,
+ MediaStreamVideoSource::kMaxWidth,
+ desired_width);
+ GetOptionalConstraintValueAsInteger(constraints,
+ MediaStreamVideoSource::kMaxHeight,
+ desired_height);
+}
+
+// Retrieve the desired max and min aspect ratio from |constraints|. If not set,
+// the |min_aspect_ratio| is set to 0 and |max_aspect_ratio| is set to
+// std::numeric_limits<double>::max();
+// If either min or max aspect ratio is set as a mandatory constraint, the
+// optional constraints are not checked.
+void GetDesiredMinAndMaxAspectRatio(
+ const blink::WebMediaConstraints& constraints,
+ double* min_aspect_ratio,
+ double* max_aspect_ratio) {
+ *min_aspect_ratio = 0;
+ *max_aspect_ratio = std::numeric_limits<double>::max();
+
+ bool mandatory = GetMandatoryConstraintValueAsDouble(
+ constraints,
+ MediaStreamVideoSource::kMinAspectRatio,
+ min_aspect_ratio);
+ mandatory |= GetMandatoryConstraintValueAsDouble(
+ constraints,
+ MediaStreamVideoSource::kMaxAspectRatio,
+ max_aspect_ratio);
+ if (mandatory)
+ return;
+
+ GetOptionalConstraintValueAsDouble(
+ constraints,
+ MediaStreamVideoSource::kMinAspectRatio,
+ min_aspect_ratio);
+ GetOptionalConstraintValueAsDouble(
+ constraints,
+ MediaStreamVideoSource::kMaxAspectRatio,
+ max_aspect_ratio);
+}
+
// Returns true if |constraint| is fulfilled. |format| can be changed
// changed by a constraint. Ie - the frame rate can be changed by setting
// maxFrameRate.
@@ -87,22 +156,10 @@ bool UpdateFormatForConstraint(
if (constraint_name == MediaStreamVideoSource::kMinAspectRatio ||
constraint_name == MediaStreamVideoSource::kMaxAspectRatio) {
- double double_value = 0;
- base::StringToDouble(constraint_value, &double_value);
-
- // The aspect ratio in |constraint.m_value| has been converted to a string
- // and back to a double, so it may have a rounding error.
- // E.g if the value 1/3 is converted to a string, the string will not have
- // infinite length.
- // We add a margin of 0.0005 which is high enough to detect the same aspect
- // ratio but small enough to avoid matching wrong aspect ratios.
- const double kRoundingTruncation = 0.0005;
- double ratio = static_cast<double>(format->frame_size.width()) /
- format->frame_size.height();
- if (constraint_name == MediaStreamVideoSource::kMinAspectRatio)
- return (double_value <= ratio + kRoundingTruncation);
- // Subtract 0.0005 to avoid rounding problems. Same as above.
- return (double_value >= ratio - kRoundingTruncation);
+ // These constraints are handled by cropping if the camera outputs the wrong
+ // aspect ratio.
+ double value;
+ return base::StringToDouble(constraint_value, &value);
}
int value;
@@ -172,13 +229,37 @@ media::VideoCaptureFormats FilterFormats(
return supported_formats;
}
+ double max_aspect_ratio;
+ double min_aspect_ratio;
+ GetDesiredMinAndMaxAspectRatio(constraints,
+ &min_aspect_ratio,
+ &max_aspect_ratio);
+
+ if (min_aspect_ratio > max_aspect_ratio || max_aspect_ratio < 0.05f) {
+ DLOG(WARNING) << "Wrong requested aspect ratio.";
+ return media::VideoCaptureFormats();
+ }
+
+ int min_width = 0;
+ GetMandatoryConstraintValueAsInteger(constraints,
+ MediaStreamVideoSource::kMinWidth,
+ &min_width);
+ int min_height = 0;
+ GetMandatoryConstraintValueAsInteger(constraints,
+ MediaStreamVideoSource::kMinHeight,
+ &min_height);
+ int max_width;
+ int max_height;
+ GetDesiredMaxWidthAndHeight(constraints, &max_width, &max_height);
+
+ if (min_width > max_width || min_height > max_height)
+ return media::VideoCaptureFormats();
+
blink::WebVector<blink::WebMediaConstraint> mandatory;
blink::WebVector<blink::WebMediaConstraint> optional;
constraints.getMandatoryConstraints(mandatory);
constraints.getOptionalConstraints(optional);
-
media::VideoCaptureFormats candidates = supported_formats;
-
for (size_t i = 0; i < mandatory.size(); ++i)
FilterFormatsByConstraint(mandatory[i], true, &candidates);
@@ -203,33 +284,6 @@ media::VideoCaptureFormats FilterFormats(
return candidates;
}
-// Returns true if |constraint| has mandatory constraints.
-bool HasMandatoryConstraints(const blink::WebMediaConstraints& constraints) {
- blink::WebVector<blink::WebMediaConstraint> mandatory_constraints;
- constraints.getMandatoryConstraints(mandatory_constraints);
- return !mandatory_constraints.isEmpty();
-}
-
-// Retrieve the desired max width and height from |constraints|.
-void GetDesiredMaxWidthAndHeight(const blink::WebMediaConstraints& constraints,
- int* desired_width, int* desired_height) {
- bool mandatory = GetMandatoryConstraintValueAsInteger(
- constraints, MediaStreamVideoSource::kMaxWidth, desired_width);
- mandatory |= GetMandatoryConstraintValueAsInteger(
- constraints, MediaStreamVideoSource::kMaxHeight, desired_height);
- // Skip the optional constraints if any of the mandatory constraint is
- // specified.
- if (mandatory)
- return;
-
- GetOptionalConstraintValueAsInteger(constraints,
- MediaStreamVideoSource::kMaxWidth,
- desired_width);
- GetOptionalConstraintValueAsInteger(constraints,
- MediaStreamVideoSource::kMaxHeight,
- desired_height);
-}
-
const media::VideoCaptureFormat& GetBestFormatBasedOnArea(
const media::VideoCaptureFormats& formats,
int area) {
@@ -255,109 +309,21 @@ const media::VideoCaptureFormat& GetBestFormatBasedOnArea(
void GetBestCaptureFormat(
const media::VideoCaptureFormats& formats,
const blink::WebMediaConstraints& constraints,
- media::VideoCaptureFormat* capture_format,
- gfx::Size* max_frame_output_size) {
+ media::VideoCaptureFormat* capture_format) {
DCHECK(!formats.empty());
- DCHECK(max_frame_output_size);
- int max_width = std::numeric_limits<int>::max();
- int max_height = std::numeric_limits<int>::max();;
+ int max_width;
+ int max_height;
GetDesiredMaxWidthAndHeight(constraints, &max_width, &max_height);
*capture_format = GetBestFormatBasedOnArea(
formats,
std::min(max_width, MediaStreamVideoSource::kDefaultWidth) *
std::min(max_height, MediaStreamVideoSource::kDefaultHeight));
-
- max_frame_output_size->set_width(max_width);
- max_frame_output_size->set_height(max_height);
-}
-
-// Empty method used for keeping a reference to the original media::VideoFrame
-// in MediaStreamVideoSource::FrameDeliverer::DeliverFrameOnIO if cropping is
-// needed. The reference to |frame| is kept in the closure that calls this
-// method.
-void ReleaseOriginalFrame(
- const scoped_refptr<media::VideoFrame>& frame) {
}
} // anonymous namespace
-// Helper class used for delivering video frames to all registered tracks
-// on the IO-thread.
-class MediaStreamVideoSource::FrameDeliverer : public VideoFrameDeliverer {
- public:
- FrameDeliverer(
- const scoped_refptr<base::MessageLoopProxy>& io_message_loop)
- : VideoFrameDeliverer(io_message_loop) {
- }
-
- // Register |callback| to receive video frames of max size
- // |max_frame_output_size| on the IO thread.
- // TODO(perkj): Currently |max_frame_output_size| must be the same for all
- // |callbacks|.
- void AddCallback(void* id,
- const VideoCaptureDeliverFrameCB& callback,
- const gfx::Size& max_frame_output_size) {
- DCHECK(thread_checker().CalledOnValidThread());
- io_message_loop()->PostTask(
- FROM_HERE,
- base::Bind(
- &FrameDeliverer::AddCallbackWithResolutionOnIO,
- this, id, callback, max_frame_output_size));
- }
-
- virtual void DeliverFrameOnIO(
- const scoped_refptr<media::VideoFrame>& frame,
- const media::VideoCaptureFormat& format) OVERRIDE {
- DCHECK(io_message_loop()->BelongsToCurrentThread());
- TRACE_EVENT0("video", "MediaStreamVideoSource::DeliverFrameOnIO");
- if (max_output_size_.IsEmpty())
- return; // Frame received before the output has been decided.
-
- scoped_refptr<media::VideoFrame> video_frame(frame);
- const gfx::Size& visible_size = frame->visible_rect().size();
- if (visible_size.width() > max_output_size_.width() ||
- visible_size.height() > max_output_size_.height()) {
- // If |frame| is not the size that is expected, we need to crop it by
- // providing a new |visible_rect|. The new visible rect must be within the
- // original |visible_rect|.
- gfx::Rect output_rect = frame->visible_rect();
- output_rect.ClampToCenteredSize(max_output_size_);
- // TODO(perkj): Allow cropping of textures once http://crbug/362521 is
- // fixed.
- if (frame->format() != media::VideoFrame::NATIVE_TEXTURE) {
- video_frame = media::VideoFrame::WrapVideoFrame(
- frame,
- output_rect,
- output_rect.size(),
- base::Bind(&ReleaseOriginalFrame, frame));
- }
- }
- VideoFrameDeliverer::DeliverFrameOnIO(video_frame, format);
- }
-
- protected:
- virtual ~FrameDeliverer() {
- }
-
- void AddCallbackWithResolutionOnIO(
- void* id,
- const VideoCaptureDeliverFrameCB& callback,
- const gfx::Size& max_frame_output_size) {
- DCHECK(io_message_loop()->BelongsToCurrentThread());
- // Currently we only support one frame output size.
- DCHECK(!max_frame_output_size.IsEmpty() &&
- (max_output_size_.IsEmpty() ||
- max_output_size_ == max_frame_output_size));
- max_output_size_ = max_frame_output_size;
- VideoFrameDeliverer::AddCallbackOnIO(id, callback);
- }
-
- private:
- gfx::Size max_output_size_;
-};
-
// static
MediaStreamVideoSource* MediaStreamVideoSource::GetVideoSource(
const blink::WebMediaStreamSource& source) {
@@ -375,9 +341,8 @@ bool MediaStreamVideoSource::IsConstraintSupported(const std::string& name) {
MediaStreamVideoSource::MediaStreamVideoSource()
: state_(NEW),
- frame_deliverer_(
- new MediaStreamVideoSource::FrameDeliverer(
- ChildProcess::current()->io_message_loop_proxy())),
+ track_adapter_(new VideoTrackAdapter(
+ ChildProcess::current()->io_message_loop_proxy())),
weak_factory_(this) {
}
@@ -440,9 +405,20 @@ void MediaStreamVideoSource::RemoveTrack(MediaStreamVideoTrack* video_track) {
std::find(tracks_.begin(), tracks_.end(), video_track);
DCHECK(it != tracks_.end());
tracks_.erase(it);
- // Call |RemoveCallback| here even if adding the track has failed and
- // frame_deliverer_->AddCallback has not been called.
- frame_deliverer_->RemoveCallback(video_track);
+
+ // Check if |video_track| is waiting for applying new constraints and remove
+ // the request in that case.
+ for (std::vector<RequestedConstraints>::iterator it =
+ requested_constraints_.begin();
+ it != requested_constraints_.end(); ++it) {
+ if (it->track == video_track) {
+ requested_constraints_.erase(it);
+ break;
+ }
+ }
+ // Call |frame_adapter_->RemoveTrack| here even if adding the track has
+ // failed and |frame_adapter_->AddCallback| has not been called.
+ track_adapter_->RemoveTrack(video_track);
if (tracks_.empty())
StopSource();
@@ -450,7 +426,8 @@ void MediaStreamVideoSource::RemoveTrack(MediaStreamVideoTrack* video_track) {
const scoped_refptr<base::MessageLoopProxy>&
MediaStreamVideoSource::io_message_loop() const {
- return frame_deliverer_->io_message_loop();
+ DCHECK(CalledOnValidThread());
+ return track_adapter_->io_message_loop();
}
void MediaStreamVideoSource::DoStopSource() {
@@ -470,9 +447,7 @@ void MediaStreamVideoSource::OnSupportedFormats(
supported_formats_ = formats;
if (!FindBestFormatWithConstraints(supported_formats_,
- &current_format_,
- &max_frame_output_size_,
- &current_constraints_)) {
+ &current_format_)) {
SetReadyState(blink::WebMediaStreamSource::ReadyStateEnded);
// This object can be deleted after calling FinalizeAddTrack. See comment
// in the header file.
@@ -490,15 +465,12 @@ void MediaStreamVideoSource::OnSupportedFormats(
params.requested_format = current_format_;
StartSourceImpl(
params,
- base::Bind(&MediaStreamVideoSource::FrameDeliverer::DeliverFrameOnIO,
- frame_deliverer_));
+ base::Bind(&VideoTrackAdapter::DeliverFrameOnIO, track_adapter_));
}
bool MediaStreamVideoSource::FindBestFormatWithConstraints(
const media::VideoCaptureFormats& formats,
- media::VideoCaptureFormat* best_format,
- gfx::Size* max_frame_output_size,
- blink::WebMediaConstraints* resulting_constraints) {
+ media::VideoCaptureFormat* best_format) {
// Find the first constraints that we can fulfill.
for (std::vector<RequestedConstraints>::iterator request_it =
requested_constraints_.begin();
@@ -511,9 +483,6 @@ bool MediaStreamVideoSource::FindBestFormatWithConstraints(
// we will start with whatever format is native to the source.
if (formats.empty() && !HasMandatoryConstraints(requested_constraints)) {
*best_format = media::VideoCaptureFormat();
- *resulting_constraints = requested_constraints;
- *max_frame_output_size = gfx::Size(std::numeric_limits<int>::max(),
- std::numeric_limits<int>::max());
return true;
}
media::VideoCaptureFormats filtered_formats =
@@ -522,9 +491,7 @@ bool MediaStreamVideoSource::FindBestFormatWithConstraints(
// A request with constraints that can be fulfilled.
GetBestCaptureFormat(filtered_formats,
requested_constraints,
- best_format,
- max_frame_output_size);
- *resulting_constraints= requested_constraints;
+ best_format);
return true;
}
}
@@ -566,11 +533,23 @@ void MediaStreamVideoSource::FinalizeAddTrack() {
((!current_format_.IsValid() && !HasMandatoryConstraints(
it->constraints)) ||
!FilterFormats(it->constraints, formats).empty());
+
if (success) {
- frame_deliverer_->AddCallback(it->track, it->frame_callback,
- max_frame_output_size_);
+ int max_width;
+ int max_height;
+ GetDesiredMaxWidthAndHeight(it->constraints, &max_width, &max_height);
+ double max_aspect_ratio;
+ double min_aspect_ratio;
+ GetDesiredMinAndMaxAspectRatio(it->constraints,
+ &min_aspect_ratio,
+ &max_aspect_ratio);
+ track_adapter_->AddTrack(it->track,it->frame_callback,
+ max_width, max_height,
+ min_aspect_ratio, max_aspect_ratio);
}
+
DVLOG(3) << "FinalizeAddTrack() success " << success;
+
if (!it->callback.is_null())
it->callback.Run(this, success);
}
diff --git a/content/renderer/media/media_stream_video_source.h b/content/renderer/media/media_stream_video_source.h
index 34b9170..8a93838 100644
--- a/content/renderer/media/media_stream_video_source.h
+++ b/content/renderer/media/media_stream_video_source.h
@@ -10,11 +10,11 @@
#include "base/compiler_specific.h"
#include "base/memory/weak_ptr.h"
+#include "base/message_loop/message_loop.h"
#include "base/threading/non_thread_safe.h"
#include "content/common/content_export.h"
#include "content/common/media/video_capture.h"
#include "content/renderer/media/media_stream_source.h"
-#include "content/renderer/media/video_frame_deliverer.h"
#include "media/base/video_frame.h"
#include "media/video/capture/video_capture_types.h"
#include "third_party/WebKit/public/platform/WebMediaConstraints.h"
@@ -24,6 +24,7 @@
namespace content {
class MediaStreamVideoTrack;
+class VideoTrackAdapter;
// MediaStreamVideoSource is an interface used for sending video frames to a
// MediaStreamVideoTrack.
@@ -38,8 +39,7 @@ class MediaStreamVideoTrack;
// the source implementation must call OnSupportedFormats.
// MediaStreamVideoSource then match the constraints provided in AddTrack with
// the formats and call StartSourceImpl. The source implementation must call
-// OnStartDone when the underlying source has been started or failed to
-// start.
+// OnStartDone when the underlying source has been started or failed to start.
class CONTENT_EXPORT MediaStreamVideoSource
: public MediaStreamSource,
NON_EXPORTED_BASE(public base::NonThreadSafe) {
@@ -128,14 +128,10 @@ class CONTENT_EXPORT MediaStreamVideoSource
// Finds the first constraints in |requested_constraints_| that can be
// fulfilled. |best_format| is set to the video resolution that can be
- // fulfilled. |frame_output_size| is the requested frame size after cropping.
- // |resulting_constraints| is set to the found constraints in
- // |requested_constraints_|.
+ // fulfilled.
bool FindBestFormatWithConstraints(
const media::VideoCaptureFormats& formats,
- media::VideoCaptureFormat* best_format,
- gfx::Size* frame_output_size,
- blink::WebMediaConstraints* resulting_constraints);
+ media::VideoCaptureFormat* best_format);
// Trigger all cached callbacks from AddTrack. AddTrack is successful
// if the capture delegate has started and the constraints provided in
@@ -149,10 +145,6 @@ class CONTENT_EXPORT MediaStreamVideoSource
State state_;
media::VideoCaptureFormat current_format_;
- blink::WebMediaConstraints current_constraints_;
- // |max_frame_output_size_| is the maximum frame size allowed by
- // |current_constraints_|.
- gfx::Size max_frame_output_size_;
struct RequestedConstraints {
RequestedConstraints(MediaStreamVideoTrack* track,
@@ -170,12 +162,10 @@ class CONTENT_EXPORT MediaStreamVideoSource
media::VideoCaptureFormats supported_formats_;
- // |FrameDeliverer| is an internal helper object used for delivering video
- // frames using callbacks to all registered tracks on the IO thread.
- class FrameDeliverer;
- scoped_refptr<FrameDeliverer> frame_deliverer_;
+ // |track_adapter_| delivers video frames to the tracks on the IO-thread.
+ scoped_refptr<VideoTrackAdapter> track_adapter_;
- // Tracks that currently are receiving video frames.
+ // Tracks that currently are connected to this source.
std::vector<MediaStreamVideoTrack*> tracks_;
// NOTE: Weak pointers must be invalidated before all other member variables.
diff --git a/content/renderer/media/media_stream_video_source_unittest.cc b/content/renderer/media/media_stream_video_source_unittest.cc
index d580dd5..81e1626 100644
--- a/content/renderer/media/media_stream_video_source_unittest.cc
+++ b/content/renderer/media/media_stream_video_source_unittest.cc
@@ -94,7 +94,7 @@ class MediaStreamVideoSourceTest
MockMediaStreamVideoSource* mock_source() { return mock_source_; }
- // Test that the source crops to the requested max width and
+ // Test that the source crops/scales to the requested width and
// height even though the camera delivers a larger frame.
void TestSourceCropFrame(int capture_width,
int capture_height,
@@ -130,6 +130,64 @@ class MediaStreamVideoSourceTest
run_loop.Run();
}
+ void DeliverVideoFrameAndWaitForTwoRenderers(
+ int width,
+ int height,
+ MockMediaStreamVideoSink* sink1,
+ MockMediaStreamVideoSink* sink2) {
+ base::RunLoop run_loop;
+ base::Closure quit_closure = run_loop.QuitClosure();
+ EXPECT_CALL(*sink1, OnVideoFrame());
+ EXPECT_CALL(*sink2, OnVideoFrame()).WillOnce(
+ RunClosure(quit_closure));
+ scoped_refptr<media::VideoFrame> frame =
+ media::VideoFrame::CreateBlackFrame(gfx::Size(width, height));
+ mock_source()->DeliverVideoFrame(frame);
+ run_loop.Run();
+ }
+
+ void TestTwoTracksWithDifferentConstraints(
+ const blink::WebMediaConstraints& constraints1,
+ const blink::WebMediaConstraints& constraints2,
+ int capture_width,
+ int capture_height,
+ int expected_width1,
+ int expected_height1,
+ int expected_width2,
+ int expected_height2) {
+ blink::WebMediaStreamTrack track1 =
+ CreateTrackAndStartSource(constraints1, capture_width, capture_height,
+ MediaStreamVideoSource::kDefaultFrameRate);
+
+ blink::WebMediaStreamTrack track2 =
+ CreateTrack("dummy", constraints2);
+
+ MockMediaStreamVideoSink sink1;
+ MediaStreamVideoSink::AddToVideoTrack(&sink1, sink1.GetDeliverFrameCB(),
+ track1);
+ EXPECT_EQ(0, sink1.number_of_frames());
+
+ MockMediaStreamVideoSink sink2;
+ MediaStreamVideoSink::AddToVideoTrack(&sink2, sink2.GetDeliverFrameCB(),
+ track2);
+ EXPECT_EQ(0, sink2.number_of_frames());
+
+ DeliverVideoFrameAndWaitForTwoRenderers(capture_width,
+ capture_height,
+ &sink1,
+ &sink2);
+
+ EXPECT_EQ(1, sink1.number_of_frames());
+ EXPECT_EQ(expected_width1, sink1.frame_size().width());
+ EXPECT_EQ(expected_height1, sink1.frame_size().height());
+
+ EXPECT_EQ(1, sink2.number_of_frames());
+ EXPECT_EQ(expected_width2, sink2.frame_size().width());
+ EXPECT_EQ(expected_height2, sink2.frame_size().height());
+
+ MediaStreamVideoSink::RemoveFromVideoTrack(&sink1, track1);
+ MediaStreamVideoSink::RemoveFromVideoTrack(&sink2, track2);
+ }
void ReleaseTrackAndSourceOnAddTrackCallback(
const blink::WebMediaStreamTrack& track_to_release) {
@@ -151,8 +209,8 @@ class MediaStreamVideoSourceTest
track_to_release_.reset();
}
}
- scoped_ptr<ChildProcess> child_process_;
base::MessageLoopForUI message_loop_;
+ scoped_ptr<ChildProcess> child_process_;
blink::WebMediaStreamTrack track_to_release_;
int number_of_successful_constraints_applied_;
int number_of_failed_constraints_applied_;
@@ -246,14 +304,53 @@ TEST_F(MediaStreamVideoSourceTest, MandatoryAspectRatio4To3) {
640.0 / 480);
factory.AddOptional(MediaStreamVideoSource::kMinWidth, 1280);
- CreateTrackAndStartSource(factory.CreateWebMediaConstraints(), 640, 480, 30);
+ TestSourceCropFrame(1280, 720,
+ factory.CreateWebMediaConstraints(), 960, 720);
+}
+
+// Test that AddTrack succeeds if the mandatory min aspect ratio it set to 2.
+TEST_F(MediaStreamVideoSourceTest, MandatoryAspectRatio2) {
+ MockMediaConstraintFactory factory;
+ factory.AddMandatory(MediaStreamVideoSource::kMinAspectRatio, 2);
+
+ TestSourceCropFrame(MediaStreamVideoSource::kDefaultWidth,
+ MediaStreamVideoSource::kDefaultHeight,
+ factory.CreateWebMediaConstraints(), 640, 320);
}
-// Test that AddTrack fail if the mandatory aspect ratio
-// is set higher than supported.
-TEST_F(MediaStreamVideoSourceTest, MandatoryAspectRatioTooHigh) {
+TEST_F(MediaStreamVideoSourceTest, MinAspectRatioLargerThanMaxAspectRatio) {
MockMediaConstraintFactory factory;
factory.AddMandatory(MediaStreamVideoSource::kMinAspectRatio, 2);
+ factory.AddMandatory(MediaStreamVideoSource::kMaxAspectRatio, 1);
+ blink::WebMediaStreamTrack track = CreateTrack(
+ "123", factory.CreateWebMediaConstraints());
+ mock_source()->CompleteGetSupportedFormats();
+ EXPECT_EQ(1, NumberOfFailedConstraintsCallbacks());
+}
+
+TEST_F(MediaStreamVideoSourceTest, MaxAspectRatioZero) {
+ MockMediaConstraintFactory factory;
+ factory.AddOptional(MediaStreamVideoSource::kMaxAspectRatio, 0);
+ blink::WebMediaStreamTrack track = CreateTrack(
+ "123", factory.CreateWebMediaConstraints());
+ mock_source()->CompleteGetSupportedFormats();
+ EXPECT_EQ(1, NumberOfFailedConstraintsCallbacks());
+}
+
+TEST_F(MediaStreamVideoSourceTest, MinWidthLargerThanMaxWidth) {
+ MockMediaConstraintFactory factory;
+ factory.AddMandatory(MediaStreamVideoSource::kMinWidth, 640);
+ factory.AddMandatory(MediaStreamVideoSource::kMaxWidth, 320);
+ blink::WebMediaStreamTrack track = CreateTrack(
+ "123", factory.CreateWebMediaConstraints());
+ mock_source()->CompleteGetSupportedFormats();
+ EXPECT_EQ(1, NumberOfFailedConstraintsCallbacks());
+}
+
+TEST_F(MediaStreamVideoSourceTest, MinHeightLargerThanMaxHeight) {
+ MockMediaConstraintFactory factory;
+ factory.AddMandatory(MediaStreamVideoSource::kMinHeight, 480);
+ factory.AddMandatory(MediaStreamVideoSource::kMaxHeight, 360);
blink::WebMediaStreamTrack track = CreateTrack(
"123", factory.CreateWebMediaConstraints());
mock_source()->CompleteGetSupportedFormats();
@@ -278,7 +375,7 @@ TEST_F(MediaStreamVideoSourceTest, ReleaseTrackAndSourceOnSuccessCallBack) {
// source during the callback if adding a track fails.
TEST_F(MediaStreamVideoSourceTest, ReleaseTrackAndSourceOnFailureCallBack) {
MockMediaConstraintFactory factory;
- factory.AddMandatory(MediaStreamVideoSource::kMinAspectRatio, 2);
+ factory.AddMandatory(MediaStreamVideoSource::kMinWidth, 99999);
{
blink::WebMediaStreamTrack track =
CreateTrack("123", factory.CreateWebMediaConstraints());
@@ -405,6 +502,15 @@ TEST_F(MediaStreamVideoSourceTest, DeliverCroppedVideoFrame637359) {
TestSourceCropFrame(640, 480, factory.CreateWebMediaConstraints(), 637, 359);
}
+TEST_F(MediaStreamVideoSourceTest, DeliverCroppedVideoFrame320320) {
+ MockMediaConstraintFactory factory;
+ factory.AddMandatory(MediaStreamVideoSource::kMaxWidth, 320);
+ factory.AddMandatory(MediaStreamVideoSource::kMaxHeight, 320);
+ factory.AddMandatory(MediaStreamVideoSource::kMinHeight, 320);
+ factory.AddMandatory(MediaStreamVideoSource::kMinWidth, 320);
+ TestSourceCropFrame(640, 480, factory.CreateWebMediaConstraints(), 320, 320);
+}
+
TEST_F(MediaStreamVideoSourceTest, DeliverSmallerSizeWhenTooLargeMax) {
MockMediaConstraintFactory factory;
factory.AddOptional(MediaStreamVideoSource::kMaxWidth, 1920);
@@ -415,6 +521,84 @@ TEST_F(MediaStreamVideoSourceTest, DeliverSmallerSizeWhenTooLargeMax) {
1280, 720);
}
+TEST_F(MediaStreamVideoSourceTest, TwoTracksWithVGAAndWVGA) {
+ MockMediaConstraintFactory factory1;
+ factory1.AddOptional(MediaStreamVideoSource::kMaxWidth, 640);
+ factory1.AddOptional(MediaStreamVideoSource::kMaxHeight, 480);
+
+ MockMediaConstraintFactory factory2;
+ factory2.AddOptional(MediaStreamVideoSource::kMaxHeight, 360);
+
+ TestTwoTracksWithDifferentConstraints(factory1.CreateWebMediaConstraints(),
+ factory2.CreateWebMediaConstraints(),
+ 640, 480,
+ 640, 480,
+ 640, 360);
+}
+
+TEST_F(MediaStreamVideoSourceTest, TwoTracksWith720AndWVGA) {
+ MockMediaConstraintFactory factory1;
+ factory1.AddOptional(MediaStreamVideoSource::kMinWidth, 1280);
+ factory1.AddOptional(MediaStreamVideoSource::kMinHeight, 720);
+
+
+ MockMediaConstraintFactory factory2;
+ factory2.AddMandatory(MediaStreamVideoSource::kMaxWidth, 640);
+ factory2.AddMandatory(MediaStreamVideoSource::kMaxHeight, 360);
+
+ TestTwoTracksWithDifferentConstraints(factory1.CreateWebMediaConstraints(),
+ factory2.CreateWebMediaConstraints(),
+ 1280, 720,
+ 1280, 720,
+ 640, 360);
+}
+
+TEST_F(MediaStreamVideoSourceTest, TwoTracksWith720AndW700H700) {
+ MockMediaConstraintFactory factory1;
+ factory1.AddOptional(MediaStreamVideoSource::kMinWidth, 1280);
+ factory1.AddOptional(MediaStreamVideoSource::kMinHeight, 720);
+
+ MockMediaConstraintFactory factory2;
+ factory2.AddMandatory(MediaStreamVideoSource::kMaxWidth, 700);
+ factory2.AddMandatory(MediaStreamVideoSource::kMaxHeight, 700);
+
+ TestTwoTracksWithDifferentConstraints(factory1.CreateWebMediaConstraints(),
+ factory2.CreateWebMediaConstraints(),
+ 1280, 720,
+ 1280, 720,
+ 700, 700);
+}
+
+TEST_F(MediaStreamVideoSourceTest, TwoTracksWith720AndMaxAspectRatio4To3) {
+ MockMediaConstraintFactory factory1;
+ factory1.AddOptional(MediaStreamVideoSource::kMinWidth, 1280);
+ factory1.AddOptional(MediaStreamVideoSource::kMinHeight, 720);
+
+ MockMediaConstraintFactory factory2;
+ factory2.AddMandatory(MediaStreamVideoSource::kMaxAspectRatio, 640.0 / 480);
+
+ TestTwoTracksWithDifferentConstraints(factory1.CreateWebMediaConstraints(),
+ factory2.CreateWebMediaConstraints(),
+ 1280, 720,
+ 1280, 720,
+ 960, 720);
+}
+
+TEST_F(MediaStreamVideoSourceTest, TwoTracksWithVgaAndMinAspectRatio) {
+ MockMediaConstraintFactory factory1;
+ factory1.AddOptional(MediaStreamVideoSource::kMaxWidth, 640);
+ factory1.AddOptional(MediaStreamVideoSource::kMaxHeight, 480);
+
+ MockMediaConstraintFactory factory2;
+ factory2.AddMandatory(MediaStreamVideoSource::kMinAspectRatio, 640.0 / 360);
+
+ TestTwoTracksWithDifferentConstraints(factory1.CreateWebMediaConstraints(),
+ factory2.CreateWebMediaConstraints(),
+ 640, 480,
+ 640, 480,
+ 640, 360);
+}
+
// Test that a source can change the frame resolution on the fly and that
// tracks sinks get the new frame size unless constraints force the frame to be
// cropped.
diff --git a/content/renderer/media/media_stream_video_track.cc b/content/renderer/media/media_stream_video_track.cc
index 7598cd2..cbb2869 100644
--- a/content/renderer/media/media_stream_video_track.cc
+++ b/content/renderer/media/media_stream_video_track.cc
@@ -123,6 +123,7 @@ MediaStreamVideoTrack::MediaStreamVideoTrack(
}
MediaStreamVideoTrack::~MediaStreamVideoTrack() {
+ DCHECK(thread_checker_.CalledOnValidThread());
Stop();
DVLOG(3) << "~MediaStreamVideoTrack()";
}
diff --git a/content/renderer/media/mock_media_stream_video_source.cc b/content/renderer/media/mock_media_stream_video_source.cc
index 12bddb4..ce060f6 100644
--- a/content/renderer/media/mock_media_stream_video_source.cc
+++ b/content/renderer/media/mock_media_stream_video_source.cc
@@ -84,7 +84,6 @@ void MockMediaStreamVideoSource::DeliverVideoFrameOnIO(
const scoped_refptr<media::VideoFrame>& frame,
media::VideoCaptureFormat format,
const VideoCaptureDeliverFrameCB& frame_callback) {
- DCHECK(io_message_loop()->BelongsToCurrentThread());
frame_callback.Run(frame, format);
}
diff --git a/content/renderer/media/video_frame_deliverer.h b/content/renderer/media/video_frame_deliverer.h
index e8819b4..0649d81 100644
--- a/content/renderer/media/video_frame_deliverer.h
+++ b/content/renderer/media/video_frame_deliverer.h
@@ -20,7 +20,7 @@ namespace content {
// VideoFrameDeliverer is a helper class used for registering
// VideoCaptureDeliverFrameCB on the main render thread to receive video frames
// on the IO-thread.
-// Its used by MediaStreamVideoTrack and MediaStreamVideoSource.
+// Its used by MediaStreamVideoTrack.
class VideoFrameDeliverer
: public base::RefCountedThreadSafe<VideoFrameDeliverer> {
public:
diff --git a/content/renderer/media/video_track_adapter.cc b/content/renderer/media/video_track_adapter.cc
new file mode 100644
index 0000000..46223f8
--- /dev/null
+++ b/content/renderer/media/video_track_adapter.cc
@@ -0,0 +1,335 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/renderer/media/video_track_adapter.h"
+
+#include <algorithm>
+#include <limits>
+#include <utility>
+
+#include "base/bind.h"
+#include "base/debug/trace_event.h"
+#include "base/location.h"
+#include "media/base/video_util.h"
+
+namespace content {
+
+namespace {
+
+// Empty method used for keeping a reference to the original media::VideoFrame
+// in VideoFrameResolutionAdapter::DeliverFrame if cropping is needed.
+// The reference to |frame| is kept in the closure that calls this method.
+void ReleaseOriginalFrame(
+ const scoped_refptr<media::VideoFrame>& frame) {
+}
+
+void ResetCallbackOnMainRenderThread(
+ scoped_ptr<VideoCaptureDeliverFrameCB> callback) {
+ // |callback| will be deleted when this exits.
+}
+
+} // anonymous namespace
+
+// VideoFrameResolutionAdapter is created on and lives on
+// on the IO-thread. It does the resolution adaptation and delivers frames to
+// all registered tracks on the IO-thread.
+// All method calls must be on the IO-thread.
+class VideoTrackAdapter::VideoFrameResolutionAdapter
+ : public base::RefCountedThreadSafe<VideoFrameResolutionAdapter> {
+ public:
+ VideoFrameResolutionAdapter(
+ scoped_refptr<base::SingleThreadTaskRunner> render_message_loop,
+ int max_width,
+ int max_height,
+ double min_aspect_ratio,
+ double max_aspect_ratio);
+
+ // Add |callback| to receive video frames on the IO-thread.
+ // |callback| will however be released on the main render thread.
+ void AddCallback(const MediaStreamVideoTrack* track,
+ const VideoCaptureDeliverFrameCB& callback);
+
+ // Removes |callback| associated with |track| from receiving video frames if
+ // |track| has been added. It is ok to call RemoveCallback even if the |track|
+ // has not been added. The |callback| is released on the main render thread.
+ void RemoveCallback(const MediaStreamVideoTrack* track);
+
+ void DeliverFrame(const scoped_refptr<media::VideoFrame>& frame,
+ const media::VideoCaptureFormat& format);
+
+ // Returns true if all arguments match with the output of this adapter.
+ bool ConstraintsMatch(int max_width,
+ int max_height,
+ double min_aspect_ratio,
+ double max_aspect_ratio) const;
+
+ bool IsEmpty() const;
+
+ private:
+ virtual ~VideoFrameResolutionAdapter();
+ friend class base::RefCountedThreadSafe<VideoFrameResolutionAdapter>;
+
+ virtual void DoDeliverFrame(
+ const scoped_refptr<media::VideoFrame>& frame,
+ const media::VideoCaptureFormat& format);
+
+ // Bound to the IO-thread.
+ base::ThreadChecker io_thread_checker_;
+
+ // The task runner where we will release VideoCaptureDeliverFrameCB
+ // registered in AddCallback.
+ scoped_refptr<base::SingleThreadTaskRunner> renderer_task_runner_;
+
+ gfx::Size max_frame_size_;
+ double min_aspect_ratio_;
+ double max_aspect_ratio_;
+
+ typedef std::pair<const void*, VideoCaptureDeliverFrameCB>
+ VideoIdCallbackPair;
+ std::vector<VideoIdCallbackPair> callbacks_;
+
+ DISALLOW_COPY_AND_ASSIGN(VideoFrameResolutionAdapter);
+};
+
+VideoTrackAdapter::
+VideoFrameResolutionAdapter::VideoFrameResolutionAdapter(
+ scoped_refptr<base::SingleThreadTaskRunner> render_message_loop,
+ int max_width,
+ int max_height,
+ double min_aspect_ratio,
+ double max_aspect_ratio)
+ : renderer_task_runner_(render_message_loop),
+ max_frame_size_(max_width, max_height),
+ min_aspect_ratio_(min_aspect_ratio),
+ max_aspect_ratio_(max_aspect_ratio) {
+ DCHECK(renderer_task_runner_);
+ DCHECK(io_thread_checker_.CalledOnValidThread());
+ DCHECK_GE(max_aspect_ratio_, min_aspect_ratio_);
+ CHECK_NE(0, max_aspect_ratio_);
+ DVLOG(3) << "VideoFrameResolutionAdapter("
+ << "{ max_width =" << max_width << "}, "
+ << "{ max_height =" << max_height << "}, "
+ << "{ min_aspect_ratio =" << min_aspect_ratio << "}, "
+ << "{ max_aspect_ratio_ =" << max_aspect_ratio_ << "}) ";
+}
+
+VideoTrackAdapter::
+VideoFrameResolutionAdapter::~VideoFrameResolutionAdapter() {
+ DCHECK(io_thread_checker_.CalledOnValidThread());
+ DCHECK(callbacks_.empty());
+}
+
+void VideoTrackAdapter::VideoFrameResolutionAdapter::DeliverFrame(
+ const scoped_refptr<media::VideoFrame>& frame,
+ const media::VideoCaptureFormat& format) {
+ DCHECK(io_thread_checker_.CalledOnValidThread());
+ // TODO(perkj): Allow cropping / scaling of textures once
+ // http://crbug/362521 is fixed.
+ if (frame->format() == media::VideoFrame::NATIVE_TEXTURE) {
+ DoDeliverFrame(frame, format);
+ return;
+ }
+ scoped_refptr<media::VideoFrame> video_frame(frame);
+ double input_ratio =
+ static_cast<double>(frame->natural_size().width()) /
+ frame->natural_size().height();
+
+ // If |frame| has larger width or height than requested, or the aspect ratio
+ // does not match the requested, we want to create a wrapped version of this
+ // frame with a size that fulfills the constraints.
+ if (frame->natural_size().width() > max_frame_size_.width() ||
+ frame->natural_size().height() > max_frame_size_.height() ||
+ input_ratio > max_aspect_ratio_ ||
+ input_ratio < min_aspect_ratio_) {
+ int desired_width = std::min(max_frame_size_.width(),
+ frame->natural_size().width());
+ int desired_height = std::min(max_frame_size_.height(),
+ frame->natural_size().height());
+
+ double resulting_ratio =
+ static_cast<double>(desired_width) / desired_height;
+ double requested_ratio = resulting_ratio;
+
+ if (requested_ratio > max_aspect_ratio_)
+ requested_ratio = max_aspect_ratio_;
+ else if (requested_ratio < min_aspect_ratio_)
+ requested_ratio = min_aspect_ratio_;
+
+ if (resulting_ratio < requested_ratio) {
+ desired_height = static_cast<int>((desired_height * resulting_ratio) /
+ requested_ratio);
+ // Make sure we scale to an even height to avoid rounding errors
+ desired_height = (desired_height + 1) & ~1;
+ } else if (resulting_ratio > requested_ratio) {
+ desired_width = static_cast<int>((desired_width * requested_ratio) /
+ resulting_ratio);
+ // Make sure we scale to an even width to avoid rounding errors.
+ desired_width = (desired_width + 1) & ~1;
+ }
+
+ gfx::Size desired_size(desired_width, desired_height);
+
+ // Get the largest centered rectangle with the same aspect ratio of
+ // |desired_size| that fits entirely inside of |frame->visible_rect()|.
+ // This will be the rect we need to crop the original frame to.
+ // From this rect, the original frame can be scaled down to |desired_size|.
+ gfx::Rect region_in_frame =
+ media::ComputeLetterboxRegion(frame->visible_rect(), desired_size);
+
+ video_frame = media::VideoFrame::WrapVideoFrame(
+ frame,
+ region_in_frame,
+ desired_size,
+ base::Bind(&ReleaseOriginalFrame, frame));
+
+ DVLOG(3) << "desired size " << desired_size.ToString()
+ << " output natural size "
+ << video_frame->natural_size().ToString()
+ << " output visible rect "
+ << video_frame->visible_rect().ToString();
+ }
+ DoDeliverFrame(video_frame, format);
+}
+
+void VideoTrackAdapter::
+VideoFrameResolutionAdapter::DoDeliverFrame(
+ const scoped_refptr<media::VideoFrame>& frame,
+ const media::VideoCaptureFormat& format) {
+ DCHECK(io_thread_checker_.CalledOnValidThread());
+ for (std::vector<VideoIdCallbackPair>::const_iterator it = callbacks_.begin();
+ it != callbacks_.end(); ++it) {
+ it->second.Run(frame, format);
+ }
+}
+
+void VideoTrackAdapter::VideoFrameResolutionAdapter::AddCallback(
+ const MediaStreamVideoTrack* track,
+ const VideoCaptureDeliverFrameCB& callback) {
+ DCHECK(io_thread_checker_.CalledOnValidThread());
+ callbacks_.push_back(std::make_pair(track, callback));
+}
+
+void VideoTrackAdapter::VideoFrameResolutionAdapter::RemoveCallback(
+ const MediaStreamVideoTrack* track) {
+ DCHECK(io_thread_checker_.CalledOnValidThread());
+ std::vector<VideoIdCallbackPair>::iterator it = callbacks_.begin();
+ for (; it != callbacks_.end(); ++it) {
+ if (it->first == track) {
+ // Make sure the VideoCaptureDeliverFrameCB is released on the main
+ // render thread since it was added on the main render thread in
+ // VideoTrackAdapter::AddTrack.
+ scoped_ptr<VideoCaptureDeliverFrameCB> callback(
+ new VideoCaptureDeliverFrameCB(it->second));
+ callbacks_.erase(it);
+ renderer_task_runner_->PostTask(
+ FROM_HERE, base::Bind(&ResetCallbackOnMainRenderThread,
+ base::Passed(&callback)));
+
+ return;
+ }
+ }
+}
+
+bool VideoTrackAdapter::VideoFrameResolutionAdapter::ConstraintsMatch(
+ int max_width,
+ int max_height,
+ double min_aspect_ratio,
+ double max_aspect_ratio) const {
+ DCHECK(io_thread_checker_.CalledOnValidThread());
+ return max_frame_size_.width() == max_width &&
+ max_frame_size_.height() == max_height &&
+ min_aspect_ratio_ == min_aspect_ratio &&
+ max_aspect_ratio_ == max_aspect_ratio;
+}
+
+bool VideoTrackAdapter::VideoFrameResolutionAdapter::IsEmpty() const {
+ DCHECK(io_thread_checker_.CalledOnValidThread());
+ return callbacks_.empty();
+}
+
+VideoTrackAdapter::VideoTrackAdapter(
+ const scoped_refptr<base::MessageLoopProxy>& io_message_loop)
+ : io_message_loop_(io_message_loop),
+ renderer_task_runner_(base::MessageLoopProxy::current()) {
+ DCHECK(io_message_loop_);
+}
+
+VideoTrackAdapter::~VideoTrackAdapter() {
+ DCHECK(adapters_.empty());
+}
+
+void VideoTrackAdapter::AddTrack(const MediaStreamVideoTrack* track,
+ VideoCaptureDeliverFrameCB frame_callback,
+ int max_width,
+ int max_height,
+ double min_aspect_ratio,
+ double max_aspect_ratio) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+ io_message_loop_->PostTask(
+ FROM_HERE,
+ base::Bind(&VideoTrackAdapter::AddTrackOnIO,
+ this, track, frame_callback, max_width, max_height,
+ min_aspect_ratio, max_aspect_ratio));
+}
+
+void VideoTrackAdapter::AddTrackOnIO(
+ const MediaStreamVideoTrack* track,
+ VideoCaptureDeliverFrameCB frame_callback,
+ int max_width,
+ int max_height,
+ double min_aspect_ratio,
+ double max_aspect_ratio) {
+ DCHECK(io_message_loop_->BelongsToCurrentThread());
+ scoped_refptr<VideoFrameResolutionAdapter> adapter;
+ for (FrameAdapters::const_iterator it = adapters_.begin();
+ it != adapters_.end(); ++it) {
+ if ((*it)->ConstraintsMatch(max_width, max_height, min_aspect_ratio,
+ max_aspect_ratio)) {
+ adapter = it->get();
+ break;
+ }
+ }
+ if (!adapter) {
+ adapter = new VideoFrameResolutionAdapter(renderer_task_runner_,
+ max_width,
+ max_height,
+ min_aspect_ratio,
+ max_aspect_ratio);
+ adapters_.push_back(adapter);
+ }
+
+ adapter->AddCallback(track, frame_callback);
+}
+
+void VideoTrackAdapter::RemoveTrack(const MediaStreamVideoTrack* track) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+ io_message_loop_->PostTask(
+ FROM_HERE,
+ base::Bind(&VideoTrackAdapter::RemoveTrackOnIO, this, track));
+}
+
+void VideoTrackAdapter::RemoveTrackOnIO(const MediaStreamVideoTrack* track) {
+ DCHECK(io_message_loop_->BelongsToCurrentThread());
+ for (FrameAdapters::iterator it = adapters_.begin();
+ it != adapters_.end(); ++it) {
+ (*it)->RemoveCallback(track);
+ if ((*it)->IsEmpty()) {
+ adapters_.erase(it);
+ break;
+ }
+ }
+}
+
+void VideoTrackAdapter::DeliverFrameOnIO(
+ const scoped_refptr<media::VideoFrame>& frame,
+ const media::VideoCaptureFormat& format) {
+ DCHECK(io_message_loop_->BelongsToCurrentThread());
+ TRACE_EVENT0("video", "VideoTrackAdapter::DeliverFrameOnIO");
+ for (FrameAdapters::iterator it = adapters_.begin();
+ it != adapters_.end(); ++it) {
+ (*it)->DeliverFrame(frame, format);
+ }
+}
+
+} // namespace content
diff --git a/content/renderer/media/video_track_adapter.h b/content/renderer/media/video_track_adapter.h
new file mode 100644
index 0000000..7ed65a1
--- /dev/null
+++ b/content/renderer/media/video_track_adapter.h
@@ -0,0 +1,88 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CONTENT_RENDERER_MEDIA_VIDEO_TRACK_ADAPTER_H_
+#define CONTENT_RENDERER_MEDIA_VIDEO_TRACK_ADAPTER_H_
+
+#include <vector>
+
+#include "base/macros.h"
+#include "base/memory/ref_counted.h"
+#include "content/renderer/media/media_stream_video_track.h"
+#include "media/base/video_frame.h"
+
+namespace content {
+
+// VideoTrackAdapter is a helper class used by MediaStreamVideoSource used for
+// adapting the video resolution from a source implementation to the resolution
+// a track requires. Different tracks can have different resolution constraints.
+// The constraints can be set as max width and height as well as max and min
+// aspect ratio.
+// Video frames are delivered to a track using a VideoCaptureDeliverFrameCB on
+// the IO-thread.
+// Adaptations is done by wrapping the original media::VideoFrame in a new
+// media::VideoFrame with a new visible_rect and natural_size.
+class VideoTrackAdapter
+ : public base::RefCountedThreadSafe<VideoTrackAdapter> {
+ public:
+ explicit VideoTrackAdapter(
+ const scoped_refptr<base::MessageLoopProxy>& io_message_loop);
+
+ // Register |track| to receive video frames in |frame_callback| with
+ // a resolution within the boundaries of the arguments.
+ // Must be called on the main render thread. |frame_callback| is guaranteed to
+ // be released on the main render thread.
+ void AddTrack(const MediaStreamVideoTrack* track,
+ VideoCaptureDeliverFrameCB frame_callback,
+ int max_width, int max_height,
+ double min_aspect_ratio,
+ double max_aspect_ratio);
+ void RemoveTrack(const MediaStreamVideoTrack* track);
+
+ // Delivers |frame| to all tracks that have registered a callback.
+ // Must be called on the IO-thread.
+ void DeliverFrameOnIO(
+ const scoped_refptr<media::VideoFrame>& frame,
+ const media::VideoCaptureFormat& format);
+
+ const scoped_refptr<base::MessageLoopProxy>& io_message_loop() {
+ DCHECK(thread_checker_.CalledOnValidThread());
+ return io_message_loop_;
+ }
+
+ private:
+ virtual ~VideoTrackAdapter();
+ friend class base::RefCountedThreadSafe<VideoTrackAdapter>;
+
+ void AddTrackOnIO(
+ const MediaStreamVideoTrack* track,
+ VideoCaptureDeliverFrameCB frame_callback,
+ int max_width, int max_height,
+ double min_aspect_ratio,
+ double max_aspect_ratio);
+ void RemoveTrackOnIO(const MediaStreamVideoTrack* track);
+
+ // |thread_checker_| is bound to the main render thread.
+ base::ThreadChecker thread_checker_;
+
+ scoped_refptr<base::MessageLoopProxy> io_message_loop_;
+
+ // |renderer_task_runner_| is used to ensure that
+ // VideoCaptureDeliverFrameCB is released on the main render thread.
+ scoped_refptr<base::SingleThreadTaskRunner> renderer_task_runner_;
+
+ // VideoFrameResolutionAdapter is an inner class that is created on the main
+ // render thread but operates on the IO-thread. It does the resolution
+ // adaptation and delivers frames to all registered tracks on the IO-thread.
+ class VideoFrameResolutionAdapter;
+ typedef std::vector<scoped_refptr<VideoFrameResolutionAdapter> >
+ FrameAdapters;
+ FrameAdapters adapters_;
+
+ DISALLOW_COPY_AND_ASSIGN(VideoTrackAdapter);
+};
+
+} // namespace content
+
+#endif // CONTENT_RENDERER_MEDIA_VIDEO_TRACK_ADAPTER_H_
diff --git a/content/renderer/media/webrtc/media_stream_remote_video_source.h b/content/renderer/media/webrtc/media_stream_remote_video_source.h
index e7d65dd..76041ee 100644
--- a/content/renderer/media/webrtc/media_stream_remote_video_source.h
+++ b/content/renderer/media/webrtc/media_stream_remote_video_source.h
@@ -5,6 +5,7 @@
#ifndef CONTENT_RENDERER_MEDIA_WEBRTC_MEDIA_STREAM_REMOTE_VIDEO_SOURCE_H_
#define CONTENT_RENDERER_MEDIA_WEBRTC_MEDIA_STREAM_REMOTE_VIDEO_SOURCE_H_
+#include "base/threading/thread_checker.h"
#include "content/common/content_export.h"
#include "content/renderer/media/media_stream_video_source.h"
#include "third_party/WebKit/public/platform/WebMediaStreamSource.h"
diff --git a/content/renderer/media/webrtc/media_stream_remote_video_source_unittest.cc b/content/renderer/media/webrtc/media_stream_remote_video_source_unittest.cc
index b23addc..c6b50ec 100644
--- a/content/renderer/media/webrtc/media_stream_remote_video_source_unittest.cc
+++ b/content/renderer/media/webrtc/media_stream_remote_video_source_unittest.cc
@@ -92,8 +92,8 @@ class MediaStreamRemoteVideoSourceTest
++number_of_failed_constraints_applied_;
}
- scoped_ptr<ChildProcess> child_process_;
base::MessageLoopForUI message_loop_;
+ scoped_ptr<ChildProcess> child_process_;
scoped_ptr<MockPeerConnectionDependencyFactory> mock_factory_;
scoped_refptr<webrtc::VideoTrackInterface> webrtc_video_track_;
// |remote_source_| is owned by |webkit_source_|.
diff --git a/content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc b/content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc
index eb56669..603edb3 100644
--- a/content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc
+++ b/content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc
@@ -8,7 +8,7 @@
#include "base/debug/trace_event.h"
#include "base/memory/aligned_memory.h"
#include "media/base/video_frame.h"
-#include "third_party/libyuv/include/libyuv/convert.h"
+#include "third_party/libyuv/include/libyuv/scale.h"
namespace content {
@@ -84,14 +84,22 @@ bool WebRtcVideoCapturerAdapter::GetBestCaptureFormat(
void WebRtcVideoCapturerAdapter::OnFrameCaptured(
const scoped_refptr<media::VideoFrame>& frame) {
DCHECK(thread_checker_.CalledOnValidThread());
- DCHECK(media::VideoFrame::I420 == frame->format() ||
- media::VideoFrame::YV12 == frame->format());
+ TRACE_EVENT0("video", "WebRtcVideoCapturerAdapter::OnFrameCaptured");
+ if (!(media::VideoFrame::I420 == frame->format() ||
+ media::VideoFrame::YV12 == frame->format())) {
+ // Some types of sources support textures as output. Since connecting
+ // sources and sinks do not check the format, we need to just ignore
+ // formats that we can not handle.
+ NOTREACHED();
+ return;
+ }
+
if (first_frame_timestamp_ == media::kNoTimestamp())
first_frame_timestamp_ = frame->timestamp();
cricket::CapturedFrame captured_frame;
- captured_frame.width = frame->visible_rect().width();
- captured_frame.height = frame->visible_rect().height();
+ captured_frame.width = frame->natural_size().width();
+ captured_frame.height = frame->natural_size().height();
// cricket::CapturedFrame time is in nanoseconds.
captured_frame.elapsed_time =
(frame->timestamp() - first_frame_timestamp_).InMicroseconds() *
@@ -104,10 +112,10 @@ void WebRtcVideoCapturerAdapter::OnFrameCaptured(
// TODO(perkj):
// Libjingle expects contiguous layout of image planes as input.
// The only format where that is true in Chrome is I420 where the
- // coded_size == visible_rect().size().
+ // coded_size == natural_size().
if (frame->format() != media::VideoFrame::I420 ||
- frame->coded_size() != frame->visible_rect().size()) {
- // Cropping and or switching UV planes is needed.
+ frame->coded_size() != frame->natural_size()) {
+ // Cropping / Scaling and or switching UV planes is needed.
UpdateI420Buffer(frame);
captured_frame.data = buffer_;
captured_frame.data_size = buffer_size_;
@@ -128,26 +136,25 @@ void WebRtcVideoCapturerAdapter::OnFrameCaptured(
void WebRtcVideoCapturerAdapter::UpdateI420Buffer(
const scoped_refptr<media::VideoFrame>& src) {
DCHECK(thread_checker_.CalledOnValidThread());
- const int src_width = src->coded_size().width();
- const int src_height = src->coded_size().height();
- const int dst_width = src->visible_rect().width();
- const int dst_height = src->visible_rect().height();
- DCHECK(src_width >= dst_width && src_height >= dst_height);
+ const int dst_width = src->natural_size().width();
+ const int dst_height = src->natural_size().height();
+ DCHECK(src->visible_rect().width() >= dst_width &&
+ src->visible_rect().height() >= dst_height);
- const int horiz_crop = src->visible_rect().x();
- const int vert_crop = src->visible_rect().y();
+ const gfx::Rect& visible_rect = src->visible_rect();
const uint8* src_y = src->data(media::VideoFrame::kYPlane) +
- (src_width * vert_crop + horiz_crop);
- const int center = (src_width + 1) / 2;
+ visible_rect.y() * src->stride(media::VideoFrame::kYPlane) +
+ visible_rect.x();
const uint8* src_u = src->data(media::VideoFrame::kUPlane) +
- (center * vert_crop + horiz_crop) / 2;
+ visible_rect.y() / 2 * src->stride(media::VideoFrame::kUPlane) +
+ visible_rect.x() / 2;
const uint8* src_v = src->data(media::VideoFrame::kVPlane) +
- (center * vert_crop + horiz_crop) / 2;
+ visible_rect.y() / 2 * src->stride(media::VideoFrame::kVPlane) +
+ visible_rect.x() / 2;
const size_t dst_size =
- media::VideoFrame::AllocationSize(src->format(),
- src->visible_rect().size());
+ media::VideoFrame::AllocationSize(src->format(), src->natural_size());
if (dst_size != buffer_size_) {
base::AlignedFree(buffer_);
@@ -164,20 +171,23 @@ void WebRtcVideoCapturerAdapter::UpdateI420Buffer(
const int dst_halfheight = (dst_height + 1) / 2;
uint8* dst_v = dst_u + dst_halfwidth * dst_halfheight;
- libyuv::I420Copy(src_y,
- src->stride(media::VideoFrame::kYPlane),
- src_u,
- src->stride(media::VideoFrame::kUPlane),
- src_v,
- src->stride(media::VideoFrame::kVPlane),
- dst_y,
- dst_stride_y,
- dst_u,
- dst_halfwidth,
- dst_v,
- dst_halfwidth,
- dst_width,
- dst_height);
+ libyuv::I420Scale(src_y,
+ src->stride(media::VideoFrame::kYPlane),
+ src_u,
+ src->stride(media::VideoFrame::kUPlane),
+ src_v,
+ src->stride(media::VideoFrame::kVPlane),
+ visible_rect.width(),
+ visible_rect.height(),
+ dst_y,
+ dst_stride_y,
+ dst_u,
+ dst_halfwidth,
+ dst_v,
+ dst_halfwidth,
+ dst_width,
+ dst_height,
+ libyuv::kFilterBilinear);
}
} // namespace content
diff --git a/content/renderer/media/webrtc/webrtc_video_capturer_adapter_unittest.cc b/content/renderer/media/webrtc/webrtc_video_capturer_adapter_unittest.cc
index bd817ab..250124d 100644
--- a/content/renderer/media/webrtc/webrtc_video_capturer_adapter_unittest.cc
+++ b/content/renderer/media/webrtc/webrtc_video_capturer_adapter_unittest.cc
@@ -25,22 +25,23 @@ class WebRtcVideoCapturerAdapterTest
void TestSourceCropFrame(int capture_width,
int capture_height,
- int expected_width,
- int expected_height) {
- const int visible_width = std::min(capture_width, expected_width);
- const int horiz_crop = ((capture_width - visible_width) / 2);
- const int visible_height = std::min(capture_height, expected_height);
- const int vert_crop = ((expected_height - visible_height) / 2);
+ int cropped_width,
+ int cropped_height,
+ int natural_width,
+ int natural_height) {
+ const int horiz_crop = ((capture_width - cropped_width) / 2);
+ const int vert_crop = ((capture_height - cropped_height) / 2);
gfx::Size coded_size(capture_width, capture_height);
- gfx::Rect view_rect(horiz_crop, vert_crop, visible_width, visible_height);
+ gfx::Size natural_size(natural_width, natural_height);
+ gfx::Rect view_rect(horiz_crop, vert_crop, cropped_width, cropped_height);
scoped_refptr<media::VideoFrame> frame =
media::VideoFrame::CreateFrame(media::VideoFrame::I420,
- coded_size, view_rect, coded_size,
+ coded_size, view_rect, natural_size,
base::TimeDelta());
adapter_.OnFrameCaptured(frame);
- EXPECT_EQ(expected_width, output_frame_width_);
- EXPECT_EQ(expected_height, output_frame_height_);
+ EXPECT_EQ(natural_width, output_frame_width_);
+ EXPECT_EQ(natural_height, output_frame_height_);
}
protected:
void OnFrameCaptured(cricket::VideoCapturer* capturer,
@@ -56,11 +57,15 @@ class WebRtcVideoCapturerAdapterTest
};
TEST_F(WebRtcVideoCapturerAdapterTest, CropFrameTo640360) {
- TestSourceCropFrame(640, 480, 640, 360);
+ TestSourceCropFrame(640, 480, 640, 360, 640, 360);
}
-TEST_F(WebRtcVideoCapturerAdapterTest, CropFrameTo732489) {
- TestSourceCropFrame(1280, 720, 731, 489);
+TEST_F(WebRtcVideoCapturerAdapterTest, CropFrameTo320320) {
+ TestSourceCropFrame(640, 480, 480, 480, 320, 320);
+}
+
+TEST_F(WebRtcVideoCapturerAdapterTest, Scale720To640360) {
+ TestSourceCropFrame(1280, 720, 1280, 720, 640, 360);
}
} // namespace content