summaryrefslogtreecommitdiffstats
path: root/content
diff options
context:
space:
mode:
Diffstat (limited to 'content')
-rw-r--r--content/browser/media/webrtc_browsertest.cc24
-rw-r--r--content/renderer/media/media_stream_center.cc19
-rw-r--r--content/renderer/media/media_stream_center.h8
-rw-r--r--content/renderer/media/media_stream_dependency_factory.cc146
-rw-r--r--content/renderer/media/media_stream_dependency_factory.h8
-rw-r--r--content/renderer/media/media_stream_dependency_factory_unittest.cc59
-rw-r--r--content/renderer/media/mock_media_stream_dependency_factory.cc16
-rw-r--r--content/test/data/media/getusermedia.html76
-rw-r--r--content/test/data/media/getusermedia_and_stop.html18
-rw-r--r--content/test/data/media/peerconnection-call.html98
-rw-r--r--content/test/data/media/webrtc_test_utilities.js80
11 files changed, 379 insertions, 173 deletions
diff --git a/content/browser/media/webrtc_browsertest.cc b/content/browser/media/webrtc_browsertest.cc
index fd6c169..fe52307 100644
--- a/content/browser/media/webrtc_browsertest.cc
+++ b/content/browser/media/webrtc_browsertest.cc
@@ -47,7 +47,7 @@ class WebrtcBrowserTest: public ContentBrowserTest {
// see that the success callback is called. If the error callback is called or
// none of the callbacks are called the tests will simply time out and fail.
IN_PROC_BROWSER_TEST_F(WebrtcBrowserTest, GetVideoStreamAndStop) {
- GURL url(test_server()->GetURL("files/media/getusermedia_and_stop.html"));
+ GURL url(test_server()->GetURL("files/media/getusermedia.html"));
NavigateToURL(shell(), url);
EXPECT_TRUE(ExecuteJavascript("getUserMedia({video: true});"));
@@ -56,7 +56,7 @@ IN_PROC_BROWSER_TEST_F(WebrtcBrowserTest, GetVideoStreamAndStop) {
}
IN_PROC_BROWSER_TEST_F(WebrtcBrowserTest, GetAudioAndVideoStreamAndStop) {
- GURL url(test_server()->GetURL("files/media/getusermedia_and_stop.html"));
+ GURL url(test_server()->GetURL("files/media/getusermedia.html"));
NavigateToURL(shell(), url);
EXPECT_TRUE(ExecuteJavascript("getUserMedia({video: true, audio: true});"));
@@ -64,6 +64,15 @@ IN_PROC_BROWSER_TEST_F(WebrtcBrowserTest, GetAudioAndVideoStreamAndStop) {
ExpectTitle("OK");
}
+IN_PROC_BROWSER_TEST_F(WebrtcBrowserTest, GetAudioAndVideoStreamAndClone) {
+ GURL url(test_server()->GetURL("files/media/getusermedia.html"));
+ NavigateToURL(shell(), url);
+
+ EXPECT_TRUE(ExecuteJavascript("getUserMediaAndClone();"));
+
+ ExpectTitle("OK");
+}
+
// These tests will make a complete PeerConnection-based call and verify that
// video is playing for the call.
IN_PROC_BROWSER_TEST_F(WebrtcBrowserTest, CanSetupVideoCall) {
@@ -147,5 +156,16 @@ IN_PROC_BROWSER_TEST_F(WebrtcBrowserTest,
ExpectTitle("OK");
}
+// This test will make a PeerConnection-based call and send a new Video
+// MediaStream that has been created based on a MediaStream created with
+// getUserMedia.
+IN_PROC_BROWSER_TEST_F(WebrtcBrowserTest, CallWithNewVideoMediaStream) {
+ GURL url(test_server()->GetURL("files/media/peerconnection-call.html"));
+ NavigateToURL(shell(), url);
+
+ EXPECT_TRUE(ExecuteJavascript("callWithNewVideoMediaStream();"));
+ ExpectTitle("OK");
+}
+
} // namespace content
diff --git a/content/renderer/media/media_stream_center.cc b/content/renderer/media/media_stream_center.cc
index 29f2363..14b0327 100644
--- a/content/renderer/media/media_stream_center.cc
+++ b/content/renderer/media/media_stream_center.cc
@@ -32,7 +32,6 @@ static webrtc::MediaStreamInterface* GetNativeMediaStream(
return extra_data->stream();
}
-
static webrtc::MediaStreamTrackInterface* GetNativeMediaStreamTrack(
const WebKit::WebMediaStream& stream,
const WebKit::WebMediaStreamTrack& component) {
@@ -99,4 +98,22 @@ void MediaStreamCenter::didCreateMediaStream(
rtc_factory_->CreateNativeLocalMediaStream(&stream);
}
+bool MediaStreamCenter::didAddMediaStreamTrack(
+ const WebKit::WebMediaStream& stream,
+ const WebKit::WebMediaStreamTrack& track) {
+ if (!rtc_factory_)
+ return false;
+
+ return rtc_factory_->AddNativeMediaStreamTrack(stream, track);
+}
+
+bool MediaStreamCenter::didRemoveMediaStreamTrack(
+ const WebKit::WebMediaStream& stream,
+ const WebKit::WebMediaStreamTrack& track) {
+ if (!rtc_factory_)
+ return false;
+
+ return rtc_factory_->RemoveNativeMediaStreamTrack(stream, track);
+}
+
} // namespace content
diff --git a/content/renderer/media/media_stream_center.h b/content/renderer/media/media_stream_center.h
index bd125b5..de5f2e1 100644
--- a/content/renderer/media/media_stream_center.h
+++ b/content/renderer/media/media_stream_center.h
@@ -42,6 +42,14 @@ class CONTENT_EXPORT MediaStreamCenter
virtual void didCreateMediaStream(
WebKit::WebMediaStream& stream) OVERRIDE;
+ virtual bool didAddMediaStreamTrack(
+ const WebKit::WebMediaStream& stream,
+ const WebKit::WebMediaStreamTrack& track) OVERRIDE;
+
+ virtual bool didRemoveMediaStreamTrack(
+ const WebKit::WebMediaStream& stream,
+ const WebKit::WebMediaStreamTrack& track) OVERRIDE;
+
private:
// |rtc_factory_| is a weak pointer and is owned by the RenderThreadImpl.
// It is valid as long as RenderThreadImpl exist.
diff --git a/content/renderer/media/media_stream_dependency_factory.cc b/content/renderer/media/media_stream_dependency_factory.cc
index 4e24b58..1a27722 100644
--- a/content/renderer/media/media_stream_dependency_factory.cc
+++ b/content/renderer/media/media_stream_dependency_factory.cc
@@ -66,7 +66,7 @@ class WebAudioConstraints : public RTCMediaConstraints {
}
}
- virtual ~WebAudioConstraints() {};
+ virtual ~WebAudioConstraints() {}
};
class P2PPortAllocatorFactory : public webrtc::PortAllocatorFactoryInterface {
@@ -309,92 +309,118 @@ void MediaStreamDependencyFactory::CreateNativeLocalMediaStream(
WebKit::WebMediaStream* description) {
DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeLocalMediaStream()";
if (!EnsurePeerConnectionFactory()) {
- DVLOG(1) << "EnsurePeerConnectionFactory() failed!";
- return;
+ DVLOG(1) << "EnsurePeerConnectionFactory() failed!";
+ return;
}
std::string label = UTF16ToUTF8(description->label());
scoped_refptr<webrtc::MediaStreamInterface> native_stream =
CreateLocalMediaStream(label);
+ MediaStreamExtraData* extra_data = new MediaStreamExtraData(native_stream,
+ true);
+ description->setExtraData(extra_data);
// Add audio tracks.
WebKit::WebVector<WebKit::WebMediaStreamTrack> audio_tracks;
- description->audioSources(audio_tracks);
-
- bool start_stream = false;
+ description->audioTracks(audio_tracks);
for (size_t i = 0; i < audio_tracks.size(); ++i) {
- WebKit::WebMediaStreamSource source = audio_tracks[i].source();
+ AddNativeMediaStreamTrack(*description, audio_tracks[i]);
+ }
- // See if we're adding a WebAudio MediaStream.
+ // Add video tracks.
+ WebKit::WebVector<WebKit::WebMediaStreamTrack> video_tracks;
+ description->videoTracks(video_tracks);
+ for (size_t i = 0; i < video_tracks.size(); ++i) {
+ AddNativeMediaStreamTrack(*description, video_tracks[i]);
+ }
+}
+
+void MediaStreamDependencyFactory::CreateNativeLocalMediaStream(
+ WebKit::WebMediaStream* description,
+ const MediaStreamExtraData::StreamStopCallback& stream_stop) {
+ CreateNativeLocalMediaStream(description);
+
+ MediaStreamExtraData* extra_data =
+ static_cast<MediaStreamExtraData*>(description->extraData());
+ extra_data->SetLocalStreamStopCallback(stream_stop);
+}
+
+bool MediaStreamDependencyFactory::AddNativeMediaStreamTrack(
+ const WebKit::WebMediaStream& stream,
+ const WebKit::WebMediaStreamTrack& track) {
+ MediaStreamExtraData* extra_data =
+ static_cast<MediaStreamExtraData*>(stream.extraData());
+ webrtc::MediaStreamInterface* native_stream = extra_data->stream();
+ DCHECK(native_stream);
+
+ WebKit::WebMediaStreamSource source = track.source();
+ MediaStreamSourceExtraData* source_data =
+ static_cast<MediaStreamSourceExtraData*>(source.extraData());
+
+ if (!source_data) {
if (source.requiresAudioConsumer()) {
+ // We're adding a WebAudio MediaStream.
// TODO(crogers, xians): In reality we should be able to send a unique
- // audio stream to each PeerConnection separately. But currently WebRTC
- // is only able to handle a global audio stream sent to ALL peers.
-
- // Create a special source where default WebAudio constraints are used.
- if (!CreateWebAudioSource(&source)) {
+ // audio stream to each PeerConnection separately.
+ // Currently WebRTC is only able to handle a global audio stream sent to
+ // ALL peers. Create a special source where default WebAudio constraints
+ // are used.
+ if (CreateWebAudioSource(&source)) {
+ source_data =
+ static_cast<MediaStreamSourceExtraData*>(source.extraData());
+ } else {
LOG(ERROR) << "Failed to create WebAudio source";
- continue;
+ return false;
}
- }
-
- MediaStreamSourceExtraData* source_data =
- static_cast<MediaStreamSourceExtraData*>(source.extraData());
-
- if (!source_data) {
+ } else {
// TODO(perkj): Implement support for sources from
// remote MediaStreams.
NOTIMPLEMENTED();
- continue;
+ return false;
}
-
- scoped_refptr<webrtc::AudioTrackInterface> audio_track(
- CreateLocalAudioTrack(UTF16ToUTF8(audio_tracks[i].id()),
- source_data->local_audio_source()));
- native_stream->AddTrack(audio_track);
- audio_track->set_enabled(audio_tracks[i].isEnabled());
- start_stream = true;
}
- if (start_stream && GetWebRtcAudioDevice()) {
- WebRtcAudioCapturer* capturer = GetWebRtcAudioDevice()->capturer();
- capturer->Start();
- }
+ WebKit::WebMediaStreamSource::Type type = track.source().type();
+ DCHECK(type == WebKit::WebMediaStreamSource::TypeAudio ||
+ type == WebKit::WebMediaStreamSource::TypeVideo);
- // Add video tracks.
- WebKit::WebVector<WebKit::WebMediaStreamTrack> video_tracks;
- description->videoSources(video_tracks);
- for (size_t i = 0; i < video_tracks.size(); ++i) {
- const WebKit::WebMediaStreamSource& source = video_tracks[i].source();
- MediaStreamSourceExtraData* source_data =
- static_cast<MediaStreamSourceExtraData*>(source.extraData());
- if (!source_data || !source_data->video_source()) {
- // TODO(perkj): Implement support for sources from remote MediaStreams.
- NOTIMPLEMENTED();
- continue;
+ std::string track_id = UTF16ToUTF8(track.id());
+ if (source.type() == WebKit::WebMediaStreamSource::TypeAudio) {
+ // TODO(henrika,xians): Refactor how an audio track is created to harmonize
+ // with video tracks.
+ scoped_refptr<webrtc::AudioTrackInterface> audio_track(
+ CreateLocalAudioTrack(track_id, source_data->local_audio_source()));
+ audio_track->set_enabled(track.isEnabled());
+ if (GetWebRtcAudioDevice()) {
+ WebRtcAudioCapturer* capturer = GetWebRtcAudioDevice()->capturer();
+ if (!capturer->is_recording())
+ capturer->Start();
}
-
+ return native_stream->AddTrack(audio_track);
+ } else {
scoped_refptr<webrtc::VideoTrackInterface> video_track(
- CreateLocalVideoTrack(UTF16ToUTF8(video_tracks[i].id()),
- source_data->video_source()));
-
- native_stream->AddTrack(video_track);
- video_track->set_enabled(video_tracks[i].isEnabled());
+ CreateLocalVideoTrack(track_id, source_data->video_source()));
+ video_track->set_enabled(track.isEnabled());
+ return native_stream->AddTrack(video_track);
}
-
- MediaStreamExtraData* extra_data = new MediaStreamExtraData(native_stream,
- true);
- description->setExtraData(extra_data);
}
-void MediaStreamDependencyFactory::CreateNativeLocalMediaStream(
- WebKit::WebMediaStream* description,
- const MediaStreamExtraData::StreamStopCallback& stream_stop) {
- CreateNativeLocalMediaStream(description);
-
+bool MediaStreamDependencyFactory::RemoveNativeMediaStreamTrack(
+ const WebKit::WebMediaStream& stream,
+ const WebKit::WebMediaStreamTrack& track) {
MediaStreamExtraData* extra_data =
- static_cast<MediaStreamExtraData*>(description->extraData());
- extra_data->SetLocalStreamStopCallback(stream_stop);
+ static_cast<MediaStreamExtraData*>(stream.extraData());
+ webrtc::MediaStreamInterface* native_stream = extra_data->stream();
+ DCHECK(native_stream);
+
+ WebKit::WebMediaStreamSource::Type type = track.source().type();
+ DCHECK(type == WebKit::WebMediaStreamSource::TypeAudio ||
+ type == WebKit::WebMediaStreamSource::TypeVideo);
+
+ std::string track_id = UTF16ToUTF8(track.id());
+ return type == WebKit::WebMediaStreamSource::TypeAudio ?
+ native_stream->RemoveTrack(native_stream->FindAudioTrack(track_id)) :
+ native_stream->RemoveTrack(native_stream->FindVideoTrack(track_id));
}
bool MediaStreamDependencyFactory::CreatePeerConnectionFactory() {
diff --git a/content/renderer/media/media_stream_dependency_factory.h b/content/renderer/media/media_stream_dependency_factory.h
index 27e4f4a..2bdd8ff 100644
--- a/content/renderer/media/media_stream_dependency_factory.h
+++ b/content/renderer/media/media_stream_dependency_factory.h
@@ -89,6 +89,14 @@ class CONTENT_EXPORT MediaStreamDependencyFactory
WebKit::WebMediaStream* description,
const MediaStreamExtraData::StreamStopCallback& stream_stop);
+ // Adds a libjingle representation of a MediaStreamTrack to |stream| based
+ // on the source of |track|.
+ bool AddNativeMediaStreamTrack(const WebKit::WebMediaStream& stream,
+ const WebKit::WebMediaStreamTrack& track);
+
+ bool RemoveNativeMediaStreamTrack(const WebKit::WebMediaStream& stream,
+ const WebKit::WebMediaStreamTrack& track);
+
// Asks the libjingle PeerConnection factory to create a libjingle
// PeerConnection object.
// The PeerConnection object is owned by PeerConnectionHandler.
diff --git a/content/renderer/media/media_stream_dependency_factory_unittest.cc b/content/renderer/media/media_stream_dependency_factory_unittest.cc
index a44f134..879c425 100644
--- a/content/renderer/media/media_stream_dependency_factory_unittest.cc
+++ b/content/renderer/media/media_stream_dependency_factory_unittest.cc
@@ -49,10 +49,9 @@ class MediaStreamDependencyFactoryTest : public ::testing::Test {
dependency_factory_.reset(new MockMediaStreamDependencyFactory());
}
- WebKit::WebMediaStream CreateWebKitMediaStream(bool audio,
- bool video) {
+ WebKit::WebMediaStream CreateWebKitMediaStream(bool audio, bool video) {
WebKit::WebVector<WebKit::WebMediaStreamSource> audio_sources(
- audio ? static_cast<size_t>(1) : 0);
+ audio ? static_cast<size_t>(1) : 0);
WebKit::WebVector<WebKit::WebMediaStreamSource> video_sources(
video ? static_cast<size_t>(1) : 0);
@@ -108,6 +107,17 @@ class MediaStreamDependencyFactoryTest : public ::testing::Test {
EXPECT_TRUE(observer.description() == descriptor);
}
+ void VerifyMediaStream(const WebKit::WebMediaStream& stream_desc,
+ size_t num_audio_tracks,
+ size_t num_video_tracks) {
+ content::MediaStreamExtraData* extra_data =
+ static_cast<content::MediaStreamExtraData*>(stream_desc.extraData());
+ ASSERT_TRUE(extra_data && extra_data->stream());
+ EXPECT_TRUE(extra_data->is_local());
+ EXPECT_EQ(num_audio_tracks, extra_data->stream()->GetAudioTracks().size());
+ EXPECT_EQ(num_video_tracks, extra_data->stream()->GetVideoTracks().size());
+ }
+
protected:
scoped_ptr<MockMediaStreamDependencyFactory> dependency_factory_;
};
@@ -124,13 +134,7 @@ TEST_F(MediaStreamDependencyFactoryTest, CreateNativeMediaStream) {
CreateNativeSources(&stream_desc);
dependency_factory_->CreateNativeLocalMediaStream(&stream_desc);
-
- content::MediaStreamExtraData* extra_data =
- static_cast<content::MediaStreamExtraData*>(stream_desc.extraData());
- ASSERT_TRUE(extra_data && extra_data->stream());
- EXPECT_TRUE(extra_data->is_local());
- EXPECT_EQ(1u, extra_data->stream()->GetAudioTracks().size());
- EXPECT_EQ(1u, extra_data->stream()->GetVideoTracks().size());
+ VerifyMediaStream(stream_desc, 1, 1);
}
// Test that we don't crash if a MediaStream is created in WebKit with unknown
@@ -153,12 +157,35 @@ TEST_F(MediaStreamDependencyFactoryTest, CreateNativeMediaStreamWithoutSource) {
EXPECT_TRUE(dependency_factory_->EnsurePeerConnectionFactory());
dependency_factory_->CreateNativeLocalMediaStream(&stream_desc);
- MediaStreamExtraData* extra_data = static_cast<MediaStreamExtraData*>(
- stream_desc.extraData());
- ASSERT_TRUE(extra_data && extra_data->stream());
- EXPECT_TRUE(extra_data->is_local());
- EXPECT_EQ(0u, extra_data->stream()->GetVideoTracks().size());
- EXPECT_EQ(0u, extra_data->stream()->GetAudioTracks().size());
+ VerifyMediaStream(stream_desc, 0, 0);
+}
+
+TEST_F(MediaStreamDependencyFactoryTest, AddAndRemoveNativeTrack) {
+ WebKit::WebMediaStream stream_desc = CreateWebKitMediaStream(true, true);
+ CreateNativeSources(&stream_desc);
+
+ dependency_factory_->CreateNativeLocalMediaStream(&stream_desc);
+ VerifyMediaStream(stream_desc, 1, 1);
+
+ WebKit::WebVector<WebKit::WebMediaStreamTrack> audio_tracks;
+ stream_desc.audioTracks(audio_tracks);
+ EXPECT_TRUE(dependency_factory_->RemoveNativeMediaStreamTrack(
+ stream_desc, audio_tracks[0]));
+ VerifyMediaStream(stream_desc, 0, 1);
+
+ EXPECT_TRUE(dependency_factory_->AddNativeMediaStreamTrack(
+ stream_desc, audio_tracks[0]));
+ VerifyMediaStream(stream_desc, 1, 1);
+
+ WebKit::WebVector<WebKit::WebMediaStreamTrack> video_tracks;
+ stream_desc.videoTracks(video_tracks);
+ EXPECT_TRUE(dependency_factory_->RemoveNativeMediaStreamTrack(
+ stream_desc, video_tracks[0]));
+ VerifyMediaStream(stream_desc, 1, 0);
+
+ EXPECT_TRUE(dependency_factory_->AddNativeMediaStreamTrack(
+ stream_desc, video_tracks[0]));
+ VerifyMediaStream(stream_desc, 1, 1);
}
} // namespace content
diff --git a/content/renderer/media/mock_media_stream_dependency_factory.cc b/content/renderer/media/mock_media_stream_dependency_factory.cc
index f2656c0..3920a71 100644
--- a/content/renderer/media/mock_media_stream_dependency_factory.cc
+++ b/content/renderer/media/mock_media_stream_dependency_factory.cc
@@ -50,12 +50,20 @@ class MockMediaStream : public webrtc::MediaStreamInterface {
return true;
}
virtual bool RemoveTrack(AudioTrackInterface* track) OVERRIDE {
- NOTIMPLEMENTED();
- return false;
+ AudioTrackVector::iterator it = FindTrack(&audio_track_vector_,
+ track->id());
+ if (it == audio_track_vector_.end())
+ return false;
+ audio_track_vector_.erase(it);
+ return true;
}
virtual bool RemoveTrack(VideoTrackInterface* track) OVERRIDE {
- NOTIMPLEMENTED();
- return false;
+ VideoTrackVector::iterator it = FindTrack(&video_track_vector_,
+ track->id());
+ if (it == video_track_vector_.end())
+ return false;
+ video_track_vector_.erase(it);
+ return true;
}
virtual std::string label() const OVERRIDE { return label_; }
virtual AudioTrackVector GetAudioTracks() OVERRIDE {
diff --git a/content/test/data/media/getusermedia.html b/content/test/data/media/getusermedia.html
new file mode 100644
index 0000000..429dd66
--- /dev/null
+++ b/content/test/data/media/getusermedia.html
@@ -0,0 +1,76 @@
+<html>
+<head>
+ <script type="text/javascript" src="webrtc_test_utilities.js"></script>
+ <script type="text/javascript">
+ $ = function(id) {
+ return document.getElementById(id);
+ };
+
+ var gLocalStream = null;
+
+ setAllEventsOccuredHandler(function() {
+ gLocalStream.stop();
+ document.title = 'OK';
+ });
+
+ // This test that a MediaStream can be created and a local preview
+ // rendered.
+ function getUserMedia(constraints) {
+ navigator.webkitGetUserMedia(constraints, displayAndWaitForVideo,
+ failedCallback);
+ }
+
+ // This test that a MediaStream can be cloned and that the clone can
+ // be rendered.
+ function getUserMediaAndClone() {
+ navigator.webkitGetUserMedia({video: true, audio: true},
+ createAndRenderClone, failedCallback);
+ }
+
+ function failedCallback(error) {
+ document.title = 'GetUserMedia call failed with code ' + error.code;
+ }
+
+ function displayAndWaitForVideo(stream) {
+ gLocalStream = stream;
+ var localStreamUrl = webkitURL.createObjectURL(stream);
+ $('local-view').src = localStreamUrl;
+ waitForVideo('local-view');
+ }
+
+ function createAndRenderClone(stream) {
+ gLocalStream = stream;
+ // TODO(perkj): --use-fake-device-for-media-stream do not currently
+ // work with audio devices and not all bots has a microphone.
+ new_stream = new webkitMediaStream();
+ new_stream.addTrack(stream.getVideoTracks()[0]);
+ expectEquals(new_stream.getVideoTracks().length, 1);
+ if (stream.getAudioTracks().length > 0) {
+ new_stream.addTrack(stream.getAudioTracks()[0]);
+ expectEquals(new_stream.getAudioTracks().length, 1);
+ new_stream.removeTrack(new_stream.getAudioTracks()[0]);
+ expectEquals(new_stream.getAudioTracks().length, 0);
+ }
+
+ var newStreamUrl = webkitURL.createObjectURL(new_stream);
+ $('local-view').src = newStreamUrl;
+ waitForVideo('local-view');
+ }
+
+ </script>
+</head>
+<body>
+ <table border="0">
+ <tr>
+ <td>Local Preview</td>
+ </tr>
+ <tr>
+ <td><video width="320" height="240" id="local-view"
+ autoplay="autoplay"></video></td>
+ <!-- Canvases are named after their corresponding video elements. -->
+ <td><canvas width="320" height="240" id="local-view-canvas"
+ style="display:none"></canvas></td>
+ </tr>
+ </table>
+</body>
+</html> \ No newline at end of file
diff --git a/content/test/data/media/getusermedia_and_stop.html b/content/test/data/media/getusermedia_and_stop.html
deleted file mode 100644
index f1c43be..0000000
--- a/content/test/data/media/getusermedia_and_stop.html
+++ /dev/null
@@ -1,18 +0,0 @@
-<html>
-<head>
- <script type="text/javascript">
- function getUserMedia(constraints) {
- navigator.webkitGetUserMedia(constraints, okCallback, failedCallback);
- }
-
- function failedCallback(error) {
- document.title = 'GetUserMedia call failed with code ' + error.code;
- }
-
- function okCallback(stream) {
- stream.stop();
- document.title = 'OK';
- }
- </script>
-</head>
-</html> \ No newline at end of file
diff --git a/content/test/data/media/peerconnection-call.html b/content/test/data/media/peerconnection-call.html
index 7581b72..39b01b5 100644
--- a/content/test/data/media/peerconnection-call.html
+++ b/content/test/data/media/peerconnection-call.html
@@ -1,28 +1,22 @@
<html>
<head>
+ <script type="text/javascript" src="webrtc_test_utilities.js"></script>
<script type="text/javascript">
$ = function(id) {
return document.getElementById(id);
};
- // These must match with how the video and canvas tags are declared in html.
- const VIDEO_TAG_WIDTH = 320;
- const VIDEO_TAG_HEIGHT = 240;
-
var gFirstConnection = null;
var gSecondConnection = null;
var gTestWithoutMsidAndBundle = false;
- // Number of test events to occur before the test pass. When the test pass,
- // the document title change to OK.
- var gNumberOfExpectedEvents = 0;
-
- // Number of events that currently have occured.
- var gNumberOfEvents = 0;
-
var gLocalStream = null;
var gSentTones = '';
+ setAllEventsOccuredHandler(function() {
+ document.title = 'OK';
+ });
+
// Test that we can setup call with an audio and video track.
function call(constraints) {
createConnections(null);
@@ -109,6 +103,16 @@
// Do the DTMF test after we have received video.
detectVideoIn('remote-view-2', onCallEstablished);
}
+
+ // Test call with a new Video MediaStream that has been created based on a
+ // stream generated by getUserMedia.
+ function callWithNewVideoMediaStream() {
+ createConnections(null);
+ navigator.webkitGetUserMedia({audio:true, video:true},
+ createNewVideoStreamAndAddToBothConnections, printGetUserMediaError);
+ waitForVideo('remote-view-1');
+ waitForVideo('remote-view-2');
+ }
// This function is used for setting up a test that:
// 1. Creates a data channel on |gFirstConnection| and sends data to
@@ -155,7 +159,7 @@
secondDataChannel.send(sendDataString);
}
}
- }
+ }
function onToneChange(tone) {
gSentTones += tone.tone;
@@ -203,6 +207,15 @@
gFirstConnection.addStream(localStream);
negotiate();
}
+
+ // Called if getUserMedia succeeds when we want to send a modified
+ // MediaStream. A new MediaStream is created and the video track from
+ // |localStream| is added.
+ function createNewVideoStreamAndAddToBothConnections(localStream) {
+ var new_stream = new webkitMediaStream();
+ new_stream.addTrack(localStream.getVideoTracks()[0]);
+ addStreamToBothConnectionsAndNegotiate(new_stream);
+ }
function negotiate() {
gFirstConnection.createOffer(onOfferCreated);
@@ -275,66 +288,7 @@
var remoteVideo = $(target);
remoteVideo.src = remoteStreamUrl;
}
-
- // TODO(phoglund): perhaps use the video detector in chrome/test/data/webrtc/?
- function detectVideoIn(videoElementName, callback) {
- var width = VIDEO_TAG_WIDTH;
- var height = VIDEO_TAG_HEIGHT;
- var videoElement = $(videoElementName);
- var canvas = $(videoElementName + '-canvas');
- var waitVideo = setInterval(function() {
- var context = canvas.getContext('2d');
- context.drawImage(videoElement, 0, 0, width, height);
- var pixels = context.getImageData(0, 0, width, height).data;
-
- if (isVideoPlaying(pixels, width, height)) {
- clearInterval(waitVideo);
- callback();
- }
- }, 100);
- }
-
- function waitForVideo(videoElement) {
- document.title = 'Waiting for video...';
- addExpectedEvent();
- detectVideoIn(videoElement, function () { eventOccured(); });
- }
-
- // This very basic video verification algorithm will be satisfied if any
- // pixels are nonzero in a small sample area in the middle. It relies on the
- // assumption that a video element with null source just presents zeroes.
- function isVideoPlaying(pixels, width, height) {
- // Sample somewhere near the middle of the image.
- var middle = width * height / 2;
- for (var i = 0; i < 20; i++) {
- if (pixels[middle + i] > 0) {
- return true;
- }
- }
- return false;
- }
-
-
- // This function matches |left| and |right| and throws an exception if the
- // values don't match.
- function expectEquals(left, right) {
- if (left != right) {
- var s = "expectEquals failed left: " + left + " right: " + right;
- document.title = s;
- throw s;
- }
- }
-
- function addExpectedEvent() {
- ++gNumberOfExpectedEvents;
- }
-
- function eventOccured() {
- ++gNumberOfEvents;
- if (gNumberOfEvents == gNumberOfExpectedEvents) {
- document.title = 'OK';
- }
- }
+
</script>
</head>
<body>
diff --git a/content/test/data/media/webrtc_test_utilities.js b/content/test/data/media/webrtc_test_utilities.js
new file mode 100644
index 0000000..dae549f
--- /dev/null
+++ b/content/test/data/media/webrtc_test_utilities.js
@@ -0,0 +1,80 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// These must match with how the video and canvas tags are declared in html.
+const VIDEO_TAG_WIDTH = 320;
+const VIDEO_TAG_HEIGHT = 240;
+
+// Number of test events to occur before the test pass. When the test pass,
+// the function gAllEventsOccured is called.
+var gNumberOfExpectedEvents = 0;
+
+// Number of events that currently have occurred.
+var gNumberOfEvents = 0;
+
+var gAllEventsOccured = function () {};
+
+// Use this function to set a function that will be called once all expected
+// events has occurred.
+function setAllEventsOccuredHandler(handler) {
+ gAllEventsOccured = handler;
+}
+
+function detectVideoIn(videoElementName, callback) {
+ var width = VIDEO_TAG_WIDTH;
+ var height = VIDEO_TAG_HEIGHT;
+ var videoElement = $(videoElementName);
+ var canvas = $(videoElementName + '-canvas');
+ var waitVideo = setInterval(function() {
+ var context = canvas.getContext('2d');
+ context.drawImage(videoElement, 0, 0, width, height);
+ var pixels = context.getImageData(0, 0, width, height).data;
+
+ if (isVideoPlaying(pixels, width, height)) {
+ clearInterval(waitVideo);
+ callback();
+ }
+ }, 100);
+}
+
+function waitForVideo(videoElement) {
+ document.title = 'Waiting for video...';
+ addExpectedEvent();
+ detectVideoIn(videoElement, function () { eventOccured(); });
+}
+
+function addExpectedEvent() {
+ ++gNumberOfExpectedEvents;
+}
+
+function eventOccured() {
+ ++gNumberOfEvents;
+ if (gNumberOfEvents == gNumberOfExpectedEvents) {
+ gAllEventsOccured();
+ }
+}
+
+// This very basic video verification algorithm will be satisfied if any
+// pixels are nonzero in a small sample area in the middle. It relies on the
+// assumption that a video element with null source just presents zeroes.
+function isVideoPlaying(pixels, width, height) {
+ // Sample somewhere near the middle of the image.
+ var middle = width * height / 2;
+ for (var i = 0; i < 20; i++) {
+ if (pixels[middle + i] > 0) {
+ return true;
+ }
+ }
+ return false;
+}
+
+// This function matches |left| and |right| and throws an exception if the
+// values don't match.
+function expectEquals(left, right) {
+ if (left != right) {
+ var s = "expectEquals failed left: " + left + " right: " + right;
+ document.title = s;
+ throw s;
+ }
+} \ No newline at end of file