diff options
author | perkj@chromium.org <perkj@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2013-04-12 12:30:17 +0000 |
---|---|---|
committer | perkj@chromium.org <perkj@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2013-04-12 12:30:17 +0000 |
commit | 6db74a79053d941ca5803fc6d363860aeda874e7 (patch) | |
tree | e2e886f0036793568decd54ac3f40eb107f243a9 /content/test | |
parent | fa893ac0228308da04ea8452cf3af5a650221f0d (diff) | |
download | chromium_src-6db74a79053d941ca5803fc6d363860aeda874e7.zip chromium_src-6db74a79053d941ca5803fc6d363860aeda874e7.tar.gz chromium_src-6db74a79053d941ca5803fc6d363860aeda874e7.tar.bz2 |
Hookup the MediaStream glue for Adding and Removing tracks to an existing MediaStream.
https://code.google.com/p/webrtc/issues/detail?id=382
This cl refactor the way Chrome create the native repressentation for MediaStreams by splitting up creating MediaStreams and tracks in two functions, of for MediaStreams and one for Tracks (AddNativeLocalMediaTrack).
AddNativeLocalMediaTrack is hooked up to MediaStreamCenter::didAddMediaStreamTrack to allow adding tracks to existing MediaStreams.
Two content_browsertests are added for testing adding and removing tracks from MediaStreams.
Review URL: https://chromiumcodereview.appspot.com/13496009
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@193908 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'content/test')
-rw-r--r-- | content/test/data/media/getusermedia.html | 76 | ||||
-rw-r--r-- | content/test/data/media/getusermedia_and_stop.html | 18 | ||||
-rw-r--r-- | content/test/data/media/peerconnection-call.html | 98 | ||||
-rw-r--r-- | content/test/data/media/webrtc_test_utilities.js | 80 |
4 files changed, 182 insertions, 90 deletions
diff --git a/content/test/data/media/getusermedia.html b/content/test/data/media/getusermedia.html new file mode 100644 index 0000000..429dd66 --- /dev/null +++ b/content/test/data/media/getusermedia.html @@ -0,0 +1,76 @@ +<html> +<head> + <script type="text/javascript" src="webrtc_test_utilities.js"></script> + <script type="text/javascript"> + $ = function(id) { + return document.getElementById(id); + }; + + var gLocalStream = null; + + setAllEventsOccuredHandler(function() { + gLocalStream.stop(); + document.title = 'OK'; + }); + + // This test that a MediaStream can be created and a local preview + // rendered. + function getUserMedia(constraints) { + navigator.webkitGetUserMedia(constraints, displayAndWaitForVideo, + failedCallback); + } + + // This test that a MediaStream can be cloned and that the clone can + // be rendered. + function getUserMediaAndClone() { + navigator.webkitGetUserMedia({video: true, audio: true}, + createAndRenderClone, failedCallback); + } + + function failedCallback(error) { + document.title = 'GetUserMedia call failed with code ' + error.code; + } + + function displayAndWaitForVideo(stream) { + gLocalStream = stream; + var localStreamUrl = webkitURL.createObjectURL(stream); + $('local-view').src = localStreamUrl; + waitForVideo('local-view'); + } + + function createAndRenderClone(stream) { + gLocalStream = stream; + // TODO(perkj): --use-fake-device-for-media-stream do not currently + // work with audio devices and not all bots has a microphone. + new_stream = new webkitMediaStream(); + new_stream.addTrack(stream.getVideoTracks()[0]); + expectEquals(new_stream.getVideoTracks().length, 1); + if (stream.getAudioTracks().length > 0) { + new_stream.addTrack(stream.getAudioTracks()[0]); + expectEquals(new_stream.getAudioTracks().length, 1); + new_stream.removeTrack(new_stream.getAudioTracks()[0]); + expectEquals(new_stream.getAudioTracks().length, 0); + } + + var newStreamUrl = webkitURL.createObjectURL(new_stream); + $('local-view').src = newStreamUrl; + waitForVideo('local-view'); + } + + </script> +</head> +<body> + <table border="0"> + <tr> + <td>Local Preview</td> + </tr> + <tr> + <td><video width="320" height="240" id="local-view" + autoplay="autoplay"></video></td> + <!-- Canvases are named after their corresponding video elements. --> + <td><canvas width="320" height="240" id="local-view-canvas" + style="display:none"></canvas></td> + </tr> + </table> +</body> +</html>
\ No newline at end of file diff --git a/content/test/data/media/getusermedia_and_stop.html b/content/test/data/media/getusermedia_and_stop.html deleted file mode 100644 index f1c43be..0000000 --- a/content/test/data/media/getusermedia_and_stop.html +++ /dev/null @@ -1,18 +0,0 @@ -<html> -<head> - <script type="text/javascript"> - function getUserMedia(constraints) { - navigator.webkitGetUserMedia(constraints, okCallback, failedCallback); - } - - function failedCallback(error) { - document.title = 'GetUserMedia call failed with code ' + error.code; - } - - function okCallback(stream) { - stream.stop(); - document.title = 'OK'; - } - </script> -</head> -</html>
\ No newline at end of file diff --git a/content/test/data/media/peerconnection-call.html b/content/test/data/media/peerconnection-call.html index 7581b72..39b01b5 100644 --- a/content/test/data/media/peerconnection-call.html +++ b/content/test/data/media/peerconnection-call.html @@ -1,28 +1,22 @@ <html> <head> + <script type="text/javascript" src="webrtc_test_utilities.js"></script> <script type="text/javascript"> $ = function(id) { return document.getElementById(id); }; - // These must match with how the video and canvas tags are declared in html. - const VIDEO_TAG_WIDTH = 320; - const VIDEO_TAG_HEIGHT = 240; - var gFirstConnection = null; var gSecondConnection = null; var gTestWithoutMsidAndBundle = false; - // Number of test events to occur before the test pass. When the test pass, - // the document title change to OK. - var gNumberOfExpectedEvents = 0; - - // Number of events that currently have occured. - var gNumberOfEvents = 0; - var gLocalStream = null; var gSentTones = ''; + setAllEventsOccuredHandler(function() { + document.title = 'OK'; + }); + // Test that we can setup call with an audio and video track. function call(constraints) { createConnections(null); @@ -109,6 +103,16 @@ // Do the DTMF test after we have received video. detectVideoIn('remote-view-2', onCallEstablished); } + + // Test call with a new Video MediaStream that has been created based on a + // stream generated by getUserMedia. + function callWithNewVideoMediaStream() { + createConnections(null); + navigator.webkitGetUserMedia({audio:true, video:true}, + createNewVideoStreamAndAddToBothConnections, printGetUserMediaError); + waitForVideo('remote-view-1'); + waitForVideo('remote-view-2'); + } // This function is used for setting up a test that: // 1. Creates a data channel on |gFirstConnection| and sends data to @@ -155,7 +159,7 @@ secondDataChannel.send(sendDataString); } } - } + } function onToneChange(tone) { gSentTones += tone.tone; @@ -203,6 +207,15 @@ gFirstConnection.addStream(localStream); negotiate(); } + + // Called if getUserMedia succeeds when we want to send a modified + // MediaStream. A new MediaStream is created and the video track from + // |localStream| is added. + function createNewVideoStreamAndAddToBothConnections(localStream) { + var new_stream = new webkitMediaStream(); + new_stream.addTrack(localStream.getVideoTracks()[0]); + addStreamToBothConnectionsAndNegotiate(new_stream); + } function negotiate() { gFirstConnection.createOffer(onOfferCreated); @@ -275,66 +288,7 @@ var remoteVideo = $(target); remoteVideo.src = remoteStreamUrl; } - - // TODO(phoglund): perhaps use the video detector in chrome/test/data/webrtc/? - function detectVideoIn(videoElementName, callback) { - var width = VIDEO_TAG_WIDTH; - var height = VIDEO_TAG_HEIGHT; - var videoElement = $(videoElementName); - var canvas = $(videoElementName + '-canvas'); - var waitVideo = setInterval(function() { - var context = canvas.getContext('2d'); - context.drawImage(videoElement, 0, 0, width, height); - var pixels = context.getImageData(0, 0, width, height).data; - - if (isVideoPlaying(pixels, width, height)) { - clearInterval(waitVideo); - callback(); - } - }, 100); - } - - function waitForVideo(videoElement) { - document.title = 'Waiting for video...'; - addExpectedEvent(); - detectVideoIn(videoElement, function () { eventOccured(); }); - } - - // This very basic video verification algorithm will be satisfied if any - // pixels are nonzero in a small sample area in the middle. It relies on the - // assumption that a video element with null source just presents zeroes. - function isVideoPlaying(pixels, width, height) { - // Sample somewhere near the middle of the image. - var middle = width * height / 2; - for (var i = 0; i < 20; i++) { - if (pixels[middle + i] > 0) { - return true; - } - } - return false; - } - - - // This function matches |left| and |right| and throws an exception if the - // values don't match. - function expectEquals(left, right) { - if (left != right) { - var s = "expectEquals failed left: " + left + " right: " + right; - document.title = s; - throw s; - } - } - - function addExpectedEvent() { - ++gNumberOfExpectedEvents; - } - - function eventOccured() { - ++gNumberOfEvents; - if (gNumberOfEvents == gNumberOfExpectedEvents) { - document.title = 'OK'; - } - } + </script> </head> <body> diff --git a/content/test/data/media/webrtc_test_utilities.js b/content/test/data/media/webrtc_test_utilities.js new file mode 100644 index 0000000..dae549f --- /dev/null +++ b/content/test/data/media/webrtc_test_utilities.js @@ -0,0 +1,80 @@ +// Copyright (c) 2012 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// These must match with how the video and canvas tags are declared in html. +const VIDEO_TAG_WIDTH = 320; +const VIDEO_TAG_HEIGHT = 240; + +// Number of test events to occur before the test pass. When the test pass, +// the function gAllEventsOccured is called. +var gNumberOfExpectedEvents = 0; + +// Number of events that currently have occurred. +var gNumberOfEvents = 0; + +var gAllEventsOccured = function () {}; + +// Use this function to set a function that will be called once all expected +// events has occurred. +function setAllEventsOccuredHandler(handler) { + gAllEventsOccured = handler; +} + +function detectVideoIn(videoElementName, callback) { + var width = VIDEO_TAG_WIDTH; + var height = VIDEO_TAG_HEIGHT; + var videoElement = $(videoElementName); + var canvas = $(videoElementName + '-canvas'); + var waitVideo = setInterval(function() { + var context = canvas.getContext('2d'); + context.drawImage(videoElement, 0, 0, width, height); + var pixels = context.getImageData(0, 0, width, height).data; + + if (isVideoPlaying(pixels, width, height)) { + clearInterval(waitVideo); + callback(); + } + }, 100); +} + +function waitForVideo(videoElement) { + document.title = 'Waiting for video...'; + addExpectedEvent(); + detectVideoIn(videoElement, function () { eventOccured(); }); +} + +function addExpectedEvent() { + ++gNumberOfExpectedEvents; +} + +function eventOccured() { + ++gNumberOfEvents; + if (gNumberOfEvents == gNumberOfExpectedEvents) { + gAllEventsOccured(); + } +} + +// This very basic video verification algorithm will be satisfied if any +// pixels are nonzero in a small sample area in the middle. It relies on the +// assumption that a video element with null source just presents zeroes. +function isVideoPlaying(pixels, width, height) { + // Sample somewhere near the middle of the image. + var middle = width * height / 2; + for (var i = 0; i < 20; i++) { + if (pixels[middle + i] > 0) { + return true; + } + } + return false; +} + +// This function matches |left| and |right| and throws an exception if the +// values don't match. +function expectEquals(left, right) { + if (left != right) { + var s = "expectEquals failed left: " + left + " right: " + right; + document.title = s; + throw s; + } +}
\ No newline at end of file |