diff options
author | hclam@chromium.org <hclam@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2014-02-12 17:31:30 +0000 |
---|---|---|
committer | hclam@chromium.org <hclam@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2014-02-12 17:31:30 +0000 |
commit | 29cb8a8f2bd44af0f27e9add0a3bb445cdb9a28f (patch) | |
tree | 9b5d8cb7e9a692d22c2188780701693ec825b739 /chrome | |
parent | 8a82269dafa5aca936588a6d9a7b74acfc8565c9 (diff) | |
download | chromium_src-29cb8a8f2bd44af0f27e9add0a3bb445cdb9a28f.zip chromium_src-29cb8a8f2bd44af0f27e9add0a3bb445cdb9a28f.tar.gz chromium_src-29cb8a8f2bd44af0f27e9add0a3bb445cdb9a28f.tar.bz2 |
Implement events for cast.streaming.rtpStream API
This change implements:
cast.streaming.rtpStream.onStarted
cast.streaming.rtpStream.onStopped
cast.streaming.rtpStream.onError
Above event handlers are now called when a stream starts, stops and
encounters an error.
CastStreamingApiTest is updated to connect these events for testing.
BUG=301920
Review URL: https://codereview.chromium.org/143243003
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@250740 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'chrome')
-rw-r--r-- | chrome/renderer/extensions/cast_streaming_native_handler.cc | 47 | ||||
-rw-r--r-- | chrome/renderer/extensions/cast_streaming_native_handler.h | 4 | ||||
-rw-r--r-- | chrome/renderer/extensions/chrome_v8_context.cc | 12 | ||||
-rw-r--r-- | chrome/renderer/extensions/chrome_v8_context.h | 2 | ||||
-rw-r--r-- | chrome/renderer/media/cast_rtp_stream.cc | 60 | ||||
-rw-r--r-- | chrome/renderer/media/cast_rtp_stream.h | 17 | ||||
-rw-r--r-- | chrome/test/data/extensions/api_test/cast_streaming/basics.js | 52 |
7 files changed, 175 insertions, 19 deletions
diff --git a/chrome/renderer/extensions/cast_streaming_native_handler.cc b/chrome/renderer/extensions/cast_streaming_native_handler.cc index 8170f63..71fd3e9 100644 --- a/chrome/renderer/extensions/cast_streaming_native_handler.cc +++ b/chrome/renderer/extensions/cast_streaming_native_handler.cc @@ -236,6 +236,38 @@ void CastStreamingNativeHandler::CallCreateCallback( create_callback_.reset(); } +void CastStreamingNativeHandler::CallStartCallback(int stream_id) { + v8::Isolate* isolate = context()->isolate(); + v8::HandleScope handle_scope(isolate); + v8::Context::Scope context_scope(context()->v8_context()); + v8::Handle<v8::Array> event_args = v8::Array::New(isolate, 1); + event_args->Set(0, v8::Integer::New(isolate, stream_id)); + context()->DispatchEvent("cast.streaming.rtpStream.onStarted", event_args); +} + +void CastStreamingNativeHandler::CallStopCallback(int stream_id) { + v8::Isolate* isolate = context()->isolate(); + v8::HandleScope handle_scope(isolate); + v8::Context::Scope context_scope(context()->v8_context()); + v8::Handle<v8::Array> event_args = v8::Array::New(isolate, 1); + event_args->Set(0, v8::Integer::New(isolate, stream_id)); + context()->DispatchEvent("cast.streaming.rtpStream.onStopped", event_args); +} + +void CastStreamingNativeHandler::CallErrorCallback(int stream_id, + const std::string& message) { + v8::Isolate* isolate = context()->isolate(); + v8::HandleScope handle_scope(isolate); + v8::Context::Scope context_scope(context()->v8_context()); + v8::Handle<v8::Array> event_args = v8::Array::New(isolate, 2); + event_args->Set(0, v8::Integer::New(isolate, stream_id)); + event_args->Set( + 1, + v8::String::NewFromUtf8( + isolate, message.data(), v8::String::kNormalString, message.size())); + context()->DispatchEvent("cast.streaming.rtpStream.onError", event_args); +} + void CastStreamingNativeHandler::DestroyCastRtpStream( const v8::FunctionCallbackInfo<v8::Value>& args) { CHECK_EQ(1, args.Length()); @@ -303,7 +335,20 @@ void CastStreamingNativeHandler::StartCastRtpStream( v8::Isolate* isolate = context()->v8_context()->GetIsolate(); if (!ToCastRtpParamsOrThrow(isolate, *params, &cast_params)) return; - transport->Start(cast_params); + + base::Closure start_callback = + base::Bind(&CastStreamingNativeHandler::CallStartCallback, + weak_factory_.GetWeakPtr(), + transport_id); + base::Closure stop_callback = + base::Bind(&CastStreamingNativeHandler::CallStopCallback, + weak_factory_.GetWeakPtr(), + transport_id); + CastRtpStream::ErrorCallback error_callback = + base::Bind(&CastStreamingNativeHandler::CallErrorCallback, + weak_factory_.GetWeakPtr(), + transport_id); + transport->Start(cast_params, start_callback, stop_callback, error_callback); } void CastStreamingNativeHandler::StopCastRtpStream( diff --git a/chrome/renderer/extensions/cast_streaming_native_handler.h b/chrome/renderer/extensions/cast_streaming_native_handler.h index 02d8efa..9f41a92 100644 --- a/chrome/renderer/extensions/cast_streaming_native_handler.h +++ b/chrome/renderer/extensions/cast_streaming_native_handler.h @@ -54,6 +54,10 @@ class CastStreamingNativeHandler : public ObjectBackedNativeHandler { scoped_ptr<CastRtpStream> stream2, scoped_ptr<CastUdpTransport> udp_transport); + void CallStartCallback(int stream_id); + void CallStopCallback(int stream_id); + void CallErrorCallback(int stream_id, const std::string& message); + // Gets the RTP stream or UDP transport indexed by an ID. // If not found, returns NULL and throws a V8 exception. CastRtpStream* GetRtpStreamOrThrow(int stream_id) const; diff --git a/chrome/renderer/extensions/chrome_v8_context.cc b/chrome/renderer/extensions/chrome_v8_context.cc index 631a8b2..cb3b068 100644 --- a/chrome/renderer/extensions/chrome_v8_context.cc +++ b/chrome/renderer/extensions/chrome_v8_context.cc @@ -16,6 +16,7 @@ #include "content/public/renderer/v8_value_converter.h" #include "extensions/common/extension.h" #include "extensions/common/extension_api.h" +#include "extensions/common/extension_urls.h" #include "third_party/WebKit/public/web/WebFrame.h" #include "third_party/WebKit/public/web/WebScopedMicrotaskSuppression.h" #include "third_party/WebKit/public/web/WebView.h" @@ -119,6 +120,17 @@ Feature::Availability ChromeV8Context::GetAvailability( GetURL()); } +void ChromeV8Context::DispatchEvent(const char* event_name, + v8::Handle<v8::Array> args) const { + v8::HandleScope handle_scope(isolate()); + v8::Context::Scope context_scope(v8_context()); + + v8::Handle<v8::Value> argv[] = { + v8::String::NewFromUtf8(isolate(), event_name), args}; + module_system_->CallModuleMethod( + kEventBindings, "dispatchEvent", arraysize(argv), argv); +} + void ChromeV8Context::DispatchOnUnloadEvent() { module_system_->CallModuleMethod("unload_event", "dispatch"); } diff --git a/chrome/renderer/extensions/chrome_v8_context.h b/chrome/renderer/extensions/chrome_v8_context.h index dcdcc901..ff44de8 100644 --- a/chrome/renderer/extensions/chrome_v8_context.h +++ b/chrome/renderer/extensions/chrome_v8_context.h @@ -99,6 +99,8 @@ class ChromeV8Context : public RequestSender::Source { int argc, v8::Handle<v8::Value> argv[]) const; + void DispatchEvent(const char* event_name, v8::Handle<v8::Array> args) const; + // Fires the onunload event on the unload_event module. void DispatchOnUnloadEvent(); diff --git a/chrome/renderer/media/cast_rtp_stream.cc b/chrome/renderer/media/cast_rtp_stream.cc index 06ffe7b..4824b5e 100644 --- a/chrome/renderer/media/cast_rtp_stream.cc +++ b/chrome/renderer/media/cast_rtp_stream.cc @@ -13,6 +13,7 @@ #include "content/public/renderer/media_stream_video_sink.h" #include "content/public/renderer/render_thread.h" #include "media/base/audio_bus.h" +#include "media/base/bind_to_current_loop.h" #include "media/cast/cast_config.h" #include "media/cast/cast_defines.h" #include "media/cast/cast_sender.h" @@ -114,9 +115,13 @@ void DeleteAudioBus(scoped_ptr<media::AudioBus> audio_bus) { class CastVideoSink : public base::SupportsWeakPtr<CastVideoSink>, public content::MediaStreamVideoSink { public: - explicit CastVideoSink(const blink::WebMediaStreamTrack& track) + // |track| provides data for this sink. + // |error_callback| is called if video formats don't match. + CastVideoSink(const blink::WebMediaStreamTrack& track, + const CastRtpStream::ErrorCallback& error_callback) : track_(track), sink_added_(false), + error_callback_(error_callback), render_thread_task_runner_(content::RenderThread::Get() ->GetMessageLoop() ->message_loop_proxy()) {} @@ -153,6 +158,7 @@ class CastVideoSink : public base::SupportsWeakPtr<CastVideoSink>, blink::WebMediaStreamTrack track_; scoped_refptr<media::cast::FrameInput> frame_input_; bool sink_added_; + CastRtpStream::ErrorCallback error_callback_; scoped_refptr<base::SingleThreadTaskRunner> render_thread_task_runner_; DISALLOW_COPY_AND_ASSIGN(CastVideoSink); @@ -165,9 +171,13 @@ class CastVideoSink : public base::SupportsWeakPtr<CastVideoSink>, class CastAudioSink : public base::SupportsWeakPtr<CastAudioSink>, public content::MediaStreamAudioSink { public: - explicit CastAudioSink(const blink::WebMediaStreamTrack& track) + // |track| provides data for this sink. + // |error_callback| is called if audio formats don't match. + CastAudioSink(const blink::WebMediaStreamTrack& track, + const CastRtpStream::ErrorCallback& error_callback) : track_(track), sink_added_(false), + error_callback_(error_callback), weak_factory_(this), render_thread_task_runner_(content::RenderThread::Get() ->GetMessageLoop() @@ -191,6 +201,7 @@ class CastAudioSink : public base::SupportsWeakPtr<CastAudioSink>, // TODO(hclam): Pass in the accurate capture time to have good // audio / video sync. + // TODO(hclam): We shouldn't hop through the render thread. // Bounce the call from the real-time audio thread to the render thread. // Needed since frame_input_ can be changed runtime by the render thread. media::AudioBus* const audio_bus_ptr = audio_bus.get(); @@ -230,6 +241,7 @@ class CastAudioSink : public base::SupportsWeakPtr<CastAudioSink>, blink::WebMediaStreamTrack track_; scoped_refptr<media::cast::FrameInput> frame_input_; bool sink_added_; + CastRtpStream::ErrorCallback error_callback_; base::WeakPtrFactory<CastAudioSink> weak_factory_; scoped_refptr<base::SingleThreadTaskRunner> render_thread_task_runner_; @@ -264,12 +276,9 @@ CastRtpParams::CastRtpParams() { CastRtpParams::~CastRtpParams() { } -CastRtpStream::CastRtpStream( - const blink::WebMediaStreamTrack& track, - const scoped_refptr<CastSession>& session) - : track_(track), - cast_session_(session) { -} +CastRtpStream::CastRtpStream(const blink::WebMediaStreamTrack& track, + const scoped_refptr<CastSession>& session) + : track_(track), cast_session_(session), weak_factory_(this) {} CastRtpStream::~CastRtpStream() { } @@ -285,35 +294,60 @@ CastRtpParams CastRtpStream::GetParams() { return params_; } -void CastRtpStream::Start(const CastRtpParams& params) { +void CastRtpStream::Start(const CastRtpParams& params, + const base::Closure& start_callback, + const base::Closure& stop_callback, + const ErrorCallback& error_callback) { + stop_callback_ = stop_callback; + error_callback_ = error_callback; + if (IsAudio()) { AudioSenderConfig config; if (!ToAudioSenderConfig(params, &config)) { - DVLOG(1) << "Invalid parameters for audio."; + DidEncounterError("Invalid parameters for audio."); + return; } - audio_sink_.reset(new CastAudioSink(track_)); + // In case of error we have to go through DidEncounterError() to stop + // the streaming after reporting the error. + audio_sink_.reset(new CastAudioSink( + track_, + media::BindToCurrentLoop(base::Bind(&CastRtpStream::DidEncounterError, + weak_factory_.GetWeakPtr())))); cast_session_->StartAudio( config, base::Bind(&CastAudioSink::AddToTrack, audio_sink_->AsWeakPtr())); + start_callback.Run(); } else { VideoSenderConfig config; if (!ToVideoSenderConfig(params, &config)) { - DVLOG(1) << "Invalid parameters for video."; + DidEncounterError("Invalid parameters for video."); + return; } - video_sink_.reset(new CastVideoSink(track_)); + // See the code for audio above for explanation of callbacks. + video_sink_.reset(new CastVideoSink( + track_, + media::BindToCurrentLoop(base::Bind(&CastRtpStream::DidEncounterError, + weak_factory_.GetWeakPtr())))); cast_session_->StartVideo( config, base::Bind(&CastVideoSink::AddToTrack, video_sink_->AsWeakPtr())); + start_callback.Run(); } } void CastRtpStream::Stop() { audio_sink_.reset(); video_sink_.reset(); + stop_callback_.Run(); } bool CastRtpStream::IsAudio() const { return track_.source().type() == blink::WebMediaStreamSource::TypeAudio; } + +void CastRtpStream::DidEncounterError(const std::string& message) { + error_callback_.Run(message); + Stop(); +} diff --git a/chrome/renderer/media/cast_rtp_stream.h b/chrome/renderer/media/cast_rtp_stream.h index a9fe5ed..f3a06c0 100644 --- a/chrome/renderer/media/cast_rtp_stream.h +++ b/chrome/renderer/media/cast_rtp_stream.h @@ -9,8 +9,10 @@ #include <vector> #include "base/basictypes.h" +#include "base/callback.h" #include "base/memory/ref_counted.h" #include "base/memory/scoped_ptr.h" +#include "base/memory/weak_ptr.h" #include "third_party/WebKit/public/platform/WebMediaStreamTrack.h" class CastAudioSink; @@ -90,6 +92,8 @@ struct CastRtpParams { // stream. class CastRtpStream { public: + typedef base::Callback<void(const std::string&)> ErrorCallback; + CastRtpStream(const blink::WebMediaStreamTrack& track, const scoped_refptr<CastSession>& session); ~CastRtpStream(); @@ -102,7 +106,13 @@ class CastRtpStream { // Begin encoding of media stream and then submit the encoded streams // to underlying transport. - void Start(const CastRtpParams& params); + // When the stream is started |start_callback| is called. + // When the stream is stopped |stop_callback| is called. + // When there is an error |error_callback| is called with a message. + void Start(const CastRtpParams& params, + const base::Closure& start_callback, + const base::Closure& stop_callback, + const ErrorCallback& error_callback); // Stop encoding. void Stop(); @@ -112,11 +122,16 @@ class CastRtpStream { // track is a video track. bool IsAudio() const; + void DidEncounterError(const std::string& message); + blink::WebMediaStreamTrack track_; const scoped_refptr<CastSession> cast_session_; scoped_ptr<CastAudioSink> audio_sink_; scoped_ptr<CastVideoSink> video_sink_; CastRtpParams params_; + base::WeakPtrFactory<CastRtpStream> weak_factory_; + base::Closure stop_callback_; + ErrorCallback error_callback_; DISALLOW_COPY_AND_ASSIGN(CastRtpStream); }; diff --git a/chrome/test/data/extensions/api_test/cast_streaming/basics.js b/chrome/test/data/extensions/api_test/cast_streaming/basics.js index 3c8e410..d6d0bf4 100644 --- a/chrome/test/data/extensions/api_test/cast_streaming/basics.js +++ b/chrome/test/data/extensions/api_test/cast_streaming/basics.js @@ -8,6 +8,35 @@ var udpTransport = chrome.cast.streaming.udpTransport; var createSession = chrome.cast.streaming.session.create; var pass = chrome.test.callbackPass; +function TestStateMachine(stream, audioId, videoId, udpId) { + this.stream = stream; + this.audioId = audioId; + this.videoId = videoId; + this.udpId = udpId; + this.audioStarted = false; + this.videoStarted = false; + this.audioStopped = false; + this.videoStopped = false; +} + +TestStateMachine.prototype.onStarted = function(id) { + if (id == this.audioId) + this.audioStarted = true; + if (id == this.videoId) + this.videoStarted = true; + if (this.audioStarted && this.videoStarted) + this.onAllStarted(); +} + +TestStateMachine.prototype.onStopped = function(id) { + if (id == this.audioId) + this.audioStopped = true; + if (id == this.videoId) + this.videoStopped = true; + if (this.audioStopped && this.videoStopped) + this.onAllStopped(); +} + chrome.test.runTests([ function rtpStreamStart() { console.log("[TEST] rtpStreamStart"); @@ -19,23 +48,38 @@ chrome.test.runTests([ stream.getVideoTracks()[0], pass(function(stream, audioId, videoId, udpId) { console.log("Starting."); + var stateMachine = new TestStateMachine(stream, + audioId, + videoId, + udpId); var audioParams = rtpStream.getSupportedParams(audioId)[0]; var videoParams = rtpStream.getSupportedParams(videoId)[0]; + chrome.test.assertEq(audioParams.payload.codecName, "OPUS"); + chrome.test.assertEq(videoParams.payload.codecName, "VP8"); udpTransport.setDestination(udpId, {address: "127.0.0.1", port: 2344}); - rtpStream.start(audioId, audioParams); - rtpStream.start(videoId, videoParams); - window.setTimeout(pass(function() { + rtpStream.onStarted.addListener( + stateMachine.onStarted.bind(stateMachine)); + stateMachine.onAllStarted = + pass(function(audioId, videoId) { console.log("Stopping."); rtpStream.stop(audioId); rtpStream.stop(videoId); + }.bind(null, audioId, videoId)); + rtpStream.onStopped.addListener( + stateMachine.onStopped.bind(stateMachine)); + stateMachine.onAllStopped = + pass(function(stream, audioId, videoId, udpId) { + console.log("Destroying."); rtpStream.destroy(audioId); rtpStream.destroy(videoId); udpTransport.destroy(udpId); chrome.test.assertEq(audioParams.payload.codecName, "OPUS"); chrome.test.assertEq(videoParams.payload.codecName, "VP8"); chrome.test.succeed(); - }), 0); + }.bind(null, stream, audioId, videoId, udpId)); + rtpStream.start(audioId, audioParams); + rtpStream.start(videoId, videoParams); }.bind(null, stream))); })); }, |