1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
|
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <limits.h>
#include <stddef.h>
#include <stdint.h>
#include <algorithm>
#include <climits>
#include <cstdarg>
#include <cstdio>
#include <deque>
#include <map>
#include <string>
#include <utility>
#include "base/at_exit.h"
#include "base/command_line.h"
#include "base/logging.h"
#include "base/memory/ref_counted.h"
#include "base/memory/scoped_ptr.h"
#include "base/message_loop/message_loop.h"
#include "base/synchronization/lock.h"
#include "base/synchronization/waitable_event.h"
#include "base/threading/thread.h"
#include "base/time/default_tick_clock.h"
#include "base/timer/timer.h"
#include "media/audio/audio_io.h"
#include "media/audio/audio_manager.h"
#include "media/audio/audio_parameters.h"
#include "media/audio/fake_audio_log_factory.h"
#include "media/base/audio_bus.h"
#include "media/base/channel_layout.h"
#include "media/base/video_frame.h"
#include "media/cast/cast_config.h"
#include "media/cast/cast_environment.h"
#include "media/cast/cast_receiver.h"
#include "media/cast/logging/logging_defines.h"
#include "media/cast/net/udp_transport.h"
#include "media/cast/test/utility/audio_utility.h"
#include "media/cast/test/utility/barcode.h"
#include "media/cast/test/utility/default_config.h"
#include "media/cast/test/utility/in_process_receiver.h"
#include "media/cast/test/utility/input_builder.h"
#include "media/cast/test/utility/standalone_cast_environment.h"
#include "net/base/net_util.h"
#if defined(USE_X11)
#include "media/cast/test/linux_output_window.h"
#endif // defined(USE_X11)
namespace media {
namespace cast {
// Settings chosen to match default sender settings.
#define DEFAULT_SEND_PORT "0"
#define DEFAULT_RECEIVE_PORT "2344"
#define DEFAULT_SEND_IP "0.0.0.0"
#define DEFAULT_AUDIO_FEEDBACK_SSRC "2"
#define DEFAULT_AUDIO_INCOMING_SSRC "1"
#define DEFAULT_AUDIO_PAYLOAD_TYPE "127"
#define DEFAULT_VIDEO_FEEDBACK_SSRC "12"
#define DEFAULT_VIDEO_INCOMING_SSRC "11"
#define DEFAULT_VIDEO_PAYLOAD_TYPE "96"
#if defined(USE_X11)
const char* kVideoWindowWidth = "1280";
const char* kVideoWindowHeight = "720";
#endif // defined(USE_X11)
void GetPorts(uint16_t* tx_port, uint16_t* rx_port) {
test::InputBuilder tx_input(
"Enter send port.", DEFAULT_SEND_PORT, 1, 65535);
*tx_port = static_cast<uint16_t>(tx_input.GetIntInput());
test::InputBuilder rx_input(
"Enter receive port.", DEFAULT_RECEIVE_PORT, 1, 65535);
*rx_port = static_cast<uint16_t>(rx_input.GetIntInput());
}
std::string GetIpAddress(const std::string& display_text) {
test::InputBuilder input(display_text, DEFAULT_SEND_IP, INT_MIN, INT_MAX);
std::string ip_address = input.GetStringInput();
// Ensure IP address is either the default value or in correct form.
while (ip_address != DEFAULT_SEND_IP &&
std::count(ip_address.begin(), ip_address.end(), '.') != 3) {
ip_address = input.GetStringInput();
}
return ip_address;
}
void GetAudioSsrcs(FrameReceiverConfig* audio_config) {
test::InputBuilder input_tx(
"Choose audio sender SSRC.", DEFAULT_AUDIO_FEEDBACK_SSRC, 1, INT_MAX);
audio_config->receiver_ssrc = input_tx.GetIntInput();
test::InputBuilder input_rx(
"Choose audio receiver SSRC.", DEFAULT_AUDIO_INCOMING_SSRC, 1, INT_MAX);
audio_config->sender_ssrc = input_rx.GetIntInput();
}
void GetVideoSsrcs(FrameReceiverConfig* video_config) {
test::InputBuilder input_tx(
"Choose video sender SSRC.", DEFAULT_VIDEO_FEEDBACK_SSRC, 1, INT_MAX);
video_config->receiver_ssrc = input_tx.GetIntInput();
test::InputBuilder input_rx(
"Choose video receiver SSRC.", DEFAULT_VIDEO_INCOMING_SSRC, 1, INT_MAX);
video_config->sender_ssrc = input_rx.GetIntInput();
}
#if defined(USE_X11)
void GetWindowSize(int* width, int* height) {
// Resolution values based on sender settings
test::InputBuilder input_w(
"Choose window width.", kVideoWindowWidth, 144, 1920);
*width = input_w.GetIntInput();
test::InputBuilder input_h(
"Choose window height.", kVideoWindowHeight, 176, 1080);
*height = input_h.GetIntInput();
}
#endif // defined(USE_X11)
void GetAudioPayloadtype(FrameReceiverConfig* audio_config) {
test::InputBuilder input("Choose audio receiver payload type.",
DEFAULT_AUDIO_PAYLOAD_TYPE,
kDefaultRtpVideoPayloadType /* low_range */,
kDefaultRtpAudioPayloadType /* high_range */);
audio_config->rtp_payload_type = input.GetIntInput();
}
FrameReceiverConfig GetAudioReceiverConfig() {
FrameReceiverConfig audio_config = GetDefaultAudioReceiverConfig();
GetAudioSsrcs(&audio_config);
GetAudioPayloadtype(&audio_config);
audio_config.rtp_max_delay_ms = 300;
return audio_config;
}
void GetVideoPayloadtype(FrameReceiverConfig* video_config) {
test::InputBuilder input("Choose video receiver payload type.",
DEFAULT_VIDEO_PAYLOAD_TYPE,
kDefaultRtpVideoPayloadType /* low_range */,
kDefaultRtpAudioPayloadType /* high_range */);
video_config->rtp_payload_type = input.GetIntInput();
}
FrameReceiverConfig GetVideoReceiverConfig() {
FrameReceiverConfig video_config = GetDefaultVideoReceiverConfig();
GetVideoSsrcs(&video_config);
GetVideoPayloadtype(&video_config);
video_config.rtp_max_delay_ms = 300;
return video_config;
}
AudioParameters ToAudioParameters(const FrameReceiverConfig& config) {
const int samples_in_10ms = config.rtp_timebase / 100;
return AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
GuessChannelLayout(config.channels),
config.rtp_timebase, 32, samples_in_10ms);
}
// An InProcessReceiver that renders video frames to a LinuxOutputWindow and
// audio frames via Chromium's audio stack.
//
// InProcessReceiver pushes audio and video frames to this subclass, and these
// frames are pushed into a queue. Then, for audio, the Chromium audio stack
// will make polling calls on a separate, unknown thread whereby audio frames
// are pulled out of the audio queue as needed. For video, however, NaivePlayer
// is responsible for scheduling updates to the screen itself. For both, the
// queues are pruned (i.e., received frames are skipped) when the system is not
// able to play back as fast as frames are entering the queue.
//
// This is NOT a good reference implementation for a Cast receiver player since:
// 1. It only skips frames to handle slower-than-expected playout, or halts
// playback to handle frame underruns.
// 2. It makes no attempt to synchronize the timing of playout of the video
// frames with the audio frames.
// 3. It does nothing to smooth or hide discontinuities in playback due to
// timing issues or missing frames.
class NaivePlayer : public InProcessReceiver,
public AudioOutputStream::AudioSourceCallback {
public:
NaivePlayer(const scoped_refptr<CastEnvironment>& cast_environment,
const net::IPEndPoint& local_end_point,
const net::IPEndPoint& remote_end_point,
const FrameReceiverConfig& audio_config,
const FrameReceiverConfig& video_config,
int window_width,
int window_height)
: InProcessReceiver(cast_environment,
local_end_point,
remote_end_point,
audio_config,
video_config),
// Maximum age is the duration of 3 video frames. 3 was chosen
// arbitrarily, but seems to work well.
max_frame_age_(base::TimeDelta::FromSeconds(1) * 3 /
video_config.target_frame_rate),
#if defined(USE_X11)
render_(0, 0, window_width, window_height, "Cast_receiver"),
#endif // defined(USE_X11)
num_video_frames_processed_(0),
num_audio_frames_processed_(0),
currently_playing_audio_frame_start_(-1) {}
~NaivePlayer() final {}
void Start() final {
AudioManager::Get()->GetTaskRunner()->PostTask(
FROM_HERE,
base::Bind(&NaivePlayer::StartAudioOutputOnAudioManagerThread,
base::Unretained(this)));
// Note: No need to wait for audio polling to start since the push-and-pull
// mechanism is synchronized via the |audio_playout_queue_|.
InProcessReceiver::Start();
}
void Stop() final {
// First, stop audio output to the Chromium audio stack.
base::WaitableEvent done(false, false);
DCHECK(!AudioManager::Get()->GetTaskRunner()->BelongsToCurrentThread());
AudioManager::Get()->GetTaskRunner()->PostTask(
FROM_HERE,
base::Bind(&NaivePlayer::StopAudioOutputOnAudioManagerThread,
base::Unretained(this),
&done));
done.Wait();
// Now, stop receiving new frames.
InProcessReceiver::Stop();
// Finally, clear out any frames remaining in the queues.
while (!audio_playout_queue_.empty()) {
const scoped_ptr<AudioBus> to_be_deleted(
audio_playout_queue_.front().second);
audio_playout_queue_.pop_front();
}
video_playout_queue_.clear();
}
private:
void StartAudioOutputOnAudioManagerThread() {
DCHECK(AudioManager::Get()->GetTaskRunner()->BelongsToCurrentThread());
DCHECK(!audio_output_stream_);
audio_output_stream_.reset(AudioManager::Get()->MakeAudioOutputStreamProxy(
ToAudioParameters(audio_config()), ""));
if (audio_output_stream_.get() && audio_output_stream_->Open()) {
audio_output_stream_->Start(this);
} else {
LOG(ERROR) << "Failed to open an audio output stream. "
<< "Audio playback disabled.";
audio_output_stream_.reset();
}
}
void StopAudioOutputOnAudioManagerThread(base::WaitableEvent* done) {
DCHECK(AudioManager::Get()->GetTaskRunner()->BelongsToCurrentThread());
if (audio_output_stream_.get()) {
audio_output_stream_->Stop();
audio_output_stream_->Close();
audio_output_stream_.reset();
}
done->Signal();
}
////////////////////////////////////////////////////////////////////
// InProcessReceiver finals.
void OnVideoFrame(const scoped_refptr<VideoFrame>& video_frame,
const base::TimeTicks& playout_time,
bool is_continuous) final {
DCHECK(cast_env()->CurrentlyOn(CastEnvironment::MAIN));
LOG_IF(WARNING, !is_continuous)
<< "Video: Discontinuity in received frames.";
video_playout_queue_.push_back(std::make_pair(playout_time, video_frame));
ScheduleVideoPlayout();
uint16_t frame_no;
if (media::cast::test::DecodeBarcode(video_frame, &frame_no)) {
video_play_times_.insert(
std::pair<uint16_t, base::TimeTicks>(frame_no, playout_time));
} else {
VLOG(2) << "Barcode decode failed!";
}
}
void OnAudioFrame(scoped_ptr<AudioBus> audio_frame,
const base::TimeTicks& playout_time,
bool is_continuous) final {
DCHECK(cast_env()->CurrentlyOn(CastEnvironment::MAIN));
LOG_IF(WARNING, !is_continuous)
<< "Audio: Discontinuity in received frames.";
base::AutoLock auto_lock(audio_lock_);
uint16_t frame_no;
if (media::cast::DecodeTimestamp(audio_frame->channel(0),
audio_frame->frames(),
&frame_no)) {
// Since there are lots of audio packets with the same frame_no,
// we really want to make sure that we get the playout_time from
// the first one. If is_continous is true, then it's possible
// that we already missed the first one.
if (is_continuous && frame_no == last_audio_frame_no_ + 1) {
audio_play_times_.insert(
std::pair<uint16_t, base::TimeTicks>(frame_no, playout_time));
}
last_audio_frame_no_ = frame_no;
} else {
VLOG(2) << "Audio decode failed!";
last_audio_frame_no_ = -2;
}
audio_playout_queue_.push_back(
std::make_pair(playout_time, audio_frame.release()));
}
// End of InProcessReceiver finals.
////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////
// AudioSourceCallback implementation.
int OnMoreData(AudioBus* dest,
uint32_t total_bytes_delay,
uint32_t frames_skipped) final {
// Note: This method is being invoked by a separate thread unknown to us
// (i.e., outside of CastEnvironment).
int samples_remaining = dest->frames();
while (samples_remaining > 0) {
// Get next audio frame ready for playout.
if (!currently_playing_audio_frame_.get()) {
base::AutoLock auto_lock(audio_lock_);
// Prune the queue, skipping entries that are too old.
// TODO(miu): Use |total_bytes_delay| to account for audio buffering
// delays upstream.
const base::TimeTicks earliest_time_to_play =
cast_env()->Clock()->NowTicks() - max_frame_age_;
while (!audio_playout_queue_.empty() &&
audio_playout_queue_.front().first < earliest_time_to_play) {
PopOneAudioFrame(true);
}
if (audio_playout_queue_.empty())
break;
currently_playing_audio_frame_ = PopOneAudioFrame(false);
currently_playing_audio_frame_start_ = 0;
}
// Copy some or all of the samples in |currently_playing_audio_frame_| to
// |dest|. Once all samples in |currently_playing_audio_frame_| have been
// consumed, release it.
const int num_samples_to_copy =
std::min(samples_remaining,
currently_playing_audio_frame_->frames() -
currently_playing_audio_frame_start_);
currently_playing_audio_frame_->CopyPartialFramesTo(
currently_playing_audio_frame_start_,
num_samples_to_copy,
0,
dest);
samples_remaining -= num_samples_to_copy;
currently_playing_audio_frame_start_ += num_samples_to_copy;
if (currently_playing_audio_frame_start_ ==
currently_playing_audio_frame_->frames()) {
currently_playing_audio_frame_.reset();
}
}
// If |dest| has not been fully filled, then an underrun has occurred; and
// fill the remainder of |dest| with zeros.
if (samples_remaining > 0) {
// Note: Only logging underruns after the first frame has been received.
LOG_IF(WARNING, currently_playing_audio_frame_start_ != -1)
<< "Audio: Playback underrun of " << samples_remaining << " samples!";
dest->ZeroFramesPartial(dest->frames() - samples_remaining,
samples_remaining);
}
return dest->frames();
}
void OnError(AudioOutputStream* stream) final {
LOG(ERROR) << "AudioOutputStream reports an error. "
<< "Playback is unlikely to continue.";
}
// End of AudioSourceCallback implementation.
////////////////////////////////////////////////////////////////////
void ScheduleVideoPlayout() {
DCHECK(cast_env()->CurrentlyOn(CastEnvironment::MAIN));
// Prune the queue, skipping entries that are too old.
const base::TimeTicks now = cast_env()->Clock()->NowTicks();
const base::TimeTicks earliest_time_to_play = now - max_frame_age_;
while (!video_playout_queue_.empty() &&
video_playout_queue_.front().first < earliest_time_to_play) {
PopOneVideoFrame(true);
}
// If the queue is not empty, schedule playout of its first frame.
if (video_playout_queue_.empty()) {
video_playout_timer_.Stop();
} else {
video_playout_timer_.Start(
FROM_HERE,
video_playout_queue_.front().first - now,
base::Bind(&NaivePlayer::PlayNextVideoFrame,
base::Unretained(this)));
}
}
void PlayNextVideoFrame() {
DCHECK(cast_env()->CurrentlyOn(CastEnvironment::MAIN));
if (!video_playout_queue_.empty()) {
const scoped_refptr<VideoFrame> video_frame = PopOneVideoFrame(false);
#if defined(USE_X11)
render_.RenderFrame(video_frame);
#endif // defined(USE_X11)
}
ScheduleVideoPlayout();
CheckAVSync();
}
scoped_refptr<VideoFrame> PopOneVideoFrame(bool is_being_skipped) {
DCHECK(cast_env()->CurrentlyOn(CastEnvironment::MAIN));
if (is_being_skipped) {
VLOG(1) << "VideoFrame[" << num_video_frames_processed_
<< " (dt=" << (video_playout_queue_.front().first -
last_popped_video_playout_time_).InMicroseconds()
<< " usec)]: Skipped.";
} else {
VLOG(1) << "VideoFrame[" << num_video_frames_processed_
<< " (dt=" << (video_playout_queue_.front().first -
last_popped_video_playout_time_).InMicroseconds()
<< " usec)]: Playing "
<< (cast_env()->Clock()->NowTicks() -
video_playout_queue_.front().first).InMicroseconds()
<< " usec later than intended.";
}
last_popped_video_playout_time_ = video_playout_queue_.front().first;
const scoped_refptr<VideoFrame> ret = video_playout_queue_.front().second;
video_playout_queue_.pop_front();
++num_video_frames_processed_;
return ret;
}
scoped_ptr<AudioBus> PopOneAudioFrame(bool was_skipped) {
audio_lock_.AssertAcquired();
if (was_skipped) {
VLOG(1) << "AudioFrame[" << num_audio_frames_processed_
<< " (dt=" << (audio_playout_queue_.front().first -
last_popped_audio_playout_time_).InMicroseconds()
<< " usec)]: Skipped.";
} else {
VLOG(1) << "AudioFrame[" << num_audio_frames_processed_
<< " (dt=" << (audio_playout_queue_.front().first -
last_popped_audio_playout_time_).InMicroseconds()
<< " usec)]: Playing "
<< (cast_env()->Clock()->NowTicks() -
audio_playout_queue_.front().first).InMicroseconds()
<< " usec later than intended.";
}
last_popped_audio_playout_time_ = audio_playout_queue_.front().first;
scoped_ptr<AudioBus> ret(audio_playout_queue_.front().second);
audio_playout_queue_.pop_front();
++num_audio_frames_processed_;
return ret;
}
void CheckAVSync() {
if (video_play_times_.size() > 30 &&
audio_play_times_.size() > 30) {
size_t num_events = 0;
base::TimeDelta delta;
std::map<uint16_t, base::TimeTicks>::iterator audio_iter, video_iter;
for (video_iter = video_play_times_.begin();
video_iter != video_play_times_.end();
++video_iter) {
audio_iter = audio_play_times_.find(video_iter->first);
if (audio_iter != audio_play_times_.end()) {
num_events++;
// Positive values means audio is running behind video.
delta += audio_iter->second - video_iter->second;
}
}
if (num_events > 30) {
VLOG(0) << "Audio behind by: "
<< (delta / num_events).InMilliseconds()
<< "ms";
video_play_times_.clear();
audio_play_times_.clear();
}
} else if (video_play_times_.size() + audio_play_times_.size() > 500) {
// We are decoding audio or video timestamps, but not both, clear it out.
video_play_times_.clear();
audio_play_times_.clear();
}
}
// Frames in the queue older than this (relative to NowTicks()) will be
// dropped (i.e., playback is falling behind).
const base::TimeDelta max_frame_age_;
// Outputs created, started, and destroyed by this NaivePlayer.
#if defined(USE_X11)
test::LinuxOutputWindow render_;
#endif // defined(USE_X11)
scoped_ptr<AudioOutputStream> audio_output_stream_;
// Video playout queue.
typedef std::pair<base::TimeTicks, scoped_refptr<VideoFrame> >
VideoQueueEntry;
std::deque<VideoQueueEntry> video_playout_queue_;
base::TimeTicks last_popped_video_playout_time_;
int64_t num_video_frames_processed_;
base::OneShotTimer video_playout_timer_;
// Audio playout queue, synchronized by |audio_lock_|.
base::Lock audio_lock_;
typedef std::pair<base::TimeTicks, AudioBus*> AudioQueueEntry;
std::deque<AudioQueueEntry> audio_playout_queue_;
base::TimeTicks last_popped_audio_playout_time_;
int64_t num_audio_frames_processed_;
// These must only be used on the audio thread calling OnMoreData().
scoped_ptr<AudioBus> currently_playing_audio_frame_;
int currently_playing_audio_frame_start_;
std::map<uint16_t, base::TimeTicks> audio_play_times_;
std::map<uint16_t, base::TimeTicks> video_play_times_;
int32_t last_audio_frame_no_;
};
} // namespace cast
} // namespace media
int main(int argc, char** argv) {
base::AtExitManager at_exit;
base::CommandLine::Init(argc, argv);
InitLogging(logging::LoggingSettings());
scoped_refptr<media::cast::CastEnvironment> cast_environment(
new media::cast::StandaloneCastEnvironment);
// Start up Chromium audio system.
media::FakeAudioLogFactory fake_audio_log_factory_;
const scoped_ptr<media::AudioManager> audio_manager(
media::AudioManager::Create(&fake_audio_log_factory_));
CHECK(media::AudioManager::Get());
media::cast::FrameReceiverConfig audio_config =
media::cast::GetAudioReceiverConfig();
media::cast::FrameReceiverConfig video_config =
media::cast::GetVideoReceiverConfig();
// Determine local and remote endpoints.
uint16_t remote_port, local_port;
media::cast::GetPorts(&remote_port, &local_port);
if (!local_port) {
LOG(ERROR) << "Invalid local port.";
return 1;
}
std::string remote_ip_address = media::cast::GetIpAddress("Enter remote IP.");
std::string local_ip_address = media::cast::GetIpAddress("Enter local IP.");
net::IPAddressNumber remote_ip_number;
net::IPAddressNumber local_ip_number;
if (!net::ParseIPLiteralToNumber(remote_ip_address, &remote_ip_number)) {
LOG(ERROR) << "Invalid remote IP address.";
return 1;
}
if (!net::ParseIPLiteralToNumber(local_ip_address, &local_ip_number)) {
LOG(ERROR) << "Invalid local IP address.";
return 1;
}
net::IPEndPoint remote_end_point(remote_ip_number, remote_port);
net::IPEndPoint local_end_point(local_ip_number, local_port);
// Create and start the player.
int window_width = 0;
int window_height = 0;
#if defined(USE_X11)
media::cast::GetWindowSize(&window_width, &window_height);
#endif // defined(USE_X11)
media::cast::NaivePlayer player(cast_environment,
local_end_point,
remote_end_point,
audio_config,
video_config,
window_width,
window_height);
player.Start();
base::MessageLoop().Run(); // Run forever (i.e., until SIGTERM).
NOTREACHED();
return 0;
}
|