summaryrefslogtreecommitdiffstats
path: root/content/test/data/media/peerconnection-call.html
blob: 7581b729947e8f3370b4d4aa4652e232c6090185 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
<html>
<head>
  <script type="text/javascript">
  $ = function(id) {
    return document.getElementById(id);
  };

  // These must match with how the video and canvas tags are declared in html.
  const VIDEO_TAG_WIDTH = 320;
  const VIDEO_TAG_HEIGHT = 240;

  var gFirstConnection = null;
  var gSecondConnection = null;
  var gTestWithoutMsidAndBundle = false;

  // Number of test events to occur before the test pass. When the test pass,
  // the document title change to OK.
  var gNumberOfExpectedEvents = 0;

  // Number of events that currently have occured.
  var gNumberOfEvents = 0;

  var gLocalStream = null;
  var gSentTones = '';

  // Test that we can setup call with an audio and video track.
  function call(constraints) {
    createConnections(null);
    navigator.webkitGetUserMedia(constraints,
      addStreamToBothConnectionsAndNegotiate, printGetUserMediaError);
    waitForVideo('remote-view-1');
    waitForVideo('remote-view-2');
  }

  // First calls without streams on any connections, and then adds a stream
  // to peer connection 1 which gets sent to peer connection 2.
  function makeEmptyCallThenAddOneStreamAndRenegotiate(constraints) {
    createConnections(null);
    negotiate();
    navigator.webkitGetUserMedia(constraints,
      addStreamToTheFirstConnectionAndNegotiate, printGetUserMediaError);
    // Only the first connection is sending here.
    waitForVideo('remote-view-2');
  }

  // Test that we can setup call with an audio and video track and
  // simulate that the remote peer don't support MSID.
  function callWithoutMsidAndBundle() {
    createConnections(null);
    gTestWithoutMsidAndBundle = true;
    navigator.webkitGetUserMedia({audio:true, video:true},
      addStreamToBothConnectionsAndNegotiate, printGetUserMediaError);    
    waitForVideo('remote-view-1');
    waitForVideo('remote-view-2');
  }

  // Test only a data channel.
  function callWithDataOnly() {
    createConnections({optional:[{RtpDataChannels: true}]});
    setupDataChannel();
    gFirstConnection.createOffer(onOfferCreated);
  }

  // Test call with audio, video and a data channel.
  function callWithDataAndMedia() {
    createConnections({optional:[{RtpDataChannels: true}]});
    setupDataChannel();
    navigator.webkitGetUserMedia({audio:true, video:true},
      addStreamToBothConnectionsAndNegotiate,
      printGetUserMediaError);
    waitForVideo('remote-view-1');
    waitForVideo('remote-view-2');
  }

  // Test call with a data channel and later add audio and video.
  function callWithDataAndLaterAddMedia() {
    createConnections({optional:[{RtpDataChannels: true}]});
    setupDataChannel();
    gFirstConnection.createOffer(onOfferCreated);

    navigator.webkitGetUserMedia({audio:true, video:true},
      addStreamToBothConnectionsAndNegotiate, printGetUserMediaError);
    waitForVideo('remote-view-1');
    waitForVideo('remote-view-2');
  }

  // Test that we can setup call and send DTMF.
  function callAndSendDtmf(tones) {
    createConnections(null);
    navigator.webkitGetUserMedia({audio:true, video:true},
      addStreamToBothConnectionsAndNegotiate, printGetUserMediaError);
    var onCallEstablished = function() {
      // Send DTMF tones.
      var localAudioTrack = gLocalStream.getAudioTracks()[0];
      var dtmfSender = gFirstConnection.createDTMFSender(localAudioTrack);
      dtmfSender.ontonechange = onToneChange;
      dtmfSender.insertDTMF(tones);
      // Wait for the DTMF tones callback.
      document.title = 'Waiting for dtmf...';
      addExpectedEvent();
      var waitDtmf = setInterval(function() {
        if (gSentTones == tones) {
          clearInterval(waitDtmf);
          eventOccured();
        }
      }, 100);
    }

    // Do the DTMF test after we have received video.
    detectVideoIn('remote-view-2', onCallEstablished);
  }

  // This function is used for setting up a test that:
  // 1. Creates a data channel on |gFirstConnection| and sends data to
  //    |gSecondConnection|.
  // 2. When data is received on |gSecondConnection| a message
  //    is sent to |gFirstConnection|.
  // 3. When data is received on |gFirstConnection|, the data
  //    channel is closed. The test passes when the state transition completes.
  function setupDataChannel() {
    var sendDataString = "send some text on a data channel."
    firstDataChannel = gFirstConnection.createDataChannel(
        "sendDataChannel", {reliable : false});
    expectEquals('connecting', firstDataChannel.readyState);

    // When |firstDataChannel| transition to open state, send a text string.
    firstDataChannel.onopen = function() {
      expectEquals('open', firstDataChannel.readyState);
      firstDataChannel.send(sendDataString);
    }

    // When |firstDataChannel| receive a message, close the channel and
    // initiate a new offer/answer exchange to complete the closure.
    firstDataChannel.onmessage = function(event) {
      expectEquals(event.data, sendDataString);
      firstDataChannel.close();
      gFirstConnection.createOffer(onOfferCreated);
    }

    // When |firstDataChannel| transition to closed state, the test pass.
    addExpectedEvent();
    firstDataChannel.onclose = function() {
      expectEquals('closed', firstDataChannel.readyState);
      eventOccured();
    }

    // Event handler for when |gSecondConnection| receive a new dataChannel.
    gSecondConnection.ondatachannel = function (event) {
      var secondDataChannel = event.channel;

      // When |secondDataChannel| receive a message, send a message back.
      secondDataChannel.onmessage = function(event) {
        expectEquals(event.data, sendDataString);
        expectEquals('open', secondDataChannel.readyState);
        secondDataChannel.send(sendDataString);
      }
    }
  }

  function onToneChange(tone) {
    gSentTones += tone.tone;
    document.title = gSentTones;
  }

  function createConnections(constraints) {
    gFirstConnection = new webkitRTCPeerConnection(null, constraints);
    gFirstConnection.onicecandidate = onIceCandidateToFirst;
    gFirstConnection.onaddstream = function(event) {
      onRemoteStream(event, 'remote-view-1');
    }
    expectEquals('stable', gFirstConnection.signalingState);

    gSecondConnection = new webkitRTCPeerConnection(null, constraints);
    gSecondConnection.onicecandidate = onIceCandidateToSecond;
    gSecondConnection.onaddstream = function(event) {
      onRemoteStream(event, 'remote-view-2');
    }
  }

  function displayAndRemember(localStream) {
    var localStreamUrl = webkitURL.createObjectURL(localStream);
    $('local-view').src = localStreamUrl;

    gLocalStream = localStream;
  }

  // Called if getUserMedia fails.
  function printGetUserMediaError(error) {
    document.title = 'getUserMedia request failed with code ' + error.code;
  }

  // Called if getUserMedia succeeds and we want to send from both connections.
  function addStreamToBothConnectionsAndNegotiate(localStream) {
    displayAndRemember(localStream);
    gFirstConnection.addStream(localStream);
    gSecondConnection.addStream(localStream);
    negotiate();
  }

  // Called if getUserMedia succeeds when we want to send from one connection.
  function addStreamToTheFirstConnectionAndNegotiate(localStream) {
    displayAndRemember(localStream);
    gFirstConnection.addStream(localStream);
    negotiate();
  }

  function negotiate() {
    gFirstConnection.createOffer(onOfferCreated);
  }

  function onOfferCreated(offer) {
    gFirstConnection.setLocalDescription(offer);
    expectEquals('have-local-offer', gFirstConnection.signalingState);
    receiveOffer(offer.sdp);
  }

  function receiveOffer(offerSdp) {
    if (gTestWithoutMsidAndBundle) {
      offerSdp = removeMsidAndBundle(offerSdp);
    }

    var parsedOffer = new RTCSessionDescription({ type: 'offer',
                                                  sdp: offerSdp });
    gSecondConnection.setRemoteDescription(parsedOffer);
    gSecondConnection.createAnswer(onAnswerCreated);
    expectEquals('have-remote-offer', gSecondConnection.signalingState);
  }

  function removeMsidAndBundle(offerSdp) {
    offerSdp = offerSdp.replace(/a=msid-semantic.*\r\n/g, '');
    offerSdp = offerSdp.replace('a=group:BUNDLE audio video\r\n', '');
    offerSdp = offerSdp.replace('a=mid:audio\r\n', '');
    offerSdp = offerSdp.replace('a=mid:video\r\n', '');
    offerSdp = offerSdp.replace(/a=ssrc.*\r\n/g, '');
    return offerSdp;
  }

  function onAnswerCreated(answer) {
    gSecondConnection.setLocalDescription(answer);
    expectEquals('stable', gSecondConnection.signalingState);
    receiveAnswer(answer.sdp);
  }

  function receiveAnswer(answerSdp) {
    if (gTestWithoutMsidAndBundle) {
      answerSdp = removeMsidAndBundle(answerSdp);
    }
    var parsedAnswer = new RTCSessionDescription({ type: 'answer',
                                                   sdp: answerSdp });
    gFirstConnection.setRemoteDescription(parsedAnswer);
    expectEquals('stable', gFirstConnection.signalingState);
  }

  function onIceCandidateToFirst(event) {
    if (event.candidate) {
      var candidate = new RTCIceCandidate(event.candidate);
      gSecondConnection.addIceCandidate(candidate);
    }
  }

  function onIceCandidateToSecond(event) {
    if (event.candidate) {
      var candidate = new RTCIceCandidate(event.candidate);
      gFirstConnection.addIceCandidate(candidate);
    }
  }

  function onRemoteStream(e, target) {
    if (gTestWithoutMsidAndBundle && e.stream.label != "default") {
      document.title = 'a default remote stream was expected but instead ' +
          e.stream.label + ' was received.';
      return;
    }
    var remoteStreamUrl = webkitURL.createObjectURL(e.stream);
    var remoteVideo = $(target);
    remoteVideo.src = remoteStreamUrl;
  }

  // TODO(phoglund): perhaps use the video detector in chrome/test/data/webrtc/?
  function detectVideoIn(videoElementName, callback) {
    var width = VIDEO_TAG_WIDTH;
    var height = VIDEO_TAG_HEIGHT;
    var videoElement = $(videoElementName);
    var canvas = $(videoElementName + '-canvas');
    var waitVideo = setInterval(function() {
      var context = canvas.getContext('2d');
      context.drawImage(videoElement, 0, 0, width, height);
      var pixels = context.getImageData(0, 0, width, height).data;

      if (isVideoPlaying(pixels, width, height)) {
        clearInterval(waitVideo);
        callback();
      }
    }, 100);
  }

  function waitForVideo(videoElement) {
    document.title = 'Waiting for video...';
    addExpectedEvent();
    detectVideoIn(videoElement, function () { eventOccured(); });
  }

  // This very basic video verification algorithm will be satisfied if any
  // pixels are nonzero in a small sample area in the middle. It relies on the
  // assumption that a video element with null source just presents zeroes.
  function isVideoPlaying(pixels, width, height) {
    // Sample somewhere near the middle of the image.
    var middle = width * height / 2;
    for (var i = 0; i < 20; i++) {
      if (pixels[middle + i] > 0) {
        return true;
      }
    }
    return false;
  }


  // This function matches |left| and |right| and throws an exception if the
  // values don't match.
  function expectEquals(left, right) {
    if (left != right) {
      var s = "expectEquals failed left: " + left + " right: " + right;
      document.title = s;
      throw s;
    }
  }

  function addExpectedEvent() {
    ++gNumberOfExpectedEvents;
  }

  function eventOccured() {
    ++gNumberOfEvents;
    if (gNumberOfEvents == gNumberOfExpectedEvents) {
      document.title = 'OK';
    }
  }
  </script>
</head>
<body>
  <table border="0">
    <tr>
      <td>Local Preview</td>
      <td>Remote Stream for Connection 1</td>
      <td>Remote Stream for Connection 2</td>
    </tr>
    <tr>
      <td><video width="320" height="240" id="local-view"
          autoplay="autoplay"></video></td>
      <td><video width="320" height="240" id="remote-view-1"
          autoplay="autoplay"></video></td>
      <td><video width="320" height="240" id="remote-view-2"
          autoplay="autoplay"></video></td>
      <!-- Canvases are named after their corresponding video elements. -->
      <td><canvas width="320" height="240" id="remote-view-1-canvas"
          style="display:none"></canvas></td>
      <td><canvas width="320" height="240" id="remote-view-2-canvas">
          style="display:none"></canvas></td>
    </tr>
  </table>
</body>
</html>