Html程序  |  874行  |  32.35 KB

<html>
<head>
  <script type="text/javascript" src="webrtc_test_utilities.js"></script>
  <script type="text/javascript" src="webrtc_test_audio.js"></script>
  <script type="text/javascript">
  $ = function(id) {
    return document.getElementById(id);
  };

  var gFirstConnection = null;
  var gSecondConnection = null;
  var gTestWithoutMsid = false;
  var gLocalStream = null;
  var gSentTones = '';

  var gRemoteStreams = {};

  // Default transform functions, overridden by some test cases.
  var transformSdp = function(sdp) { return sdp; };
  var transformRemoteSdp = function(sdp) { return sdp; };
  var transformCandidate = function(candidate) { return candidate; };
  var onLocalDescriptionError = function(error) { failTest(error); };
  var onRemoteDescriptionError = function(error) { failTest(error); };

  // Temporary measure to be able to force iSAC 16K where needed, particularly
  // on Android. This applies to every test which is why it's implemented like
  // this.
  var maybeForceIsac16K = function(sdp) { return sdp; };
  function forceIsac16KInSdp() {
    maybeForceIsac16K = function(sdp) {
      sdp = sdp.replace(/m=audio (\d+) RTP\/SAVPF.*\r\n/g,
                        'm=audio $1 RTP/SAVPF 103 126\r\n');
      sdp = sdp.replace('a=fmtp:111 minptime=10', 'a=fmtp:103 minptime=10');
      if (sdp.search('a=rtpmap:103 ISAC/16000') == -1)
        failTest('Missing iSAC 16K codec on Android; cannot force codec.');
      return sdp;
    };
    sendValueToTest('isac-forced');
  }

  // When using external SDES, the crypto key is chosen by javascript.
  var EXTERNAL_SDES_LINES = {
    'audio': 'a=crypto:1 AES_CM_128_HMAC_SHA1_80 ' +
        'inline:PS1uQCVeeCFCanVmcjkpPywjNWhcYD0mXXtxaVBR',
    'video': 'a=crypto:1 AES_CM_128_HMAC_SHA1_80 ' +
        'inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj',
    'data': 'a=crypto:1 AES_CM_128_HMAC_SHA1_80 ' +
        'inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj'
  };

  // When using GICE, the ICE credentials can be chosen by javascript.
  var EXTERNAL_GICE_UFRAG = '1234567890123456';
  var EXTERNAL_GICE_PWD = '123456789012345678901234';

  setAllEventsOccuredHandler(reportTestSuccess);

  // Test that we can setup call with an audio and video track.
  function call(constraints) {
    createConnections(null);
    navigator.webkitGetUserMedia(constraints,
      addStreamToBothConnectionsAndNegotiate, printGetUserMediaError);
    waitForVideo('remote-view-1');
    waitForVideo('remote-view-2');
  }

  // Test that we can setup call with an audio and video track and check that
  // the video resolution is as expected.
  function callAndExpectResolution(constraints,
                                   expected_width,
                                   expected_height) {
    createConnections(null);
    navigator.webkitGetUserMedia(constraints,
      addStreamToBothConnectionsAndNegotiate, printGetUserMediaError);
    waitForVideoWithResolution('remote-view-1',
                               expected_width,
                               expected_height);
    waitForVideoWithResolution('remote-view-2',
                               expected_width,
                               expected_height);
  }


  // First calls without streams on any connections, and then adds a stream
  // to peer connection 1 which gets sent to peer connection 2. We must wait
  // for the first negotiation to complete before starting the second one, which
  // is why we wait until the connection is stable before re-negotiating.
  function callEmptyThenAddOneStreamAndRenegotiate(constraints) {
    createConnections(null);
    negotiate();
    waitForConnectionToStabilize(gFirstConnection, function() {
      navigator.webkitGetUserMedia(constraints,
        addStreamToTheFirstConnectionAndNegotiate, printGetUserMediaError);
      // Only the first connection is sending here.
      waitForVideo('remote-view-2');
    });
  }

  // First makes a call between pc1 and pc2, and then makes a call between pc3
  // and pc4. The stream sent from pc3 to pc4 is the stream received on pc1.
  // The stream sent from pc4 to pc3 is cloned from the stream received on pc2
  // to test that cloning of remote video tracks works as intended.
  function callAndForwardRemoteStream(constraints) {
    createConnections(null);
    navigator.webkitGetUserMedia(constraints,
                                 addStreamToBothConnectionsAndNegotiate,
                                 printGetUserMediaError);
    var gotRemoteStream1 = false;
    var gotRemoteStream2 = false;

    var onRemoteStream1 = function() {
      gotRemoteStream1 = true;
      maybeCallEstablished();
    }

    var onRemoteStream2 = function() {
      gotRemoteStream2 = true;
      maybeCallEstablished();
    }

    var maybeCallEstablished = function() {
      if (gotRemoteStream1 && gotRemoteStream2) {
        onCallEstablished();
      }
    }

    var onCallEstablished = function() {
      thirdConnection = createConnection(null, 'remote-view-3');
      thirdConnection.addStream(gRemoteStreams['remote-view-1']);

      fourthConnection = createConnection(null, 'remote-view-4');
      fourthConnection.addStream(gRemoteStreams['remote-view-2'].clone());

      negotiateBetween(thirdConnection, fourthConnection);

      waitForVideo('remote-view-3');
      waitForVideo('remote-view-4');
    }

    // Do the forwarding after we have received video.
    detectVideoPlaying('remote-view-1', onRemoteStream1);
    detectVideoPlaying('remote-view-2', onRemoteStream2);
  }

  // Test that we can setup call with an audio and video track and
  // simulate that the remote peer don't support MSID.
  function callWithoutMsidAndBundle() {
    createConnections(null);
    transformSdp = removeBundle;
    transformRemoteSdp = removeMsid;
    gTestWithoutMsid = true;
    navigator.webkitGetUserMedia({audio: true, video: true},
        addStreamToBothConnectionsAndNegotiate, printGetUserMediaError);
    waitForVideo('remote-view-1');
    waitForVideo('remote-view-2');
  }

  // Test that we can't setup a call with an unsupported video codec
  function negotiateUnsupportedVideoCodec() {
    createConnections(null);
    transformSdp = removeVideoCodec;

    onLocalDescriptionError = function(error) {
      var expectedMsg = 'Failed to set local offer sdp:' +
          ' Session error code: ERROR_CONTENT. Session error description:' +
              ' Failed to set video receive codecs..';
      assertEquals(expectedMsg, error);
      reportTestSuccess();
    };
    navigator.webkitGetUserMedia({audio: true, video: true},
        addStreamToBothConnectionsAndNegotiate, printGetUserMediaError);
  }

  // Test that we can't setup a call if one peer does not support encryption
  function negotiateNonCryptoCall() {
    createConnections(null);
    transformSdp = removeCrypto;
    onLocalDescriptionError = function(error) {
      var expectedMsg = 'Failed to set local offer sdp:' +
          ' Called with SDP without DTLS fingerprint.';

      assertEquals(expectedMsg, error);
      reportTestSuccess();
    };
    navigator.webkitGetUserMedia({audio: true, video: true},
        addStreamToBothConnectionsAndNegotiate, printGetUserMediaError);
  }

  // Test that we can negotiate a call with an SDP offer that includes a
  // b=AS:XX line to control audio and video bandwidth
  function negotiateOfferWithBLine() {
    createConnections(null);
    transformSdp = addBandwithControl;
    navigator.webkitGetUserMedia({audio: true, video: true},
        addStreamToBothConnectionsAndNegotiate, printGetUserMediaError);
    waitForVideo('remote-view-1');
    waitForVideo('remote-view-2');
  }

  // Test that we can setup call with legacy settings.
  function callWithLegacySdp() {
    transformSdp = function(sdp) {
      return removeBundle(useGice(useExternalSdes(sdp)));
    };
    transformCandidate = addGiceCredsToCandidate;
    createConnections({
      'mandatory': {'RtpDataChannels': true, 'DtlsSrtpKeyAgreement': false}
    });
    setupDataChannel({reliable: false});
    navigator.webkitGetUserMedia({audio: true, video: true},
        addStreamToBothConnectionsAndNegotiate, printGetUserMediaError);
    waitForVideo('remote-view-1');
    waitForVideo('remote-view-2');
  }

  // Test only a data channel.
  function callWithDataOnly() {
    createConnections({optional:[{RtpDataChannels: true}]});
    setupDataChannel({reliable: false});
    negotiate();
  }

  function callWithSctpDataOnly() {
    createConnections({optional: [{DtlsSrtpKeyAgreement: true}]});
    setupSctpDataChannel({reliable: true});
    negotiate();
  }

  // Test call with audio, video and a data channel.
  function callWithDataAndMedia() {
    createConnections({optional:[{RtpDataChannels: true}]});
    setupDataChannel({reliable: false});
    navigator.webkitGetUserMedia({audio: true, video: true},
      addStreamToBothConnectionsAndNegotiate,
      printGetUserMediaError);
    waitForVideo('remote-view-1');
    waitForVideo('remote-view-2');
  }

  function callWithSctpDataAndMedia() {
    createConnections({optional: [{DtlsSrtpKeyAgreement: true}]});
    setupSctpDataChannel({reliable: true});
    navigator.webkitGetUserMedia({audio: true, video: true},
      addStreamToBothConnectionsAndNegotiate,
      printGetUserMediaError);
    waitForVideo('remote-view-1');
    waitForVideo('remote-view-2');
  }


  // Test call with a data channel and later add audio and video.
  function callWithDataAndLaterAddMedia() {
    createConnections({optional:[{RtpDataChannels: true}]});
    setupDataChannel({reliable: false});
    negotiate();

    // Set an event handler for when the data channel has been closed.
    setAllEventsOccuredHandler(function() {
      // When the video is flowing the test is done.
      setAllEventsOccuredHandler(reportTestSuccess);
      navigator.webkitGetUserMedia({audio: true, video: true},
        addStreamToBothConnectionsAndNegotiate, printGetUserMediaError);
      waitForVideo('remote-view-1');
      waitForVideo('remote-view-2');
    });
  }

  // Test that we can setup call and send DTMF.
  function callAndSendDtmf(tones) {
    createConnections(null);
    navigator.webkitGetUserMedia({audio: true, video: true},
      addStreamToBothConnectionsAndNegotiate, printGetUserMediaError);
    var onCallEstablished = function() {
      // Send DTMF tones.
      var localAudioTrack = gLocalStream.getAudioTracks()[0];
      var dtmfSender = gFirstConnection.createDTMFSender(localAudioTrack);
      dtmfSender.ontonechange = onToneChange;
      dtmfSender.insertDTMF(tones);
      // Wait for the DTMF tones callback.
      addExpectedEvent();
      var waitDtmf = setInterval(function() {
        if (gSentTones == tones) {
          clearInterval(waitDtmf);
          eventOccured();
        }
      }, 100);
    }

    // Do the DTMF test after we have received video.
    detectVideoPlaying('remote-view-2', onCallEstablished);
  }

  function enableRemoteVideo(peerConnection, enabled) {
    remoteStream = peerConnection.getRemoteStreams()[0];
    remoteVideoTrack = remoteStream.getVideoTracks()[0];
    remoteVideoTrack.enabled = enabled;
  }

  function enableRemoteAudio(peerConnection, enabled) {
    remoteStream = peerConnection.getRemoteStreams()[0];
    remoteAudioTrack = remoteStream.getAudioTracks()[0];
    remoteAudioTrack.enabled = enabled;
  }

  function callAndEnsureAudioIsPlaying(beLenient, constraints) {
    createConnections(null);
    navigator.webkitGetUserMedia(constraints,
      addStreamToBothConnectionsAndNegotiate, printGetUserMediaError);

    // Wait until we have gathered samples and can conclude if audio is playing.
    addExpectedEvent();
    var onCallEstablished = function() {
      // Gather 50 samples per second for 2 seconds.
      gatherAudioLevelSamples(gSecondConnection, 100, 50, function(samples) {
        verifyAudioIsPlaying(samples, beLenient);
        eventOccured();
      });
    };

    waitForConnectionToStabilize(gFirstConnection, onCallEstablished);
  }

  function callAndEnsureAudioTrackMutingWorks(beLenient) {
    callAndEnsureAudioIsPlaying(beLenient, {audio: true, video: true});
    setAllEventsOccuredHandler(function() {
      // Call is up, now mute the track and check everything goes silent (give
      // it a small delay though, we don't expect it to happen instantly).
      enableRemoteAudio(gSecondConnection, false);

      setTimeout(function() {
        gatherAudioLevelSamples(gSecondConnection, 100, 50, function(samples) {
          verifyIsSilent(samples);
          reportTestSuccess();
        });
      }, 500);
    });
  }

  function callAndEnsureAudioTrackUnmutingWorks(beLenient) {
    callAndEnsureAudioIsPlaying(beLenient, {audio: true, video: true});
    setAllEventsOccuredHandler(function() {
      // Mute, wait a while, unmute, verify audio gets back up.
      // (Also, ensure video muting doesn't affect audio).
      enableRemoteAudio(gSecondConnection, false);
      enableRemoteVideo(gSecondConnection, false);

      setTimeout(function() {
        enableRemoteAudio(gSecondConnection, true);
      }, 500);

      setTimeout(function() {
        // Sample for four seconds here; it can take a bit of time for audio to
        // get back up after the unmute.
        gatherAudioLevelSamples(gSecondConnection, 200, 50, function(samples) {
          verifyAudioIsPlaying(samples, beLenient);
          reportTestSuccess();
        });
      }, 1500);
    });
  }

  function callAndEnsureVideoTrackMutingWorks() {
    createConnections(null);
    navigator.webkitGetUserMedia({audio: true, video: true},
      addStreamToBothConnectionsAndNegotiate, printGetUserMediaError);

    addExpectedEvent();
    detectVideoPlaying('remote-view-2', function() {
      // Disable the receiver's remote media stream. Video should stop.
      // (Also, ensure muting audio doesn't affect video).
      enableRemoteVideo(gSecondConnection, false);
      enableRemoteAudio(gSecondConnection, false);

      detectVideoStopped('remote-view-2', function() {
        // Video has stopped: unmute and succeed if it starts playing again.
        enableRemoteVideo(gSecondConnection, true);
        detectVideoPlaying('remote-view-2', eventOccured);
      })
    });
  }

  // Test call with a new Video MediaStream that has been created based on a
  // stream generated by getUserMedia.
  function callWithNewVideoMediaStream() {
    createConnections(null);
    navigator.webkitGetUserMedia({audio: true, video: true},
        createNewVideoStreamAndAddToBothConnections, printGetUserMediaError);
    waitForVideo('remote-view-1');
    waitForVideo('remote-view-2');
  }

  // Test call with a new Video MediaStream that has been created based on a
  // stream generated by getUserMedia. When Video is flowing, an audio track
  // is added to the sent stream and the video track is removed. This
  // is to test that adding and removing of remote tracks on an existing
  // mediastream works.
  function callWithNewVideoMediaStreamLaterSwitchToAudio() {
    createConnections(null);
    navigator.webkitGetUserMedia({audio: true, video: true},
        createNewVideoStreamAndAddToBothConnections, printGetUserMediaError);

    waitForVideo('remote-view-1');
    waitForVideo('remote-view-2');

    // Set an event handler for when video is playing.
    setAllEventsOccuredHandler(function() {
      // Add an audio track to the local stream and remove the video track and
      // then renegotiate. But first - setup the expectations.
      local_stream = gFirstConnection.getLocalStreams()[0];

      remote_stream_1 = gFirstConnection.getRemoteStreams()[0];
      // Add an expected event that onaddtrack will be called on the remote
      // mediastream received on gFirstConnection when the audio track is
      // received.
      addExpectedEvent();
      remote_stream_1.onaddtrack = function(){
        assertEquals(remote_stream_1.getAudioTracks()[0].id,
                     local_stream.getAudioTracks()[0].id);
        eventOccured();
      }

      // Add an expectation that the received video track is removed from
      // gFirstConnection.
      addExpectedEvent();
      remote_stream_1.onremovetrack = function() {
        eventOccured();
      }

      // Add an expected event that onaddtrack will be called on the remote
      // mediastream received on gSecondConnection when the audio track is
      // received.
      remote_stream_2 = gSecondConnection.getRemoteStreams()[0];
      addExpectedEvent();
      remote_stream_2.onaddtrack = function() {
        assertEquals(remote_stream_2.getAudioTracks()[0].id,
                     local_stream.getAudioTracks()[0].id);
        eventOccured();
      }

      // Add an expectation that the received video track is removed from
      // gSecondConnection.
      addExpectedEvent();
      remote_stream_2.onremovetrack = function() {
        eventOccured();
      }
      // When all the above events have occurred- the test pass.
      setAllEventsOccuredHandler(reportTestSuccess);

      local_stream.addTrack(gLocalStream.getAudioTracks()[0]);
      local_stream.removeTrack(local_stream.getVideoTracks()[0]);
      negotiate();
    });
  }

  // This function is used for setting up a test that:
  // 1. Creates a data channel on |gFirstConnection| and sends data to
  //    |gSecondConnection|.
  // 2. When data is received on |gSecondConnection| a message
  //    is sent to |gFirstConnection|.
  // 3. When data is received on |gFirstConnection|, the data
  //    channel is closed. The test passes when the state transition completes.
  function setupDataChannel(params) {
    var sendDataString = "send some text on a data channel."
    firstDataChannel = gFirstConnection.createDataChannel(
        "sendDataChannel", params);
    assertEquals('connecting', firstDataChannel.readyState);

    // When |firstDataChannel| transition to open state, send a text string.
    firstDataChannel.onopen = function() {
      assertEquals('open', firstDataChannel.readyState);
      if (firstDataChannel.reliable) {
        firstDataChannel.send(sendDataString);
      } else {
        sendDataRepeatedlyUntilClosed(firstDataChannel);
      }
    }

    // When |firstDataChannel| receive a message, close the channel and
    // initiate a new offer/answer exchange to complete the closure.
    firstDataChannel.onmessage = function(event) {
      assertEquals(event.data, sendDataString);
      firstDataChannel.close();
      negotiate();
    }

    // When |firstDataChannel| transition to closed state, the test pass.
    addExpectedEvent();
    firstDataChannel.onclose = function() {
      assertEquals('closed', firstDataChannel.readyState);
      eventOccured();
    }

    // Event handler for when |gSecondConnection| receive a new dataChannel.
    gSecondConnection.ondatachannel = function (event) {
      var secondDataChannel = event.channel;

      // When |secondDataChannel| receive a message, send a message back.
      secondDataChannel.onmessage = function(event) {
        assertEquals(event.data, sendDataString);
        console.log("gSecondConnection received data");
        if (secondDataChannel.reliable) {
          // If we're reliable we will just send one message over the channel,
          // and therefore channel one's message handler cannot have shut us
          // down already.
          assertEquals('open', secondDataChannel.readyState);
          secondDataChannel.send(sendDataString);
        } else {
          // If unreliable, this could be one in a series of messages and it
          // is possible we already replied (which will close our channel).
          sendDataRepeatedlyUntilClosed(secondDataChannel);
        }
      }
    }

    // Sends |sendDataString| on |dataChannel| every 200ms as long as
    // |dataChannel| is open.
    function sendDataRepeatedlyUntilClosed(dataChannel) {
      var sendTimer = setInterval(function() {
        if (dataChannel.readyState == 'open')
          dataChannel.send(sendDataString);
        else
          clearInterval(sendTimer);
      }, 200);
    }
  }

  // SCTP data channel setup is slightly different then RTP based
  // channels. Due to a bug in libjingle, we can't send data immediately
  // after channel becomes open. So for that reason in SCTP,
  // we are sending data from second channel, when ondatachannel event is
  // received. So data flow happens 2 -> 1 -> 2.
  function setupSctpDataChannel(params) {
    var sendDataString = "send some text on a data channel."
    firstDataChannel = gFirstConnection.createDataChannel(
        "sendDataChannel", params);
    assertEquals('connecting', firstDataChannel.readyState);

    // When |firstDataChannel| transition to open state, send a text string.
    firstDataChannel.onopen = function() {
      assertEquals('open', firstDataChannel.readyState);
    }

    // When |firstDataChannel| receive a message, send message back.
    // initiate a new offer/answer exchange to complete the closure.
    firstDataChannel.onmessage = function(event) {
      assertEquals('open', firstDataChannel.readyState);
      assertEquals(event.data, sendDataString);
      firstDataChannel.send(sendDataString);
    }


    // Event handler for when |gSecondConnection| receive a new dataChannel.
    gSecondConnection.ondatachannel = function (event) {
      var secondDataChannel = event.channel;
      secondDataChannel.onopen = function() {
        secondDataChannel.send(sendDataString);
      }

      // When |secondDataChannel| receive a message, close the channel and
      // initiate a new offer/answer exchange to complete the closure.
      secondDataChannel.onmessage = function(event) {
        assertEquals(event.data, sendDataString);
        assertEquals('open', secondDataChannel.readyState);
        secondDataChannel.close();
        negotiate();
      }

      // When |secondDataChannel| transition to closed state, the test pass.
      addExpectedEvent();
      secondDataChannel.onclose = function() {
        assertEquals('closed', secondDataChannel.readyState);
        eventOccured();
      }
    }
  }

  // Test call with a stream that has been created by getUserMedia, clone
  // the stream to a cloned stream, send them via the same peer connection.
  function addTwoMediaStreamsToOneConnection() {
    createConnections(null);
    navigator.webkitGetUserMedia({audio: true, video: true},
        CloneStreamAndAddTwoStreamstoOneConnection, printGetUserMediaError);
  }

  function onToneChange(tone) {
    gSentTones += tone.tone;
  }

  function createConnections(constraints) {
    gFirstConnection = createConnection(constraints, 'remote-view-1');
    assertEquals('stable', gFirstConnection.signalingState);

    gSecondConnection = createConnection(constraints, 'remote-view-2');
    assertEquals('stable', gSecondConnection.signalingState);
  }

  function createConnection(constraints, remoteView) {
    var pc = new webkitRTCPeerConnection(null, constraints);
    pc.onaddstream = function(event) {
      onRemoteStream(event, remoteView);
    }
    return pc;
  }

  function displayAndRemember(localStream) {
    var localStreamUrl = URL.createObjectURL(localStream);
    $('local-view').src = localStreamUrl;

    gLocalStream = localStream;
  }

  // Called if getUserMedia fails.
  function printGetUserMediaError(error) {
    var message = 'getUserMedia request unexpectedly failed:';
    if (error.constraintName)
      message += ' could not satisfy constraint ' + error.constraintName;
    else
      message += ' devices not working/user denied access.';
    failTest(message);
  }

  // Called if getUserMedia succeeds and we want to send from both connections.
  function addStreamToBothConnectionsAndNegotiate(localStream) {
    displayAndRemember(localStream);
    gFirstConnection.addStream(localStream);
    gSecondConnection.addStream(localStream);
    negotiate();
  }

  // Called if getUserMedia succeeds when we want to send from one connection.
  function addStreamToTheFirstConnectionAndNegotiate(localStream) {
    displayAndRemember(localStream);
    gFirstConnection.addStream(localStream);
    negotiate();
  }

  function verifyHasOneAudioAndVideoTrack(stream) {
    assertEquals(1, stream.getAudioTracks().length);
    assertEquals(1, stream.getVideoTracks().length);
  }

  // Called if getUserMedia succeeds, then clone the stream, send two streams
  // from one peer connection.
  function CloneStreamAndAddTwoStreamstoOneConnection(localStream) {
    displayAndRemember(localStream);

    var clonedStream = null;
    if (typeof localStream.clone === "function") {
      clonedStream = localStream.clone();
    } else {
      clonedStream = new webkitMediaStream(localStream);
    }

    gFirstConnection.addStream(localStream);
    gFirstConnection.addStream(clonedStream);

    // Verify the local streams are correct.
    assertEquals(2, gFirstConnection.getLocalStreams().length);
    verifyHasOneAudioAndVideoTrack(gFirstConnection.getLocalStreams()[0]);
    verifyHasOneAudioAndVideoTrack(gFirstConnection.getLocalStreams()[1]);

    // The remote side should receive two streams. After that, verify the
    // remote side has the correct number of streams and tracks.
    addExpectedEvent();
    addExpectedEvent();
    gSecondConnection.onaddstream = function(event) {
      eventOccured();
    }
    setAllEventsOccuredHandler(function() {
      // Negotiation complete, verify remote streams on the receiving side.
      assertEquals(2, gSecondConnection.getRemoteStreams().length);
      verifyHasOneAudioAndVideoTrack(gSecondConnection.getRemoteStreams()[0]);
      verifyHasOneAudioAndVideoTrack(gSecondConnection.getRemoteStreams()[1]);

      reportTestSuccess();
    });

    negotiate();
  }

  // Called if getUserMedia succeeds when we want to send a modified
  // MediaStream. A new MediaStream is created and the video track from
  // |localStream| is added.
  function createNewVideoStreamAndAddToBothConnections(localStream) {
    displayAndRemember(localStream);
    var new_stream = new webkitMediaStream();
    new_stream.addTrack(localStream.getVideoTracks()[0]);
    gFirstConnection.addStream(new_stream);
    gSecondConnection.addStream(new_stream);
    negotiate();
  }

  function negotiate() {
    negotiateBetween(gFirstConnection, gSecondConnection);
  }

  function negotiateBetween(caller, callee) {
    console.log("Negotiating call...");
    // Not stable = negotiation is ongoing. The behavior of re-negotiating while
    // a negotiation is ongoing is more or less undefined, so avoid this.
    if (caller.signalingState != 'stable' || callee.signalingState != 'stable')
      throw 'You can only negotiate when the connection is stable!';

    connectOnIceCandidate(caller, callee);

    caller.createOffer(
        function (offer) {
          onOfferCreated(offer, caller, callee);
        });
  }

  function onOfferCreated(offer, caller, callee) {
    offer.sdp = maybeForceIsac16K(transformSdp(offer.sdp));
    caller.setLocalDescription(offer, function() {
      assertEquals('have-local-offer', caller.signalingState);
      receiveOffer(offer.sdp, caller, callee);
    }, onLocalDescriptionError);
  }

  function receiveOffer(offerSdp, caller, callee) {
    console.log("Receiving offer...");
    offerSdp = transformRemoteSdp(offerSdp);

    var parsedOffer = new RTCSessionDescription({ type: 'offer',
                                                  sdp: offerSdp });
    callee.setRemoteDescription(parsedOffer, function() {},
                                onRemoteDescriptionError);
    callee.createAnswer(function (answer) {
                          onAnswerCreated(answer, caller, callee);
                        });
    assertEquals('have-remote-offer', callee.signalingState);
  }

  function removeMsid(offerSdp) {
    offerSdp = offerSdp.replace(/a=msid-semantic.*\r\n/g, '');
    offerSdp = offerSdp.replace('a=mid:audio\r\n', '');
    offerSdp = offerSdp.replace('a=mid:video\r\n', '');
    offerSdp = offerSdp.replace(/a=ssrc.*\r\n/g, '');
    return offerSdp;
  }

  function removeVideoCodec(offerSdp) {
    offerSdp = offerSdp.replace('a=rtpmap:100 VP8/90000\r\n',
                                'a=rtpmap:100 XVP8/90000\r\n');
    return offerSdp;
  }

  function removeCrypto(offerSdp) {
    offerSdp = offerSdp.replace(/a=crypto.*\r\n/g, 'a=Xcrypto\r\n');
    offerSdp = offerSdp.replace(/a=fingerprint.*\r\n/g, '');
    return offerSdp;
  }

  function addBandwithControl(offerSdp) {
    offerSdp = offerSdp.replace('a=mid:audio\r\n', 'a=mid:audio\r\n'+
                                'b=AS:16\r\n');
    offerSdp = offerSdp.replace('a=mid:video\r\n', 'a=mid:video\r\n'+
                                'b=AS:512\r\n');
    return offerSdp;
  }

  function removeBundle(sdp) {
    return sdp.replace(/a=group:BUNDLE .*\r\n/g, '');
  }

  function useGice(sdp) {
    sdp = sdp.replace(/t=.*\r\n/g, function(subString) {
      return subString + 'a=ice-options:google-ice\r\n';
    });
    sdp = sdp.replace(/a=ice-ufrag:.*\r\n/g,
                      'a=ice-ufrag:' + EXTERNAL_GICE_UFRAG + '\r\n');
    sdp = sdp.replace(/a=ice-pwd:.*\r\n/g,
                      'a=ice-pwd:' + EXTERNAL_GICE_PWD + '\r\n');
    return sdp;
  }

  function useExternalSdes(sdp) {
    // Remove current crypto specification.
    sdp = sdp.replace(/a=crypto.*\r\n/g, '');
    sdp = sdp.replace(/a=fingerprint.*\r\n/g, '');
    // Add external crypto.  This is not compatible with |removeMsid|.
    sdp = sdp.replace(/a=mid:(\w+)\r\n/g, function(subString, group) {
      return subString + EXTERNAL_SDES_LINES[group] + '\r\n';
    });
    return sdp;
  }

  function onAnswerCreated(answer, caller, callee) {
    answer.sdp = maybeForceIsac16K(transformSdp(answer.sdp));
    callee.setLocalDescription(answer,
                               function () {
                                 assertEquals('stable', callee.signalingState);
                               },
                               onLocalDescriptionError);
    receiveAnswer(answer.sdp, caller);
  }

  function receiveAnswer(answerSdp, caller) {
    console.log("Receiving answer...");
    answerSdp = transformRemoteSdp(answerSdp);
    var parsedAnswer = new RTCSessionDescription({ type: 'answer',
                                                   sdp: answerSdp });
    caller.setRemoteDescription(parsedAnswer,
                                function() {
                                  assertEquals('stable', caller.signalingState);
                                },
                                onRemoteDescriptionError);
  }

  function connectOnIceCandidate(caller, callee) {
    caller.onicecandidate = function(event) { onIceCandidate(event, callee); }
    callee.onicecandidate = function(event) { onIceCandidate(event, caller); }
  }

  function addGiceCredsToCandidate(candidate) {
    return candidate.trimRight() +
        ' username ' + EXTERNAL_GICE_UFRAG + ' password ' + EXTERNAL_GICE_PWD;
  }

  function onIceCandidate(event, target) {
    if (event.candidate) {
      var candidate = new RTCIceCandidate(event.candidate);
      candidate.candidate = transformCandidate(candidate.candidate);
      target.addIceCandidate(candidate);
    }
  }

  function onRemoteStream(e, target) {
    console.log("Receiving remote stream...");
    if (gTestWithoutMsid && e.stream.id != "default") {
      failTest('a default remote stream was expected but instead ' +
          e.stream.id + ' was received.');
    }
    gRemoteStreams[target] = e.stream;
    var remoteStreamUrl = URL.createObjectURL(e.stream);
    var remoteVideo = $(target);
    remoteVideo.src = remoteStreamUrl;
  }

  </script>
</head>
<body>
  <table border="0">
    <tr>
      <td>Local Preview</td>
      <td>Remote Stream for Connection 1</td>
      <td>Remote Stream for Connection 2</td>
      <td>Remote Stream for Connection 3</td>
      <td>Remote Stream for Connection 4</td>
    </tr>
    <tr>
      <td><video width="320" height="240" id="local-view"
          autoplay="autoplay"></video></td>
      <td><video width="320" height="240" id="remote-view-1"
          autoplay="autoplay"></video></td>
      <td><video width="320" height="240" id="remote-view-2"
          autoplay="autoplay"></video></td>
      <td><video width="320" height="240" id="remote-view-3"
          autoplay="autoplay"></video></td>
      <td><video width="320" height="240" id="remote-view-4"
          autoplay="autoplay"></video></td>
      <!-- Canvases are named after their corresponding video elements. -->
      <td><canvas width="320" height="240" id="remote-view-1-canvas"
          style="display:none"></canvas></td>
      <td><canvas width="320" height="240" id="remote-view-2-canvas"
          style="display:none"></canvas></td>
      <td><canvas width="320" height="240" id="remote-view-3-canvas"
          style="display:none"></canvas></td>
      <td><canvas width="320" height="240" id="remote-view-4-canvas"
          style="display:none"></canvas></td>
    </tr>
  </table>
</body>
</html>