blob: 6a9219d980097c0674d2607a88cb17db923eceb4 [file] [log] [blame]
<html>
<head>
<script type="text/javascript" src="webrtc_test_utilities.js"></script>
<script type="text/javascript">
$ = function(id) {
return document.getElementById(id);
};
var gFirstConnection = null;
var gSecondConnection = null;
var gTestWithoutMsidAndBundle = false;
var gLocalStream = null;
var gSentTones = '';
setAllEventsOccuredHandler(function() {
document.title = 'OK';
});
// Test that we can setup call with an audio and video track.
function call(constraints) {
createConnections(null);
navigator.webkitGetUserMedia(constraints,
addStreamToBothConnectionsAndNegotiate, printGetUserMediaError);
waitForVideo('remote-view-1');
waitForVideo('remote-view-2');
}
// First calls without streams on any connections, and then adds a stream
// to peer connection 1 which gets sent to peer connection 2. We must wait
// for the first negotiation to complete before starting the second one, which
// is why we wait until the connection is stable before re-negotiating.
function callEmptyThenAddOneStreamAndRenegotiate(constraints) {
createConnections(null);
negotiate();
waitForConnectionToStabilize(gFirstConnection);
navigator.webkitGetUserMedia(constraints,
addStreamToTheFirstConnectionAndNegotiate, printGetUserMediaError);
// Only the first connection is sending here.
waitForVideo('remote-view-2');
}
// Test that we can setup call with an audio and video track and
// simulate that the remote peer don't support MSID.
function callWithoutMsidAndBundle() {
createConnections(null);
gTestWithoutMsidAndBundle = true;
navigator.webkitGetUserMedia({audio:true, video:true},
addStreamToBothConnectionsAndNegotiate, printGetUserMediaError);
waitForVideo('remote-view-1');
waitForVideo('remote-view-2');
}
// Test only a data channel.
function callWithDataOnly() {
createConnections({optional:[{RtpDataChannels: true}]});
setupDataChannel();
negotiate();
}
// Test call with audio, video and a data channel.
function callWithDataAndMedia() {
createConnections({optional:[{RtpDataChannels: true}]});
setupDataChannel();
navigator.webkitGetUserMedia({audio:true, video:true},
addStreamToBothConnectionsAndNegotiate,
printGetUserMediaError);
waitForVideo('remote-view-1');
waitForVideo('remote-view-2');
}
// Test call with a data channel and later add audio and video.
function callWithDataAndLaterAddMedia() {
createConnections({optional:[{RtpDataChannels: true}]});
setupDataChannel();
negotiate();
// Set an event handler for when the data channel has been closed.
setAllEventsOccuredHandler(function() {
// When the video is flowing the test is done.
setAllEventsOccuredHandler(function() {
document.title = 'OK';
});
navigator.webkitGetUserMedia({audio:true, video:true},
addStreamToBothConnectionsAndNegotiate, printGetUserMediaError);
waitForVideo('remote-view-1');
waitForVideo('remote-view-2');
});
}
// Test that we can setup call and send DTMF.
function callAndSendDtmf(tones) {
createConnections(null);
navigator.webkitGetUserMedia({audio:true, video:true},
addStreamToBothConnectionsAndNegotiate, printGetUserMediaError);
var onCallEstablished = function() {
// Send DTMF tones.
var localAudioTrack = gLocalStream.getAudioTracks()[0];
var dtmfSender = gFirstConnection.createDTMFSender(localAudioTrack);
dtmfSender.ontonechange = onToneChange;
dtmfSender.insertDTMF(tones);
// Wait for the DTMF tones callback.
document.title = 'Waiting for dtmf...';
addExpectedEvent();
var waitDtmf = setInterval(function() {
if (gSentTones == tones) {
clearInterval(waitDtmf);
eventOccured();
}
}, 100);
}
// Do the DTMF test after we have received video.
detectVideoIn('remote-view-2', onCallEstablished);
}
// Test call with a new Video MediaStream that has been created based on a
// stream generated by getUserMedia.
function callWithNewVideoMediaStream() {
createConnections(null);
navigator.webkitGetUserMedia({audio:true, video:true},
createNewVideoStreamAndAddToBothConnections, printGetUserMediaError);
waitForVideo('remote-view-1');
waitForVideo('remote-view-2');
}
// Test call with a new Video MediaStream that has been created based on a
// stream generated by getUserMedia. When Video is flowing, an audio track
// is added to the sent stream and the video track is removed. This
// is to test that adding and removing of remote tracks on an existing
// mediastream works.
function callWithNewVideoMediaStreamLaterSwitchToAudio() {
createConnections(null);
navigator.webkitGetUserMedia({audio:true, video:true},
createNewVideoStreamAndAddToBothConnections, printGetUserMediaError);
waitForVideo('remote-view-1');
waitForVideo('remote-view-2');
// Set an event handler for when video is playing.
setAllEventsOccuredHandler(function() {
// Add an audio track to the local stream and remove the video track and
// then renegotiate. But first - setup the expectations.
local_stream = gFirstConnection.getLocalStreams()[0];
remote_stream_1 = gFirstConnection.getRemoteStreams()[0];
// Add an expected event that onaddtrack will be called on the remote
// mediastream received on gFirstConnection when the audio track is
// received.
addExpectedEvent();
remote_stream_1.onaddtrack = function(){
expectEquals(remote_stream_1.getAudioTracks()[0].id,
local_stream.getAudioTracks()[0].id);
eventOccured();
}
// Add an expectation that the received video track is removed from
// gFirstConnection.
addExpectedEvent();
remote_stream_1.onremovetrack = function() {
eventOccured();
}
// Add an expected event that onaddtrack will be called on the remote
// mediastream received on gSecondConnection when the audio track is
// received.
remote_stream_2 = gSecondConnection.getRemoteStreams()[0];
addExpectedEvent();
remote_stream_2.onaddtrack = function() {
expectEquals(remote_stream_2.getAudioTracks()[0].id,
local_stream.getAudioTracks()[0].id);
eventOccured();
}
// Add an expectation that the received video track is removed from
// gSecondConnection.
addExpectedEvent();
remote_stream_2.onremovetrack = function() {
eventOccured();
}
// When all the above events have occurred- the test pass.
setAllEventsOccuredHandler(function() { document.title = 'OK'; });
local_stream.addTrack(gLocalStream.getAudioTracks()[0]);
local_stream.removeTrack(local_stream.getVideoTracks()[0]);
negotiate();
}); // End of setAllEventsOccuredHandler.
}
// This function is used for setting up a test that:
// 1. Creates a data channel on |gFirstConnection| and sends data to
// |gSecondConnection|.
// 2. When data is received on |gSecondConnection| a message
// is sent to |gFirstConnection|.
// 3. When data is received on |gFirstConnection|, the data
// channel is closed. The test passes when the state transition completes.
function setupDataChannel() {
var sendDataString = "send some text on a data channel."
firstDataChannel = gFirstConnection.createDataChannel(
"sendDataChannel", {reliable : false});
expectEquals('connecting', firstDataChannel.readyState);
// When |firstDataChannel| transition to open state, send a text string.
firstDataChannel.onopen = function() {
expectEquals('open', firstDataChannel.readyState);
firstDataChannel.send(sendDataString);
}
// When |firstDataChannel| receive a message, close the channel and
// initiate a new offer/answer exchange to complete the closure.
firstDataChannel.onmessage = function(event) {
expectEquals(event.data, sendDataString);
firstDataChannel.close();
negotiate();
}
// When |firstDataChannel| transition to closed state, the test pass.
addExpectedEvent();
firstDataChannel.onclose = function() {
expectEquals('closed', firstDataChannel.readyState);
eventOccured();
}
// Event handler for when |gSecondConnection| receive a new dataChannel.
gSecondConnection.ondatachannel = function (event) {
var secondDataChannel = event.channel;
// When |secondDataChannel| receive a message, send a message back.
secondDataChannel.onmessage = function(event) {
expectEquals(event.data, sendDataString);
expectEquals('open', secondDataChannel.readyState);
secondDataChannel.send(sendDataString);
}
}
}
function onToneChange(tone) {
gSentTones += tone.tone;
document.title = gSentTones;
}
function createConnections(constraints) {
gFirstConnection = new webkitRTCPeerConnection(null, constraints);
gFirstConnection.onicecandidate = onIceCandidateToFirst;
gFirstConnection.onaddstream = function(event) {
onRemoteStream(event, 'remote-view-1');
}
expectEquals('stable', gFirstConnection.signalingState);
gSecondConnection = new webkitRTCPeerConnection(null, constraints);
gSecondConnection.onicecandidate = onIceCandidateToSecond;
gSecondConnection.onaddstream = function(event) {
onRemoteStream(event, 'remote-view-2');
}
}
function displayAndRemember(localStream) {
var localStreamUrl = webkitURL.createObjectURL(localStream);
$('local-view').src = localStreamUrl;
gLocalStream = localStream;
}
// Called if getUserMedia fails.
function printGetUserMediaError(error) {
document.title = 'getUserMedia request failed with code ' + error.code;
}
// Called if getUserMedia succeeds and we want to send from both connections.
function addStreamToBothConnectionsAndNegotiate(localStream) {
displayAndRemember(localStream);
gFirstConnection.addStream(localStream);
gSecondConnection.addStream(localStream);
negotiate();
}
// Called if getUserMedia succeeds when we want to send from one connection.
function addStreamToTheFirstConnectionAndNegotiate(localStream) {
displayAndRemember(localStream);
gFirstConnection.addStream(localStream);
negotiate();
}
// Called if getUserMedia succeeds when we want to send a modified
// MediaStream. A new MediaStream is created and the video track from
// |localStream| is added.
function createNewVideoStreamAndAddToBothConnections(localStream) {
displayAndRemember(localStream);
var new_stream = new webkitMediaStream();
new_stream.addTrack(localStream.getVideoTracks()[0]);
gFirstConnection.addStream(new_stream);
gSecondConnection.addStream(new_stream);
negotiate();
}
function negotiate() {
// Not stable = negotiation is ongoing. The behavior of re-negotiating while
// a negotiation is ongoing is more or less undefined, so avoid this.
if (gFirstConnection.signalingState != 'stable')
throw 'You can only negotiate when the connection is stable!';
gFirstConnection.createOffer(onOfferCreated);
}
function onOfferCreated(offer) {
gFirstConnection.setLocalDescription(offer);
expectEquals('have-local-offer', gFirstConnection.signalingState);
receiveOffer(offer.sdp);
}
function receiveOffer(offerSdp) {
if (gTestWithoutMsidAndBundle) {
offerSdp = removeMsidAndBundle(offerSdp);
}
var parsedOffer = new RTCSessionDescription({ type: 'offer',
sdp: offerSdp });
gSecondConnection.setRemoteDescription(parsedOffer);
gSecondConnection.createAnswer(onAnswerCreated);
expectEquals('have-remote-offer', gSecondConnection.signalingState);
}
function removeMsidAndBundle(offerSdp) {
offerSdp = offerSdp.replace(/a=msid-semantic.*\r\n/g, '');
offerSdp = offerSdp.replace('a=group:BUNDLE audio video\r\n', '');
offerSdp = offerSdp.replace('a=mid:audio\r\n', '');
offerSdp = offerSdp.replace('a=mid:video\r\n', '');
offerSdp = offerSdp.replace(/a=ssrc.*\r\n/g, '');
return offerSdp;
}
function onAnswerCreated(answer) {
gSecondConnection.setLocalDescription(answer);
expectEquals('stable', gSecondConnection.signalingState);
receiveAnswer(answer.sdp);
}
function receiveAnswer(answerSdp) {
if (gTestWithoutMsidAndBundle) {
answerSdp = removeMsidAndBundle(answerSdp);
}
var parsedAnswer = new RTCSessionDescription({ type: 'answer',
sdp: answerSdp });
gFirstConnection.setRemoteDescription(parsedAnswer);
expectEquals('stable', gFirstConnection.signalingState);
}
function onIceCandidateToFirst(event) {
if (event.candidate) {
var candidate = new RTCIceCandidate(event.candidate);
gSecondConnection.addIceCandidate(candidate);
}
}
function onIceCandidateToSecond(event) {
if (event.candidate) {
var candidate = new RTCIceCandidate(event.candidate);
gFirstConnection.addIceCandidate(candidate);
}
}
function onRemoteStream(e, target) {
if (gTestWithoutMsidAndBundle && e.stream.id != "default") {
document.title = 'a default remote stream was expected but instead ' +
e.stream.id + ' was received.';
return;
}
var remoteStreamUrl = webkitURL.createObjectURL(e.stream);
var remoteVideo = $(target);
remoteVideo.src = remoteStreamUrl;
}
</script>
</head>
<body>
<table border="0">
<tr>
<td>Local Preview</td>
<td>Remote Stream for Connection 1</td>
<td>Remote Stream for Connection 2</td>
</tr>
<tr>
<td><video width="320" height="240" id="local-view"
autoplay="autoplay"></video></td>
<td><video width="320" height="240" id="remote-view-1"
autoplay="autoplay"></video></td>
<td><video width="320" height="240" id="remote-view-2"
autoplay="autoplay"></video></td>
<!-- Canvases are named after their corresponding video elements. -->
<td><canvas width="320" height="240" id="remote-view-1-canvas"
style="display:none"></canvas></td>
<td><canvas width="320" height="240" id="remote-view-2-canvas">
style="display:none"></canvas></td>
</tr>
</table>
</body>
</html>