diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..5cdbacc --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,6 @@ +WebRTC welcomes patches/pulls for features and bug fixes. + +For contributors external to Google, follow the instructions given in the [Google Individual Contributor License Agreement](https://cla.developers.google.com/about/google-individual). + +In all cases, contributors must sign a contributor license agreement before a contribution can be accepted. Please complete the agreement for an [individual](https://developers.google.com/open-source/cla/individual) or a [corporation](https://developers.google.com/open-source/cla/corporate) as appropriate. + diff --git a/css/landing_page.css b/css/landing_page.css new file mode 100644 index 0000000..9aa913b --- /dev/null +++ b/css/landing_page.css @@ -0,0 +1,103 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. + */ +a { + color: #6fa8dc; + font-weight: 300; + text-decoration: none; +} + +a:hover { + color: #3d85c6; + text-decoration: underline; +} + +a#viewSource { + display: block; + margin: 1.3em 0 0 0; + border-top: 1px solid #999; + padding: 1em 0 0 0; +} + +body { + font-family: 'Roboto', sans-serif; + margin: 0; + padding: 1em; + word-break: break-word; + font-weight: 300; +} + +div#container { + margin: 0 auto 0 auto; + max-width: 40em; + padding: 1em 1.5em 1.3em 1.5em; +} + +h1 { + border-bottom: 1px solid #ccc; + font-family: 'Roboto', sans-serif; + font-weight: 500; + margin: 0 0 0.8em 0; + padding: 0 0 0.2em 0; +} + +h2 { + color: #444; + font-size: 1em; + font-weight: 500; + line-height: 1.2em; + margin: 0 0 0.8em 0; +} + +h3 { + border-top: 1px solid #eee; + color: #666; + font-size: 0.9em; + font-weight: 500; + margin: 20px 0 10px 0; + padding: 10px 0 0 0; + white-space: nowrap; +} + +p { + color: #444; + font-weight: 300; + line-height: 1.6em; +} + +section p:last-of-type { + margin: 0; +} + +section { + border-bottom: 1px solid #eee; + margin: 0 0 30px 0; + padding: 0 0 20px 0; +} + +section:last-of-type { + border-bottom: none; + padding: 0 0 1em 0; +} + +@media screen and (max-width: 650px) { + h1 { + font-size: 24px; + } +} + +@media screen and (max-width: 550px) { + h1 { + font-size: 22px; + } +} + +@media screen and (max-width: 450px) { + h1 { + font-size: 20px; + } +} diff --git a/images/favicon.ico b/images/favicon.ico new file mode 100644 index 0000000..68d5298 Binary files /dev/null and b/images/favicon.ico differ diff --git a/index.html b/index.html new file mode 100644 index 0000000..7973676 --- /dev/null +++ b/index.html @@ -0,0 +1,87 @@ + + + + + + + + + + + + + + + + + WebRTC test pages + + + + + + + +
+ +

WebRTC test pages

+ +
+ +

This is a collection of WebRTC test pages.

+ +

Patches and issues welcome! See + CONTRIBUTING.md + for instructions. The Developer's Guide + for this repo has more information about code style, structure and validation. +

+ +
+ +
+ +

Audio and Video streams

+ +

Iframe apprtc

+ +

Iframe video

+ +

Multiple audio streams

+ +

Multiple peerconnections

+ +

Multiple video devices

+ +

Multiple video streams

+ +

Peer2peer

+ +

Peer2peer iframe

+ +

Peer2peer from video

+ +

Single audio stream

+ +

Single video stream

+ +
+ + github.com/webrtc/test-pages/tree/gh-pages + +
+ + diff --git a/src/audio-and-video/index.html b/src/audio-and-video/index.html new file mode 100644 index 0000000..9283da8 --- /dev/null +++ b/src/audio-and-video/index.html @@ -0,0 +1,45 @@ + + + + + Single Local Preview (Video and Audio) + + + + + + + + + + + + + + +
Local Preview
+ + diff --git a/src/css/main.css b/src/css/main.css new file mode 100644 index 0000000..9eba65c --- /dev/null +++ b/src/css/main.css @@ -0,0 +1,252 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. + */ +.drop-down { + width: 65%; + font-size: 10px; + white-space: nowrap +} + +.constraints { + width: 75%; + height: auto; + position: absolute; + overflow: scroll; + display: none; +} + +.float-left { + float: left; + width: 100%; +} + +.float-clear-left { + float: left; + clear: left; + width: 100%; +} + +.top-border { + border-top: 4px solid grey; +} + +.bottom-border { + border-bottom: 4px solid grey; +} + +#messages { + word-wrap: break-word; + white-space: pre-wrap; + font-size: 0.7em; +} + +#audio-source { + float: left; + width: 50%; +} + +#video-source { + margin-left: 50%; + width: 50%; +} + +#video-res { + width: 30%; +} + +#signal-server { + float: left; + width: 60%; +} + +#pc-server { + width: 98%; + margin-left: 0.1em; + margin-top: 0.1em; +} + +#peer-id-container { + margin-left: 60%; + height: 1.5em; +} + +#peer-id { + margin-top: 0.1em; + width: 7em; +} + +#pc-server-label { + width: 15%; +} + +#pc-server-container { + position: absolute; + margin: 0 0 0 12%; + width: 40%; + overflow: hidden; + height: 1.5em; +} + +#pc-constraints-left { + margin: 0.7em 0 0 0; + width: 60%; +} + +#call { + float: left; + margin: 0.7em 0 0 0; +} + +.float-left { + float: left; + width: 100%; +} + +.float-clear-left { + float: left; + clear: left; + width: 100%; +} + +.small-input { + width: 3em; +} + +.medium-input { + width: 6em; +} + +#screencapture-info { + margin: 1% auto; /* 15% from the top and centered */ + width: 100%; /* Could be more or less, depending on screen size */ +} + +a { + color: lightBlue; + font-weight: 300; + text-decoration: none; +} + +a:hover { + color: blue; + text-decoration: underline; +} + +body { + font-family: 'Roboto', sans-serif; + margin: 0; + padding: 1em; + word-wrap: break-word; +} + +button { + background-color: grey; + border: none; + border-radius: 1px; + color: white; + font-family: 'Roboto', sans-serif; + font-size: 0.8em; + margin: 0 0 1em 0; + padding: 0.2em; +} + +button:hover { + background-color: darkGrey; +} + +button.green { + background: darkGreen; + color: white; +} + +button.green:hover { + background: forestGreen; + color: white; +} + +button.red { + background: darkRed; + color: white; +} + +button.red:hover { + background: fireBrick; +} + +button.pressed { +background-color: black; +} + +div#container { + margin: 0 auto 0 auto; + max-width: 40em; + padding: 0 1.5em 1.3em 1.5em; + z-index: 2; + position: relative; +} + +h2 { + color: black; + font-size: 1em; + font-weight: 700; + line-height: 1.2em; + margin: 0 0 0.8em 0; +} + +div { + background: white; +} + +html { + /* avoid annoying page width change + when moving from the home page.*/ + overflow-y: scroll; +} + +select { + margin: 0 1em 1em 0; + position: relative; + top: -1px; +} + +video { + background: black; + width: 100%; +} + +#log { + float: left; + position: fixed; + overflow: auto; + top: 0; + left: 0; + width: 20%; + padding: 16px; + word-wrap: break-word; + z-index: 1; +} + +@media screen and (max-width: 1200px) { + div#log { + float: none; + width: 100%; + position: inherit; + padding: 0; + } + button { + padding: 0.7em; + } + button:active { + background: black; + } + #pc-server-label { + margin-top: 5px; + } + #pc-server-container { + margin-top: 5px; + } +} diff --git a/src/iframe-apprtc/index.html b/src/iframe-apprtc/index.html new file mode 100644 index 0000000..26171bc --- /dev/null +++ b/src/iframe-apprtc/index.html @@ -0,0 +1,20 @@ + + + + + AppRTC web app in an IFRAME + + + + AppRTC in an <iframe> element:
+ + + diff --git a/src/iframe-video/index.html b/src/iframe-video/index.html new file mode 100644 index 0000000..0b2f5a9 --- /dev/null +++ b/src/iframe-video/index.html @@ -0,0 +1,19 @@ + + + + + IFRAME Single Local Preview (Video Only) + + + + + + diff --git a/src/multiple-audio/index.html b/src/multiple-audio/index.html new file mode 100644 index 0000000..9dfab96 --- /dev/null +++ b/src/multiple-audio/index.html @@ -0,0 +1,54 @@ + + + + + Multiple Local Preview (Audio Only) + + + + + + + + + + + + + + + + + + + + + + +
Sound test
+ + diff --git a/src/multiple-peerconnections/index.html b/src/multiple-peerconnections/index.html new file mode 100755 index 0000000..04d2cfd --- /dev/null +++ b/src/multiple-peerconnections/index.html @@ -0,0 +1,47 @@ + + + + + + Multiple peerconnections + + + + + + + +
+
+
+

PeerConnection

+
+
+
+
+
+
+

Remote Streams

+
+
+
+
+ + + + + diff --git a/src/multiple-peerconnections/js/main.js b/src/multiple-peerconnections/js/main.js new file mode 100755 index 0000000..9ced861 --- /dev/null +++ b/src/multiple-peerconnections/js/main.js @@ -0,0 +1,115 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. + */ + +/*jshint esversion: 6 */ + +'use strict'; + +var $ = document.getElementById.bind(document); + +var testTable = $('test-table'); +var nPeerConnectionsInput = $('num-peerconnections'); +var startTestButton = $('start-test'); +var cpuOveruseDetectionCheckbox = $('cpuoveruse-detection'); + +startTestButton.onclick = startTest; + +function logError(err) { + console.err(err); +} + +function addNewVideoElement() { + var newRow = testTable.insertRow(-1); + var newCell = newRow.insertCell(-1); + var video = document.createElement('video'); + video.autoplay = true; + newCell.appendChild(video); + return video; +} + +function PeerConnection(id, cpuOveruseDetection) { + this.id = id; + this.cpuOveruseDetection = cpuOveruseDetection; + + this.localConnection = null; + this.remoteConnection = null; + + this.remoteView = addNewVideoElement(); + + this.start = function() { + var onGetUserMediaSuccess = this.onGetUserMediaSuccess.bind(this); + navigator.mediaDevices.getUserMedia({ + audio: true, + video: true + }) + .then(onGetUserMediaSuccess) + .catch(logError); + }; + + this.onGetUserMediaSuccess = function(stream) { + // Create local peer connection. + this.localConnection = new RTCPeerConnection(null, { + 'optional': [{ + 'googCpuOveruseDetection': this.cpuOveruseDetection + }] + }); + this.localConnection.onicecandidate = (event) => { + this.onIceCandidate(this.remoteConnection, event); + }; + this.localConnection.addStream(stream); + + // Create remote peer connection. + this.remoteConnection = new RTCPeerConnection(null, { + 'optional': [{ + 'googCpuOveruseDetection': this.cpuOveruseDetection + }] + }); + this.remoteConnection.onicecandidate = (event) => { + this.onIceCandidate(this.localConnection, event); + }; + this.remoteConnection.onaddstream = (e) => { + this.remoteView.srcObject = e.stream; + }; + + // Initiate call. + var onCreateOfferSuccess = this.onCreateOfferSuccess.bind(this); + this.localConnection.createOffer({ + offerToReceiveAudio: 1, + offerToReceiveVideo: 1 + }) + .then(onCreateOfferSuccess, logError); + }; + + this.onCreateOfferSuccess = function(desc) { + this.localConnection.setLocalDescription(desc); + this.remoteConnection.setRemoteDescription(desc); + + var onCreateAnswerSuccess = this.onCreateAnswerSuccess.bind(this); + this.remoteConnection.createAnswer() + .then(onCreateAnswerSuccess, logError); + }; + + this.onCreateAnswerSuccess = function(desc) { + this.remoteConnection.setLocalDescription(desc); + this.localConnection.setRemoteDescription(desc); + }; + + this.onIceCandidate = function(connection, event) { + if (event.candidate) { + connection.addIceCandidate(new RTCIceCandidate(event.candidate)); + } + }; +} + +function startTest() { + var cpuOveruseDetection = cpuOveruseDetectionCheckbox.checked; + var nPeerConnections = nPeerConnectionsInput.value; + for (var i = 0; i < nPeerConnections; ++i) { + new PeerConnection(i, cpuOveruseDetection).start(); + } +} diff --git a/src/multiple-video-devices/index.html b/src/multiple-video-devices/index.html new file mode 100644 index 0000000..b55fe94 --- /dev/null +++ b/src/multiple-video-devices/index.html @@ -0,0 +1,31 @@ + + + + + Multiple device test (Video Only) + + + +

Opens all cameras available and attaches the video stream to indiviual video elements.

+
+ +
+
+ + + + + diff --git a/src/multiple-video-devices/js/main.js b/src/multiple-video-devices/js/main.js new file mode 100644 index 0000000..297bd9e --- /dev/null +++ b/src/multiple-video-devices/js/main.js @@ -0,0 +1,76 @@ +// Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + +// Use of this source code is governed by a BSD-style license +// that can be found in the LICENSE file in the root of the source +// tree. An additional intellectual property rights grant can be found +// in the file PATENTS. All contributing project authors may +// be found in the AUTHORS file in the root of the source tree. + +'use strict'; + +var deviceList = []; +var counter = 0; + +window.onload = function() { + getSources_(); +}; + +function getSources_() { + if (typeof MediaStreamTrack.getSources === 'undefined') { + alert('Your browser does not support getSources, aborting.'); + return; + } + MediaStreamTrack.getSources(function(devices) { + for (var i = 0; i < devices.length; i++) { + if (devices[i].kind === 'video') { + deviceList[i] = devices[i]; + requestVideo_(deviceList[i].id); + } + } + }); +} + +function requestVideo_(id) { + navigator.mediaDevices.getUserMedia({ + video: {optional: [{sourceId: id}]}, + audio: false}).then( + function(stream) { + getUserMediaOkCallback_(stream); + }, + getUserMediaFailedCallback_ + ); +} + +function getUserMediaFailedCallback_(error) { + alert('User media request denied with error: ' + error.name); +} + +function getUserMediaOkCallback_(stream) { + var videoArea = document.getElementById('videoArea'); + var video = document.createElement('video'); + var div = document.createElement('div'); + div.style.float = 'left'; + video.setAttribute('id', 'view' + counter); + video.width = 320; + video.height = 240; + video.autoplay = true; + div.appendChild(video); + videoArea.appendChild(div); + if (typeof stream.getVideoTracks()[0].label !== 'undefined') { + var deviceLabel = document.createElement('p'); + deviceLabel.innerHTML = stream.getVideoTracks()[0].label; + div.appendChild(deviceLabel); + } + stream.getVideoTracks()[0].addEventListener('ended', errorMessage_); + document.getElementById('view' + counter).srcObject = stream; + counter++; +} + +var errorMessage_ = function(event) { + var message = 'getUserMedia successful but ' + event.type + ' event fired ' + + 'from camera. Most likely too many cameras on the same USB ' + + 'bus/hub. Verify this by disconnecting one of the cameras ' + + 'and try again.'; + document.getElementById('messages').innerHTML += event.target.label + ': ' + + message + '

'; +}; diff --git a/src/multiple-video/index.html b/src/multiple-video/index.html new file mode 100644 index 0000000..10ff672 --- /dev/null +++ b/src/multiple-video/index.html @@ -0,0 +1,69 @@ + + + + + Multiple Local Preview (Video Only) + + + + + + + + + + + + + + + + + + + + + + + + + +
Local Preview
+ + + + diff --git a/src/peer2peer-iframe/index.html b/src/peer2peer-iframe/index.html new file mode 100644 index 0000000..8b5ab7a --- /dev/null +++ b/src/peer2peer-iframe/index.html @@ -0,0 +1,17 @@ + + + + + WebRTC IFRAME peer2peer test page + + + + + + diff --git a/src/peer2peer-video/index.html b/src/peer2peer-video/index.html new file mode 100755 index 0000000..35fcff4 --- /dev/null +++ b/src/peer2peer-video/index.html @@ -0,0 +1,27 @@ + + + + Video to peerConnection + + + +
+

Stream from video to peerConnection

+ + + + +
+ +
+
+ + + + diff --git a/src/peer2peer-video/js/main.js b/src/peer2peer-video/js/main.js new file mode 100755 index 0000000..316ea6d --- /dev/null +++ b/src/peer2peer-video/js/main.js @@ -0,0 +1,98 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. + */ + +/*jshint esversion: 6 */ + +'use strict'; + +var localVideo = document.getElementById('localVideo'); +var remoteVideo = document.getElementById('remoteVideo'); +var startButton = document.getElementById('startButton'); +startButton.onclick = start; + +var pc1; +var pc2; +var stream; + +function logError(err) { + console.error(err); +} + +function maybeCreateStream() { + if (stream) { + return; + } + if (localVideo.captureStream) { + stream = localVideo.captureStream(); + } else if (localVideo.mozCaptureStream) { + stream = localVideo.mozCaptureStream(); + } else { + console.error('captureStream() not supported'); + } +} + +function start() { + startButton.onclick = hangup; + startButton.className = 'red'; + startButton.innerHTML = 'Stop test'; + if (localVideo.readyState >= 3) { // HAVE_FUTURE_DATA + // Video is already ready to play, call maybeCreateStream in case oncanplay + // fired before we registered the event handler. + maybeCreateStream(); + } + localVideo.play(); + call(); +} + +function call() { + var servers = null; + pc1 = new RTCPeerConnection(servers); + pc1.onicecandidate = (event) => { + if (event.candidate) { + pc2.addIceCandidate(event.candidate); + } + }; + + pc2 = new RTCPeerConnection(servers); + pc2.onicecandidate = (event) => { + if (event.candidate) { + pc1.addIceCandidate(event.candidate); + } + }; + pc2.onaddstream = (event) => { + remoteVideo.srcObject = event.stream; + }; + + pc1.addStream(stream); + pc1.createOffer({ + offerToReceiveAudio: 1, + offerToReceiveVideo: 1 + }).then(onCreateOfferSuccess, logError); +} + +function onCreateOfferSuccess(desc) { + pc1.setLocalDescription(desc); + pc2.setRemoteDescription(desc); + pc2.createAnswer().then(onCreateAnswerSuccess, logError); +} + +function onCreateAnswerSuccess(desc) { + pc2.setLocalDescription(desc); + pc1.setRemoteDescription(desc); +} + +function hangup() { + pc1.close(); + pc2.close(); + pc1 = null; + pc2 = null; + startButton.onclick = start; + startButton.className = 'green'; + startButton.innerHTML = 'Start test'; + localVideo.pause(); +} diff --git a/src/peer2peer-video/output.mp4 b/src/peer2peer-video/output.mp4 new file mode 100755 index 0000000..e57ff88 Binary files /dev/null and b/src/peer2peer-video/output.mp4 differ diff --git a/src/peer2peer/extension/screencapture.zip b/src/peer2peer/extension/screencapture.zip new file mode 100644 index 0000000..3ce401c Binary files /dev/null and b/src/peer2peer/extension/screencapture.zip differ diff --git a/src/peer2peer/extension/src/background.js b/src/peer2peer/extension/src/background.js new file mode 100644 index 0000000..9d00154 --- /dev/null +++ b/src/peer2peer/extension/src/background.js @@ -0,0 +1,56 @@ +// Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. +// Use of this source code is governed by a BSD-style license +// that can be found in the LICENSE file in the root of the source +// tree. + +'use strict'; + +var dataSources = ['screen', 'window']; +if (getChromeVersion() >= 50) { + dataSources.push('tab'); + dataSources.push('audio'); +} +var desktopMediaRequestId = ''; + +chrome.runtime.onConnect.addListener(function(port) { + port.onMessage.addListener(function(msg) { + if (msg.type === 'SS_UI_REQUEST') { + requestScreenSharing(port, msg); + } + + if (msg.type === 'SS_UI_CANCEL') { + cancelScreenSharing(msg); + } + }); +}); + +function requestScreenSharing(port, msg) { + // https://developer.chrome.com/extensions/desktopCapture + // params: + // - 'dataSources' Set of sources that should be shown to the user. + // - 'targetTab' Tab for which the stream is created. + // - 'streamId' String that can be passed to getUserMedia() API + desktopMediaRequestId = + chrome.desktopCapture.chooseDesktopMedia(dataSources, port.sender.tab, + function(streamId, options) { + if (streamId) { + msg.type = 'SS_DIALOG_SUCCESS'; + msg.streamId = streamId; + msg.requestAudio = options && options.canRequestAudioTrack; + } else { + msg.type = 'SS_DIALOG_CANCEL'; + } + port.postMessage(msg); + }); +} + +function cancelScreenSharing() { + if (desktopMediaRequestId) { + chrome.desktopCapture.cancelChooseDesktopMedia(desktopMediaRequestId); + } +} + +function getChromeVersion() { + var raw = navigator.userAgent.match(/Chrom(e|ium)\/([0-9]+)\./); + return raw ? parseInt(raw[2], 10) : -1; +} diff --git a/src/peer2peer/extension/src/content-script.js b/src/peer2peer/extension/src/content-script.js new file mode 100644 index 0000000..fcc776c --- /dev/null +++ b/src/peer2peer/extension/src/content-script.js @@ -0,0 +1,36 @@ +// Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. +// Use of this source code is governed by a BSD-style license +// that can be found in the LICENSE file in the root of the source +// tree. + +'use strict'; + +// https://goo.gl/7p5VrQ +// - 'content_script' and execution env are isolated from each other +// - In order to communicate we use the DOM (window.postMessage) +// +// app.js | |content-script.js | |background.js +// window.postMessage|------->|port.postMessage |----->| port.onMessage +// | window | | port | +// webkitGetUserMedia|<------ |window.postMessage|<-----| port.postMessage +// + +var port = chrome.runtime.connect(chrome.runtime.id); + +port.onMessage.addListener(function(msg) { + window.postMessage(msg, '*'); +}); + +window.addEventListener('message', function(event) { + // We only accept messages from ourselves + if (event.source !== window) { + return; + } + + if (event.data.type && ((event.data.type === 'SS_UI_REQUEST') || + (event.data.type === 'SS_UI_CANCEL'))) { + port.postMessage(event.data); + } +}, false); + +window.postMessage({type: 'SS_PING', text: 'start'}, '*'); diff --git a/src/peer2peer/extension/src/icon.png b/src/peer2peer/extension/src/icon.png new file mode 100644 index 0000000..755653f Binary files /dev/null and b/src/peer2peer/extension/src/icon.png differ diff --git a/src/peer2peer/extension/src/manifest.json b/src/peer2peer/extension/src/manifest.json new file mode 100644 index 0000000..8b40ac3 --- /dev/null +++ b/src/peer2peer/extension/src/manifest.json @@ -0,0 +1,22 @@ +{ + "name": "Screensharing Extension", + "description": "Screensharing Extension for my app", + "version": "1.0.0", + "manifest_version": 2, + "icons": { + "128": "icon.png" + }, + "background": { + "scripts": ["background.js"] + }, + "content_scripts": [ + { + "matches": ["https://test.webrtc.org/manual/*"], + "js": ["content-script.js"] + } + ], + "permissions": [ + "desktopCapture", + "https://test.webrtc.org/manual/*" + ] +} diff --git a/src/peer2peer/help.html b/src/peer2peer/help.html new file mode 100644 index 0000000..ddddf54 --- /dev/null +++ b/src/peer2peer/help.html @@ -0,0 +1,112 @@ + + + + + WebRTC PeerConnection Manual Test Help Page + + + + + +

WebRTC PeerConnection Manual Test Help Page

+

+ The test page is intended for testing WebRTC calls. + + This is how you set up a normal call: +

+
    +
  1. Open this page in two tabs.
  2. +
  3. Start the peerconnection server. Click on the question mark next + to the 'server' field for instruction on how to do that. The easiest + thing is to start it on localhost, but you can start it on any + machine you like and connect to hostname:8888.
  4. +
  5. Click the Connect button in both tabs.
  6. +
  7. Click the Call:Negotiate button in one of the tabs. You should see a bunch + of printouts when this happens. Note that no streams are sent to + begin with (although you could run steps 5-6 before this step to get streams + even in the initial call).
  8. +
  9. Grant media access using the checkboxes and Request button.
  10. +
  11. Add the local stream by clicking the "Add" button, in both tabs.
  12. +
  13. Now you must re-negotiate the call by clicking on Negotiate again.
  14. +
  15. You should now have a call up and both sides should be receiving + media data (depending on what access you granted on the respective + pages).
  16. +
  17. You can now choose to stop, re-request, re-send or disable streams + in any way you like, or hang up and re-start the call. You don't + need to disconnect: that's done automatically when you close the + page. Hanging up is NOT done automatically though.
  18. +
+ +

+ To create a data channel: +

+
    +
  1. Make sure Chrome is started with the --enable-data-channels flag.
  2. +
  3. Follow the instructions above to connect two tabs to a + peerconnection_server.
  4. +
  5. Click the Data channel: Create button in one tab. Notice the status + changes to "connecting".
  6. +
  7. Click the Call:Negotiate button. You should see the status change to + "open" in both tabs.
  8. +
  9. Enter text in the textbox next to the Send data button and then click Send + data. Notice the text is received in the remote tab in the Received on data + channel text box. Data can be sent in both direct.
  10. +
  11. To close the channel press the Close button followed by Negotiate. Notice + the status change to "closed"
  12. +
+ +

Detailed descriptions:

+ + + + + + diff --git a/src/peer2peer/index.html b/src/peer2peer/index.html new file mode 100644 index 0000000..b6846b2 --- /dev/null +++ b/src/peer2peer/index.html @@ -0,0 +1,208 @@ + + + + + + WebRTC peer2peer test page + + + + + + +
+ +
+ +
+
+

Remote Video

+ +
+ + +
+ +

Local Preview

+ +
+ + +
+
+
+ +
+ +
+

GetUserMedia

+
+ + + + + + +
+
+
+ +
+
+ +
+ + +
+
+
+ +
+
+ +
+
+ + + + +
+
+
+ +
+

PeerConnection

+
+
+ +
+
+ +
+
+
+ + + +
+
+
+
+ + + + + + + +
+
+ + + +
+
+ +
+

Media streams

+
+ + + + + + +
+
+ + + +
+
+ + + + + + + + + +
+ + + + + +
+
+
+ +
+

DTMF Sender

+ + + + + + + + +
+ +
+

Options

+ + + + +
+
+ +
+

Log

+ +

+
+ +
+
+ + + + diff --git a/src/peer2peer/js/main.js b/src/peer2peer/js/main.js new file mode 100644 index 0000000..459c65f --- /dev/null +++ b/src/peer2peer/js/main.js @@ -0,0 +1,1381 @@ +// Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. +// Use of this source code is governed by a BSD-style license +// that can be found in the LICENSE file in the root of the source +// tree. + +// See http://dev.w3.org/2011/webrtc/editor/getusermedia.html for more +// information on getUserMedia. See +// http://dev.w3.org/2011/webrtc/editor/webrtc.html for more information on +// peerconnection and webrtc in general. + +'use strict'; + +// TODO(jansson) rewrite to classes. +// Global namespace object. +var global = {}; +global.transformOutgoingSdp = function(sdp) { return sdp; }; +// Default getUserMedia video resolution. +global.videoWidth = 1280; +global.videoHeight = 720; + +// We need a STUN server for some API calls. +var STUN_SERVER = 'stun.l.google.com:19302'; + +// Used as a shortcut for finding DOM elements by ID. +// @param {string} id is a case-sensitive string representing the unique ID of +// the element being sought. +// @return {object} id returns the element object specified as a parameter. +var $ = function(id) { + return document.getElementById(id); +}; + +// Prepopulate constraints from JS to the UI. Enumerate devices available +// via getUserMedia, register elements to be used for local storage. +window.onload = function() { + hookupDataChannelCallbacks_(); + hookupDtmfSenderCallback_(); + updateGetUserMediaConstraints(); + setupLocalStorageFieldValues(); + acceptIncomingCalls(); + setPeerConnectionConstraints(); + if ($('get-devices-onload').checked === true) { + getDevices(); + } + // Checks if the mobile UI should be used. + registerResButtonsEvents(); + screenCaptureExtensionHandler_(); +}; + +// Disconnect before the tab is closed. +window.onbeforeunload = function() { + disconnect_(); +}; + +// Handles the resolution button events. +function registerResButtonsEvents() { + var lastResButtonPressed; + var elementIdAndResolutions = [ + ['video-qvga', 320, 180], + ['video-vga', 640, 360], + ['video-hd', 1280, 720] + ]; + + function setResolution(elementAndRes) { + $(elementAndRes[0]).addEventListener('click', function() { + global.videoWidth = elementAndRes[1]; + global.videoHeight = elementAndRes[2]; + $(elementAndRes[0]).className = 'pressed'; + if (typeof lastResButtonPressed !== 'undefined') { + lastResButtonPressed.className = ''; + } + lastResButtonPressed = $(elementAndRes[0]); + updateGetUserMediaConstraints(); + }, false); + } + + for (var i in elementIdAndResolutions) { + setResolution(elementIdAndResolutions[i]); + } +} + +// TODO (jansson) Setup events using addEventListener, applies in general. +// A list of element id's to be registered for local storage. +function setupLocalStorageFieldValues() { + registerLocalStorage_('pc-server'); + registerLocalStorage_('get-devices-onload'); +} + +// Public HTML functions + +// The *Here functions are called from peer2peer.html and will make calls +// into our underlying JavaScript library with the values from the page +// (have to be named differently to avoid name clashes with existing functions). +/* exported getUserMediaFromHere */ +function getUserMediaFromHere() { + var constraints = $('getusermedia-constraints').value; + try { + doGetUserMedia_(constraints); + } catch (exception) { + print_('getUserMedia says: ' + exception); + } +} +/* exported editConstraints */ +function editConstraints(elementId) { + $(elementId).style.display = 'inline'; + $(elementId).style.height = '400px'; + $(elementId).style.zIndex = '9'; + $(elementId).focus(); + $(elementId).onblur = function() { + $(elementId).style.display = 'none'; + }; +} + +/* exported connectFromHere */ +function connectFromHere() { + var server = $('pc-server').value; + if ($('peer-id').value === '') { + // Generate a random name to distinguish us from other tabs: + $('peer-id').value = 'peer_' + Math.floor(Math.random() * 10000); + print_('Our name from now on will be ' + $('peer-id').value); + } + connect(server, $('peer-id').value); +} + +/* exported negotiateCallFromHere */ +function negotiateCallFromHere() { + // Set the global variables with values from our UI. + setCreateOfferConstraints(getEvaluatedJavaScript_( + $('createoffer-constraints').value)); + setCreateAnswerConstraints(getEvaluatedJavaScript_( + $('createanswer-constraints').value)); + + ensureHasPeerConnection_(); + negotiateCall_(); +} + +/* exported addLocalStreamFromHere */ +function addLocalStreamFromHere() { + ensureHasPeerConnection_(); + addLocalStream(); +} + +/* exported removeLocalStreamFromHere */ +function removeLocalStreamFromHere() { + removeLocalStream(); +} + +/* exported hangUpFromHere */ +function hangUpFromHere() { + hangUp(); + acceptIncomingCalls(); +} + +/* exported toggleRemoteVideoFromHere */ +function toggleRemoteVideoFromHere() { + toggleRemoteStream(function(remoteStream) { + return remoteStream.getVideoTracks()[0]; + }, 'video'); +} + +/* exported toggleRemoteAudioFromHere */ +function toggleRemoteAudioFromHere() { + toggleRemoteStream(function(remoteStream) { + return remoteStream.getAudioTracks()[0]; + }, 'audio'); +} +/* exported toggleLocalVideoFromHere */ +function toggleLocalVideoFromHere() { + toggleLocalStream(function(localStream) { + return localStream.getVideoTracks()[0]; + }, 'video'); +} + +/* exported toggleLocalAudioFromHere */ +function toggleLocalAudioFromHere() { + toggleLocalStream(function(localStream) { + return localStream.getAudioTracks()[0]; + }, 'audio'); +} + +/* exported stopLocalFromHere */ +function stopLocalFromHere() { + stopLocalStream(); +} + +/* exported createDataChannelFromHere */ +function createDataChannelFromHere() { + ensureHasPeerConnection_(); + createDataChannelOnPeerConnection(); +} + +/* exported closeDataChannelFromHere */ +function closeDataChannelFromHere() { + ensureHasPeerConnection_(); + closeDataChannelOnPeerConnection(); +} + +/* exported sendDataFromHere */ +function sendDataFromHere() { + var data = $('data-channel-send').value; + sendDataOnChannel(data); +} + +/* exported createDtmfSenderFromHere */ +function createDtmfSenderFromHere() { + ensureHasPeerConnection_(); + createDtmfSenderOnPeerConnection(); +} + +/* exported insertDtmfFromHere */ +function insertDtmfFromHere() { + var tones = $('dtmf-tones').value; + var duration = $('dtmf-tones-duration').value; + var gap = $('dtmf-tones-gap').value; + insertDtmfOnSender(tones, duration, gap); +} + +/* exported forceIsacChanged */ +function forceIsacChanged() { + var forceIsac = $('force-isac').checked; + if (forceIsac) { + forceIsac_(); + } else { + dontTouchSdp_(); + } +} + +// Updates the constraints in the getusermedia-constraints text box with a +// MediaStreamConstraints string. This string is created based on the state +// of the 'audiosrc' and 'videosrc' checkboxes. +// If device enumeration is supported and device source id's are not undefined +// they will be added to the constraints string. +function updateGetUserMediaConstraints() { + var selectedAudioDevice = $('audiosrc'); + var selectedVideoDevice = $('videosrc'); + global.constraints = {audio: $('audio').checked, + video: $('video').checked + }; + + if ($('video').checked) { + global.constraints.video = {height: global.videoHeight, + width: global.videoWidth}; + } + + if (!selectedAudioDevice.disabled && !selectedAudioDevice.disabled) { + var devices = getSourcesFromField_(selectedAudioDevice, + selectedVideoDevice); + + if ($('audio').checked) { + if (devices.audioId !== null) { + global.constraints.audio = {deviceId: devices.audioId}; + } + } + + if ($('video').checked) { + if (devices.videoId !== null) { + global.constraints.video = {deviceId: devices.videoId}; + } + } + } + + $('getusermedia-constraints').value = JSON.stringify(global.constraints, + null, ' '); + $('getusermedia-constraints').addEventListener('change', function() { + global.constraints = JSON.parse($('getusermedia-constraints').value); + }, false); + $('local-res').innerHTML = global.videoWidth + 'x' + global.videoHeight; +} + +/* exported showServerHelp */ +function showServerHelp() { + alert('You need to build and run a peerconnection_server on some ' + + 'suitable machine. To build it in chrome, just run make/ninja ' + + 'peerconnection_server. Otherwise, read in https://code.google' + + '.com/searchframe#xSWYf0NTG_Q/trunk/peerconnection/README&q=REA' + + 'DME%20package:webrtc%5C.googlecode%5C.com.'); +} + +/* exported clearLog */ +function clearLog() { + $('messages').innerHTML = ''; +} + +// Stops the local stream. +function stopLocalStream() { + if (typeof global.localStream === 'undefined') { + warning_('Tried to stop local stream, ' + + 'but media access is not granted.'); + return; + } + removeVideoTrackEvents(global.localStream); + global.localStream.getTracks().forEach(function(track) { + track.stop(); + }); +} + +// Adds the current local media stream to a peer connection. +// @param {RTCPeerConnection} peerConnection +function addLocalStreamToPeerConnection(peerConnection) { + if (typeof global.localStream === 'undefined') { + error_('Tried to add local stream to peer connection, but there is no ' + + 'stream yet.'); + } + try { + peerConnection.addStream(global.localStream, global.addStreamConstraints); + } catch (exception) { + error_('Failed to add stream with constraints ' + + global.addStreamConstraints + ': ' + exception); + } + print_('Added local stream.'); +} + +// Removes the local stream from the peer connection. +// @param {rtcpeerconnection} peerConnection +function removeLocalStreamFromPeerConnection(peerConnection) { + if (typeof global.localStream === 'undefined') { + error_('Tried to remove local stream from peer connection, but there is ' + + 'no stream yet.'); + } + try { + peerConnection.removeStream(global.localStream); + } catch (exception) { + error_('Could not remove stream: ' + exception); + } + print_('Removed local stream.'); +} + +// Enumerates the audio and video devices available in Chrome and adds the +// devices to the HTML elements with Id 'audiosrc' and 'videosrc'. +// Checks if device enumeration is supported and if the 'audiosrc' + 'videosrc' +// elements exists, if not a debug printout will be displayed. +// If the device label is empty, audio/video + sequence number will be used to +// populate the name. Also makes sure the children has been loaded in order +// to update the constraints. +function getDevices() { + var selectedAudioDevice = $('audiosrc'); + var selectedVideoDevice = $('videosrc'); + selectedAudioDevice.innerHTML = ''; + selectedVideoDevice.innerHTML = ''; + + if (typeof navigator.mediaDevices.enumerateDevices === 'undefined') { + selectedAudioDevice.disabled = true; + selectedVideoDevice.disabled = true; + $('get-devices').disabled = true; + $('get-devices-onload').disabled = true; + updateGetUserMediaConstraints(); + error_('enumerateDevices not found, device enumeration not supported'); + } + + navigator.mediaDevices.enumerateDevices().then(function(devices) { + for (var i = 0; i < devices.length; i++) { + var option = document.createElement('option'); + option.value = devices[i].deviceId; + option.text = devices[i].label; + + if (devices[i].kind === 'audioinput') { + if (option.text === '') { + option.text = devices[i].deviceId; + } + selectedAudioDevice.appendChild(option); + } else if (devices[i].kind === 'videoinput') { + if (option.text === '') { + option.text = devices[i].deviceId; + } + selectedVideoDevice.appendChild(option); + } else if (devices[i].kind === 'audiooutput') { + // TODO: Add output device selection. + return; + } else { + error_('Device type ' + devices[i].kind + ' not recognized, ' + + 'cannot enumerate device. Currently only device types' + + '\'audio\' and \'video\' are supported'); + updateGetUserMediaConstraints(); + } + } + }).catch(function(error) { + error_('Could not enumerateDevices: ' + error); + }); + + checkIfDeviceDropdownsArePopulated_(); +} + +function displayScreenCaptureInfo() { + if ($('screencapture-info')) { + $('screencapture-info').style.display = 'block'; + return; + } + var message = 'Please install the screen capture extension:
' + + '1. Go to chrome://extensions
' + + '2. Check: "Enable Developer mode"
' + + '3. Click: "Load the unpacked extension..."
' + + '4. Choose "extension" folder from the ' + + 'repository
' + + '5. Reload this page over https
' + + 'Note: Make sure the URL permission in manifest.json matches ' + + 'the URL for this page.'; + var startScreenCaptureButton = document.getElementById('start-screencapture'); + var messageDiv = document.createElement('div'); + messageDiv.innerHTML = message; + messageDiv.id = 'screencapture-info'; + + window.onclick = function(event) { + if (event.target === messageDiv) { + messageDiv.style.display = 'none'; + } + }; + + document.getElementById('general-gum').insertBefore(messageDiv, + startScreenCaptureButton); +} + +function screenCaptureExtensionHandler_() { + // Copied and modified from desktop capture example. + var extensionInstalled = false; + $('start-screencapture').addEventListener('click', function() { + // send screen-sharer request to content-script + window.postMessage({type: 'SS_UI_REQUEST', text: 'start'}, '*'); + if (!extensionInstalled) { + displayScreenCaptureInfo(); + } + }); + + // listen for messages from the content-script + window.addEventListener('message', function(event) { + if (event.origin !== window.location.origin) { + return; + } + + // content-script will send a 'SS_PING' msg if extension is installed + if (event.data.type && (event.data.type === 'SS_PING')) { + extensionInstalled = true; + } + + // user chose a stream + if (event.data.type && (event.data.type === 'SS_DIALOG_SUCCESS')) { + var audioConstraints = + (adapter.browserDetails.browser === 'chrome' && + adapter.browserDetails.version >= 50 && + event.data.requestAudio) ? { + mandatory: { + chromeMediaSource: 'desktop', + chromeMediaSourceId: event.data.streamId + } + } : false; + + var videoConstraints = { + mandatory: { + chromeMediaSource: 'desktop', + chromeMediaSourceId: event.data.streamId, + maxWidth: window.screen.width, + maxHeight: window.screen.height + } + }; + + var constraints = {audio: audioConstraints, video: videoConstraints}; + doGetUserMedia_(JSON.stringify(constraints)); + } + + // user clicked on 'cancel' in choose media dialog + if (event.data.type && (event.data.type === 'SS_DIALOG_CANCEL')) { + warning_('User cancelled!'); + } + }); +} + +// Sets the transform to apply just before setting the local description and +// sending to the peer. +// @param {function} transformFunction A function which takes one SDP string as +// argument and returns the modified SDP string. +function setOutgoingSdpTransform(transformFunction) { + global.transformOutgoingSdp = transformFunction; +} + +// Sets the MediaConstraints to be used for PeerConnection createAnswer() calls. +// @param {string} mediaConstraints The constraints, as defined in the +// PeerConnection JS API spec. +function setCreateAnswerConstraints(mediaConstraints) { + global.createAnswerConstraints = mediaConstraints; +} + +// Sets the MediaConstraints to be used for PeerConnection createOffer() calls. +// @param {string} mediaConstraints The constraints, as defined in the +// PeerConnection JS API spec. +function setCreateOfferConstraints(mediaConstraints) { + global.createOfferConstraints = mediaConstraints; +} + +// Sets the callback functions that will receive DataChannel readyState updates +// and received data. +// @param {function} statusCallback The function that will receive a string +// with the current DataChannel readyState. +// @param {function} dataCallback The function that will a string with data +// received from the remote peer. +function setDataCallbacks(statusCallback, dataCallback) { + global.dataStatusCallback = statusCallback; + global.dataCallback = dataCallback; +} + +// Sends data on an active DataChannel. +// @param {string} data The string that will be sent to the remote peer. +function sendDataOnChannel(data) { + if (typeof global.dataChannel === 'undefined') { + error_('Trying to send data, but there is no DataChannel.'); + } + global.dataChannel.send(data); +} + +// Sets the callback function that will receive DTMF sender ontonechange events. +// @param {function} ontonechange The function that will receive a string with +// the tone that has just begun playout. +function setOnToneChange(ontonechange) { + global.dtmfOnToneChange = ontonechange; +} + +// Inserts DTMF tones on an active DTMF sender. +// @param {string} tones to be sent. +// @param {string} duration duration of the tones to be sent. +// @param {string} interToneGap gap between the tones to be sent. +function insertDtmf(tones, duration, interToneGap) { + if (typeof global.dtmfSender === 'undefined') { + error_('Trying to send DTMF, but there is no DTMF sender.'); + } + global.dtmfSender.insertDTMF(tones, duration, interToneGap); +} + +function handleMessage(peerConnection, message) { + var parsedMsg = JSON.parse(message); + if (parsedMsg.type) { + var sessionDescription = new RTCSessionDescription(parsedMsg); + peerConnection.setRemoteDescription( + sessionDescription + ).then( + function() { success_('setRemoteDescription'); }, + function(error) { error_('setRemoteDescription', error); } + ); + if (sessionDescription.type === 'offer') { + print_('createAnswer with constraints: ' + + JSON.stringify(global.createAnswerConstraints, null, ' ')); + peerConnection.createAnswer( + global.createAnswerConstraints + ).then( + setLocalAndSendMessage_, + function(error) { error_('createAnswer', error); } + ); + } + return; + } else if (parsedMsg.candidate) { + var candidate = new RTCIceCandidate(parsedMsg); + peerConnection.addIceCandidate(candidate, + function() { success_('addIceCandidate'); }, + function(error) { error_('addIceCandidate', error); } + ); + return; + } + error_('unknown message received'); +} + +// Sets the peerConnection constraints based on checkboxes. +// TODO (jansson) Make it possible to use the text field for constraints like +// for getUserMedia. +function setPeerConnectionConstraints() { + // Only added optional for now. + global.pcConstraints = { + optional: [] + }; + + global.pcConstraints.optional.push( + {googCpuOveruseDetection: $('cpuoveruse-detection').checked}); + + global.pcConstraints.optional.push( + {RtpDataChannels: $('data-channel-type-rtp').checked}); + + $('pc-constraints').value = JSON.stringify(global.pcConstraints, null, ' '); +} + +function createPeerConnection(stunServer) { + var servers = {iceServers: [{url: 'stun:' + stunServer}]}; + var peerConnection; + try { + peerConnection = new RTCPeerConnection(servers, global.pcConstraints); + } catch (exception) { + error_('Failed to create peer connection: ' + exception); + } + peerConnection.onaddstream = addStreamCallback_; + peerConnection.onremovestream = removeStreamCallback_; + peerConnection.onicecandidate = iceCallback_; + peerConnection.ondatachannel = onCreateDataChannelCallback_; + return peerConnection; +} + +function setupCall(peerConnection) { + print_('createOffer with constraints: ' + + JSON.stringify(global.createOfferConstraints, null, ' ')); + peerConnection.createOffer( + global.createOfferConstraints + ).then( + setLocalAndSendMessage_, + function(error) { error_('createOffer', error); } + ); +} + +function answerCall(peerConnection, message) { + handleMessage(peerConnection, message); +} + +function createDataChannel(peerConnection, label) { + if (typeof global.dataChannel !== 'undefined' && + global.dataChannel.readyState !== 'closed') { + error_('Creating DataChannel, but we already have one.'); + } + + global.dataChannel = peerConnection.createDataChannel(label, + {reliable: false}); + print_('DataChannel with label ' + global.dataChannel.label + ' initiated ' + + 'locally.'); + hookupDataChannelEvents(); +} + +function closeDataChannel() { + if (typeof global.dataChannel === 'undefined') { + error_('Closing DataChannel, but none exists.'); + } + print_('DataChannel with label ' + global.dataChannel.label + + ' is beeing closed.'); + global.dataChannel.close(); +} + +function createDtmfSender(peerConnection) { + if (typeof global.dtmfSender !== 'undefined') { + error_('Creating DTMF sender, but we already have one.'); + } + if (typeof global.localStream === 'undefined') { + error_('Creating DTMF sender but local stream is undefined.'); + } + var localAudioTrack = global.localStream.getAudioTracks()[0]; + global.dtmfSender = peerConnection.createDTMFSender(localAudioTrack); + global.dtmfSender.ontonechange = global.dtmfOnToneChange; +} + +// Connects to the provided peerconnection_server. +// @param {string} serverUrl The server URL in string form without an ending +// slash, something like http://localhost:8888. +// @param {string} clientName The name to use when connecting to the server. +function connect(serverUrl, clientName) { + if (typeof global.ourPeerId !== 'undefined') { + error_('connecting, but is already connected.'); + } + print_('Connecting to ' + serverUrl + ' as ' + clientName); + global.serverUrl = serverUrl; + global.ourClientName = clientName; + + var request = new XMLHttpRequest(); + request.open('GET', serverUrl + '/sign_in?' + clientName); + print_(serverUrl + '/sign_in?' + clientName); + request.onreadystatechange = function() { + connectCallback_(request); + }; + request.send(); +} + +// Creates a peer connection. Must be called before most other public functions +// in this file. +function preparePeerConnection() { + if (typeof global.peerConnection !== 'undefined') { + error_('creating peer connection, but we already have one.'); + } + global.peerConnection = createPeerConnection(STUN_SERVER); + success_('ok-peerconnection-created'); +} + +// Adds the local stream to the peer connection. You will have to re-negotiate +// the call for this to take effect in the call. +function addLocalStream() { + if (typeof global.peerConnection === 'undefined') { + error_('adding local stream, but we have no peer connection.'); + } + addLocalStreamToPeerConnection(global.peerConnection); + print_('ok-added'); +} + +// Removes the local stream from the peer connection. You will have to +// re-negotiate the call for this to take effect in the call. +function removeLocalStream() { + if (typeof global.peerConnection === 'undefined') { + error_('attempting to remove local stream, but no call is up'); + } + removeLocalStreamFromPeerConnection(global.peerConnection); + print_('ok-local-stream-removed'); +} + +// Toggles the remote audio stream's enabled state on the peer connection, given +// that a call is active. Returns ok-[typeToToggle]-toggled-to-[true/false] +// on success. +// @param {function} selectAudioOrVideoTrack A function that takes a remote +// stream as argument and returns a track (e.g. either the video or audio +// track). +// @param {function} typeToToggle Either "audio" or "video" depending on what +// the selector function selects. +function toggleRemoteStream(selectAudioOrVideoTrack, typeToToggle) { + if (typeof global.peerConnection === 'undefined') { + error_('Tried to toggle remote stream, but have no peer connection.'); + } + if (global.peerConnection.getRemoteStreams().length === 0) { + error_('Tried to toggle remote stream, but not receiving any stream.'); + } + var track = selectAudioOrVideoTrack( + global.peerConnection.getRemoteStreams()[0]); + toggle_(track, 'remote', typeToToggle); +} + +// See documentation on toggleRemoteStream (this function is the same except +// we are looking at local streams). +function toggleLocalStream(selectAudioOrVideoTrack, typeToToggle) { + if (typeof global.peerConnection === 'undefined') { + error_('Tried to toggle local stream, but have no peer connection.'); + } + if (global.peerConnection.getLocalStreams().length === 0) { + error_('Tried to toggle local stream, but there is no local stream in ' + + 'the call.'); + } + var track = selectAudioOrVideoTrack( + global.peerConnection.getLocalStreams()[0]); + toggle_(track, 'local', typeToToggle); +} + +// Hangs up a started call. Returns ok-call-hung-up on success. This tab will +// not accept any incoming calls after this call. +function hangUp() { + if (typeof global.peerConnection === 'undefined') { + error_('hanging up, but has no peer connection'); + } + if (getReadyState_() !== 'active') { + error_('hanging up, but ready state is not active (no call up).'); + } + sendToPeer(global.remotePeerId, 'BYE'); + closeCall_(); + global.acceptsIncomingCalls = false; + print_('ok-call-hung-up'); +} + +// Start accepting incoming calls. +function acceptIncomingCalls() { + global.acceptsIncomingCalls = true; +} + +// Creates a DataChannel on the current PeerConnection. Only one DataChannel can +// be created on each PeerConnection. +// Returns ok-datachannel-created on success. +function createDataChannelOnPeerConnection() { + if (typeof global.peerConnection === 'undefined') { + error_('Tried to create data channel, but have no peer connection.'); + } + createDataChannel(global.peerConnection, global.ourClientName); + print_('ok-datachannel-created'); +} + +// Close the DataChannel on the current PeerConnection. +// Returns ok-datachannel-close on success. +function closeDataChannelOnPeerConnection() { + if (typeof global.peerConnection === 'undefined') { + error_('Tried to close data channel, but have no peer connection.'); + } + closeDataChannel(global.peerConnection); + print_('ok-datachannel-close'); +} + +// Creates a DTMF sender on the current PeerConnection. +// Returns ok-dtmfsender-created on success. +function createDtmfSenderOnPeerConnection() { + if (typeof global.peerConnection === 'undefined') { + error_('Tried to create DTMF sender, but have no peer connection.'); + } + createDtmfSender(global.peerConnection); + print_('ok-dtmfsender-created'); +} + +// Send DTMF tones on the global.dtmfSender. +// Returns ok-dtmf-sent on success. +function insertDtmfOnSender(tones, duration, interToneGap) { + + if (typeof global.dtmfSender === 'undefined') { + error_('Tried to insert DTMF tones, but have no DTMF sender.'); + } + insertDtmf(tones, duration, interToneGap); + print_('ok-dtmf-sent'); +} + +// Sends a message to a peer through the peerconnection_server. +function sendToPeer(peer, message) { + var messageToLog = message.sdp ? message.sdp : message; + print_('Sending message ' + messageToLog + ' to peer ' + peer + '.'); + + var request = new XMLHttpRequest(); + var url = global.serverUrl + '/message?peer_id=' + global.ourPeerId + '&to=' + + peer; + request.open('POST', url, true); + request.setRequestHeader('Content-Type', 'text/plain'); + request.send(message); +} + +// @param {!string} videoElementId The ID of the video element to update. +// @param {!number} width of the video to update the video element, if width or +// height is 0, size will be taken from videoElement.videoWidth. +// @param {!number} height of the video to update the video element, if width or +// height is 0 size will be taken from the videoElement.videoHeight. +/* exported updateVideoElementSize */ +function updateVideoElementSize(videoElementId, width, height) { + var videoElement = $(videoElementId); + if (width > 0 || height > 0) { + videoElement.width = width; + videoElement.height = height; + } else { + if (videoElement.videoWidth > 0 || videoElement.videoHeight > 0) { + videoElement.width = videoElement.videoWidth; + videoElement.height = videoElement.videoHeight; + print_('Set video element "' + videoElementId + '" size to ' + + videoElement.width + 'x' + videoElement.height); + } else { + print_('"' + videoElementId + '" video stream size is 0, skipping ' + + ' resize'); + } + } + displayVideoSize(videoElement); +} + +// Disconnects from the peerconnection server. Returns ok-disconnected on +// success. +function disconnect_() { + if (typeof global.ourPeerId === 'undefined') { + return; + } + var request = new XMLHttpRequest(); + request.open('GET', global.serverUrl + '/sign_out?peer_id=' + + global.ourPeerId, true); + request.send(); + global.ourPeerId = 'undefined'; + print_('ok-disconnected'); +} + +// Returns true if we are disconnected from peerconnection_server. +function isDisconnected_() { + return global.ourPeerId === 'undefined'; +} + +// @return {!string} The current peer connection's ready state, or +// 'no-peer-connection' if there is no peer connection up. +// NOTE: The PeerConnection states are changing and until chromium has +// implemented the new states we have to use this interim solution of always +// assuming that the PeerConnection is 'active'. +function getReadyState_() { + if (typeof global.peerConnection === 'undefined') { + return 'no-peer-connection'; + } + return 'active'; +} + +// This function asks permission to use the webcam and mic from the browser. It +// will return ok-requested to the test. This does not mean the request was +// approved though. The test will then have to click past the dialog that +// appears in Chrome, which will run either the OK or failed callback as a +// a result. To see which callback was called, use obtainGetUserMediaResult_(). +// @param {string} constraints Defines what to be requested, with mandatory +// and optional constraints defined. The contents of this parameter depends +// on the WebRTC version. This should be JavaScript code that we eval(). +function doGetUserMedia_(constraints) { + if (!navigator.getUserMedia) { + print_('Browser does not support WebRTC.'); + return; + } + var evaluatedConstraints; + try { + evaluatedConstraints = JSON.parse(constraints); + } catch (exception) { + error_('Not valid JavaScript expression: ' + constraints); + } + + print_('Requesting doGetUserMedia: constraints: ' + constraints); + navigator.mediaDevices.getUserMedia(evaluatedConstraints) + .then(function(stream) { + global.localStream = stream; + success_('getUserMedia'); + + if (stream.getVideoTracks().length > 0) { + // Show the video element if we did request video in the getUserMedia call. + var videoElement = $('local-view'); + videoElement.srcObject = stream; + registerVideoTrackEvents(stream); + window.addEventListener('loadedmetadata', function() { + displayVideoSize(videoElement);}, true); + } + }).catch(function(error) { + error_('GetUserMedia failed with error: ' + error.name); + }); +} + +function registerVideoTrackEvents(stream) { + // Throw an error when no video is sent from camera but gUM returns OK. + stream.getVideoTracks()[0].onended = function() { + error_(stream + ' getUserMedia successful but ' + + 'MediaStreamTrack.onended event fired, no frames from camera.'); + }; + // Print information on track being muted. + stream.getVideoTracks()[0].onmute = function() { + error_(stream + ' MediaStreamTrack.onmute event has ' + + 'fired, no frames to the track.'); + }; + // Print information on track being unmuted mute. + stream.getVideoTracks()[0].onunmute = function() { + warning_(stream + ' MediaStreamTrack.onunmute event has ' + + 'fired.'); + }; +} + +function removeVideoTrackEvents(stream) { + var videoTrackevents = ['onmute', 'onunmute', 'onended']; + videoTrackevents.forEach(function(trackEvent) { + stream.getVideoTracks()[0][trackEvent] = null; + }); +} + +// Must be called after calling doGetUserMedia. +// @return {string} Returns not-called-yet if we have not yet been called back +// by WebRTC. Otherwise it returns either ok-got-stream or failed-with-error-x +// (where x is the error code from the error callback) depending on which +// callback got called by WebRTC. +function obtainGetUserMediaResult_() { + if (typeof global.requestWebcamAndMicrophoneResult === 'undefined') { + global.requestWebcamAndMicrophoneResult = ' not called yet'; + } + return global.requestWebcamAndMicrophoneResult; +} + +// Negotiates a call with the other side. This will create a peer connection on +// the other side if there isn't one. +// To call this method we need to be aware of the other side, e.g. we must be +// connected to peerconnection_server and we must have exactly one peer on that +// server. +// This method may be called any number of times. If you haven't added any +// streams to the call, an "empty" call will result. The method will return +// ok-negotiating immediately to the test if the negotiation was successfully +// sent. +function negotiateCall_() { + if (typeof global.peerConnection === 'undefined') { + error_('Negotiating call, but we have no peer connection.'); + } else if (typeof global.ourPeerId === 'undefined') { + error_('Negotiating call, but not connected.'); + } else if (typeof global.remotePeerId === 'undefined') { + error_('Negotiating call, but missing remote peer.'); + } + setupCall(global.peerConnection); + print_('ok-negotiating'); +} + +// This provides the selected source id from the objects in the parameters +// provided to this function. If the audioSelect or videoSelect objects does +// not have any HTMLOptions children it will return null in the source +// object. +// @param {!object} audioSelect HTML drop down element with audio devices added +// as HTMLOptionsCollection children. +// @param {!object} videoSelect HTML drop down element with audio devices added +// as HTMLOptionsCollection children. +// @return {!object} source contains audio and video source ID from the selected +// devices in the drop down menu elements. +function getSourcesFromField_(audioSelect, videoSelect) { + var source = { + audioId: null, + videoId: null + }; + if (audioSelect.options.length > 0) { + source.audioId = audioSelect.options[audioSelect.selectedIndex].value; + } + if (videoSelect.options.length > 0) { + source.videoId = videoSelect.options[videoSelect.selectedIndex].value; + } + return source; +} + +function iceCallback_(event) { + if (event.candidate) { + sendToPeer(global.remotePeerId, JSON.stringify(event.candidate)); + } +} + +function setLocalAndSendMessage_(sessionDescription) { + var unmodifiedSdp = sessionDescription.sdp; + sessionDescription.sdp = + global.transformOutgoingSdp(sessionDescription.sdp); + global.peerConnection.setLocalDescription( + sessionDescription + ).then( + function() { success_('setLocalDescription'); }, + failedSetLocalDescription + ); + print_('Sending SDP message:\n' + sessionDescription.sdp); + sendToPeer(global.remotePeerId, JSON.stringify(sessionDescription)); + + function failedSetLocalDescription(error) { + error_('SetLocalDescription failure: ' + error + '\n' + + 'SDP before transform:\n ' + unmodifiedSdp + '\n' + + 'SDP after transform:\n ' + sessionDescription.sdp + '\n'); + } +} + +function addStreamCallback_(event) { + print_('Receiving remote stream...'); + var videoElement = document.getElementById('remote-view'); + videoElement.srcObject = event.stream; + + window.addEventListener('loadedmetadata', + function() {displayVideoSize(videoElement);}, true); +} + +function removeStreamCallback_() { + print_('Call ended.'); + document.getElementById('remote-view').src = ''; +} + +function onCreateDataChannelCallback_(event) { + if (typeof global.dataChannel !== 'undefined' && + global.dataChannel.readyState !== 'closed') { + error_('Received DataChannel, but we already have one.'); + } + global.dataChannel = event.channel; + print_('DataChannel with label ' + global.dataChannel.label + + ' initiated by remote peer.'); + hookupDataChannelEvents(); +} + +function hookupDataChannelEvents() { + global.dataChannel.onmessage = global.dataCallback; + global.dataChannel.onopen = onDataChannelReadyStateChange_; + global.dataChannel.onclose = onDataChannelReadyStateChange_; + // Trigger global.dataStatusCallback so an application is notified + // about the created data channel. + onDataChannelReadyStateChange_(); +} + +function onDataChannelReadyStateChange_() { + print_('DataChannel state:' + global.dataChannel.readyState); + global.dataStatusCallback(global.dataChannel.readyState); + // Display dataChannel.id only when dataChannel is active/open. + if (global.dataChannel.readyState === 'open') { + $('data-channel-id').value = global.dataChannel.id; + } else if (global.dataChannel.readyState === 'closed') { + $('data-channel-id').value = ''; + } +} + +// @param {string} videoTag The ID of the video tag + stream used to write the +// size to a HTML tag based on id if the div's exists. +function displayVideoSize(videoTag) { + if (videoTag.videoWidth > 0 || videoTag.videoHeight > 0) { + $(videoTag.id + '-size').firstChild.data = videoTag.videoWidth + 'x' + + videoTag.videoHeight; + } +} + +// Checks if the 'audiosrc' and 'videosrc' drop down menu elements has had all +// of its children appended in order to provide device ID's to the function +// 'updateGetUserMediaConstraints()', used in turn to populate the getUserMedia +// constraints text box when the page has loaded. +function checkIfDeviceDropdownsArePopulated_() { + if (document.addEventListener) { + $('audiosrc').addEventListener('DOMNodeInserted', + updateGetUserMediaConstraints, false); + $('videosrc').addEventListener('DOMNodeInserted', + updateGetUserMediaConstraints, false); + } else { + print_('addEventListener is not supported by your browser, cannot update ' + + 'device source ID\'s automatically. Select a device from the audio' + + ' or video source drop down menu to update device source id\'s'); + } +} + +// Register an input element to use local storage to remember its state between +// sessions (using local storage). Only input elements are supported. +// @param {!string} element_id to be used as a key for local storage and the id +// of the element to store the state for. +function registerLocalStorage_(elementId) { + var element = $(elementId); + if (element.tagName !== 'INPUT') { + error_('You can only use registerLocalStorage_ for input elements. ' + + 'Element \"' + element.tagName + '\" is not an input element. '); + } + + if (localStorage.getItem(element.id) === null) { + storeLocalStorageField_(element); + } else { + getLocalStorageField_(element); + } + + // Registers the appropriate events for input elements. + if (element.type === 'checkbox') { + element.onclick = function() { storeLocalStorageField_(this); }; + } else if (element.type === 'text') { + element.onblur = function() { storeLocalStorageField_(this); }; + } else { + error_('Unsupportered input type: ' + '\"' + element.type + '\"'); + } +} + +// Fetches the stored values from local storage and updates checkbox status. +// @param {!Object} element of which id is representing the key parameter for +// local storage. +function getLocalStorageField_(element) { + // Makes sure the checkbox status is matching the local storage value. + if (element.type === 'checkbox') { + element.checked = (localStorage.getItem(element.id) === 'true'); + } else if (element.type === 'text') { + element.value = localStorage.getItem(element.id); + } else { + error_('Unsupportered input type: ' + '\"' + element.type + '\"'); + } +} + +// Stores the string value of the element object using local storage. +// @param {!Object} element of which id is representing the key parameter for +// local storage. +function storeLocalStorageField_(element) { + if (element.type === 'checkbox') { + localStorage.setItem(element.id, element.checked); + } else if (element.type === 'text') { + localStorage.setItem(element.id, element.value); + } +} + +// Create the peer connection if none is up (this is just convenience to +// avoid having a separate button for that). +function ensureHasPeerConnection_() { + if (getReadyState_() === 'no-peer-connection') { + preparePeerConnection(); + } +} + +// @param {string} message Text to print. +function print_(message) { + printHandler_(message, 'black'); +} + +// @param {string} message Text to print. +function success_(message) { + printHandler_(message, 'green'); +} + +// @param {string} message Text to print. +function warning_(message) { + printHandler_(message, 'orange'); +} + +// @param {string} message Text to print. +function error_(message) { + printHandler_(message, 'red'); +} + +// @param {string} message Text to print. +// @param {string} textField Element ID of where to print. +// @param {string} color Color of the text. +function printHandler_(message, color) { + if (color === 'green') { + message += ' success'; + } + $('messages').innerHTML += '' + message + + '
'; + if (color === 'red') { + throw new Error(message); + } + console.log(message); +} + +// @param {string} stringRepresentation JavaScript as a string. +// @return {Object} The PeerConnection constraints as a JavaScript dictionary. +function getEvaluatedJavaScript_(stringRepresentation) { + try { + var evaluatedJavaScript; + evaluatedJavaScript = JSON.parse(stringRepresentation); + return evaluatedJavaScript; + } catch (exception) { + error_('Not valid JavaScript expression: ' + stringRepresentation); + } +} + +function forceIsac_() { + setOutgoingSdpTransform(function(sdp) { + // Remove all other codecs (not the video codecs though). + sdp = sdp.replace(/m=audio (\d+) UDP\/TLS\/RTP\/SAVPF.*\r\n/g, + 'm=audio $1 UDP\/TLS\/RTP\/SAVPF 104\r\n'); + sdp = sdp.replace('a=rtcp-fb:111 transport-cc', + 'a=rtcp-fb:104 transport-cc'); + sdp = sdp.replace('a=fmtp:111 minptime=10', 'a=fmtp:104 minptime=10'); + var t = /a=rtpmap:(?!104)\d{1,3} (?!VP8|H264|VP9|red|ulpfec|rtx).*\r\n/g; + sdp = sdp.replace(t,''); + return sdp; + }); +} + +function dontTouchSdp_() { + setOutgoingSdpTransform(function(sdp) { + return sdp; + }); +} + +function hookupDataChannelCallbacks_() { + setDataCallbacks(function(status) { + $('data-channel-status').value = status; + }, + function(dataMessage) { + print_('Received ' + dataMessage.data); + $('data-channel-receive').value = + dataMessage.data + '\n' + $('data-channel-receive').value; + }); +} + +function hookupDtmfSenderCallback_() { + setOnToneChange(function(tone) { + print_('Sent DTMF tone: ' + tone.tone); + }); +} + +function toggle_(track, localOrRemote, audioOrVideo) { + if (!track) { + error_('Tried to toggle ' + localOrRemote + ' ' + audioOrVideo + + ' stream, but has no such stream.'); + } + track.enabled = !track.enabled; + print_('ok-' + audioOrVideo + '-toggled-to-' + track.enabled); +} + +function connectCallback_(request) { + print_('Connect callback: ' + request.status + ', ' + request.readyState); + if (request.status === 0) { + print_('peerconnection_server doesn\'t seem to be up.'); + error_('failed connecting to peerConnection server'); + } + if (request.readyState === 4 && request.status === 200) { + global.ourPeerId = parseOurPeerId_(request.responseText); + global.remotePeerId = parseRemotePeerIdIfConnected_(request.responseText); + startHangingGet_(global.serverUrl, global.ourPeerId); + print_('ok-connected'); + } +} + +function parseOurPeerId_(responseText) { + // According to peerconnection_server's protocol. + var peerList = responseText.split('\n'); + return parseInt(peerList[0].split(',')[1]); +} + +function parseRemotePeerIdIfConnected_(responseText) { + var peerList = responseText.split('\n'); + if (peerList.length === 1) { + // No peers have connected yet - we'll get their id later in a notification. + return null; + } + var remotePeerId = null; + for (var i = 0; i < peerList.length; i++) { + if (peerList[i].length === 0) { + continue; + } + var parsed = peerList[i].split(','); + var name = parsed[0]; + var id = parseInt(parsed[1]); + if (id !== global.ourPeerId) { + print_('Found remote peer with name ' + name + ', id ' + + id + ' when connecting.'); + // There should be at most one remote peer in this test. + if (remotePeerId !== null) { + error_('Expected just one remote peer in this test: ' + + 'found several.'); + } + // Found a remote peer. + remotePeerId = id; + } + } + return remotePeerId; +} + +function startHangingGet_(server, ourId) { + if (isDisconnected_()) { + return; + } + var hangingGetRequest = new XMLHttpRequest(); + hangingGetRequest.onreadystatechange = function() { + hangingGetCallback_(hangingGetRequest, server, ourId); + }; + hangingGetRequest.ontimeout = function() { + hangingGetTimeoutCallback_(hangingGetRequest, server, ourId); + }; + var callUrl = server + '/wait?peer_id=' + ourId; + print_('Sending ' + callUrl); + hangingGetRequest.open('GET', callUrl); + hangingGetRequest.send(); +} + +function hangingGetCallback_(hangingGetRequest, server, ourId) { + if (hangingGetRequest.readyState !== 4 || hangingGetRequest.status === 0) { + // Code 0 is not possible if the server actually responded. Ignore. + return; + } + if (hangingGetRequest.status !== 200) { + error_('Error ' + hangingGetRequest.status + ' from server: ' + + hangingGetRequest.statusText); + } + var targetId = readResponseHeader_(hangingGetRequest, 'Pragma'); + if (targetId === ourId) { + handleServerNotification_(hangingGetRequest.responseText); + } else { + handlePeerMessage_(targetId, hangingGetRequest.responseText); + } + hangingGetRequest.abort(); + + restartHangingGet_(server, ourId); +} + +function hangingGetTimeoutCallback_(hangingGetRequest, server, ourId) { + print_('Hanging GET times out, re-issuing...'); + hangingGetRequest.abort(); + restartHangingGet_(server, ourId); +} + +function handleServerNotification_(message) { + var parsed = message.split(','); + if (parseInt(parsed[2]) === 1) { + // Peer connected - this must be our remote peer, and it must mean we + // connected before them (except if we happened to connect to the server + // at precisely the same moment). + print_('Found remote peer with name ' + parsed[0] + ', id ' + parsed[1] + + ' when connecting.'); + global.remotePeerId = parseInt(parsed[1]); + } +} + +function closeCall_() { + if (typeof global.peerConnection === 'undefined') { + warning_('Closing call, but no call active.'); + } + global.peerConnection.close(); + global.peerConnection = undefined; +} + +function handlePeerMessage_(peerId, message) { + print_('Received message from peer ' + peerId + ': ' + message); + if (peerId !== global.remotePeerId) { + error_('Received notification from unknown peer ' + peerId + + ' (only know about ' + global.remotePeerId + '.'); + } + if (message.search('BYE') === 0) { + print_('Received BYE from peer: closing call'); + closeCall_(); + return; + } + if (typeof global.peerConnection === 'undefined' && + global.acceptsIncomingCalls) { + // The other side is calling us. + print_('We are being called: answer...'); + + global.peerConnection = createPeerConnection(STUN_SERVER); + + if ($('auto-add-stream-oncall') && + obtainGetUserMediaResult_() === 'ok-got-stream') { + print_('We have a local stream, so hook it up automatically.'); + addLocalStreamToPeerConnection(global.peerConnection); + } + answerCall(global.peerConnection, message); + return; + } + handleMessage(global.peerConnection, message); +} + +function restartHangingGet_(server, ourId) { + window.setTimeout(function() { + startHangingGet_(server, ourId); + }, 0); +} + +function readResponseHeader_(request, key) { + var value = request.getResponseHeader(key); + if (value === null || value.length === 0) { + error_('Received empty value ' + value + + ' for response header key ' + key + '.'); + } + return parseInt(value); +} diff --git a/src/single-audio/index.html b/src/single-audio/index.html new file mode 100644 index 0000000..7524af2 --- /dev/null +++ b/src/single-audio/index.html @@ -0,0 +1,42 @@ + + + + + Single Local Preview (Audio Only) + + + + + + + + + + + + + +
Sound test
+ + diff --git a/src/single-video/index.html b/src/single-video/index.html new file mode 100644 index 0000000..cada57e --- /dev/null +++ b/src/single-video/index.html @@ -0,0 +1,43 @@ + + + + + Single Local Preview (Video Only) + + + + + + + + + + + + + +
Local Preview
+ +