@@ -0,0 +1,656 @@
<html>
<head>
<title>Podcast</title>
<style>
video, canvas {
width: 35rem;
}
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree.
*/
audio {
display: inline-block;
position: relative;
top: 9px;
width: calc(100% - 120px);
}
button {
margin: 0 20px 0 0;
width: 96px;
}
table {
border-collapse: collapse;
}
th, td {
border: 1px solid black;
}
tr:hover {
background-color: #f5f5f5;
}
div#audio {
margin: 0 0 29px 0;
}
div#audio > div {
margin: 0 0 20px 0;
}
div.label {
display: inline-block;
font-weight: 400;
width: 120px;
}
div.graph-container {
float: left;
margin: 0.5em;
width: calc(50% - 1em);
}
a#viewSource {
clear: both;
}
</style>
<script type="text/javascript">
let pc1;
let pc2;
let localStream;
let bitrateGraph;
let bitrateSeries;
let targetBitrateSeries;
let headerrateSeries;
let packetGraph;
let packetSeries;
let lastResult;
const offerOptions = {
offerToReceiveAudio: 1,
offerToReceiveVideo: 0,
voiceActivityDetection: false
};
const audioLevels = [];
let audioLevelGraph;
let audioLevelSeries;
// Enabling opus DTX is an expert option without GUI.
// eslint-disable-next-line prefer-const
let useDtx = false;
// Disabling Opus FEC is an expert option without GUI.
// eslint-disable-next-line prefer-const
let useFec = true;
let audio2;
let callButton;
let hangupButton;
let codecSelector;
async function hangup() {
if (pc) {
pc.close();
pc = null;
}
localStream.getTracks().forEach(track => track.stop());
localStream = null;
startButton.disabled = false;
hangupButton.disabled = true;
};
function createPeerConnection() {
pc = new RTCPeerConnection();
pc.onicecandidate = e => {
const message = {
type: 'candidate',
candidate: null,
};
if (e.candidate) {
message.candidate = e.candidate.candidate;
message.sdpMid = e.candidate.sdpMid;
message.sdpMLineIndex = e.candidate.sdpMLineIndex;
}
signaling.postMessage(message);
};
pc.ontrack = e => remoteVideo.srcObject = e.streams[0];
localStream.getTracks().forEach(track => pc.addTrack(track, localStream));
}
async function makeCall() {
await createPeerConnection();
const offer = await pc.createOffer();
signaling.postMessage({type: 'offer', sdp: offer.sdp});
await pc.setLocalDescription(offer);
}
async function handleOffer(offer) {
if (pc) {
console.error('existing peerconnection');
return;
}
await createPeerConnection();
await pc.setRemoteDescription(offer);
const answer = await pc.createAnswer();
signaling.postMessage({type: 'answer', sdp: answer.sdp});
await pc.setLocalDescription(answer);
}
async function handleAnswer(answer) {
if (!pc) {
console.error('no peerconnection');
return;
}
await pc.setRemoteDescription(answer);
}
async function handleCandidate(candidate) {
if (!pc) {
console.error('no peerconnection');
return;
}
if (!candidate.candidate) {
await pc.addIceCandidate(null);
} else {
await pc.addIceCandidate(candidate);
}
}
function start() {
window.navigator.mediaDevices.getUserMedia({
audio: true,
video: true,
})
.then((stream) => {
audio2 = document.querySelector('audio#audio2');
callButton = document.querySelector('button#callButton');
hangupButton = document.querySelector('button#hangupButton');
codecSelector = document.querySelector('select#codec');
hangupButton.disabled = true;
callButton.onclick = call;
hangupButton.onclick = hangup;
// We only show one way of doing this.
const codecPreferences = document.querySelector('#codecPreferences');
const supportsSetCodecPreferences = window.RTCRtpTransceiver &&
'setCodecPreferences' in window.RTCRtpTransceiver.prototype;
if (supportsSetCodecPreferences) {
codecSelector.style.display = 'none';
const {codecs} = RTCRtpReceiver.getCapabilities('audio');
codecs.forEach(codec => {
if (['audio/CN', 'audio/telephone-event'].includes(codec.mimeType)) {
return;
}
const option = document.createElement('option');
option.value = (codec.mimeType + ' ' + codec.clockRate + ' ' +
(codec.sdpFmtpLine || '')).trim();
option.innerText = option.value;
codecPreferences.appendChild(option);
});
codecPreferences.disabled = false;
} else {
codecPreferences.style.display = 'none';
}
// Change the ptime. For opus supported values are [10, 20, 40, 60].
// Expert option without GUI.
// eslint-disable-next-line no-unused-vars
async function setPtime(ptime) {
const offer = await pc1.createOffer();
await pc1.setLocalDescription(offer);
const desc = pc1.remoteDescription;
if (desc.sdp.indexOf('a=ptime:') !== -1) {
desc.sdp = desc.sdp.replace(/a=ptime:.*/, 'a=ptime:' + ptime);
} else {
desc.sdp += 'a=ptime:' + ptime + '\r\n';
}
await pc1.setRemoteDescription(desc);
}
if (window.RTCRtpReceiver && ('getSynchronizationSources' in window.RTCRtpReceiver.prototype)) {
let lastTime;
const getAudioLevel = (timestamp) => {
window.requestAnimationFrame(getAudioLevel);
if (!pc2) {
return;
}
const receiver = pc2.getReceivers().find(r => r.track.kind === 'audio');
if (!receiver) {
return;
}
const sources = receiver.getSynchronizationSources();
sources.forEach(source => {
audioLevels.push(source.audioLevel);
});
if (!lastTime) {
lastTime = timestamp;
} else if (timestamp - lastTime > 500 && audioLevels.length > 0) {
// Update graph every 500ms.
const maxAudioLevel = Math.max.apply(null, audioLevels);
audioLevelSeries.addPoint(Date.now(), maxAudioLevel);
audioLevelGraph.setDataSeries([audioLevelSeries]);
audioLevelGraph.updateEndDate();
audioLevels.length = 0;
lastTime = timestamp;
}
};
window.requestAnimationFrame(getAudioLevel);
}
/* console.log(stream);
const canvas = window.canvas = document.querySelector('canvas');
canvas.width = 480;
canvas.height = 360;
const button = document.querySelector("button.shot");
const video = document.querySelector('video');
button.onclick = function() {
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
canvas.getContext('2d').drawImage(video, 0, 0, canvas.width, canvas.height);
};
window.stream = stream; // make stream available to browser console
video.srcObject = stream;
navigator.mediaDevices.enumerateDevices()
.then((devices) => {
devices.forEach((device) => {
console.log(`${device.kind}: ${device.label} id = ${device.deviceId}`);
});
})
.catch((err) => {
console.error(`${err.name}: ${err.message}`);
});
*/
})
.catch((err) => {
console.log(err);
});
}
// query getStats every second
window.setInterval(() => {
if (!pc1) {
return;
}
const sender = pc1.getSenders()[0];
if (!sender) {
return;
}
sender.getStats().then(res => {
res.forEach(report => {
let bytes;
let headerBytes;
let packets;
if (report.type === 'outbound-rtp') {
if (report.isRemote) {
return;
}
const now = report.timestamp;
bytes = report.bytesSent;
headerBytes = report.headerBytesSent;
packets = report.packetsSent;
if (lastResult && lastResult.has(report.id)) {
const deltaT = (now - lastResult.get(report.id).timestamp) / 1000;
// calculate bitrate
const bitrate = 8 * (bytes - lastResult.get(report.id).bytesSent) /
deltaT;
const headerrate = 8 * (headerBytes - lastResult.get(report.id).headerBytesSent) /
deltaT;
// append to chart
bitrateSeries.addPoint(now, bitrate);
headerrateSeries.addPoint(now, headerrate);
targetBitrateSeries.addPoint(now, report.targetBitrate);
bitrateGraph.setDataSeries([bitrateSeries, headerrateSeries, targetBitrateSeries]);
bitrateGraph.updateEndDate();
// calculate number of packets and append to chart
packetSeries.addPoint(now, (packets -
lastResult.get(report.id).packetsSent) / deltaT);
packetGraph.setDataSeries([packetSeries]);
packetGraph.updateEndDate();
}
}
});
lastResult = res;
});
}, 1000);
function gotStream(stream) {
hangupButton.disabled = false;
console.log('Received local stream');
localStream = stream;
const audioTracks = localStream.getAudioTracks();
if (audioTracks.length > 0) {
console.log(`Using Audio device: ${audioTracks[0].label}`);
}
localStream.getTracks().forEach(track => pc1.addTrack(track, localStream));
console.log('Adding Local Stream to peer connection');
pc1.createOffer(offerOptions)
.then(gotDescription1, onCreateSessionDescriptionError);
bitrateSeries = new TimelineDataSeries();
bitrateGraph = new TimelineGraphView('bitrateGraph', 'bitrateCanvas');
bitrateGraph.updateEndDate();
targetBitrateSeries = new TimelineDataSeries();
targetBitrateSeries.setColor('blue');
headerrateSeries = new TimelineDataSeries();
headerrateSeries.setColor('green');
packetSeries = new TimelineDataSeries();
packetGraph = new TimelineGraphView('packetGraph', 'packetCanvas');
packetGraph.updateEndDate();
audioLevelSeries = new TimelineDataSeries();
audioLevelGraph = new TimelineGraphView('audioLevelGraph', 'audioLevelCanvas');
audioLevelGraph.updateEndDate();
}
function onCreateSessionDescriptionError(error) {
console.log(`Failed to create session description: ${error.toString()}`);
}
function call() {
callButton.disabled = true;
codecSelector.disabled = true;
console.log('Starting call');
const servers = null;
pc1 = new RTCPeerConnection(servers);
console.log('Created local peer connection object pc1');
pc1.onicecandidate = e => onIceCandidate(pc1, e);
pc2 = new RTCPeerConnection(servers);
console.log('Created remote peer connection object pc2');
pc2.onicecandidate = e => onIceCandidate(pc2, e);
pc2.ontrack = gotRemoteStream;
console.log('Requesting local stream');
navigator.mediaDevices
.getUserMedia({
audio: true,
video: false
})
.then(gotStream)
.catch(e => {
alert(`getUserMedia() error: ${e.name}`);
});
}
function gotDescription1(desc) {
console.log(`Offer from pc1\n${desc.sdp}`);
pc1.setLocalDescription(desc)
.then(() => {
if (!supportsSetCodecPreferences) {
desc.sdp = forceChosenAudioCodec(desc.sdp);
}
pc2.setRemoteDescription(desc).then(() => {
return pc2.createAnswer().then(gotDescription2, onCreateSessionDescriptionError);
}, onSetSessionDescriptionError);
}, onSetSessionDescriptionError);
}
function gotDescription2(desc) {
console.log(`Answer from pc2\n${desc.sdp}`);
pc2.setLocalDescription(desc).then(() => {
if (!supportsSetCodecPreferences) {
desc.sdp = forceChosenAudioCodec(desc.sdp);
}
if (useDtx) {
desc.sdp = desc.sdp.replace('useinbandfec=1', 'useinbandfec=1;usedtx=1');
}
if (!useFec) {
desc.sdp = desc.sdp.replace('useinbandfec=1', 'useinbandfec=0');
}
pc1.setRemoteDescription(desc).then(() => {}, onSetSessionDescriptionError);
}, onSetSessionDescriptionError);
}
function hangup() {
console.log('Ending call');
localStream.getTracks().forEach(track => track.stop());
pc1.close();
pc2.close();
pc1 = null;
pc2 = null;
hangupButton.disabled = true;
callButton.disabled = false;
codecSelector.disabled = false;
}
function gotRemoteStream(e) {
if (supportsSetCodecPreferences) {
const preferredCodec = codecPreferences.options[codecPreferences.selectedIndex];
if (preferredCodec.value !== '') {
const [mimeType, clockRate, sdpFmtpLine] = preferredCodec.value.split(' ');
const {codecs} = RTCRtpReceiver.getCapabilities('audio');
console.log(mimeType, clockRate, sdpFmtpLine);
console.log(JSON.stringify(codecs, null, ' '));
const selectedCodecIndex = codecs.findIndex(c => c.mimeType === mimeType && c.clockRate === parseInt(clockRate, 10) && c.sdpFmtpLine === sdpFmtpLine);
const selectedCodec = codecs[selectedCodecIndex];
codecs.splice(selectedCodecIndex, 1);
codecs.unshift(selectedCodec);
e.transceiver.setCodecPreferences(codecs);
console.log('Preferred video codec', selectedCodec);
}
}
if (audio2.srcObject !== e.streams[0]) {
audio2.srcObject = e.streams[0];
console.log('Received remote stream');
}
}
function getOtherPc(pc) {
return (pc === pc1) ? pc2 : pc1;
}
function getName(pc) {
return (pc === pc1) ? 'pc1' : 'pc2';
}
function onIceCandidate(pc, event) {
getOtherPc(pc).addIceCandidate(event.candidate)
.then(
() => onAddIceCandidateSuccess(pc),
err => onAddIceCandidateError(pc, err)
);
console.log(`${getName(pc)} ICE candidate:\n${event.candidate ? event.candidate.candidate : '(null)'}`);
}
function onAddIceCandidateSuccess() {
console.log('AddIceCandidate success.');
}
function onAddIceCandidateError(error) {
console.log(`Failed to add ICE Candidate: ${error.toString()}`);
}
function onSetSessionDescriptionError(error) {
console.log(`Failed to set session description: ${error.toString()}`);
}
function forceChosenAudioCodec(sdp) {
return maybePreferCodec(sdp, 'audio', 'send', codecSelector.value);
}
// Copied from AppRTC's sdputils.js:
// Sets |codec| as the default |type| codec if it's present.
// The format of |codec| is 'NAME/RATE', e.g. 'opus/48000'.
function maybePreferCodec(sdp, type, dir, codec) {
const str = `${type} ${dir} codec`;
if (codec === '') {
console.log(`No preference on ${str}.`);
return sdp;
}
console.log(`Prefer ${str}: ${codec}`);
const sdpLines = sdp.split('\r\n');
// Search for m line.
const mLineIndex = findLine(sdpLines, 'm=', type);
if (mLineIndex === null) {
return sdp;
}
// If the codec is available, set it as the default in m line.
const codecIndex = findLine(sdpLines, 'a=rtpmap', codec);
console.log('codecIndex', codecIndex);
if (codecIndex) {
const payload = getCodecPayloadType(sdpLines[codecIndex]);
if (payload) {
sdpLines[mLineIndex] = setDefaultCodec(sdpLines[mLineIndex], payload);
}
}
sdp = sdpLines.join('\r\n');
return sdp;
}
// Find the line in sdpLines that starts with |prefix|, and, if specified,
// contains |substr| (case-insensitive search).
function findLine(sdpLines, prefix, substr) {
return findLineInRange(sdpLines, 0, -1, prefix, substr);
}
// Find the line in sdpLines[startLine...endLine - 1] that starts with |prefix|
// and, if specified, contains |substr| (case-insensitive search).
function findLineInRange(sdpLines, startLine, endLine, prefix, substr) {
const realEndLine = endLine !== -1 ? endLine : sdpLines.length;
for (let i = startLine; i < realEndLine; ++i) {
if (sdpLines[i].indexOf(prefix) === 0) {
if (!substr ||
sdpLines[i].toLowerCase().indexOf(substr.toLowerCase()) !== -1) {
return i;
}
}
}
return null;
}
// Gets the codec payload type from an a=rtpmap:X line.
function getCodecPayloadType(sdpLine) {
const pattern = new RegExp('a=rtpmap:(\\d+) \\w+\\/\\d+');
const result = sdpLine.match(pattern);
return (result && result.length === 2) ? result[1] : null;
}
// Returns a new m= line with the specified codec as the first one.
function setDefaultCodec(mLine, payload) {
const elements = mLine.split(' ');
// Just copy the first three parameters; codec order starts on fourth.
const newLine = elements.slice(0, 3);
// Put target payload first and copy in the rest.
newLine.push(payload);
for (let i = 3; i < elements.length; i++) {
if (elements[i] !== payload) {
newLine.push(elements[i]);
}
}
return newLine.join(' ');
}
</script>
</head>
<body>
<center><h1>Starting</h1></center>
<hr>
<center>nginx/1.26.1</center>
<div id="audio">
<div>
<div class="label">Local audio:</div>
<audio id="audio1" autoplay controls muted></audio>
</div>
<div>
<div class="label">Remote audio:</div>
<audio id="audio2" autoplay controls></audio>
</div>
</div>
<div id="buttons">
<select id="codec">
<!-- Codec values are matched with how they appear in the SDP.
For instance, opus matches opus/48000/2 in Chrome, and ISAC/16000
matches 16K iSAC (but not 32K iSAC). -->
<option value="opus">Opus</option>
<option value="ISAC">iSAC 16K</option>
<option value="G722">G722</option>
<option value="PCMU">PCMU</option>
<option value="red">RED</option>
</select>
<select id="codecPreferences" disabled>
<option selected value="">Default</option>
</select>
<button id="callButton">Call</button>
<button id="hangupButton">Hang Up</button>
</div>
<div class="graph-container" id="bitrateGraph">
<div>Bitrate</div>
<canvas id="bitrateCanvas"></canvas>
</div>
<div class="graph-container" id="packetGraph">
<div>Packets sent per second</div>
<canvas id="packetCanvas"></canvas>
</div>
<div class="graph-container" id="audioLevelGraph">
<div>average audio level ([0..1])</div>
<canvas id="audioLevelCanvas"></canvas>
</div>
<a href="https://github.com/webrtc/samples/tree/gh-pages/src/content/peerconnection/audio"
title="View source for this page on GitHub" id="viewSource">View source on GitHub</a>
</div>
<table>
<caption>Bitrate and Packes sent per second - approximate results in browsers</caption>
<tr>
<th>Opus</th>
<th>iSAC 16K</th>
<th>G722</th>
<th>PCMU</th>
<th>Browsers Tested</th>
</tr>
<tr>
<td>~40 kbps / Muted : Same, ~50 Packets, Muted : Same or slight drop</td>
<td>~30 kbps / Muted : Same, ~33 Packets, Muted : Same or slight drop</td>
<td>~70 kbps / Muted : Same, ~50 Packets, Muted : Same</td>
<td>~70 kbps / Muted : Same, ~55 Packets, Muted : Same</td>
<td>Tested in Chrome, Not tested in Opera, Firefox, Safari, Edge</td>
</tr>
</table>
<hr>
<!-- <canvas></canvas> -->
<button onclick="start()">Do the thing</button>
<!-- <video playsinline autoplay></video> -->
<!-- <button class="shot">Take snapshot</button> -->
</body>
</html>