forked from webrtc/samples
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* Add a video analyzer example This adds a sample that shows how to use an insertable stream for analyzing an incoming video stream. * Fixes * Warning banner on nonsupported browser * Review comments, display video size * Add size control buttons * Disable size buttons before local stream * eslint
- Loading branch information
1 parent
f548737
commit 8d4a66f
Showing
3 changed files
with
396 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,48 @@ | ||
/* | ||
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. | ||
* | ||
* Use of this source code is governed by a BSD-style license | ||
* that can be found in the LICENSE file in the root of the source | ||
* tree. | ||
*/ | ||
button { | ||
margin: 0 20px 0 0; | ||
width: 83px; | ||
} | ||
|
||
button#hangupButton { | ||
margin: 0; | ||
} | ||
|
||
video { | ||
--width: 45%; | ||
width: var(--width); | ||
height: calc(var(--width) * 0.75); | ||
margin: 0 0 20px 0; | ||
vertical-align: top; | ||
} | ||
|
||
video#localVideo { | ||
margin: 0 20px 20px 0; | ||
} | ||
|
||
div.box { | ||
margin: 1em; | ||
} | ||
|
||
@media screen and (max-width: 400px) { | ||
button { | ||
width: 83px; | ||
margin: 0 11px 10px 0; | ||
} | ||
|
||
video { | ||
height: 90px; | ||
margin: 0 0 10px 0; | ||
width: calc(50% - 7px); | ||
} | ||
video#localVideo { | ||
margin: 0 10px 20px 0; | ||
} | ||
|
||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,76 @@ | ||
<!DOCTYPE html> | ||
<!-- | ||
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. | ||
* | ||
* Use of this source code is governed by a BSD-style license | ||
* that can be found in the LICENSE file in the root of the source | ||
* tree. | ||
--> | ||
<html> | ||
<head> | ||
|
||
<meta charset="utf-8"> | ||
<meta name="description" content="WebRTC code samples"> | ||
<meta name="viewport" content="width=device-width, user-scalable=yes, initial-scale=1, maximum-scale=1"> | ||
<meta itemprop="description" content="Client-side WebRTC code samples"> | ||
<meta itemprop="image" content="../../../images/webrtc-icon-192x192.png"> | ||
<meta itemprop="name" content="WebRTC code samples"> | ||
<meta name="mobile-web-app-capable" content="yes"> | ||
<meta id="theme-color" name="theme-color" content="#ffffff"> | ||
|
||
<base target="_blank"> | ||
|
||
<title>Insertable Streams Video Analyzer</title> | ||
|
||
<link rel="icon" sizes="192x192" href="../../../images/webrtc-icon-192x192.png"> | ||
<link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css"> | ||
<link rel="stylesheet" href="../../../css/main.css"/> | ||
<link rel="stylesheet" href="css/main.css"/> | ||
|
||
</head> | ||
|
||
<body> | ||
|
||
<div id="container"> | ||
<h1><a href="//webrtc.github.io/samples/" title="WebRTC samples homepage">WebRTC samples</a> | ||
<span>Insertable Streams Video Analyzer</span></h1> | ||
|
||
<h3><span id="banner"></span></h3> | ||
<p>This sample shows how Insertable Streams can be used to analyze | ||
the encoded form of a video track. | ||
</p> | ||
|
||
<video id="localVideo" playsinline autoplay muted></video> | ||
<video id="remoteVideo" playsinline autoplay></video> | ||
|
||
<div class="box"> | ||
<button id="startButton">Start</button> | ||
<button id="callButton">Call</button> | ||
<button id="hangupButton">Hang Up</button> | ||
<br><br> | ||
<button id="size-small" disabled>Small</button> | ||
<button id="size-vga" disabled>VGA</button> | ||
<button id="size-hd" disabled>HD</button> | ||
</div> | ||
|
||
<p>View the console to see logging. | ||
</p> | ||
<div class="analyzer"> | ||
Video size: <span id="video-size"></span><br> | ||
Keyframe count: <span id="keyframe-count"></span><br> | ||
Interframe count: <span id="interframe-count"></span><br> | ||
Last keyframe size: <span id="keyframe-size"></span><br> | ||
Last interframe size: <span id="interframe-size"></span><br> | ||
</div> | ||
|
||
<a href="https://github.com/webrtc/samples/tree/gh-pages/src/content/peerconnection/video-analyzer" | ||
title="View source for this page on GitHub" id="viewSource">View source on GitHub</a> | ||
|
||
</div> | ||
|
||
<script src="https://webrtc.github.io/adapter/adapter-latest.js"></script> | ||
<script src="js/main.js" async></script> | ||
|
||
<script src="../../../js/lib/ga.js"></script> | ||
</body> | ||
</html> |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,272 @@ | ||
/* | ||
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. | ||
* | ||
* Use of this source code is governed by a BSD-style license | ||
* that can be found in the LICENSE file in the root of the source | ||
* tree. | ||
*/ | ||
|
||
'use strict'; | ||
|
||
const startButton = document.getElementById('startButton'); | ||
const callButton = document.getElementById('callButton'); | ||
const hangupButton = document.getElementById('hangupButton'); | ||
callButton.disabled = true; | ||
hangupButton.disabled = true; | ||
startButton.addEventListener('click', start); | ||
callButton.addEventListener('click', call); | ||
hangupButton.addEventListener('click', hangup); | ||
|
||
const smallButton = document.getElementById('size-small'); | ||
smallButton.addEventListener('click', () => { | ||
localStream.getVideoTracks()[0].applyConstraints({width: {exact: 180}}); | ||
}); | ||
const vgaButton = document.getElementById('size-vga'); | ||
vgaButton.addEventListener('click', () => { | ||
localStream.getVideoTracks()[0].applyConstraints({width: {exact: 640}}); | ||
}); | ||
const hdButton = document.getElementById('size-hd'); | ||
hdButton.addEventListener('click', () => { | ||
localStream.getVideoTracks()[0].applyConstraints({width: {exact: 1024}}); | ||
}); | ||
|
||
|
||
const banner = document.querySelector('#banner'); | ||
|
||
const supportsInsertableStreams = | ||
!!RTCRtpSender.prototype.createEncodedVideoStreams; | ||
|
||
if (!supportsInsertableStreams) { | ||
banner.innerText = 'Your browser does not support Insertable Streams. ' + | ||
'This sample will not work.'; | ||
startButton.disabled = true; | ||
} | ||
|
||
let startTime; | ||
const localVideo = document.getElementById('localVideo'); | ||
const remoteVideo = document.getElementById('remoteVideo'); | ||
|
||
localVideo.addEventListener('loadedmetadata', function() { | ||
console.log(`Local video videoWidth: ${this.videoWidth}px, videoHeight: ${this.videoHeight}px`); | ||
}); | ||
|
||
remoteVideo.addEventListener('loadedmetadata', function() { | ||
if (startTime) { | ||
const elapsedTime = window.performance.now() - startTime; | ||
console.log('Setup time: ' + elapsedTime.toFixed(3) + 'ms'); | ||
startTime = null; | ||
} | ||
}); | ||
|
||
let localStream; | ||
let pc1; | ||
let pc2; | ||
const offerOptions = { | ||
offerToReceiveAudio: 1, | ||
offerToReceiveVideo: 1 | ||
}; | ||
|
||
function getName(pc) { | ||
return (pc === pc1) ? 'pc1' : 'pc2'; | ||
} | ||
|
||
function getOtherPc(pc) { | ||
return (pc === pc1) ? pc2 : pc1; | ||
} | ||
|
||
async function start() { | ||
console.log('Requesting local stream'); | ||
startButton.disabled = true; | ||
try { | ||
const stream = await navigator.mediaDevices.getUserMedia({video: true}); | ||
console.log('Received local stream'); | ||
localVideo.srcObject = stream; | ||
localStream = stream; | ||
callButton.disabled = false; | ||
smallButton.disabled = false; | ||
vgaButton.disabled = false; | ||
hdButton.disabled = false; | ||
} catch (e) { | ||
alert(`getUserMedia() error: ${e.name}`); | ||
} | ||
} | ||
|
||
async function call() { | ||
callButton.disabled = true; | ||
hangupButton.disabled = false; | ||
console.log('Starting call'); | ||
startTime = window.performance.now(); | ||
const videoTracks = localStream.getVideoTracks(); | ||
if (videoTracks.length > 0) { | ||
console.log(`Using video device: ${videoTracks[0].label}`); | ||
} | ||
pc1 = new RTCPeerConnection(); | ||
console.log('Created local peer connection object pc1'); | ||
pc1.addEventListener('icecandidate', e => onIceCandidate(pc1, e)); | ||
pc2 = new RTCPeerConnection({forceEncodedVideoInsertableStreams: true}); | ||
console.log('Created remote peer connection object pc2'); | ||
pc2.addEventListener('icecandidate', e => onIceCandidate(pc2, e)); | ||
pc1.addEventListener('iceconnectionstatechange', e => onIceStateChange(pc1, e)); | ||
pc2.addEventListener('iceconnectionstatechange', e => onIceStateChange(pc2, e)); | ||
pc2.addEventListener('track', gotRemoteTrack); | ||
|
||
localStream.getTracks().forEach(track => pc1.addTrack(track, localStream)); | ||
console.log('Added local stream to pc1'); | ||
|
||
try { | ||
console.log('pc1 createOffer start'); | ||
const offer = await pc1.createOffer(offerOptions); | ||
await onCreateOfferSuccess(offer); | ||
} catch (e) { | ||
onCreateSessionDescriptionError(e); | ||
} | ||
} | ||
|
||
function onCreateSessionDescriptionError(error) { | ||
console.log(`Failed to create session description: ${error.toString()}`); | ||
} | ||
|
||
async function onCreateOfferSuccess(desc) { | ||
console.log(`Offer from pc1\n${desc.sdp}`); | ||
console.log('pc1 setLocalDescription start'); | ||
try { | ||
await pc1.setLocalDescription(desc); | ||
onSetLocalSuccess(pc1); | ||
} catch (e) { | ||
onSetSessionDescriptionError(); | ||
} | ||
|
||
console.log('pc2 setRemoteDescription start'); | ||
try { | ||
await pc2.setRemoteDescription(desc); | ||
onSetRemoteSuccess(pc2); | ||
} catch (e) { | ||
onSetSessionDescriptionError(); | ||
} | ||
|
||
console.log('pc2 createAnswer start'); | ||
try { | ||
const answer = await pc2.createAnswer(); | ||
await onCreateAnswerSuccess(answer); | ||
} catch (e) { | ||
onCreateSessionDescriptionError(e); | ||
} | ||
} | ||
|
||
function onSetLocalSuccess(pc) { | ||
console.log(`${getName(pc)} setLocalDescription complete`); | ||
} | ||
|
||
function onSetRemoteSuccess(pc) { | ||
console.log(`${getName(pc)} setRemoteDescription complete`); | ||
} | ||
|
||
function onSetSessionDescriptionError(error) { | ||
console.log(`Failed to set session description: ${error.toString()}`); | ||
} | ||
|
||
function gotRemoteTrack(e) { | ||
console.log('pc2 received remote stream'); | ||
const frameStreams = e.receiver.createEncodedVideoStreams(); | ||
frameStreams.readableStream.pipeThrough(new TransformStream({ | ||
transform: videoAnalyzer | ||
})) | ||
.pipeTo(frameStreams.writableStream); | ||
remoteVideo.srcObject = e.streams[0]; | ||
} | ||
|
||
async function onCreateAnswerSuccess(desc) { | ||
console.log(`Answer from pc2:\n${desc.sdp}`); | ||
console.log('pc2 setLocalDescription start'); | ||
try { | ||
await pc2.setLocalDescription(desc); | ||
onSetLocalSuccess(pc2); | ||
} catch (e) { | ||
onSetSessionDescriptionError(e); | ||
} | ||
console.log('pc1 setRemoteDescription start'); | ||
try { | ||
await pc1.setRemoteDescription(desc); | ||
onSetRemoteSuccess(pc1); | ||
} catch (e) { | ||
onSetSessionDescriptionError(e); | ||
} | ||
} | ||
|
||
async function onIceCandidate(pc, event) { | ||
try { | ||
await (getOtherPc(pc).addIceCandidate(event.candidate)); | ||
onAddIceCandidateSuccess(pc); | ||
} catch (e) { | ||
onAddIceCandidateError(pc, e); | ||
} | ||
console.log(`${getName(pc)} ICE candidate:\n${event.candidate ? event.candidate.candidate : '(null)'}`); | ||
} | ||
|
||
function onAddIceCandidateSuccess(pc) { | ||
console.log(`${getName(pc)} addIceCandidate success`); | ||
} | ||
|
||
function onAddIceCandidateError(pc, error) { | ||
console.log(`${getName(pc)} failed to add ICE Candidate: ${error.toString()}`); | ||
} | ||
|
||
function onIceStateChange(pc, event) { | ||
if (pc) { | ||
console.log(`${getName(pc)} ICE state: ${pc.iceConnectionState}`); | ||
console.log('ICE state change event: ', event); | ||
} | ||
} | ||
|
||
function hangup() { | ||
console.log('Ending call'); | ||
pc1.close(); | ||
pc2.close(); | ||
pc1 = null; | ||
pc2 = null; | ||
hangupButton.disabled = true; | ||
callButton.disabled = false; | ||
} | ||
|
||
const keyFrameCountDisplay = document.querySelector('#keyframe-count'); | ||
const keyFrameSizeDisplay = document.querySelector('#keyframe-size'); | ||
const interFrameCountDisplay = document.querySelector('#interframe-count'); | ||
const interFrameSizeDisplay = document.querySelector('#interframe-size'); | ||
const videoSizeDisplay = document.querySelector('#video-size'); | ||
let keyFrameCount = 0; | ||
let interFrameCount = 0; | ||
let keyFrameLastSize = 0; | ||
let interFrameLastSize = 0; | ||
|
||
function videoAnalyzer(chunk, controller) { | ||
const view = new DataView(chunk.data); | ||
// We assume that the video is VP8. | ||
// TODO: Check the codec to see that it is. | ||
// The lowest value bit in the first byte is the keyframe indicator. | ||
// https://tools.ietf.org/html/rfc6386#section-9.1 | ||
const keyframeBit = view.getUint8(0) & 0x01; | ||
// console.log(view.getUint8(0).toString(16)); | ||
if (keyframeBit === 0) { | ||
keyFrameCount++; | ||
keyFrameLastSize = chunk.data.byteLength; | ||
} else { | ||
interFrameCount++; | ||
interFrameLastSize = chunk.data.byteLength; | ||
} | ||
controller.enqueue(chunk); | ||
} | ||
|
||
// Update the display of the counters once a second. | ||
setInterval(() => { | ||
keyFrameCountDisplay.innerText = keyFrameCount; | ||
keyFrameSizeDisplay.innerText = keyFrameLastSize; | ||
interFrameCountDisplay.innerText = interFrameCount; | ||
interFrameSizeDisplay.innerText = interFrameLastSize; | ||
}, 500); | ||
|
||
remoteVideo.addEventListener('resize', () => { | ||
console.log(`Remote video size changed to ${remoteVideo.videoWidth}x${remoteVideo.videoHeight}`); | ||
// We'll use the first onsize callback as an indication that video has started | ||
// playing out. | ||
videoSizeDisplay.innerText = `${remoteVideo.videoWidth}x${remoteVideo.videoHeight}`; | ||
}); |