Skip to content

Commit

Permalink
Add synthetic video generation to "bandwidth" sample, and measure ram…
Browse files Browse the repository at this point in the history
…pup (webrtc#1472)

* Add synthetic video generation to "bandwidth" sample, and measure rampup

This allows checking how long it takes for resolution to adapt after
starting the call.

* Make eslint happy

* Smoothen the movements some more
  • Loading branch information
alvestrand authored Sep 7, 2021
1 parent 524e1d8 commit 2d242ac
Show file tree
Hide file tree
Showing 3 changed files with 80 additions and 5 deletions.
2 changes: 1 addition & 1 deletion nightwatch.json
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
"page_objects_path": "",
"selenium": {
"start_process": true,
"server_path": "./node_modules/selenium-server/lib/runner/selenium-server-standalone-3.14.0.jar",
"server_path": "./node_modules/selenium-server/lib/runner/selenium-server-standalone-3.141.59.jar",
"port": 4444,
"log_path": "",
"cli_args": {
Expand Down
2 changes: 2 additions & 0 deletions src/content/peerconnection/bandwidth/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -49,10 +49,12 @@ <h1><a href="//webrtc.github.io/samples/" title="WebRTC samples homepage">WebRTC
<option value="500">500</option>
<option value="250">250</option>
<option value="125">125</option>
<option value="75">75</option>
</select>
kbps
<button id="callButton">Call</button>
<button id="hangupButton">Hang Up</button>
Use synthetic video: <input type="checkbox" id="synthetic">
</div>
<div class="graph-container" id="bitrateGraph">
<div>Bitrate</div>
Expand Down
81 changes: 77 additions & 4 deletions src/content/peerconnection/bandwidth/js/main.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ const localVideo = document.querySelector('video#localVideo');
const callButton = document.querySelector('button#callButton');
const hangupButton = document.querySelector('button#hangupButton');
const bandwidthSelector = document.querySelector('select#bandwidth');
const synthetic = document.querySelector('input#synthetic');
hangupButton.disabled = true;
callButton.onclick = call;
hangupButton.onclick = hangup;
Expand All @@ -36,11 +37,29 @@ let packetSeries;

let lastResult;

let lastRemoteStart = 0;

// lastRemoteFullSizeDelay is designed to be picked up by a test script.
// eslint-disable-next-line no-unused-vars
let lastRemoteFullSizeDelay = 0;

const offerOptions = {
offerToReceiveAudio: 0,
offerToReceiveVideo: 1
};

remoteVideo.addEventListener('resize', ev => {
const elapsed = performance.now() - lastRemoteStart;
console.log(elapsed, ': Resize event, size ',
remoteVideo.videoWidth, 'x', remoteVideo.videoHeight);
if (localVideo.videoWidth == remoteVideo.videoWidth &&
localVideo.videoHeight == remoteVideo.videoHeight) {
lastRemoteFullSizeDelay = elapsed;
console.log('Full size achieved');
}
});


function gotStream(stream) {
hangupButton.disabled = false;
console.log('Received local stream');
Expand Down Expand Up @@ -86,10 +105,15 @@ function call() {
pc2.onicecandidate = onIceCandidate.bind(pc2);
pc2.ontrack = gotRemoteStream;

console.log('Requesting local stream');
navigator.mediaDevices.getUserMedia({video: true})
.then(gotStream)
.catch(e => alert('getUserMedia() error: ' + e.name));
if (synthetic.checked) {
console.log('Requesting synthetic local stream');
gotStream(syntheticVideoStream());
} else {
console.log('Requesting live local stream');
navigator.mediaDevices.getUserMedia({video: true})
.then(gotStream)
.catch(e => alert('getUserMedia() error: ' + e.name));
}
}

function gotDescription1(desc) {
Expand Down Expand Up @@ -138,6 +162,8 @@ function gotRemoteStream(e) {
if (remoteVideo.srcObject !== e.streams[0]) {
remoteVideo.srcObject = e.streams[0];
console.log('Received remote stream');
lastRemoteStart = performance.now();
lastRemoteFullSizeDelay = 0;
}
}

Expand Down Expand Up @@ -288,3 +314,50 @@ window.setInterval(() => {
lastResult = res;
});
}, 1000);

// Return a number between 0 and maxValue based on the input number,
// so that the output changes smoothly up and down.
function triangle(number, maxValue) {
const modulus = (maxValue + 1) * 2;
return Math.abs(number % modulus - maxValue);
}

function syntheticVideoStream({width = 640, height = 480, signal} = {}) {
const canvas = Object.assign(
document.createElement('canvas'), {width, height}
);
const ctx = canvas.getContext('2d');
const stream = canvas.captureStream();

let count = 0;
setInterval(() => {
// Use relatively-prime multipliers to get a color roll
const r = triangle(count*2, 255);
const g = triangle(count*3, 255);
const b = triangle(count*5, 255);
ctx.fillStyle = `rgb(${r}, ${g}, ${b})`;
count += 1;
const boxSize=80;
ctx.fillRect(0, 0, width, height);
// Add some bouncing boxes in contrast color to add a little more noise.
const rContrast = (r + 128)%256;
const gContrast = (g + 128)%256;
const bContrast = (b + 128)%256;
ctx.fillStyle = `rgb(${rContrast}, ${gContrast}, ${bContrast})`;
const xpos = triangle(count*5, width - boxSize);
const ypos = triangle(count*7, height - boxSize);
ctx.fillRect(xpos, ypos, boxSize, boxSize);
const xpos2 = triangle(count*11, width - boxSize);
const ypos2 = triangle(count*13, height - boxSize);
ctx.fillRect(xpos2, ypos2, boxSize, boxSize);
// If signal is set (0-255), add a constant-color box of that luminance to
// the video frame at coordinates 20 to 60 in both X and Y direction.
// (big enough to avoid color bleed from surrounding video in some codecs,
// for more stable tests).
if (signal != undefined) {
ctx.fillStyle = `rgb(${signal}, ${signal}, ${signal})`;
ctx.fillRect(20, 20, 40, 40);
}
}, 100);
return stream;
}

0 comments on commit 2d242ac

Please sign in to comment.