Skip to content

Commit

Permalink
Clean up the debug page
Browse files Browse the repository at this point in the history
Remove some code duplication. Layout and style the page a bit differently so big videos don't break things and it's easier to see the form. Allow the "active" input to be selected without modifying the source of the page.
  • Loading branch information
dmlap committed Sep 10, 2015
1 parent c76ac43 commit 8a2bc3e
Show file tree
Hide file tree
Showing 2 changed files with 67 additions and 177 deletions.
15 changes: 13 additions & 2 deletions debug/css/main.css
Original file line number Diff line number Diff line change
Expand Up @@ -161,8 +161,19 @@ section {
clear: both;
}

form label {
display: block;
#video-place {
margin-top: 20px;
}
#video-place.error video {
background-color: #ddd;
box-shadow: red 0 0 10px 3px;
}

video {
background-color: #000;
width: 100%;
max-width: 800px;
max-height: 450px;
}

.result-wrapper {
Expand Down
229 changes: 54 additions & 175 deletions debug/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -45,22 +45,32 @@ <h1 class="title">Transmux Analyzer</h1>
</pre>
<small>Looking for the <a href="legacy.html">FLV tool</a>?</small>
</header>
<section id="video-place">
</section>
<section>
<h2>Inputs</h2>
<form id="inputs">
<label>
Your original MP2T segment:
<input type="file" id="original">
</label>
<label>
A working, MP4 version of the underlying stream
produced by another tool:
<input type="file" id="working">
</label>
<legend>
The input with the checked radio box will be loaded into
the player on this page.
</legend>
<fieldset>
<input id="original-active" type=radio name=active checked value="original">
<label>
Your original MP2T segment:
<input type="file" id="original">
</label>
</fieldset>
<fieldset>
<input id="working-active" type=radio name=active value="working">
<label>
A working, MP4 version of the underlying stream
produced by another tool:
<input type="file" id="working">
</label>
</fieldset>
</form>
</section>
<section id="video-place">
</section>
<section>
<h2>Comparision</h2>
<div id="comparison">
Expand Down Expand Up @@ -103,120 +113,7 @@ <h3>footer</h3>
<!-- Include QUnit for object diffs -->
<script src="../node_modules/qunitjs/qunit/qunit.js"></script>
<script>
/*
MOSTLY STOLEN FROM https://w3c.github.io/media-source/#examples
*/
function setupMSE (videoElement, getNextVideoSegment, getNextAudioSegment) {
function onSourceOpen(videoTag, e) {
var
initVideoSegment = getNextVideoSegment(),
initAudioSegment = getNextAudioSegment(),
numberInited = 0,
videoBuffer, audioBuffer,
mediaSource = e.target;

if (mediaSource.sourceBuffers.length > 0)
return;

if (initVideoSegment) {
videoBuffer = mediaSource.addSourceBuffer('video/mp4;codecs=avc1.4d401f');
}
if (initAudioSegment) {
audioBuffer = mediaSource.addSourceBuffer('audio/mp4;codecs=mp4a.40.2');
}

videoTag.addEventListener('progress', onProgress.bind(videoTag, mediaSource));

if (initVideoSegment == null && initAudioSegment == null) {
// Error fetching the initialization segment. Signal end of stream with an error.
mediaSource.endOfStream("network");
return;
}

// Append the initialization segment.
var firstAppendHandler = function(e) {
var sourceBuffer = e.target;
sourceBuffer.removeEventListener('updateend', firstAppendHandler);

// Append some initial media data.
if (++numberInited === 2) {
onProgress(mediaSource, e);
}
};

if (videoBuffer) {
videoBuffer.addEventListener('updateend', firstAppendHandler);
}
if (audioBuffer) {
audioBuffer.addEventListener('updateend', firstAppendHandler);
}

if (initVideoSegment) {
videoBuffer.appendBuffer(initVideoSegment);
}
if (initAudioSegment) {
audioBuffer.appendBuffer(initAudioSegment);
}
}

function appendNextMediaSegment(getNextMediaSegment, mediaSource, sourceBuffer) {
if (mediaSource.readyState == "closed") {
return;
}

var mediaSegment = getNextMediaSegment();
// If we have run out of stream data, then signal end of stream.
if (mediaSegment == null) {
// mediaSource.endOfStream("network");
return false;
}

// Make sure the previous append is not still pending.
if (sourceBuffer.updating) {
return false;
}

// NOTE: If mediaSource.readyState == “ended”, this appendBuffer() call will
// cause mediaSource.readyState to transition to "open". The web application
// should be prepared to handle multiple “sourceopen” events.
sourceBuffer.appendBuffer(mediaSegment);
return true;
}
/*
function onSeeking(mediaSource, e) {
var video = e.target;
if (mediaSource.readyState == "open") {
// Abort current segment append.
mediaSource.sourceBuffers[0].abort();
}
// Notify the media segment loading code to start fetching data at the
// new playback position.
SeekToMediaSegmentAt(video.currentTime);
// Append a media segment from the new playback position.
appendNextMediaSegment(mediaSource);
}
*/
function onProgress(mediaSource, e) {
(appendNextMediaSegment(getNextVideoSegment, mediaSource, mediaSource.sourceBuffers[0]) &&
appendNextMediaSegment(getNextAudioSegment, mediaSource, mediaSource.sourceBuffers[1]));
}

var mediaSource = new MediaSource();
mediaSource.addEventListener('sourceopen', onSourceOpen.bind(this, videoElement));
videoElement.src = window.URL.createObjectURL(mediaSource);
}
function getSegment (segmentArray) {
var segment = segmentArray.shift();
if (segment) {
return segment.data;
}
return null;
}
</script>
<script>
'use strict';
var inputs = document.getElementById('inputs'),
original = document.getElementById('original'),
working = document.getElementById('working'),
Expand All @@ -231,7 +128,8 @@ <h3>footer</h3>
workingBoxes = document.querySelector('.working-boxes'),

video = document.createElement('video'),
mediaSource = new MediaSource();
mediaSource = new MediaSource(),
logevent;

document.querySelector('#video-place').appendChild(video);

Expand All @@ -247,17 +145,8 @@ <h3>footer</h3>
return;
}
comparison = document.querySelector('#comparison');
if (workingParsed[0].type === 'moof') {
diff = '<h3>Media Segment Comparision</h3>';
transmuxed = vjsParsed.slice(2);
} else if (workingParsed.length === 2) {
diff = '<h3>Init Segment Comparision</h3>';
transmuxed = vjsParsed.slice(0, 2);
} else {
diff = '<h3>General Comparision</h3>';
transmuxed = vjsParsed;
}
diff += '<p>A <del>red background</del> indicates ' +
transmuxed = vjsParsed;
diff = '<p>A <del>red background</del> indicates ' +
'properties present in the transmuxed file but missing from the ' +
'working version. A <ins>green background</ins> indicates ' +
'properties present in the working version but missing in the ' +
Expand All @@ -271,73 +160,65 @@ <h3>footer</h3>
};

mediaSource.addEventListener('sourceopen', function() {
var
buffer = mediaSource.addSourceBuffer('video/mp4;codecs=avc1.4d400d');
//buffer = mediaSource.addSourceBuffer('audio/mp4;codecs=mp4a.40.2');
var buffer = mediaSource.addSourceBuffer('video/mp4;codecs="avc1.4d401f,mp4a.40.2"');

buffer.addEventListener('updatestart', logevent);
buffer.addEventListener('updateend', logevent);
buffer.addEventListener('error', logevent);
window.vjsMediaSource = mediaSource;
window.vjsSourceBuffer = buffer;
window.vjsBuffer = buffer;
window.vjsVideo = video;
});
mediaSource.addEventListener('error', logevent);
mediaSource.addEventListener('opened', logevent);
mediaSource.addEventListener('closed', logevent);
mediaSource.addEventListener('sourceended', logevent);
video.src = URL.createObjectURL(mediaSource);
video.addEventListener('error', console.log.bind(console));

video.src = URL.createObjectURL(mediaSource);
video.addEventListener('error', logevent);
video.addEventListener('error', function() {
document.getElementById('video-place').classList.add('error');
});

original.addEventListener('change', function() {
var reader = new FileReader(),
videoBuffer = [],
audioBuffer = [];
var reader = new FileReader();

// do nothing if no file was chosen
if (!this.files[0]) {
return;
}

reader.addEventListener('loadend', function() {
var segment = new Uint8Array(reader.result),
transmuxer = new muxjs.mp2t.Transmuxer(),
videoSegments = [],
audioSegments = [],
videoBytesLength = 0,
audioBytesLength = 0,
decodeMe,
remuxedSegments = [],
remuxedBytesLength = 0,
bytes,
i, j;

// transmux the MPEG-TS data to BMFF segments
transmuxer.on('data', function(segment) {
if (segment.type === 'video') {
videoSegments.push(segment);
videoBytesLength += segment.data.byteLength;
} else {
audioSegments.push(segment);
audioBytesLength += segment.data.byteLength;
}
remuxedSegments.push(segment);
remuxedBytesLength += segment.data.byteLength;
});

transmuxer.push(segment);
transmuxer.flush();
// XXX - switch to select video/audio to show
decodeMe = videoSegments;
bytes = new Uint8Array(videoBytesLength);
bytes = new Uint8Array(remuxedBytesLength);

for (j = 0, i = 0; j < decodeMe.length; j++) {
bytes.set(decodeMe[j].data, i);
i += decodeMe[j].byteLength;
for (j = 0, i = 0; j < remuxedSegments.length; j++) {
bytes.set(remuxedSegments[j].data, i);
i += remuxedSegments[j].byteLength;
}

vjsBytes = bytes;
vjsParsed = muxjs.inspectMp4(bytes);
console.log('transmuxed', vjsParsed);
diffParsed();

// XXX - set one of videoSegments or audioSegments below to an
// empty array to only test one stream

setupMSE(video,
getSegment.bind(null, videoSegments),
getSegment.bind(null, audioSegments));
if (document.querySelector('#original-active').checked) {
window.vjsBuffer.appendBuffer(bytes);
}

// clear old box info
vjsBoxes.innerHTML = muxjs.textifyMp4(vjsParsed, null, ' ');
Expand All @@ -362,11 +243,9 @@ <h3>footer</h3>
// clear old box info
workingBoxes.innerHTML = muxjs.textifyMp4(workingParsed, null, ' ');

// XXX Media Sources Testing
/* setupMSE(video,
getSegment.bind(null, []),
getSegment.bind(null, [{data: bytes}]));*/
//window.vjsSourceBuffer.appendBuffer(bytes);
if (document.querySelector('#working-active').checked) {
window.vjsBuffer.appendBuffer(bytes);
}
});
reader.readAsArrayBuffer(this.files[0]);
}, false);
Expand Down

0 comments on commit 8a2bc3e

Please sign in to comment.