Skip to content

Commit

Permalink
feat: trigger videoTimingInfo on transmuxer with relevant timing info…
Browse files Browse the repository at this point in the history
… and prepended content duration (videojs#242)
  • Loading branch information
gesinger authored Jan 9, 2019
1 parent ddcd627 commit 668954f
Show file tree
Hide file tree
Showing 2 changed files with 208 additions and 4 deletions.
47 changes: 45 additions & 2 deletions lib/mp4/transmuxer.js
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,26 @@ var arrayEquals = function(a, b) {
return true;
};

var generateVideoSegmentTimingInfo = function(
startDts,
startPts,
endDts,
endPts,
prependedContentDuration
) {
return {
start: {
dts: startDts,
pts: startPts
},
end: {
dts: endDts,
pts: endPts
},
prependedContentDuration: prependedContentDuration
};
};

/**
* Constructs a single-track, ISO BMFF media segment from AAC data
* events. The output of this stream can be fed to a SourceBuffer
Expand Down Expand Up @@ -225,7 +245,10 @@ VideoSegmentStream = function(track, options) {
gops,
moof,
mdat,
boxes;
boxes,
prependedContentDuration = 0,
firstGop,
lastGop;

// Throw away nalUnits at the start of the byte stream until
// we find the first AUD
Expand Down Expand Up @@ -272,6 +295,10 @@ VideoSegmentStream = function(track, options) {
gopForFusion = this.getGopForFusion_(nalUnits[0], track);

if (gopForFusion) {
// in order to provide more accurate timing information about the segment, save
// the number of seconds prepended to the original segment due to GOP fusion
prependedContentDuration = gopForFusion.duration;

gops.unshift(gopForFusion);
// Adjust Gops' metadata to account for the inclusion of the
// new gop at the beginning
Expand Down Expand Up @@ -343,6 +370,18 @@ VideoSegmentStream = function(track, options) {
};
}));

firstGop = gops[0];
lastGop = gops[gops.length - 1];

this.trigger(
'segmentTimingInfo',
generateVideoSegmentTimingInfo(
firstGop.dts,
firstGop.pts,
lastGop.dts + lastGop.duration,
lastGop.pts + lastGop.duration,
prependedContentDuration));

// save all the nals in the last GOP into the gop cache
this.gopCache_.unshift({
gop: gops.pop(),
Expand Down Expand Up @@ -923,6 +962,8 @@ Transmuxer = function(options) {

pipeline.videoSegmentStream.on('processedGopsInfo',
self.trigger.bind(self, 'gopInfo'));
pipeline.videoSegmentStream.on('segmentTimingInfo',
self.trigger.bind(self, 'videoSegmentTimingInfo'));

pipeline.videoSegmentStream.on('baseMediaDecodeTime', function(baseMediaDecodeTime) {
if (audioTrack) {
Expand Down Expand Up @@ -1042,5 +1083,7 @@ module.exports = {
VideoSegmentStream: VideoSegmentStream,
AudioSegmentStream: AudioSegmentStream,
AUDIO_PROPERTIES: AUDIO_PROPERTIES,
VIDEO_PROPERTIES: VIDEO_PROPERTIES
VIDEO_PROPERTIES: VIDEO_PROPERTIES,
// exported for testing
generateVideoSegmentTimingInfo: generateVideoSegmentTimingInfo
};
165 changes: 163 additions & 2 deletions test/transmuxer.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,10 @@ var mp2t = require('../lib/m2ts'),
QUnit = require('qunit'),
testSegment = require('./utils/test-segment'),
testMiddlePatPMT = require('./utils/test-middle-pat-pmt'),
mp4AudioProperties = require('../lib/mp4/transmuxer').AUDIO_PROPERTIES,
mp4VideoProperties = require('../lib/mp4/transmuxer').VIDEO_PROPERTIES,
mp4Transmuxer = require('../lib/mp4/transmuxer'),
mp4AudioProperties = mp4Transmuxer.AUDIO_PROPERTIES,
mp4VideoProperties = mp4Transmuxer.VIDEO_PROPERTIES,
generateVideoSegmentTimingInfo = mp4Transmuxer.generateVideoSegmentTimingInfo,
clock = require('../lib/utils/clock'),
utils = require('./utils'),
TransportPacketStream = mp2t.TransportPacketStream,
Expand Down Expand Up @@ -1936,6 +1938,54 @@ QUnit.test('do not subtract the first frame\'s compositionTimeOffset from baseMe
QUnit.equal(tfdt.baseMediaDecodeTime, 140, 'calculated baseMediaDecodeTime');
});

QUnit.test('video segment stream triggers segmentTimingInfo with timing info',
function() {
var segmentTimingInfoArr = [];

videoSegmentStream.on('segmentTimingInfo', function(segmentTimingInfo) {
segmentTimingInfoArr.push(segmentTimingInfo);
});

videoSegmentStream.push({
data: new Uint8Array([0x09, 0x01]),
nalUnitType: 'access_unit_delimiter_rbsp',
dts: 50,
pts: 60
});
videoSegmentStream.push({
data: new Uint8Array([0x09, 0x01]),
nalUnitType: 'slice_layer_without_partitioning_rbsp_idr',
dts: 50,
pts: 60
});
videoSegmentStream.push({
data: new Uint8Array([0x09, 0x01]),
nalUnitType: 'access_unit_delimiter_rbsp',
dts: 100,
pts: 110
});
videoSegmentStream.push({
data: new Uint8Array([0x09, 0x01]),
nalUnitType: 'access_unit_delimiter_rbsp',
dts: 150,
pts: 160
});
videoSegmentStream.flush();

QUnit.equal(segmentTimingInfoArr.length, 1, 'triggered segmentTimingInfo once');
QUnit.deepEqual(segmentTimingInfoArr[0], {
start: {
dts: 50,
pts: 60
},
end: {
dts: 200,
pts: 210
},
prependedContentDuration: 0
}, 'triggered correct segment timing info');
});

QUnit.test('aignGopsAtStart_ filters gops appropriately', function() {
var gopsToAlignWith, gops, actual, expected;

Expand Down Expand Up @@ -2362,6 +2412,76 @@ QUnit.test('alignGopsAtEnd_ filters gops appropriately', function() {
'match with an alignment candidate');
});

QUnit.test('generateVideoSegmentTimingInfo generates correct timing info object',
function() {
var
firstFrame = {
dts: 12,
pts: 14,
duration: 3
},
lastFrame = {
dts: 120,
pts: 140,
duration: 4
},
prependedContentDuration = 0;

QUnit.deepEqual(
generateVideoSegmentTimingInfo(
firstFrame.dts,
firstFrame.pts,
lastFrame.dts + lastFrame.duration,
lastFrame.pts + lastFrame.duration,
prependedContentDuration
), {
start: {
dts: 12,
pts: 14
},
end: {
dts: 124,
pts: 144
},
prependedContentDuration: 0
}, 'generated correct timing info object');
});

QUnit.test('generateVideoSegmentTimingInfo accounts for prepended GOPs', function() {
var
firstFrame = {
dts: 12,
pts: 14,
duration: 3
},
lastFrame = {
dts: 120,
pts: 140,
duration: 4
},
prependedContentDuration = 7;

QUnit.deepEqual(
generateVideoSegmentTimingInfo(
firstFrame.dts,
firstFrame.pts,
lastFrame.dts + lastFrame.duration,
lastFrame.pts + lastFrame.duration,
prependedContentDuration
), {
start: {
dts: 12,
pts: 14
},
end: {
dts: 124,
pts: 144
},
prependedContentDuration: 7
},
'included prepended content duration in timing info');
});

QUnit.module('ADTS Stream', {
setup: function() {
adtsStream = new AdtsStream();
Expand Down Expand Up @@ -3223,6 +3343,47 @@ QUnit.test('generates a video init segment', function() {
QUnit.equal('moov', boxes[1].type, 'generated a moov box');
});

QUnit.test('transmuxer triggers video timing info event on flush', function() {
var videoSegmentTimingInfoArr = [];

transmuxer.on('videoSegmentTimingInfo', function(videoSegmentTimingInfo) {
videoSegmentTimingInfoArr.push(videoSegmentTimingInfo);
});

transmuxer.push(packetize(PAT));
transmuxer.push(packetize(generatePMT({
hasVideo: true
})));

transmuxer.push(packetize(videoPes([
0x09, 0x01 // access_unit_delimiter_rbsp
], true)));
transmuxer.push(packetize(videoPes([
0x08, 0x01 // pic_parameter_set_rbsp
], true)));
transmuxer.push(packetize(videoPes([
0x07, // seq_parameter_set_rbsp
0x27, 0x42, 0xe0, 0x0b,
0xa9, 0x18, 0x60, 0x9d,
0x80, 0x53, 0x06, 0x01,
0x06, 0xb6, 0xc2, 0xb5,
0xef, 0x7c, 0x04
], false)));
transmuxer.push(packetize(videoPes([
0x05, 0x01 // slice_layer_without_partitioning_rbsp_idr
], true)));

QUnit.equal(
videoSegmentTimingInfoArr.length,
0,
'has not triggered videoSegmentTimingInfo'
);

transmuxer.flush();

QUnit.equal(videoSegmentTimingInfoArr.length, 1, 'triggered videoSegmentTimingInfo');
});

QUnit.test('generates an audio init segment', function() {
var segments = [], boxes;
transmuxer.on('data', function(segment) {
Expand Down

0 comments on commit 668954f

Please sign in to comment.