Skip to content

Commit

Permalink
fix buffer hole when switching level introduced by dailymotion@2babc39
Browse files Browse the repository at this point in the history
fix video stuttering issue : use DTS of PTS to check hole before fragments. PTS can be out of order, whereas DTS can't
related to https://github.com/dailymotion/hls.js/issues/26
related to https://github.com/dailymotion/hls.js/issues/27
  • Loading branch information
mangui committed Oct 9, 2015
1 parent 9bec6e7 commit 062095d
Show file tree
Hide file tree
Showing 2 changed files with 83 additions and 56 deletions.
3 changes: 2 additions & 1 deletion src/demux/tsdemuxer.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,14 +17,15 @@
this.remuxerClass = remuxerClass;
this.lastCC = 0;
this.PES_TIMESCALE = 90000;
this.remuxer = new this.remuxerClass(this.observer);
}

switchLevel() {
this.pmtParsed = false;
this._pmtId = -1;
this._avcTrack = {type: 'video', id :-1, sequenceNumber: 0, samples : [], len : 0, nbNalu : 0};
this._aacTrack = {type: 'audio', id :-1, sequenceNumber: 0, samples : [], len : 0};
this.remuxer = new this.remuxerClass(this.observer);
this.remuxer.switchLevel();
}

insertDiscontinuity() {
Expand Down
136 changes: 81 additions & 55 deletions src/remux/mp4-remuxer.js
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,11 @@ class MP4Remuxer {
}

insertDiscontinuity() {
this._initPTS = this._initDTS = undefined;
this._initPTS = this._initDTS = this.nextAacPts = this.nextAvcDts = undefined;
}

switchLevel() {
this.ISGenerated = false;
}

remux(audioTrack,videoTrack,timeOffset) {
Expand Down Expand Up @@ -101,7 +105,18 @@ class MP4Remuxer {
}

remuxVideo(track, timeOffset) {
var view, i = 8, avcSample, mp4Sample, mp4SampleLength, unit, lastSampleDTS, mdat, moof, firstPTS, firstDTS, pts, dts, ptsnorm, dtsnorm, samples = [];
var view,
i = 8,
pesTimeScale = this.PES_TIMESCALE,
pes2mp4ScaleFactor = this.PES2MP4SCALEFACTOR,
avcSample,
mp4Sample,
mp4SampleLength,
unit,
mdat, moof,
firstPTS, firstDTS, lastDTS,
pts, dts, ptsnorm, dtsnorm,
samples = [];
/* concatenate the video data and construct the mdat in place
(need 8 more bytes to fill length and mpdat type) */
mdat = new Uint8Array(track.len + (4 * track.nbNalu) + 8);
Expand All @@ -122,49 +137,53 @@ class MP4Remuxer {
}
pts = avcSample.pts - this._initDTS;
dts = avcSample.dts - this._initDTS;
//logger.log('Video/PTS/DTS:' + avcSample.pts + '/' + avcSample.dts);
if (lastSampleDTS !== undefined) {
ptsnorm = this._PTSNormalize(pts, lastSampleDTS);
dtsnorm = this._PTSNormalize(dts, lastSampleDTS);
mp4Sample.duration = (dtsnorm - lastSampleDTS) / this.PES2MP4SCALEFACTOR;
//logger.log('Video/PTS/DTS:' + pts + '/' + dts);
// if not first AVC sample of video track, normalize PTS/DTS with previous sample value
// and ensure that sample duration is positive
if (lastDTS !== undefined) {
ptsnorm = this._PTSNormalize(pts, lastDTS);
dtsnorm = this._PTSNormalize(dts, lastDTS);
mp4Sample.duration = (dtsnorm - lastDTS) / pes2mp4ScaleFactor;
if (mp4Sample.duration < 0) {
//logger.log('invalid sample duration at PTS/DTS::' + avcSample.pts + '/' + avcSample.dts + ':' + mp4Sample.duration);
mp4Sample.duration = 0;
}
} else {
ptsnorm = this._PTSNormalize(pts, this.nextAvcPts);
dtsnorm = this._PTSNormalize(dts, this.nextAvcPts);
// check if fragments are contiguous (i.e. no missing frames between fragment)
if (this.nextAvcPts) {
var delta = Math.round((ptsnorm - this.nextAvcPts) / 90), absdelta = Math.abs(delta);
//logger.log('absdelta/avcSample.pts:' + absdelta + '/' + avcSample.pts);
// first AVC sample of video track, normalize PTS/DTS
ptsnorm = this._PTSNormalize(pts, this.nextAvcDts);
dtsnorm = this._PTSNormalize(dts, this.nextAvcDts);
// check if first AVC sample is contiguous with last sample of previous track
// delta between next DTS and dtsnorm should be less than 1
if (this.nextAvcDts) {
var delta = Math.round((dtsnorm - this.nextAvcDts) / 90), absdelta = Math.abs(delta);
//logger.log('absdelta/dts:' + absdelta + '/' + dtsnorm);
// if delta is less than 300 ms, next loaded fragment is assumed to be contiguous with last one
if (absdelta < 300) {
//logger.log('Video next PTS:' + this.nextAvcPts);
if (delta > 1) {
logger.log(`AVC:${delta} ms hole between fragments detected,filling it`);
} else if (delta < -1) {
logger.log(`AVC:${(-delta)} ms overlapping between fragments detected`);
}
// set PTS to next PTS
ptsnorm = this.nextAvcPts;
// offset DTS as well, ensure that DTS is smaller or equal than new PTS
dtsnorm = Math.max(dtsnorm - delta, this.lastAvcDts);
// logger.log('Video/PTS/DTS adjusted:' + avcSample.pts + '/' + avcSample.dts);
}
else {
// not contiguous timestamp, check if PTS is within acceptable range
var expectedPTS = this.PES_TIMESCALE * timeOffset;
if(absdelta) {
// set DTS to next DTS
dtsnorm = this.nextAvcDts;
// offset PTS as well, ensure that PTS is smaller or equal than new DTS
ptsnorm = Math.max(ptsnorm - delta, dtsnorm);
logger.log('Video/PTS/DTS adjusted:' + ptsnorm + '/' + dtsnorm);
}
} else {
// not contiguous timestamp, check if DTS is within acceptable range
var expectedDTS = pesTimeScale * timeOffset;
// check if there is any unexpected drift between expected timestamp and real one
if (Math.abs(expectedPTS - ptsnorm) > (this.PES_TIMESCALE * 3600)) {
if (Math.abs(expectedDTS - dtsnorm) > (pesTimeScale * 3600)) {
//logger.log('PTS looping ??? AVC PTS delta:${expectedPTS-ptsnorm}');
var ptsOffset = expectedPTS - ptsnorm;
var dtsOffset = expectedDTS - dtsnorm;
// set PTS to next expected PTS;
ptsnorm = expectedPTS;
dtsnorm = ptsnorm;
dtsnorm = expectedDTS;
ptsnorm = dtsnorm;
// offset initPTS/initDTS to fix computation for following samples
this._initPTS -= ptsOffset;
this._initDTS -= ptsOffset;
this._initPTS -= dtsOffset;
this._initDTS -= dtsOffset;
}
}
}
Expand All @@ -176,7 +195,7 @@ class MP4Remuxer {
mp4Sample = {
size: mp4SampleLength,
duration: 0,
cts: (ptsnorm - dtsnorm) / this.PES2MP4SCALEFACTOR,
cts: (ptsnorm - dtsnorm) / pes2mp4ScaleFactor,
flags: {
isLeading: 0,
isDependedOn: 0,
Expand All @@ -193,34 +212,41 @@ class MP4Remuxer {
mp4Sample.flags.isNonSync = 1;
}
samples.push(mp4Sample);
lastSampleDTS = dtsnorm;
lastDTS = dtsnorm;
}
if (samples.length >= 2) {
mp4Sample.duration = samples[samples.length - 2].duration;
}
this.lastAvcDts = dtsnorm;
// next AVC sample PTS should be equal to last sample PTS + duration
this.nextAvcPts = ptsnorm + mp4Sample.duration * this.PES2MP4SCALEFACTOR;
//logger.log('Video/lastAvcDts/nextAvcPts:' + this.lastAvcDts + '/' + this.nextAvcPts);
// next AVC sample DTS should be equal to last sample DTS + last sample duration
this.nextAvcDts = dtsnorm + mp4Sample.duration * pes2mp4ScaleFactor;
track.len = 0;
track.nbNalu = 0;
track.samples = samples;
moof = MP4.moof(track.sequenceNumber++, firstDTS / this.PES2MP4SCALEFACTOR, track);
moof = MP4.moof(track.sequenceNumber++, firstDTS / pes2mp4ScaleFactor, track);
track.samples = [];
this.observer.trigger(Event.FRAG_PARSING_DATA, {
moof: moof,
mdat: mdat,
startPTS: firstPTS / this.PES_TIMESCALE,
endPTS: this.nextAvcPts / this.PES_TIMESCALE,
startDTS: firstDTS / this.PES_TIMESCALE,
endDTS: (dtsnorm + this.PES2MP4SCALEFACTOR * mp4Sample.duration) / this.PES_TIMESCALE,
startPTS: firstPTS / pesTimeScale,
endPTS: (ptsnorm + pes2mp4ScaleFactor * mp4Sample.duration) / pesTimeScale,
startDTS: firstDTS / pesTimeScale,
endDTS: (dtsnorm + pes2mp4ScaleFactor * mp4Sample.duration) / pesTimeScale,
type: 'video',
nb: samples.length
});
}

remuxAudio(track,timeOffset) {
var view, i = 8, aacSample, mp4Sample, unit, lastSampleDTS, mdat, moof, firstPTS, firstDTS, pts, dts, ptsnorm, dtsnorm, samples = [];
var view,
i = 8,
pesTimeScale = this.PES_TIMESCALE,
pes2mp4ScaleFactor = this.PES2MP4SCALEFACTOR,
aacSample, mp4Sample,
unit,
mdat, moof,
firstPTS, firstDTS, lastDTS,
pts, dts, ptsnorm, dtsnorm,
samples = [];
/* concatenate the audio data and construct the mdat in place
(need 8 more bytes to fill length and mpdat type) */
mdat = new Uint8Array(track.len + 8);
Expand All @@ -235,11 +261,11 @@ class MP4Remuxer {
pts = aacSample.pts - this._initDTS;
dts = aacSample.dts - this._initDTS;
//logger.log('Audio/PTS:' + aacSample.pts.toFixed(0));
if (lastSampleDTS !== undefined) {
ptsnorm = this._PTSNormalize(pts, lastSampleDTS);
dtsnorm = this._PTSNormalize(dts, lastSampleDTS);
if (lastDTS !== undefined) {
ptsnorm = this._PTSNormalize(pts, lastDTS);
dtsnorm = this._PTSNormalize(dts, lastDTS);
// we use DTS to compute sample duration, but we use PTS to compute initPTS which is used to sync audio and video
mp4Sample.duration = (dtsnorm - lastSampleDTS) / this.PES2MP4SCALEFACTOR;
mp4Sample.duration = (dtsnorm - lastDTS) / pes2mp4ScaleFactor;
if (mp4Sample.duration < 0) {
//logger.log('invalid sample duration at PTS/DTS::' + avcSample.pts + '/' + avcSample.dts + ':' + mp4Sample.duration);
mp4Sample.duration = 0;
Expand All @@ -250,7 +276,7 @@ class MP4Remuxer {
// check if fragments are contiguous (i.e. no missing frames between fragment)
if (this.nextAacPts && this.nextAacPts !== ptsnorm) {
//logger.log('Audio next PTS:' + this.nextAacPts);
var delta = Math.round(1000 * (ptsnorm - this.nextAacPts) / this.PES_TIMESCALE), absdelta = Math.abs(delta);
var delta = Math.round(1000 * (ptsnorm - this.nextAacPts) / pesTimeScale), absdelta = Math.abs(delta);
// if delta is less than 300 ms, next loaded fragment is assumed to be contiguous with last one
if (absdelta > 1 && absdelta < 300) {
if (delta > 0) {
Expand All @@ -265,10 +291,10 @@ class MP4Remuxer {
}
else if (absdelta) {
// not contiguous timestamp, check if PTS is within acceptable range
var expectedPTS = this.PES_TIMESCALE * timeOffset;
var expectedPTS = pesTimeScale * timeOffset;
//logger.log('expectedPTS/PTSnorm:${expectedPTS}/${ptsnorm}/${expectedPTS-ptsnorm}');
// check if there is any unexpected drift between expected timestamp and real one
if (Math.abs(expectedPTS - ptsnorm) > this.PES_TIMESCALE * 3600) {
if (Math.abs(expectedPTS - ptsnorm) > pesTimeScale * 3600) {
//logger.log('PTS looping ??? AAC PTS delta:${expectedPTS-ptsnorm}');
var ptsOffset = expectedPTS - ptsnorm;
// set PTS to next expected PTS;
Expand Down Expand Up @@ -298,27 +324,27 @@ class MP4Remuxer {
}
};
samples.push(mp4Sample);
lastSampleDTS = dtsnorm;
lastDTS = dtsnorm;
}
//set last sample duration as being identical to previous sample
if (samples.length >= 2) {
mp4Sample.duration = samples[samples.length - 2].duration;
}
this.lastAacDts = dtsnorm;
// next aac sample PTS should be equal to last sample PTS + duration
this.nextAacPts = ptsnorm + this.PES2MP4SCALEFACTOR * mp4Sample.duration;
this.nextAacPts = ptsnorm + pes2mp4ScaleFactor * mp4Sample.duration;
//logger.log('Audio/PTS/PTSend:' + aacSample.pts.toFixed(0) + '/' + this.nextAacDts.toFixed(0));
track.len = 0;
track.samples = samples;
moof = MP4.moof(track.sequenceNumber++, firstDTS / this.PES2MP4SCALEFACTOR, track);
moof = MP4.moof(track.sequenceNumber++, firstDTS / pes2mp4ScaleFactor, track);
track.samples = [];
this.observer.trigger(Event.FRAG_PARSING_DATA, {
moof: moof,
mdat: mdat,
startPTS: firstPTS / this.PES_TIMESCALE,
endPTS: this.nextAacPts / this.PES_TIMESCALE,
startDTS: firstDTS / this.PES_TIMESCALE,
endDTS: (dtsnorm + this.PES2MP4SCALEFACTOR * mp4Sample.duration) / this.PES_TIMESCALE,
startPTS: firstPTS / pesTimeScale,
endPTS: this.nextAacPts / pesTimeScale,
startDTS: firstDTS / pesTimeScale,
endDTS: (dtsnorm + pes2mp4ScaleFactor * mp4Sample.duration) / pesTimeScale,
type: 'audio',
nb: samples.length
});
Expand Down

0 comments on commit 062095d

Please sign in to comment.