Skip to content

Commit

Permalink
Rework stats recording to be accurate for progressive parsing (breaki…
Browse files Browse the repository at this point in the history
…ng change) JW8-9959 (video-dev#239)
  • Loading branch information
John Bartos authored Aug 19, 2019
1 parent 30a87a6 commit 8466138
Show file tree
Hide file tree
Showing 33 changed files with 340 additions and 209 deletions.
8 changes: 5 additions & 3 deletions demo/basic-usage.html
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,10 @@ <h1>Hls.js demo - basic usage</h1>
<script>
if(Hls.isSupported()) {
var video = document.getElementById('video');
var hls = new Hls();
hls.loadSource('https://video-dev.github.io/streams/x36xhzz/x36xhzz.m3u8');
var hls = new Hls({
debug: true
});
hls.loadSource('https://test-streams.mux.dev/x36xhzz/x36xhzz.m3u8');
hls.attachMedia(video);
hls.on(Hls.Events.MANIFEST_PARSED,function() {
video.play();
Expand All @@ -26,7 +28,7 @@ <h1>Hls.js demo - basic usage</h1>
// When the browser has built-in HLS support (check using `canPlayType`), we can provide an HLS manifest (i.e. .m3u8 URL) directly to the video element throught the `src` property.
// This is using the built-in support of the plain video element, without using hls.js.
else if (video.canPlayType('application/vnd.apple.mpegurl')) {
video.src = 'https://video-dev.github.io/streams/x36xhzz/x36xhzz.m3u8';
video.src = 'https://test-streams.mux.dev/x36xhzz/x36xhzz.m3u8';
video.addEventListener('canplay',function() {
video.play();
});
Expand Down
43 changes: 22 additions & 21 deletions src/controller/abr-controller.ts
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ class AbrController extends EventHandler {
}

const stats: LoaderStats = loader.stats;
const requestDelay = performance.now() - stats.trequest;
const requestDelay = performance.now() - stats.loading.start;
const playbackRate = Math.abs(media.playbackRate);
// In order to work with a stable bandwidth, only begin monitoring bandwidth after half of the fragment has been loaded
if (requestDelay <= (500 * frag.duration / playbackRate)) {
Expand Down Expand Up @@ -180,9 +180,7 @@ class AbrController extends EventHandler {
level.loaded = { bytes: loadedBytes, duration: loadedDuration };
level.realBitrate = Math.round(8 * loadedBytes / loadedDuration);
}
// if fragment has been loaded to perform a bitrate test,
if (frag.bitrateTest) {
stats.tparsed = stats.tbuffered = stats.tload;
this.onFragBuffered(data);
}
}
Expand All @@ -191,24 +189,25 @@ class AbrController extends EventHandler {
onFragBuffered (data: { frag: Fragment }) {
const frag = data.frag;
const stats = frag.stats;
// only update stats on first frag buffering
// if same frag is loaded multiple times, it might be in browser cache, and loaded quickly
// and leading to wrong bw estimation
// on bitrate test, also only update stats once (if tload = tbuffered == on FRAG_LOADED)
if (!stats.aborted && frag.type === 'main' && Number.isFinite(frag.sn as number) && ((!frag.bitrateTest || stats.tload === stats.tbuffered))) {
// use tparsed-trequest instead of tbuffered-trequest to compute fragLoadingProcessing; rationale is that buffer appending only happens once media is attached
// in case we use config.startFragPrefetch while media is not attached yet, fragment might be parsed while media not attached yet, but it will only be buffered on media attached
// as a consequence it could happen really late in the process. meaning that appending duration might appears huge ... leading to underestimated throughput estimation
let fragLoadingProcessingMs = stats.tparsed - stats.trequest;
logger.log(`latency/loading/parsing/append/kbps:${Math.round(stats.tfirst - stats.trequest)}/${Math.round(stats.tload - stats.tfirst)}/${Math.round(stats.tparsed - stats.tload)}/${Math.round(stats.tbuffered - stats.tparsed)}/${Math.round(8 * stats.loaded / (stats.tbuffered - stats.trequest))}`);
this._bwEstimator.sample(fragLoadingProcessingMs, stats.loaded);
stats.bwEstimate = this._bwEstimator.getEstimate();
// if fragment has been loaded to perform a bitrate test, (hls.startLevel = -1), store bitrate test delay duration
if (frag.bitrateTest) {
this.bitrateTestDelay = fragLoadingProcessingMs / 1000;
} else {
this.bitrateTestDelay = 0;
}

if (stats.aborted) {
return;
}
// Only count non-alt-audio frags which were actually buffered in our BW calculations
// TODO: Figure out a heuristical way to see if a frag was loaded from the cache
if (frag.type !== 'main' || frag.sn === 'initSegment' || frag.bitrateTest) {
return;
}
// Use the difference between parsing and request instead of buffering and request to compute fragLoadingProcessing;
// rationale is that buffer appending only happens once media is attached. This can happen when config.startFragPrefetch
// is used. If we used buffering in that case, our BW estimate sample will be very large.
const fragLoadingProcessingMs = stats.parsing.end - stats.loading.start;
this._bwEstimator.sample(fragLoadingProcessingMs, stats.loaded);
stats.bwEstimate = this._bwEstimator.getEstimate();
if (frag.bitrateTest) {
this.bitrateTestDelay = fragLoadingProcessingMs / 1000;
} else {
this.bitrateTestDelay = 0;
}
}

Expand Down Expand Up @@ -342,4 +341,6 @@ class AbrController extends EventHandler {
}
}



export default AbrController;
22 changes: 12 additions & 10 deletions src/controller/audio-stream-controller.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,13 @@ import TimeRanges from '../utils/time-ranges';
import { ErrorDetails, ErrorTypes } from '../errors';
import { logger } from '../utils/logger';
import { FragmentState } from './fragment-tracker';
import { ElementaryStreamTypes } from '../loader/fragment';
import Fragment, { ElementaryStreamTypes } from '../loader/fragment';
import BaseStreamController, { State } from './base-stream-controller';
import FragmentLoader from '../loader/fragment-loader';
import LevelDetails from '../loader/level-details';
import { ChunkMetadata, TransmuxerResult } from '../types/transmuxer';
import { BufferAppendingEventPayload } from '../types/bufferAppendingEventPayload';
import { TrackSet } from '../types/track';

const { performance } = window;

Expand Down Expand Up @@ -343,7 +345,7 @@ class AudioStreamController extends BaseStreamController {
}
}

_handleFragmentLoadProgress (frag, payload) {
_handleFragmentLoadProgress (frag: Fragment, payload: Uint8Array) {
const { config, trackId, levels } = this;
if (!levels) {
this.warn(`Audio tracks were reset while fragment load was in progress. Fragment ${frag.sn} of level ${frag.level} will not be buffered`);
Expand All @@ -369,7 +371,7 @@ class AudioStreamController extends BaseStreamController {
// this.log(`Transmuxing ${sn} of [${details.startSN} ,${details.endSN}],track ${trackId}`);
// time Offset is accurate if level PTS is known, or if playlist is not sliding (not live)
let accurateTimeOffset = false; // details.PTSKnown || !details.live;
const chunkMeta = new ChunkMetadata(frag.level, frag.sn);
const chunkMeta = new ChunkMetadata(frag.level, frag.sn, frag.stats.chunkCount, payload.byteLength);
transmuxer.push(payload, initSegmentData, audioCodec, '', frag, details.totalduration, accurateTimeOffset, chunkMeta, initPTS);
}

Expand All @@ -389,7 +391,7 @@ class AudioStreamController extends BaseStreamController {
}
}

onFragBuffered (data) {
onFragBuffered (data: { frag: Fragment }) {
const { frag } = data;
if (frag && frag.type !== 'audio') {
return;
Expand Down Expand Up @@ -528,13 +530,13 @@ class AudioStreamController extends BaseStreamController {
}

if (initSegment && initSegment.tracks) {
this._bufferInitSegment(initSegment.tracks);
this._bufferInitSegment(initSegment.tracks, frag, chunkMeta);
hls.trigger(Event.FRAG_PARSING_INIT_SEGMENT, { frag, id, tracks: initSegment.tracks });
// Only flush audio from old audio tracks when PTS is known on new audio track
}
if (audio) {
frag.setElementaryStreamInfo(ElementaryStreamTypes.AUDIO, audio.startPTS, audio.endPTS, audio.startDTS, audio.endDTS);
this.bufferFragmentData(audio, 'audio');
this.bufferFragmentData(audio, frag, chunkMeta);
}

if (id3) {
Expand All @@ -551,7 +553,7 @@ class AudioStreamController extends BaseStreamController {
}
}

private _bufferInitSegment (tracks) {
private _bufferInitSegment (tracks: TrackSet, frag: Fragment, chunkMeta: ChunkMetadata) {
if (this.state !== State.PARSING) {
return;
}
Expand All @@ -572,14 +574,14 @@ class AudioStreamController extends BaseStreamController {
this.log(`Audio, container:${track.container}, codecs[level/parsed]=[${track.levelCodec}/${track.codec}]`);
let initSegment = track.initSegment;
if (initSegment) {
let appendObj = { type: 'audio', data: initSegment, parent: 'audio', content: 'initSegment' };
this.hls.trigger(Event.BUFFER_APPENDING, appendObj);
let segment: BufferAppendingEventPayload = { type: 'audio', data: initSegment, frag, chunkMeta };
this.hls.trigger(Event.BUFFER_APPENDING, segment);
}
// trigger handler right now
this.tick();
}

private loadFragment (frag) {
private loadFragment (frag: Fragment) {
// only load if fragment is not loaded or if in audio switch
// we force a frag loading in audio switch as fragment tracker might not have evicted previous frags in case of quick audio switch
const fragState = this.fragmentTracker.getState(frag);
Expand Down
37 changes: 22 additions & 15 deletions src/controller/base-stream-controller.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,9 @@ import { appendUint8Array } from '../utils/mp4-tools';
import LevelDetails from '../loader/level-details';
import { alignStream } from '../utils/discontinuities';
import { findFragmentByPDT, findFragmentByPTS, findFragWithCC } from './fragment-finders';
import { BufferAppendingEventPayload } from '../types/bufferAppendingEventPayload';
import { SourceBufferName } from '../types/buffer';
import { HlsChunkPerformanceTiming, HlsProgressivePerformanceTiming, LoaderStats } from '../types/loader';

export const State = {
STOPPED: 'STOPPED',
Expand Down Expand Up @@ -156,13 +159,14 @@ export default class BaseStreamController extends TaskLoop {
super.onHandlerDestroyed();
}

protected _loadFragForPlayback (frag) {
protected _loadFragForPlayback (frag: Fragment) {
const progressCallback: FragmentLoadProgressCallback = ({ payload }) => {
if (this._fragLoadAborted(frag)) {
this.warn(`Fragment ${frag.sn} of level ${frag.level} was aborted during progressive download.`);
this.fragmentTracker.removeFragment(frag);
return;
}
frag.stats.chunkCount++;
this._handleFragmentLoadProgress(frag, payload);
};

Expand All @@ -178,11 +182,11 @@ export default class BaseStreamController extends TaskLoop {
compatibilityEventData.frag = frag;
this.hls.trigger(Event.FRAG_LOADED, compatibilityEventData);
// Pass through the whole payload; controllers not implementing progressive loading receive data from this callback
this._handleFragmentLoadComplete(frag, data.payload);
this._handleFragmentLoadComplete(frag);
});
}

protected _loadInitSegment (frag) {
protected _loadInitSegment (frag: Fragment) {
this._doFragLoad(frag)
.then((data: FragLoadSuccessResult) => {
const { fragCurrent, hls, levels } = this;
Expand All @@ -194,33 +198,35 @@ export default class BaseStreamController extends TaskLoop {
this.state = State.IDLE;
this.fragLoadError = 0;
levels[frag.level].details.initSegment.data = payload;
stats.tparsed = stats.tbuffered = window.performance.now();
stats.parsing.start = stats.buffering.start = window.performance.now();
stats.parsing.end = stats.buffering.end = window.performance.now();
// TODO: set id from calling class
hls.trigger(Event.FRAG_BUFFERED, { stats, frag: fragCurrent, id: frag.type });
this.tick();
});
}

protected _fragLoadAborted (frag) {
protected _fragLoadAborted (frag: Fragment | null) {
const { fragCurrent } = this;
if (!frag || !fragCurrent) {
return true;
}
return frag.level !== fragCurrent.level || frag.sn !== fragCurrent.sn;
}

protected _handleFragmentLoadComplete (frag, payload?: ArrayBuffer) {
protected _handleFragmentLoadComplete (frag: Fragment) {
const { transmuxer } = this;
if (!transmuxer) {
return;
}
const chunkMeta = new ChunkMetadata(frag.level, frag.sn);
const chunkMeta = new ChunkMetadata(frag.level, frag.sn, frag.stats.chunkCount + 1, 0);
chunkMeta.transmuxing.start = performance.now();
transmuxer.flush(chunkMeta);
}

protected _handleFragmentLoadProgress (frag, payload) {}
protected _handleFragmentLoadProgress (frag: Fragment, payload: ArrayBuffer | Uint8Array) {}

protected _doFragLoad (frag, progressCallback?: FragmentLoadProgressCallback) {
protected _doFragLoad (frag: Fragment, progressCallback?: FragmentLoadProgressCallback) {
this.state = State.FRAG_LOADING;
this.hls.trigger(Event.FRAG_LOADING, { frag });

Expand Down Expand Up @@ -248,7 +254,7 @@ export default class BaseStreamController extends TaskLoop {
return;
}
const { frag, level } = context;
frag.stats.tparsed = window.performance.now();
frag.stats.parsing.end = performance.now();

this.updateLevelTiming(frag, level);
this.state = State.PARSED;
Expand Down Expand Up @@ -276,12 +282,12 @@ export default class BaseStreamController extends TaskLoop {
return { frag, level: currentLevel };
}

protected bufferFragmentData (data, parent) {
protected bufferFragmentData (data: { data1: Uint8Array, data2?: Uint8Array, type: SourceBufferName }, frag: Fragment, chunkMeta: ChunkMetadata) {
if (!data || this.state !== State.PARSING) {
return;
}

const { data1, data2 } = data;
let { data1, data2 } = data;
let buffer = data1;
if (data1 && data2) {
// Combine the moof + mdat so that we buffer with a single append
Expand All @@ -292,7 +298,8 @@ export default class BaseStreamController extends TaskLoop {
return;
}

this.hls.trigger(Event.BUFFER_APPENDING, { type: data.type, data: buffer, parent, content: 'data' });
const segment: BufferAppendingEventPayload = { type: data.type, data: buffer, frag, chunkMeta };
this.hls.trigger(Event.BUFFER_APPENDING, segment);
this.tick();
}

Expand Down Expand Up @@ -548,14 +555,14 @@ export default class BaseStreamController extends TaskLoop {
this.nextLoadPosition = this.lastCurrentTime;
}
if (transmuxer && frag.sn !== 'initSegment') {
transmuxer.flush(new ChunkMetadata(frag.level, frag.sn));
transmuxer.flush(new ChunkMetadata(frag.level, frag.sn, frag.stats.chunkCount + 1, 0));
}

Object.keys(frag.elementaryStreams).forEach(type => frag.elementaryStreams[type] = null);
this.log(`Fragment ${frag.sn} of level ${frag.level} was aborted, flushing transmuxer & resetting nextLoadPosition to ${this.nextLoadPosition}`);
}

private updateLevelTiming (frag, currentLevel) {
private updateLevelTiming (frag: Fragment, currentLevel) {
const { details } = currentLevel;
Object.keys(frag.elementaryStreams).forEach(type => {
const info = frag.elementaryStreams[type];
Expand Down
Loading

0 comments on commit 8466138

Please sign in to comment.