Skip to content

Commit

Permalink
remove media channel constants
Browse files Browse the repository at this point in the history
  • Loading branch information
davidkim9 committed Jan 8, 2018
1 parent 050d030 commit 1421bd3
Show file tree
Hide file tree
Showing 7 changed files with 35 additions and 59 deletions.
7 changes: 3 additions & 4 deletions src/controller/audio-stream-controller.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ import {ErrorDetails} from '../errors';
import {logger} from '../utils/logger';
import { findFragWithCC } from '../utils/discontinuities';
import {FragmentState} from '../helper/fragment-tracker';
import * as MediaChannels from '../media-channels';

const State = {
STOPPED : 'STOPPED',
Expand Down Expand Up @@ -616,12 +615,12 @@ class AudioStreamController extends EventHandler {
let tracks = data.tracks, track;

// delete any video track found on audio demuxer
if (tracks[MediaChannels.VIDEO]) {
delete tracks[MediaChannels.VIDEO];
if (tracks.video) {
delete tracks.video;
}

// include levelCodec in audio and video tracks
track = tracks[MediaChannels.AUDIO];
track = tracks.audio;
if(track) {
track.levelCodec = track.codec;
track.id = data.id;
Expand Down
7 changes: 3 additions & 4 deletions src/controller/buffer-controller.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ import EventHandler from '../event-handler';
import {logger} from '../utils/logger';
import {ErrorTypes, ErrorDetails} from '../errors';
import {getMediaSource} from '../helper/mediasource-helper';
import * as MediaChannels from '../media-channels';

const MediaSource = getMediaSource();

Expand Down Expand Up @@ -49,7 +48,7 @@ class BufferController extends EventHandler {

onLevelPtsUpdated(data) {
let type = data.type;
let audioTrack = this.tracks[MediaChannels.AUDIO];
let audioTrack = this.tracks.audio;

// Adjusting `SourceBuffer.timestampOffset` (desired point in the timeline where the next frames should be appended)
// in Chrome browser when we detect MPEG audio container and time delta between level PTS and `SourceBuffer.timestampOffset`
Expand All @@ -59,7 +58,7 @@ class BufferController extends EventHandler {
// More info here: https://github.com/video-dev/hls.js/issues/332#issuecomment-257986486

if (type === 'audio' && audioTrack && audioTrack.container === 'audio/mpeg') { // Chrome audio mp3 track
let audioBuffer = this.sourceBuffer[MediaChannels.AUDIO];
let audioBuffer = this.sourceBuffer.audio;
let delta = Math.abs(audioBuffer.timestampOffset - data.start);

// adjust timestamp offset if time delta is greater than 100ms
Expand Down Expand Up @@ -204,7 +203,7 @@ class BufferController extends EventHandler {
onSBUpdateEnd() {
// update timestampOffset
if (this.audioTimestampOffset) {
let audioBuffer = this.sourceBuffer[MediaChannels.AUDIO];
let audioBuffer = this.sourceBuffer.audio;
logger.warn('change mpeg audio timestamp offset from ' + audioBuffer.timestampOffset + ' to ' + this.audioTimestampOffset);
audioBuffer.timestampOffset = this.audioTimestampOffset;
delete this.audioTimestampOffset;
Expand Down
6 changes: 3 additions & 3 deletions src/controller/stream-controller.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ import TimeRanges from '../utils/timeRanges';
import {ErrorTypes, ErrorDetails} from '../errors';
import {logger} from '../utils/logger';
import { alignDiscontinuities } from '../utils/discontinuities';
import * as MediaChannels from '../media-channels';

const State = {
STOPPED : 'STOPPED',
Expand Down Expand Up @@ -60,6 +59,7 @@ class StreamController extends EventHandler {
}

destroy() {
console.error('destroy stream controller');
this.stopLoad();
if (this.timer) {
clearInterval(this.timer);
Expand Down Expand Up @@ -1092,7 +1092,7 @@ class StreamController extends EventHandler {
data.id === 'main' &&
fragNew.sn === fragCurrent.sn &&
fragNew.level === fragCurrent.level &&
!(data.type === MediaChannels.AUDIO && this.altAudio) && // filter out main audio if audio track is loaded through audio stream controller
!(data.type === 'audio' && this.altAudio) && // filter out main audio if audio track is loaded through audio stream controller
this.state === State.PARSING) {
var level = this.levels[this.level],
frag = fragCurrent;
Expand All @@ -1110,7 +1110,7 @@ class StreamController extends EventHandler {
logger.log(`Parsed ${data.type},PTS:[${data.startPTS.toFixed(3)},${data.endPTS.toFixed(3)}],DTS:[${data.startDTS.toFixed(3)}/${data.endDTS.toFixed(3)}],nb:${data.nb},dropped:${data.dropped || 0}`);

// Detect gaps in a fragment and try to fix it by finding a keyframe in the previous fragment (see _findFragments)
if(data.type === MediaChannels.VIDEO) {
if(data.type === 'video') {
frag.dropped = data.dropped;
if (frag.dropped) {
if (!frag.backtracked) {
Expand Down
7 changes: 3 additions & 4 deletions src/helper/fragment-tracker.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import EventHandler from '../event-handler';
import Event from '../events';
import * as MediaChannels from '../media-channels';

export const FragmentState = {
NOT_LOADED: 'NOT_LOADED',
Expand Down Expand Up @@ -38,7 +37,7 @@ export class FragmentTracker extends EventHandler {
* Partial fragments effected by coded frame eviction will be removed
* The browser will unload parts of the buffer to free up memory for new buffer data
* Fragments will need to be reloaded when the buffer is freed up, removing partial fragments will allow them to reload(since there might be parts that are still playable)
* @param {String} channel The channel of media this is (eg. MediaChannels.VIDEO, MediaChannels.AUDIO)
* @param {String} channel The channel of media this is (eg. video/audio)
* @param {Object} timeRange TimeRange object from a sourceBuffer
*/
detectEvictedFragments(channel, timeRange) {
Expand Down Expand Up @@ -167,8 +166,8 @@ export class FragmentTracker extends EventHandler {

isPartial(fragmentEntity) {
return fragmentEntity.buffered === true &&
((fragmentEntity.range[MediaChannels.VIDEO] !== undefined && fragmentEntity.range[MediaChannels.VIDEO].partial === true) ||
(fragmentEntity.range[MediaChannels.AUDIO] !== undefined && fragmentEntity.range[MediaChannels.AUDIO].partial === true));
((fragmentEntity.range.video !== undefined && fragmentEntity.range.video.partial === true) ||
(fragmentEntity.range.audio !== undefined && fragmentEntity.range.audio.partial === true));
}

isTimeBuffered(startPTS, endPTS, timeRange) {
Expand Down
19 changes: 0 additions & 19 deletions src/media-channels.js

This file was deleted.

5 changes: 2 additions & 3 deletions src/remux/mp4-remuxer.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
import AAC from '../helper/aac';
import Event from '../events';
import {logger} from '../utils/logger';
import * as MediaChannels from '../media-channels';
import MP4 from '../remux/mp4-generator';
import {ErrorTypes, ErrorDetails} from '../errors';

Expand Down Expand Up @@ -129,7 +128,7 @@ class MP4Remuxer {
audioTrack.codec = 'mp3';
}
}
tracks[MediaChannels.AUDIO] = {
tracks.audio = {
container : container,
codec : audioTrack.codec,
initSegment : !audioTrack.isAAC && typeSupported.mpeg ? new Uint8Array() : MP4.initSegment([audioTrack]),
Expand All @@ -148,7 +147,7 @@ class MP4Remuxer {
// we use input time scale straight away to avoid rounding issues on frame duration / cts computation
const inputTimeScale = videoTrack.inputTimeScale;
videoTrack.timescale = inputTimeScale;
tracks[MediaChannels.VIDEO] = {
tracks.video = {
container : 'video/mp4',
codec : videoTrack.codec,
initSegment : MP4.initSegment([videoTrack]),
Expand Down
43 changes: 21 additions & 22 deletions tests/unit/helper/fragment-tracker.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ import Event from "../../../src/events";
const assert = require('assert');

import {FragmentTracker, FragmentState} from '../../../src/helper/fragment-tracker';
import * as MediaChannels from '../../../src/media-channels';
import Hls from '../../../src/hls';

function createMockBuffer(buffered) {
Expand All @@ -26,7 +25,7 @@ describe('FragmentTracker', () => {
endPTS: 1,
sn: 1,
level: 1,
mediaChannels: new Set([MediaChannels.AUDIO, MediaChannels.VIDEO]),
mediaChannels: new Set(['audio', 'video']),
type: 'main'
};
hls.trigger(Event.FRAG_LOADED, { frag: fragment });
Expand All @@ -39,8 +38,8 @@ describe('FragmentTracker', () => {
]);

timeRanges = new Map();
timeRanges.set(MediaChannels.VIDEO, buffered);
timeRanges.set(MediaChannels.AUDIO, buffered);
timeRanges.set('video', buffered);
timeRanges.set('audio', buffered);
hls.trigger(Event.BUFFER_APPENDED, { timeRanges });

hls.trigger(Event.FRAG_BUFFERED, { stats: { aborted: true }, id : 'main', frag: fragment });
Expand Down Expand Up @@ -73,7 +72,7 @@ describe('FragmentTracker', () => {
endPTS: 1,
sn: 1,
level: 0,
mediaChannels: new Set([MediaChannels.AUDIO, MediaChannels.VIDEO]),
mediaChannels: new Set(['audio', 'video']),
type: 'main'
};
hls.trigger(Event.FRAG_LOADED, { frag: fragment });
Expand All @@ -94,8 +93,8 @@ describe('FragmentTracker', () => {
]);

timeRanges = new Map();
timeRanges.set(MediaChannels.VIDEO, buffered);
timeRanges.set(MediaChannels.AUDIO, buffered);
timeRanges.set('video', buffered);
timeRanges.set('audio', buffered);
hls.trigger(Event.BUFFER_APPENDED, { timeRanges });

hls.trigger(Event.FRAG_BUFFERED, { stats: { aborted: true }, id : 'main', frag: fragment });
Expand All @@ -112,8 +111,8 @@ describe('FragmentTracker', () => {
},
]);
timeRanges = new Map();
timeRanges.set(MediaChannels.VIDEO, buffered);
timeRanges.set(MediaChannels.AUDIO, buffered);
timeRanges.set('video', buffered);
timeRanges.set('audio', buffered);
hls.trigger(Event.BUFFER_APPENDED, { timeRanges });

hls.trigger(Event.FRAG_BUFFERED, { stats: { aborted: true }, id : 'main', frag: fragment });
Expand All @@ -130,8 +129,8 @@ describe('FragmentTracker', () => {
},
]);
timeRanges = new Map();
timeRanges.set(MediaChannels.VIDEO, buffered);
timeRanges.set(MediaChannels.AUDIO, buffered);
timeRanges.set('video', buffered);
timeRanges.set('audio', buffered);
hls.trigger(Event.BUFFER_APPENDED, { timeRanges });

hls.trigger(Event.FRAG_BUFFERED, { stats: { aborted: true }, id : 'main', frag: fragment });
Expand All @@ -146,8 +145,8 @@ describe('FragmentTracker', () => {
},
]);
timeRanges = new Map();
timeRanges.set(MediaChannels.VIDEO, buffered);
timeRanges.set(MediaChannels.AUDIO, buffered);
timeRanges.set('video', buffered);
timeRanges.set('audio', buffered);
hls.trigger(Event.BUFFER_APPENDED, { timeRanges });

assert.strictEqual(fragmentTracker.getState(fragment), FragmentState.NOT_LOADED);
Expand All @@ -166,19 +165,19 @@ describe('FragmentTracker', () => {
endPTS: 1,
sn: 1,
level: 1,
mediaChannels: new Set([MediaChannels.AUDIO, MediaChannels.VIDEO]),
mediaChannels: new Set(['audio', 'video']),
type: 'main'
};
hls.trigger(Event.FRAG_LOADED, { frag: fragment });

timeRanges = new Map();
timeRanges.set(MediaChannels.VIDEO, createMockBuffer([
timeRanges.set('video', createMockBuffer([
{
startPTS: 0,
endPTS: 2
},
]));
timeRanges.set(MediaChannels.AUDIO, createMockBuffer([
timeRanges.set('audio', createMockBuffer([
{
startPTS: 0.5,
endPTS: 2
Expand All @@ -197,19 +196,19 @@ describe('FragmentTracker', () => {
endPTS: 1,
sn: 1,
level: 1,
mediaChannels: new Set([MediaChannels.AUDIO, MediaChannels.VIDEO]),
mediaChannels: new Set(['audio', 'video']),
type: 'main'
};
hls.trigger(Event.FRAG_LOADED, { frag: fragment });

timeRanges = new Map();
timeRanges.set(MediaChannels.VIDEO, createMockBuffer([
timeRanges.set('video', createMockBuffer([
{
startPTS: 0.5,
endPTS: 2
},
]));
timeRanges.set(MediaChannels.AUDIO, createMockBuffer([
timeRanges.set('audio', createMockBuffer([
{
startPTS: 0,
endPTS: 2
Expand All @@ -228,19 +227,19 @@ describe('FragmentTracker', () => {
endPTS: 1,
sn: 1,
level: 1,
mediaChannels: new Set([MediaChannels.AUDIO]),
mediaChannels: new Set(['audio']),
type: 'audio'
};
hls.trigger(Event.FRAG_LOADED, { frag: fragment });

timeRanges = new Map();
timeRanges.set(MediaChannels.VIDEO, createMockBuffer([
timeRanges.set('video', createMockBuffer([
{
startPTS: 0.5,
endPTS: 2
},
]));
timeRanges.set(MediaChannels.AUDIO, createMockBuffer([
timeRanges.set('audio', createMockBuffer([
{
startPTS: 0,
endPTS: 2
Expand Down

0 comments on commit 1421bd3

Please sign in to comment.