1
0
forked from jchomaz/Vulture

Tracking de l'application VApp (IHM du jeu)

This commit is contained in:
2025-05-11 18:04:12 +02:00
commit 89e9db9b62
17763 changed files with 3718499 additions and 0 deletions

View File

@@ -0,0 +1,101 @@
/**
* @file ad-cue-tags.js
*/
import window from 'global/window';
/**
* Searches for an ad cue that overlaps with the given mediaTime
*
* @param {Object} track
* the track to find the cue for
*
* @param {number} mediaTime
* the time to find the cue at
*
* @return {Object|null}
* the found cue or null
*/
export const findAdCue = function(track, mediaTime) {
const cues = track.cues;
for (let i = 0; i < cues.length; i++) {
const cue = cues[i];
if (mediaTime >= cue.adStartTime && mediaTime <= cue.adEndTime) {
return cue;
}
}
return null;
};
export const updateAdCues = function(media, track, offset = 0) {
if (!media.segments) {
return;
}
let mediaTime = offset;
let cue;
for (let i = 0; i < media.segments.length; i++) {
const segment = media.segments[i];
if (!cue) {
// Since the cues will span for at least the segment duration, adding a fudge
// factor of half segment duration will prevent duplicate cues from being
// created when timing info is not exact (e.g. cue start time initialized
// at 10.006677, but next call mediaTime is 10.003332 )
cue = findAdCue(track, mediaTime + (segment.duration / 2));
}
if (cue) {
if ('cueIn' in segment) {
// Found a CUE-IN so end the cue
cue.endTime = mediaTime;
cue.adEndTime = mediaTime;
mediaTime += segment.duration;
cue = null;
continue;
}
if (mediaTime < cue.endTime) {
// Already processed this mediaTime for this cue
mediaTime += segment.duration;
continue;
}
// otherwise extend cue until a CUE-IN is found
cue.endTime += segment.duration;
} else {
if ('cueOut' in segment) {
cue = new window.VTTCue(
mediaTime,
mediaTime + segment.duration,
segment.cueOut
);
cue.adStartTime = mediaTime;
// Assumes tag format to be
// #EXT-X-CUE-OUT:30
cue.adEndTime = mediaTime + parseFloat(segment.cueOut);
track.addCue(cue);
}
if ('cueOutCont' in segment) {
// Entered into the middle of an ad cue
// Assumes tag formate to be
// #EXT-X-CUE-OUT-CONT:10/30
const [adOffset, adTotal] = segment.cueOutCont.split('/').map(parseFloat);
cue = new window.VTTCue(
mediaTime,
mediaTime + segment.duration,
''
);
cue.adStartTime = mediaTime - adOffset;
cue.adEndTime = cue.adStartTime + adTotal;
track.addCue(cue);
}
}
mediaTime += segment.duration;
}
};

View File

@@ -0,0 +1,131 @@
import { isArrayBufferView } from '@videojs/vhs-utils/es/byte-helpers';
/**
* @file bin-utils.js
*/
/**
* convert a TimeRange to text
*
* @param {TimeRange} range the timerange to use for conversion
* @param {number} i the iterator on the range to convert
* @return {string} the range in string format
*/
const textRange = function(range, i) {
return range.start(i) + '-' + range.end(i);
};
/**
* format a number as hex string
*
* @param {number} e The number
* @param {number} i the iterator
* @return {string} the hex formatted number as a string
*/
const formatHexString = function(e, i) {
const value = e.toString(16);
return '00'.substring(0, 2 - value.length) + value + (i % 2 ? ' ' : '');
};
const formatAsciiString = function(e) {
if (e >= 0x20 && e < 0x7e) {
return String.fromCharCode(e);
}
return '.';
};
/**
* Creates an object for sending to a web worker modifying properties that are TypedArrays
* into a new object with seperated properties for the buffer, byteOffset, and byteLength.
*
* @param {Object} message
* Object of properties and values to send to the web worker
* @return {Object}
* Modified message with TypedArray values expanded
* @function createTransferableMessage
*/
export const createTransferableMessage = function(message) {
const transferable = {};
Object.keys(message).forEach((key) => {
const value = message[key];
if (isArrayBufferView(value)) {
transferable[key] = {
bytes: value.buffer,
byteOffset: value.byteOffset,
byteLength: value.byteLength
};
} else {
transferable[key] = value;
}
});
return transferable;
};
/**
* Returns a unique string identifier for a media initialization
* segment.
*
* @param {Object} initSegment
* the init segment object.
*
* @return {string} the generated init segment id
*/
export const initSegmentId = function(initSegment) {
const byterange = initSegment.byterange || {
length: Infinity,
offset: 0
};
return [
byterange.length, byterange.offset, initSegment.resolvedUri
].join(',');
};
/**
* Returns a unique string identifier for a media segment key.
*
* @param {Object} key the encryption key
* @return {string} the unique id for the media segment key.
*/
export const segmentKeyId = function(key) {
return key.resolvedUri;
};
/**
* utils to help dump binary data to the console
*
* @param {Array|TypedArray} data
* data to dump to a string
*
* @return {string} the data as a hex string.
*/
export const hexDump = (data) => {
const bytes = Array.prototype.slice.call(data);
const step = 16;
let result = '';
let hex;
let ascii;
for (let j = 0; j < bytes.length / step; j++) {
hex = bytes.slice(j * step, j * step + step).map(formatHexString).join('');
ascii = bytes.slice(j * step, j * step + step).map(formatAsciiString).join('');
result += hex + ' ' + ascii + '\n';
}
return result;
};
export const tagDump = ({ bytes }) => hexDump(bytes);
export const textRanges = (ranges) => {
let result = '';
let i;
for (i = 0; i < ranges.length; i++) {
result += textRange(ranges, i) + ' ';
}
return result;
};

View File

@@ -0,0 +1,21 @@
export default {
GOAL_BUFFER_LENGTH: 30,
MAX_GOAL_BUFFER_LENGTH: 60,
BACK_BUFFER_LENGTH: 30,
GOAL_BUFFER_LENGTH_RATE: 1,
// 0.5 MB/s
INITIAL_BANDWIDTH: 4194304,
// A fudge factor to apply to advertised playlist bitrates to account for
// temporary flucations in client bandwidth
BANDWIDTH_VARIANCE: 1.2,
// How much of the buffer must be filled before we consider upswitching
BUFFER_LOW_WATER_LINE: 0,
MAX_BUFFER_LOW_WATER_LINE: 30,
// TODO: Remove this when experimentalBufferBasedABR is removed
EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE: 16,
BUFFER_LOW_WATER_LINE_RATE: 1,
// If the buffer is greater than the high water line, we won't switch down
BUFFER_HIGH_WATER_LINE: 30
};

View File

@@ -0,0 +1,489 @@
import resolveUrl from './resolve-url';
import window from 'global/window';
import logger from './util/logger';
import videojs from 'video.js';
/**
* A utility class for setting properties and maintaining the state of the content steering manifest.
*
* Content Steering manifest format:
* VERSION: number (required) currently only version 1 is supported.
* TTL: number in seconds (optional) until the next content steering manifest reload.
* RELOAD-URI: string (optional) uri to fetch the next content steering manifest.
* SERVICE-LOCATION-PRIORITY or PATHWAY-PRIORITY a non empty array of unique string values.
* PATHWAY-CLONES: array (optional) (HLS only) pathway clone objects to copy from other playlists.
*/
class SteeringManifest {
constructor() {
this.priority_ = [];
this.pathwayClones_ = new Map();
}
set version(number) {
// Only version 1 is currently supported for both DASH and HLS.
if (number === 1) {
this.version_ = number;
}
}
set ttl(seconds) {
// TTL = time-to-live, default = 300 seconds.
this.ttl_ = seconds || 300;
}
set reloadUri(uri) {
if (uri) {
// reload URI can be relative to the previous reloadUri.
this.reloadUri_ = resolveUrl(this.reloadUri_, uri);
}
}
set priority(array) {
// priority must be non-empty and unique values.
if (array && array.length) {
this.priority_ = array;
}
}
set pathwayClones(array) {
// pathwayClones must be non-empty.
if (array && array.length) {
this.pathwayClones_ = new Map(array.map((clone) => [clone.ID, clone]));
}
}
get version() {
return this.version_;
}
get ttl() {
return this.ttl_;
}
get reloadUri() {
return this.reloadUri_;
}
get priority() {
return this.priority_;
}
get pathwayClones() {
return this.pathwayClones_;
}
}
/**
* This class represents a content steering manifest and associated state. See both HLS and DASH specifications.
* HLS: https://developer.apple.com/streaming/HLSContentSteeringSpecification.pdf and
* https://datatracker.ietf.org/doc/draft-pantos-hls-rfc8216bis/ section 4.4.6.6.
* DASH: https://dashif.org/docs/DASH-IF-CTS-00XX-Content-Steering-Community-Review.pdf
*
* @param {function} xhr for making a network request from the browser.
* @param {function} bandwidth for fetching the current bandwidth from the main segment loader.
*/
export default class ContentSteeringController extends videojs.EventTarget {
constructor(xhr, bandwidth) {
super();
this.currentPathway = null;
this.defaultPathway = null;
this.queryBeforeStart = false;
this.availablePathways_ = new Set();
this.steeringManifest = new SteeringManifest();
this.proxyServerUrl_ = null;
this.manifestType_ = null;
this.ttlTimeout_ = null;
this.request_ = null;
this.currentPathwayClones = new Map();
this.nextPathwayClones = new Map();
this.excludedSteeringManifestURLs = new Set();
this.logger_ = logger('Content Steering');
this.xhr_ = xhr;
this.getBandwidth_ = bandwidth;
}
/**
* Assigns the content steering tag properties to the steering controller
*
* @param {string} baseUrl the baseURL from the main manifest for resolving the steering manifest url
* @param {Object} steeringTag the content steering tag from the main manifest
*/
assignTagProperties(baseUrl, steeringTag) {
this.manifestType_ = steeringTag.serverUri ? 'HLS' : 'DASH';
// serverUri is HLS serverURL is DASH
const steeringUri = steeringTag.serverUri || steeringTag.serverURL;
if (!steeringUri) {
this.logger_(`steering manifest URL is ${steeringUri}, cannot request steering manifest.`);
this.trigger('error');
return;
}
// Content steering manifests can be encoded as a data URI. We can decode, parse and return early if that's the case.
if (steeringUri.startsWith('data:')) {
this.decodeDataUriManifest_(steeringUri.substring(steeringUri.indexOf(',') + 1));
return;
}
// reloadUri is the resolution of the main manifest URL and steering URL.
this.steeringManifest.reloadUri = resolveUrl(baseUrl, steeringUri);
// pathwayId is HLS defaultServiceLocation is DASH
this.defaultPathway = steeringTag.pathwayId || steeringTag.defaultServiceLocation;
// currently only DASH supports the following properties on <ContentSteering> tags.
this.queryBeforeStart = steeringTag.queryBeforeStart;
this.proxyServerUrl_ = steeringTag.proxyServerURL;
// trigger a steering event if we have a pathway from the content steering tag.
// this tells VHS which segment pathway to start with.
// If queryBeforeStart is true we need to wait for the steering manifest response.
if (this.defaultPathway && !this.queryBeforeStart) {
this.trigger('content-steering');
}
}
/**
* Requests the content steering manifest and parse the response. This should only be called after
* assignTagProperties was called with a content steering tag.
*
* @param {string} initialUri The optional uri to make the request with.
* If set, the request should be made with exactly what is passed in this variable.
* This scenario should only happen once on initalization.
*/
requestSteeringManifest(initial) {
const reloadUri = this.steeringManifest.reloadUri;
if (!reloadUri) {
return;
}
// We currently don't support passing MPD query parameters directly to the content steering URL as this requires
// ExtUrlQueryInfo tag support. See the DASH content steering spec section 8.1.
// This request URI accounts for manifest URIs that have been excluded.
const uri = initial ? reloadUri : this.getRequestURI(reloadUri);
// If there are no valid manifest URIs, we should stop content steering.
if (!uri) {
this.logger_('No valid content steering manifest URIs. Stopping content steering.');
this.trigger('error');
this.dispose();
return;
}
const metadata = {
contentSteeringInfo: {
uri
}
};
this.trigger({ type: 'contentsteeringloadstart', metadata });
this.request_ = this.xhr_({
uri,
requestType: 'content-steering-manifest'
}, (error, errorInfo) => {
if (error) {
// If the client receives HTTP 410 Gone in response to a manifest request,
// it MUST NOT issue another request for that URI for the remainder of the
// playback session. It MAY continue to use the most-recently obtained set
// of Pathways.
if (errorInfo.status === 410) {
this.logger_(`manifest request 410 ${error}.`);
this.logger_(`There will be no more content steering requests to ${uri} this session.`);
this.excludedSteeringManifestURLs.add(uri);
return;
}
// If the client receives HTTP 429 Too Many Requests with a Retry-After
// header in response to a manifest request, it SHOULD wait until the time
// specified by the Retry-After header to reissue the request.
if (errorInfo.status === 429) {
const retrySeconds = errorInfo.responseHeaders['retry-after'];
this.logger_(`manifest request 429 ${error}.`);
this.logger_(`content steering will retry in ${retrySeconds} seconds.`);
this.startTTLTimeout_(parseInt(retrySeconds, 10));
return;
}
// If the Steering Manifest cannot be loaded and parsed correctly, the
// client SHOULD continue to use the previous values and attempt to reload
// it after waiting for the previously-specified TTL (or 5 minutes if
// none).
this.logger_(`manifest failed to load ${error}.`);
this.startTTLTimeout_();
return;
}
this.trigger({ type: 'contentsteeringloadcomplete', metadata });
let steeringManifestJson;
try {
steeringManifestJson = JSON.parse(this.request_.responseText);
} catch (parseError) {
const errorMetadata = {
errorType: videojs.Error.StreamingContentSteeringParserError,
error: parseError
};
this.trigger({ type: 'error', metadata: errorMetadata });
}
this.assignSteeringProperties_(steeringManifestJson);
const parsedMetadata = {
contentSteeringInfo: metadata.contentSteeringInfo,
contentSteeringManifest: {
version: this.steeringManifest.version,
reloadUri: this.steeringManifest.reloadUri,
priority: this.steeringManifest.priority
}
};
this.trigger({ type: 'contentsteeringparsed', metadata: parsedMetadata });
this.startTTLTimeout_();
});
}
/**
* Set the proxy server URL and add the steering manifest url as a URI encoded parameter.
*
* @param {string} steeringUrl the steering manifest url
* @return the steering manifest url to a proxy server with all parameters set
*/
setProxyServerUrl_(steeringUrl) {
const steeringUrlObject = new window.URL(steeringUrl);
const proxyServerUrlObject = new window.URL(this.proxyServerUrl_);
proxyServerUrlObject.searchParams.set('url', encodeURI(steeringUrlObject.toString()));
return this.setSteeringParams_(proxyServerUrlObject.toString());
}
/**
* Decodes and parses the data uri encoded steering manifest
*
* @param {string} dataUri the data uri to be decoded and parsed.
*/
decodeDataUriManifest_(dataUri) {
const steeringManifestJson = JSON.parse(window.atob(dataUri));
this.assignSteeringProperties_(steeringManifestJson);
}
/**
* Set the HLS or DASH content steering manifest request query parameters. For example:
* _HLS_pathway="<CURRENT-PATHWAY-ID>" and _HLS_throughput=<THROUGHPUT>
* _DASH_pathway and _DASH_throughput
*
* @param {string} uri to add content steering server parameters to.
* @return a new uri as a string with the added steering query parameters.
*/
setSteeringParams_(url) {
const urlObject = new window.URL(url);
const path = this.getPathway();
const networkThroughput = this.getBandwidth_();
if (path) {
const pathwayKey = `_${this.manifestType_}_pathway`;
urlObject.searchParams.set(pathwayKey, path);
}
if (networkThroughput) {
const throughputKey = `_${this.manifestType_}_throughput`;
urlObject.searchParams.set(throughputKey, networkThroughput);
}
return urlObject.toString();
}
/**
* Assigns the current steering manifest properties and to the SteeringManifest object
*
* @param {Object} steeringJson the raw JSON steering manifest
*/
assignSteeringProperties_(steeringJson) {
this.steeringManifest.version = steeringJson.VERSION;
if (!this.steeringManifest.version) {
this.logger_(`manifest version is ${steeringJson.VERSION}, which is not supported.`);
this.trigger('error');
return;
}
this.steeringManifest.ttl = steeringJson.TTL;
this.steeringManifest.reloadUri = steeringJson['RELOAD-URI'];
// HLS = PATHWAY-PRIORITY required. DASH = SERVICE-LOCATION-PRIORITY optional
this.steeringManifest.priority = steeringJson['PATHWAY-PRIORITY'] || steeringJson['SERVICE-LOCATION-PRIORITY'];
// Pathway clones to be created/updated in HLS.
// See section 7.2 https://datatracker.ietf.org/doc/draft-pantos-hls-rfc8216bis/
this.steeringManifest.pathwayClones = steeringJson['PATHWAY-CLONES'];
this.nextPathwayClones = this.steeringManifest.pathwayClones;
// 1. apply first pathway from the array.
// 2. if first pathway doesn't exist in manifest, try next pathway.
// a. if all pathways are exhausted, ignore the steering manifest priority.
// 3. if segments fail from an established pathway, try all variants/renditions, then exclude the failed pathway.
// a. exclude a pathway for a minimum of the last TTL duration. Meaning, from the next steering response,
// the excluded pathway will be ignored.
// See excludePathway usage in excludePlaylist().
// If there are no available pathways, we need to stop content steering.
if (!this.availablePathways_.size) {
this.logger_('There are no available pathways for content steering. Ending content steering.');
this.trigger('error');
this.dispose();
}
const chooseNextPathway = (pathwaysByPriority) => {
for (const path of pathwaysByPriority) {
if (this.availablePathways_.has(path)) {
return path;
}
}
// If no pathway matches, ignore the manifest and choose the first available.
return [...this.availablePathways_][0];
};
const nextPathway = chooseNextPathway(this.steeringManifest.priority);
if (this.currentPathway !== nextPathway) {
this.currentPathway = nextPathway;
this.trigger('content-steering');
}
}
/**
* Returns the pathway to use for steering decisions
*
* @return {string} returns the current pathway or the default
*/
getPathway() {
return this.currentPathway || this.defaultPathway;
}
/**
* Chooses the manifest request URI based on proxy URIs and server URLs.
* Also accounts for exclusion on certain manifest URIs.
*
* @param {string} reloadUri the base uri before parameters
*
* @return {string} the final URI for the request to the manifest server.
*/
getRequestURI(reloadUri) {
if (!reloadUri) {
return null;
}
const isExcluded = (uri) => this.excludedSteeringManifestURLs.has(uri);
if (this.proxyServerUrl_) {
const proxyURI = this.setProxyServerUrl_(reloadUri);
if (!isExcluded(proxyURI)) {
return proxyURI;
}
}
const steeringURI = this.setSteeringParams_(reloadUri);
if (!isExcluded(steeringURI)) {
return steeringURI;
}
// Return nothing if all valid manifest URIs are excluded.
return null;
}
/**
* Start the timeout for re-requesting the steering manifest at the TTL interval.
*
* @param {number} ttl time in seconds of the timeout. Defaults to the
* ttl interval in the steering manifest
*/
startTTLTimeout_(ttl = this.steeringManifest.ttl) {
// 300 (5 minutes) is the default value.
const ttlMS = ttl * 1000;
this.ttlTimeout_ = window.setTimeout(() => {
this.requestSteeringManifest();
}, ttlMS);
}
/**
* Clear the TTL timeout if necessary.
*/
clearTTLTimeout_() {
window.clearTimeout(this.ttlTimeout_);
this.ttlTimeout_ = null;
}
/**
* aborts any current steering xhr and sets the current request object to null
*/
abort() {
if (this.request_) {
this.request_.abort();
}
this.request_ = null;
}
/**
* aborts steering requests clears the ttl timeout and resets all properties.
*/
dispose() {
this.off('content-steering');
this.off('error');
this.abort();
this.clearTTLTimeout_();
this.currentPathway = null;
this.defaultPathway = null;
this.queryBeforeStart = null;
this.proxyServerUrl_ = null;
this.manifestType_ = null;
this.ttlTimeout_ = null;
this.request_ = null;
this.excludedSteeringManifestURLs = new Set();
this.availablePathways_ = new Set();
this.steeringManifest = new SteeringManifest();
}
/**
* adds a pathway to the available pathways set
*
* @param {string} pathway the pathway string to add
*/
addAvailablePathway(pathway) {
if (pathway) {
this.availablePathways_.add(pathway);
}
}
/**
* Clears all pathways from the available pathways set
*/
clearAvailablePathways() {
this.availablePathways_.clear();
}
/**
* Removes a pathway from the available pathways set.
*/
excludePathway(pathway) {
return this.availablePathways_.delete(pathway);
}
/**
* Checks the refreshed DASH manifest content steering tag for changes.
*
* @param {string} baseURL new steering tag on DASH manifest refresh
* @param {Object} newTag the new tag to check for changes
* @return a true or false whether the new tag has different values
*/
didDASHTagChange(baseURL, newTag) {
return !newTag && this.steeringManifest.reloadUri ||
newTag && (resolveUrl(baseURL, newTag.serverURL) !== this.steeringManifest.reloadUri ||
newTag.defaultServiceLocation !== this.defaultPathway ||
newTag.queryBeforeStart !== this.queryBeforeStart ||
newTag.proxyServerURL !== this.proxyServerUrl_);
}
getAvailablePathways() {
return this.availablePathways_;
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,41 @@
/* global self */
import { Decrypter } from 'aes-decrypter';
import { createTransferableMessage } from './bin-utils';
/**
* Our web worker interface so that things can talk to aes-decrypter
* that will be running in a web worker. the scope is passed to this by
* webworkify.
*/
self.onmessage = function(event) {
const data = event.data;
const encrypted = new Uint8Array(
data.encrypted.bytes,
data.encrypted.byteOffset,
data.encrypted.byteLength
);
const key = new Uint32Array(
data.key.bytes,
data.key.byteOffset,
data.key.byteLength / 4
);
const iv = new Uint32Array(
data.iv.bytes,
data.iv.byteOffset,
data.iv.byteLength / 4
);
/* eslint-disable no-new, handle-callback-err */
new Decrypter(
encrypted,
key,
iv,
function(err, bytes) {
self.postMessage(createTransferableMessage({
source: data.source,
decrypted: bytes
}), [bytes.buffer]);
}
);
/* eslint-enable */
};

View File

@@ -0,0 +1,32 @@
import videojs from 'video.js';
// https://www.w3.org/TR/WebIDL-1/#quotaexceedederror
export const QUOTA_EXCEEDED_ERR = 22;
export const getStreamingNetworkErrorMetadata = ({ requestType, request, error, parseFailure }) => {
const isBadStatus = request.status < 200 || request.status > 299;
const isFailure = request.status >= 400 && request.status <= 499;
const errorMetadata = {
uri: request.uri,
requestType
};
const isBadStatusOrParseFailure = (isBadStatus && !isFailure) || parseFailure;
if (error && isFailure) {
// copy original error and add to the metadata.
errorMetadata.error = {...error};
errorMetadata.errorType = videojs.Error.NetworkRequestFailed;
} else if (request.aborted) {
errorMetadata.errorType = videojs.Error.NetworkRequestAborted;
} else if (request.timedout) {
errorMetadata.erroType = videojs.Error.NetworkRequestTimeout;
} else if (isBadStatusOrParseFailure) {
const errorType = parseFailure ? videojs.Error.NetworkBodyParserFailed : videojs.Error.NetworkBadStatus;
errorMetadata.errorType = errorType;
errorMetadata.status = request.status;
errorMetadata.headers = request.headers;
}
return errorMetadata;
};

View File

@@ -0,0 +1,343 @@
import videojs from 'video.js';
import window from 'global/window';
import { Parser as M3u8Parser } from 'm3u8-parser';
import { resolveUrl } from './resolve-url';
import { getLastParts, isAudioOnly } from './playlist.js';
const { log } = videojs;
export const createPlaylistID = (index, uri) => {
return `${index}-${uri}`;
};
// default function for creating a group id
export const groupID = (type, group, label) => {
return `placeholder-uri-${type}-${group}-${label}`;
};
/**
* Parses a given m3u8 playlist
*
* @param {Function} [onwarn]
* a function to call when the parser triggers a warning event.
* @param {Function} [oninfo]
* a function to call when the parser triggers an info event.
* @param {string} manifestString
* The downloaded manifest string
* @param {Object[]} [customTagParsers]
* An array of custom tag parsers for the m3u8-parser instance
* @param {Object[]} [customTagMappers]
* An array of custom tag mappers for the m3u8-parser instance
* @param {boolean} [llhls]
* Whether to keep ll-hls features in the manifest after parsing.
* @return {Object}
* The manifest object
*/
export const parseManifest = ({
onwarn,
oninfo,
manifestString,
customTagParsers = [],
customTagMappers = [],
llhls
}) => {
const parser = new M3u8Parser();
if (onwarn) {
parser.on('warn', onwarn);
}
if (oninfo) {
parser.on('info', oninfo);
}
customTagParsers.forEach(customParser => parser.addParser(customParser));
customTagMappers.forEach(mapper => parser.addTagMapper(mapper));
parser.push(manifestString);
parser.end();
const manifest = parser.manifest;
// remove llhls features from the parsed manifest
// if we don't want llhls support.
if (!llhls) {
[
'preloadSegment',
'skip',
'serverControl',
'renditionReports',
'partInf',
'partTargetDuration'
].forEach(function(k) {
if (manifest.hasOwnProperty(k)) {
delete manifest[k];
}
});
if (manifest.segments) {
manifest.segments.forEach(function(segment) {
['parts', 'preloadHints'].forEach(function(k) {
if (segment.hasOwnProperty(k)) {
delete segment[k];
}
});
});
}
}
if (!manifest.targetDuration) {
let targetDuration = 10;
if (manifest.segments && manifest.segments.length) {
targetDuration = manifest
.segments.reduce((acc, s) => Math.max(acc, s.duration), 0);
}
if (onwarn) {
onwarn({ message: `manifest has no targetDuration defaulting to ${targetDuration}` });
}
manifest.targetDuration = targetDuration;
}
const parts = getLastParts(manifest);
if (parts.length && !manifest.partTargetDuration) {
const partTargetDuration = parts.reduce((acc, p) => Math.max(acc, p.duration), 0);
if (onwarn) {
onwarn({ message: `manifest has no partTargetDuration defaulting to ${partTargetDuration}` });
log.error('LL-HLS manifest has parts but lacks required #EXT-X-PART-INF:PART-TARGET value. See https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-09#section-4.4.3.7. Playback is not guaranteed.');
}
manifest.partTargetDuration = partTargetDuration;
}
return manifest;
};
/**
* Loops through all supported media groups in main and calls the provided
* callback for each group
*
* @param {Object} main
* The parsed main manifest object
* @param {Function} callback
* Callback to call for each media group
*/
export const forEachMediaGroup = (main, callback) => {
if (!main.mediaGroups) {
return;
}
['AUDIO', 'SUBTITLES'].forEach((mediaType) => {
if (!main.mediaGroups[mediaType]) {
return;
}
for (const groupKey in main.mediaGroups[mediaType]) {
for (const labelKey in main.mediaGroups[mediaType][groupKey]) {
const mediaProperties = main.mediaGroups[mediaType][groupKey][labelKey];
callback(mediaProperties, mediaType, groupKey, labelKey);
}
}
});
};
/**
* Adds properties and attributes to the playlist to keep consistent functionality for
* playlists throughout VHS.
*
* @param {Object} config
* Arguments object
* @param {Object} config.playlist
* The media playlist
* @param {string} [config.uri]
* The uri to the media playlist (if media playlist is not from within a main
* playlist)
* @param {string} id
* ID to use for the playlist
*/
export const setupMediaPlaylist = ({ playlist, uri, id }) => {
playlist.id = id;
playlist.playlistErrors_ = 0;
if (uri) {
// For media playlists, m3u8-parser does not have access to a URI, as HLS media
// playlists do not contain their own source URI, but one is needed for consistency in
// VHS.
playlist.uri = uri;
}
// For HLS main playlists, even though certain attributes MUST be defined, the
// stream may still be played without them.
// For HLS media playlists, m3u8-parser does not attach an attributes object to the
// manifest.
//
// To avoid undefined reference errors through the project, and make the code easier
// to write/read, add an empty attributes object for these cases.
playlist.attributes = playlist.attributes || {};
};
/**
* Adds ID, resolvedUri, and attributes properties to each playlist of the main, where
* necessary. In addition, creates playlist IDs for each playlist and adds playlist ID to
* playlist references to the playlists array.
*
* @param {Object} main
* The main playlist
*/
export const setupMediaPlaylists = (main) => {
let i = main.playlists.length;
while (i--) {
const playlist = main.playlists[i];
setupMediaPlaylist({
playlist,
id: createPlaylistID(i, playlist.uri)
});
playlist.resolvedUri = resolveUrl(main.uri, playlist.uri);
main.playlists[playlist.id] = playlist;
// URI reference added for backwards compatibility
main.playlists[playlist.uri] = playlist;
// Although the spec states an #EXT-X-STREAM-INF tag MUST have a BANDWIDTH attribute,
// the stream can be played without it. Although an attributes property may have been
// added to the playlist to prevent undefined references, issue a warning to fix the
// manifest.
if (!playlist.attributes.BANDWIDTH) {
log.warn('Invalid playlist STREAM-INF detected. Missing BANDWIDTH attribute.');
}
}
};
/**
* Adds resolvedUri properties to each media group.
*
* @param {Object} main
* The main playlist
*/
export const resolveMediaGroupUris = (main) => {
forEachMediaGroup(main, (properties) => {
if (properties.uri) {
properties.resolvedUri = resolveUrl(main.uri, properties.uri);
}
});
};
/**
* Creates a main playlist wrapper to insert a sole media playlist into.
*
* @param {Object} media
* Media playlist
* @param {string} uri
* The media URI
*
* @return {Object}
* main playlist
*/
export const mainForMedia = (media, uri) => {
const id = createPlaylistID(0, uri);
const main = {
mediaGroups: {
'AUDIO': {},
'VIDEO': {},
'CLOSED-CAPTIONS': {},
'SUBTITLES': {}
},
uri: window.location.href,
resolvedUri: window.location.href,
playlists: [{
uri,
id,
resolvedUri: uri,
// m3u8-parser does not attach an attributes property to media playlists so make
// sure that the property is attached to avoid undefined reference errors
attributes: {}
}]
};
// set up ID reference
main.playlists[id] = main.playlists[0];
// URI reference added for backwards compatibility
main.playlists[uri] = main.playlists[0];
return main;
};
/**
* Does an in-place update of the main manifest to add updated playlist URI references
* as well as other properties needed by VHS that aren't included by the parser.
*
* @param {Object} main
* main manifest object
* @param {string} uri
* The source URI
* @param {function} createGroupID
* A function to determine how to create the groupID for mediaGroups
*/
export const addPropertiesToMain = (main, uri, createGroupID = groupID) => {
main.uri = uri;
for (let i = 0; i < main.playlists.length; i++) {
if (!main.playlists[i].uri) {
// Set up phony URIs for the playlists since playlists are referenced by their URIs
// throughout VHS, but some formats (e.g., DASH) don't have external URIs
// TODO: consider adding dummy URIs in mpd-parser
const phonyUri = `placeholder-uri-${i}`;
main.playlists[i].uri = phonyUri;
}
}
const audioOnlyMain = isAudioOnly(main);
forEachMediaGroup(main, (properties, mediaType, groupKey, labelKey) => {
// add a playlist array under properties
if (!properties.playlists || !properties.playlists.length) {
// If the manifest is audio only and this media group does not have a uri, check
// if the media group is located in the main list of playlists. If it is, don't add
// placeholder properties as it shouldn't be considered an alternate audio track.
if (audioOnlyMain && mediaType === 'AUDIO' && !properties.uri) {
for (let i = 0; i < main.playlists.length; i++) {
const p = main.playlists[i];
if (p.attributes && p.attributes.AUDIO && p.attributes.AUDIO === groupKey) {
return;
}
}
}
properties.playlists = [Object.assign({}, properties)];
}
properties.playlists.forEach(function(p, i) {
const groupId = createGroupID(mediaType, groupKey, labelKey, p);
const id = createPlaylistID(i, groupId);
if (p.uri) {
p.resolvedUri = p.resolvedUri || resolveUrl(main.uri, p.uri);
} else {
// DEPRECATED, this has been added to prevent a breaking change.
// previously we only ever had a single media group playlist, so
// we mark the first playlist uri without prepending the index as we used to
// ideally we would do all of the playlists the same way.
p.uri = i === 0 ? groupId : id;
// don't resolve a placeholder uri to an absolute url, just use
// the placeholder again
p.resolvedUri = p.uri;
}
p.id = p.id || id;
// add an empty attributes object, all playlists are
// expected to have this.
p.attributes = p.attributes || {};
// setup ID and URI references (URI for backwards compatibility)
main.playlists[p.id] = p;
main.playlists[p.uri] = p;
});
});
setupMediaPlaylists(main);
resolveMediaGroupUris(main);
};

View File

@@ -0,0 +1,961 @@
import videojs from 'video.js';
import PlaylistLoader from './playlist-loader';
import DashPlaylistLoader from './dash-playlist-loader';
import noop from './util/noop';
import {isAudioOnly, playlistMatch} from './playlist.js';
import logger from './util/logger';
import {merge} from './util/vjs-compat';
/**
* Convert the properties of an HLS track into an audioTrackKind.
*
* @private
*/
const audioTrackKind_ = (properties) => {
let kind = properties.default ? 'main' : 'alternative';
if (properties.characteristics &&
properties.characteristics.indexOf('public.accessibility.describes-video') >= 0) {
kind = 'main-desc';
}
return kind;
};
/**
* Pause provided segment loader and playlist loader if active
*
* @param {SegmentLoader} segmentLoader
* SegmentLoader to pause
* @param {Object} mediaType
* Active media type
* @function stopLoaders
*/
export const stopLoaders = (segmentLoader, mediaType) => {
segmentLoader.abort();
segmentLoader.pause();
if (mediaType && mediaType.activePlaylistLoader) {
mediaType.activePlaylistLoader.pause();
mediaType.activePlaylistLoader = null;
}
};
/**
* Start loading provided segment loader and playlist loader
*
* @param {PlaylistLoader} playlistLoader
* PlaylistLoader to start loading
* @param {Object} mediaType
* Active media type
* @function startLoaders
*/
export const startLoaders = (playlistLoader, mediaType) => {
// Segment loader will be started after `loadedmetadata` or `loadedplaylist` from the
// playlist loader
mediaType.activePlaylistLoader = playlistLoader;
playlistLoader.load();
};
/**
* Returns a function to be called when the media group changes. It performs a
* non-destructive (preserve the buffer) resync of the SegmentLoader. This is because a
* change of group is merely a rendition switch of the same content at another encoding,
* rather than a change of content, such as switching audio from English to Spanish.
*
* @param {string} type
* MediaGroup type
* @param {Object} settings
* Object containing required information for media groups
* @return {Function}
* Handler for a non-destructive resync of SegmentLoader when the active media
* group changes.
* @function onGroupChanged
*/
export const onGroupChanged = (type, settings) => () => {
const {
segmentLoaders: {
[type]: segmentLoader,
main: mainSegmentLoader
},
mediaTypes: { [type]: mediaType }
} = settings;
const activeTrack = mediaType.activeTrack();
const activeGroup = mediaType.getActiveGroup();
const previousActiveLoader = mediaType.activePlaylistLoader;
const lastGroup = mediaType.lastGroup_;
// the group did not change do nothing
if (activeGroup && lastGroup && activeGroup.id === lastGroup.id) {
return;
}
mediaType.lastGroup_ = activeGroup;
mediaType.lastTrack_ = activeTrack;
stopLoaders(segmentLoader, mediaType);
if (!activeGroup || activeGroup.isMainPlaylist) {
// there is no group active or active group is a main playlist and won't change
return;
}
if (!activeGroup.playlistLoader) {
if (previousActiveLoader) {
// The previous group had a playlist loader but the new active group does not
// this means we are switching from demuxed to muxed audio. In this case we want to
// do a destructive reset of the main segment loader and not restart the audio
// loaders.
mainSegmentLoader.resetEverything();
}
return;
}
// Non-destructive resync
segmentLoader.resyncLoader();
startLoaders(activeGroup.playlistLoader, mediaType);
};
export const onGroupChanging = (type, settings) => () => {
const {
segmentLoaders: {
[type]: segmentLoader
},
mediaTypes: { [type]: mediaType }
} = settings;
mediaType.lastGroup_ = null;
segmentLoader.abort();
segmentLoader.pause();
};
/**
* Returns a function to be called when the media track changes. It performs a
* destructive reset of the SegmentLoader to ensure we start loading as close to
* currentTime as possible.
*
* @param {string} type
* MediaGroup type
* @param {Object} settings
* Object containing required information for media groups
* @return {Function}
* Handler for a destructive reset of SegmentLoader when the active media
* track changes.
* @function onTrackChanged
*/
export const onTrackChanged = (type, settings) => () => {
const {
mainPlaylistLoader,
segmentLoaders: {
[type]: segmentLoader,
main: mainSegmentLoader
},
mediaTypes: { [type]: mediaType }
} = settings;
const activeTrack = mediaType.activeTrack();
const activeGroup = mediaType.getActiveGroup();
const previousActiveLoader = mediaType.activePlaylistLoader;
const lastTrack = mediaType.lastTrack_;
// track did not change, do nothing
if (lastTrack && activeTrack && lastTrack.id === activeTrack.id) {
return;
}
mediaType.lastGroup_ = activeGroup;
mediaType.lastTrack_ = activeTrack;
stopLoaders(segmentLoader, mediaType);
if (!activeGroup) {
// there is no group active so we do not want to restart loaders
return;
}
if (activeGroup.isMainPlaylist) {
// track did not change, do nothing
if (!activeTrack || !lastTrack || activeTrack.id === lastTrack.id) {
return;
}
const pc = settings.vhs.playlistController_;
const newPlaylist = pc.selectPlaylist();
// media will not change do nothing
if (pc.media() === newPlaylist) {
return;
}
mediaType.logger_(`track change. Switching main audio from ${lastTrack.id} to ${activeTrack.id}`);
mainPlaylistLoader.pause();
mainSegmentLoader.resetEverything();
pc.fastQualityChange_(newPlaylist);
return;
}
if (type === 'AUDIO') {
if (!activeGroup.playlistLoader) {
// when switching from demuxed audio/video to muxed audio/video (noted by no
// playlist loader for the audio group), we want to do a destructive reset of the
// main segment loader and not restart the audio loaders
mainSegmentLoader.setAudio(true);
// don't have to worry about disabling the audio of the audio segment loader since
// it should be stopped
mainSegmentLoader.resetEverything();
return;
}
// although the segment loader is an audio segment loader, call the setAudio
// function to ensure it is prepared to re-append the init segment (or handle other
// config changes)
segmentLoader.setAudio(true);
mainSegmentLoader.setAudio(false);
}
if (previousActiveLoader === activeGroup.playlistLoader) {
// Nothing has actually changed. This can happen because track change events can fire
// multiple times for a "single" change. One for enabling the new active track, and
// one for disabling the track that was active
startLoaders(activeGroup.playlistLoader, mediaType);
return;
}
if (segmentLoader.track) {
// For WebVTT, set the new text track in the segmentloader
segmentLoader.track(activeTrack);
}
// destructive reset
segmentLoader.resetEverything();
startLoaders(activeGroup.playlistLoader, mediaType);
};
export const onError = {
/**
* Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
* an error.
*
* @param {string} type
* MediaGroup type
* @param {Object} settings
* Object containing required information for media groups
* @return {Function}
* Error handler. Logs warning (or error if the playlist is excluded) to
* console and switches back to default audio track.
* @function onError.AUDIO
*/
AUDIO: (type, settings) => () => {
const {
mediaTypes: { [type]: mediaType },
excludePlaylist
} = settings;
// switch back to default audio track
const activeTrack = mediaType.activeTrack();
const activeGroup = mediaType.activeGroup();
const id = (activeGroup.filter(group => group.default)[0] || activeGroup[0]).id;
const defaultTrack = mediaType.tracks[id];
if (activeTrack === defaultTrack) {
// Default track encountered an error. All we can do now is exclude the current
// rendition and hope another will switch audio groups
excludePlaylist({
error: { message: 'Problem encountered loading the default audio track.' }
});
return;
}
videojs.log.warn('Problem encountered loading the alternate audio track.' +
'Switching back to default.');
for (const trackId in mediaType.tracks) {
mediaType.tracks[trackId].enabled = mediaType.tracks[trackId] === defaultTrack;
}
mediaType.onTrackChanged();
},
/**
* Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
* an error.
*
* @param {string} type
* MediaGroup type
* @param {Object} settings
* Object containing required information for media groups
* @return {Function}
* Error handler. Logs warning to console and disables the active subtitle track
* @function onError.SUBTITLES
*/
SUBTITLES: (type, settings) => () => {
const {
mediaTypes: { [type]: mediaType }
} = settings;
videojs.log.warn('Problem encountered loading the subtitle track.' +
'Disabling subtitle track.');
const track = mediaType.activeTrack();
if (track) {
track.mode = 'disabled';
}
mediaType.onTrackChanged();
}
};
export const setupListeners = {
/**
* Setup event listeners for audio playlist loader
*
* @param {string} type
* MediaGroup type
* @param {PlaylistLoader|null} playlistLoader
* PlaylistLoader to register listeners on
* @param {Object} settings
* Object containing required information for media groups
* @function setupListeners.AUDIO
*/
AUDIO: (type, playlistLoader, settings) => {
if (!playlistLoader) {
// no playlist loader means audio will be muxed with the video
return;
}
const {
tech,
requestOptions,
segmentLoaders: { [type]: segmentLoader }
} = settings;
playlistLoader.on('loadedmetadata', () => {
const media = playlistLoader.media();
segmentLoader.playlist(media, requestOptions);
// if the video is already playing, or if this isn't a live video and preload
// permits, start downloading segments
if (!tech.paused() || (media.endList && tech.preload() !== 'none')) {
segmentLoader.load();
}
});
playlistLoader.on('loadedplaylist', () => {
segmentLoader.playlist(playlistLoader.media(), requestOptions);
// If the player isn't paused, ensure that the segment loader is running
if (!tech.paused()) {
segmentLoader.load();
}
});
playlistLoader.on('error', onError[type](type, settings));
},
/**
* Setup event listeners for subtitle playlist loader
*
* @param {string} type
* MediaGroup type
* @param {PlaylistLoader|null} playlistLoader
* PlaylistLoader to register listeners on
* @param {Object} settings
* Object containing required information for media groups
* @function setupListeners.SUBTITLES
*/
SUBTITLES: (type, playlistLoader, settings) => {
const {
tech,
requestOptions,
segmentLoaders: { [type]: segmentLoader },
mediaTypes: { [type]: mediaType }
} = settings;
playlistLoader.on('loadedmetadata', () => {
const media = playlistLoader.media();
segmentLoader.playlist(media, requestOptions);
segmentLoader.track(mediaType.activeTrack());
// if the video is already playing, or if this isn't a live video and preload
// permits, start downloading segments
if (!tech.paused() || (media.endList && tech.preload() !== 'none')) {
segmentLoader.load();
}
});
playlistLoader.on('loadedplaylist', () => {
segmentLoader.playlist(playlistLoader.media(), requestOptions);
// If the player isn't paused, ensure that the segment loader is running
if (!tech.paused()) {
segmentLoader.load();
}
});
playlistLoader.on('error', onError[type](type, settings));
}
};
export const initialize = {
/**
* Setup PlaylistLoaders and AudioTracks for the audio groups
*
* @param {string} type
* MediaGroup type
* @param {Object} settings
* Object containing required information for media groups
* @function initialize.AUDIO
*/
'AUDIO': (type, settings) => {
const {
vhs,
sourceType,
segmentLoaders: { [type]: segmentLoader },
requestOptions,
main: {mediaGroups},
mediaTypes: {
[type]: {
groups,
tracks,
logger_
}
},
mainPlaylistLoader
} = settings;
const audioOnlyMain = isAudioOnly(mainPlaylistLoader.main);
// force a default if we have none
if (!mediaGroups[type] ||
Object.keys(mediaGroups[type]).length === 0) {
mediaGroups[type] = { main: { default: { default: true } } };
if (audioOnlyMain) {
mediaGroups[type].main.default.playlists = mainPlaylistLoader.main.playlists;
}
}
for (const groupId in mediaGroups[type]) {
if (!groups[groupId]) {
groups[groupId] = [];
}
for (const variantLabel in mediaGroups[type][groupId]) {
let properties = mediaGroups[type][groupId][variantLabel];
let playlistLoader;
if (audioOnlyMain) {
logger_(`AUDIO group '${groupId}' label '${variantLabel}' is a main playlist`);
properties.isMainPlaylist = true;
playlistLoader = null;
// if vhs-json was provided as the source, and the media playlist was resolved,
// use the resolved media playlist object
} else if (sourceType === 'vhs-json' && properties.playlists) {
playlistLoader = new PlaylistLoader(
properties.playlists[0],
vhs,
requestOptions
);
} else if (properties.resolvedUri) {
playlistLoader = new PlaylistLoader(
properties.resolvedUri,
vhs,
requestOptions
);
// TODO: dash isn't the only type with properties.playlists
// should we even have properties.playlists in this check.
} else if (properties.playlists && sourceType === 'dash') {
playlistLoader = new DashPlaylistLoader(
properties.playlists[0],
vhs,
requestOptions,
mainPlaylistLoader
);
} else {
// no resolvedUri means the audio is muxed with the video when using this
// audio track
playlistLoader = null;
}
properties = merge(
{ id: variantLabel, playlistLoader },
properties
);
setupListeners[type](type, properties.playlistLoader, settings);
groups[groupId].push(properties);
if (typeof tracks[variantLabel] === 'undefined') {
const track = new videojs.AudioTrack({
id: variantLabel,
kind: audioTrackKind_(properties),
enabled: false,
language: properties.language,
default: properties.default,
label: variantLabel
});
tracks[variantLabel] = track;
}
}
}
// setup single error event handler for the segment loader
segmentLoader.on('error', onError[type](type, settings));
},
/**
* Setup PlaylistLoaders and TextTracks for the subtitle groups
*
* @param {string} type
* MediaGroup type
* @param {Object} settings
* Object containing required information for media groups
* @function initialize.SUBTITLES
*/
'SUBTITLES': (type, settings) => {
const {
tech,
vhs,
sourceType,
segmentLoaders: { [type]: segmentLoader },
requestOptions,
main: { mediaGroups },
mediaTypes: {
[type]: {
groups,
tracks
}
},
mainPlaylistLoader
} = settings;
for (const groupId in mediaGroups[type]) {
if (!groups[groupId]) {
groups[groupId] = [];
}
for (const variantLabel in mediaGroups[type][groupId]) {
if (!vhs.options_.useForcedSubtitles && mediaGroups[type][groupId][variantLabel].forced) {
// Subtitle playlists with the forced attribute are not selectable in Safari.
// According to Apple's HLS Authoring Specification:
// If content has forced subtitles and regular subtitles in a given language,
// the regular subtitles track in that language MUST contain both the forced
// subtitles and the regular subtitles for that language.
// Because of this requirement and that Safari does not add forced subtitles,
// forced subtitles are skipped here to maintain consistent experience across
// all platforms
continue;
}
let properties = mediaGroups[type][groupId][variantLabel];
let playlistLoader;
if (sourceType === 'hls') {
playlistLoader =
new PlaylistLoader(properties.resolvedUri, vhs, requestOptions);
} else if (sourceType === 'dash') {
const playlists = properties.playlists.filter((p) => p.excludeUntil !== Infinity);
if (!playlists.length) {
return;
}
playlistLoader = new DashPlaylistLoader(
properties.playlists[0],
vhs,
requestOptions,
mainPlaylistLoader
);
} else if (sourceType === 'vhs-json') {
playlistLoader = new PlaylistLoader(
// if the vhs-json object included the media playlist, use the media playlist
// as provided, otherwise use the resolved URI to load the playlist
properties.playlists ? properties.playlists[0] : properties.resolvedUri,
vhs,
requestOptions
);
}
properties = merge({
id: variantLabel,
playlistLoader
}, properties);
setupListeners[type](type, properties.playlistLoader, settings);
groups[groupId].push(properties);
if (typeof tracks[variantLabel] === 'undefined') {
const track = tech.addRemoteTextTrack({
id: variantLabel,
kind: 'subtitles',
default: properties.default && properties.autoselect,
language: properties.language,
label: variantLabel
}, false).track;
tracks[variantLabel] = track;
}
}
}
// setup single error event handler for the segment loader
segmentLoader.on('error', onError[type](type, settings));
},
/**
* Setup TextTracks for the closed-caption groups
*
* @param {String} type
* MediaGroup type
* @param {Object} settings
* Object containing required information for media groups
* @function initialize['CLOSED-CAPTIONS']
*/
'CLOSED-CAPTIONS': (type, settings) => {
const {
tech,
main: { mediaGroups },
mediaTypes: {
[type]: {
groups,
tracks
}
}
} = settings;
for (const groupId in mediaGroups[type]) {
if (!groups[groupId]) {
groups[groupId] = [];
}
for (const variantLabel in mediaGroups[type][groupId]) {
const properties = mediaGroups[type][groupId][variantLabel];
// Look for either 608 (CCn) or 708 (SERVICEn) caption services
if (!/^(?:CC|SERVICE)/.test(properties.instreamId)) {
continue;
}
const captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};
let newProps = {
label: variantLabel,
language: properties.language,
instreamId: properties.instreamId,
default: properties.default && properties.autoselect
};
if (captionServices[newProps.instreamId]) {
newProps = merge(newProps, captionServices[newProps.instreamId]);
}
if (newProps.default === undefined) {
delete newProps.default;
}
// No PlaylistLoader is required for Closed-Captions because the captions are
// embedded within the video stream
groups[groupId].push(merge({ id: variantLabel }, properties));
if (typeof tracks[variantLabel] === 'undefined') {
const track = tech.addRemoteTextTrack({
id: newProps.instreamId,
kind: 'captions',
default: newProps.default,
language: newProps.language,
label: newProps.label
}, false).track;
tracks[variantLabel] = track;
}
}
}
}
};
const groupMatch = (list, media) => {
for (let i = 0; i < list.length; i++) {
if (playlistMatch(media, list[i])) {
return true;
}
if (list[i].playlists && groupMatch(list[i].playlists, media)) {
return true;
}
}
return false;
};
/**
* Returns a function used to get the active group of the provided type
*
* @param {string} type
* MediaGroup type
* @param {Object} settings
* Object containing required information for media groups
* @return {Function}
* Function that returns the active media group for the provided type. Takes an
* optional parameter {TextTrack} track. If no track is provided, a list of all
* variants in the group, otherwise the variant corresponding to the provided
* track is returned.
* @function activeGroup
*/
export const activeGroup = (type, settings) => (track) => {
const {
mainPlaylistLoader,
mediaTypes: { [type]: { groups } }
} = settings;
const media = mainPlaylistLoader.media();
if (!media) {
return null;
}
let variants = null;
// set to variants to main media active group
if (media.attributes[type]) {
variants = groups[media.attributes[type]];
}
const groupKeys = Object.keys(groups);
if (!variants) {
// find the mainPlaylistLoader media
// that is in a media group if we are dealing
// with audio only
if (type === 'AUDIO' && groupKeys.length > 1 && isAudioOnly(settings.main)) {
for (let i = 0; i < groupKeys.length; i++) {
const groupPropertyList = groups[groupKeys[i]];
if (groupMatch(groupPropertyList, media)) {
variants = groupPropertyList;
break;
}
}
// use the main group if it exists
} else if (groups.main) {
variants = groups.main;
// only one group, use that one
} else if (groupKeys.length === 1) {
variants = groups[groupKeys[0]];
}
}
if (typeof track === 'undefined') {
return variants;
}
if (track === null || !variants) {
// An active track was specified so a corresponding group is expected. track === null
// means no track is currently active so there is no corresponding group
return null;
}
return variants.filter((props) => props.id === track.id)[0] || null;
};
export const activeTrack = {
/**
* Returns a function used to get the active track of type provided
*
* @param {string} type
* MediaGroup type
* @param {Object} settings
* Object containing required information for media groups
* @return {Function}
* Function that returns the active media track for the provided type. Returns
* null if no track is active
* @function activeTrack.AUDIO
*/
AUDIO: (type, settings) => () => {
const { mediaTypes: { [type]: { tracks } } } = settings;
for (const id in tracks) {
if (tracks[id].enabled) {
return tracks[id];
}
}
return null;
},
/**
* Returns a function used to get the active track of type provided
*
* @param {string} type
* MediaGroup type
* @param {Object} settings
* Object containing required information for media groups
* @return {Function}
* Function that returns the active media track for the provided type. Returns
* null if no track is active
* @function activeTrack.SUBTITLES
*/
SUBTITLES: (type, settings) => () => {
const { mediaTypes: { [type]: { tracks } } } = settings;
for (const id in tracks) {
if (tracks[id].mode === 'showing' || tracks[id].mode === 'hidden') {
return tracks[id];
}
}
return null;
}
};
export const getActiveGroup = (type, {mediaTypes}) => () => {
const activeTrack_ = mediaTypes[type].activeTrack();
if (!activeTrack_) {
return null;
}
return mediaTypes[type].activeGroup(activeTrack_);
};
/**
* Setup PlaylistLoaders and Tracks for media groups (Audio, Subtitles,
* Closed-Captions) specified in the main manifest.
*
* @param {Object} settings
* Object containing required information for setting up the media groups
* @param {Tech} settings.tech
* The tech of the player
* @param {Object} settings.requestOptions
* XHR request options used by the segment loaders
* @param {PlaylistLoader} settings.mainPlaylistLoader
* PlaylistLoader for the main source
* @param {VhsHandler} settings.vhs
* VHS SourceHandler
* @param {Object} settings.main
* The parsed main manifest
* @param {Object} settings.mediaTypes
* Object to store the loaders, tracks, and utility methods for each media type
* @param {Function} settings.excludePlaylist
* Excludes the current rendition and forces a rendition switch.
* @function setupMediaGroups
*/
export const setupMediaGroups = (settings) => {
['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach((type) => {
initialize[type](type, settings);
});
const {
mediaTypes,
mainPlaylistLoader,
tech,
vhs,
segmentLoaders: {
['AUDIO']: audioSegmentLoader,
main: mainSegmentLoader
}
} = settings;
// setup active group and track getters and change event handlers
['AUDIO', 'SUBTITLES'].forEach((type) => {
mediaTypes[type].activeGroup = activeGroup(type, settings);
mediaTypes[type].activeTrack = activeTrack[type](type, settings);
mediaTypes[type].onGroupChanged = onGroupChanged(type, settings);
mediaTypes[type].onGroupChanging = onGroupChanging(type, settings);
mediaTypes[type].onTrackChanged = onTrackChanged(type, settings);
mediaTypes[type].getActiveGroup = getActiveGroup(type, settings);
});
// DO NOT enable the default subtitle or caption track.
// DO enable the default audio track
const audioGroup = mediaTypes.AUDIO.activeGroup();
if (audioGroup) {
const groupId = (audioGroup.filter(group => group.default)[0] || audioGroup[0]).id;
mediaTypes.AUDIO.tracks[groupId].enabled = true;
mediaTypes.AUDIO.onGroupChanged();
mediaTypes.AUDIO.onTrackChanged();
const activeAudioGroup = mediaTypes.AUDIO.getActiveGroup();
// a similar check for handling setAudio on each loader is run again each time the
// track is changed, but needs to be handled here since the track may not be considered
// changed on the first call to onTrackChanged
if (!activeAudioGroup.playlistLoader) {
// either audio is muxed with video or the stream is audio only
mainSegmentLoader.setAudio(true);
} else {
// audio is demuxed
mainSegmentLoader.setAudio(false);
audioSegmentLoader.setAudio(true);
}
}
mainPlaylistLoader.on('mediachange', () => {
['AUDIO', 'SUBTITLES'].forEach(type => mediaTypes[type].onGroupChanged());
});
mainPlaylistLoader.on('mediachanging', () => {
['AUDIO', 'SUBTITLES'].forEach(type => mediaTypes[type].onGroupChanging());
});
// custom audio track change event handler for usage event
const onAudioTrackChanged = () => {
mediaTypes.AUDIO.onTrackChanged();
tech.trigger({ type: 'usage', name: 'vhs-audio-change' });
};
tech.audioTracks().addEventListener('change', onAudioTrackChanged);
tech.remoteTextTracks().addEventListener(
'change',
mediaTypes.SUBTITLES.onTrackChanged
);
vhs.on('dispose', () => {
tech.audioTracks().removeEventListener('change', onAudioTrackChanged);
tech.remoteTextTracks().removeEventListener(
'change',
mediaTypes.SUBTITLES.onTrackChanged
);
});
// clear existing audio tracks and add the ones we just created
tech.clearTracks('audio');
for (const id in mediaTypes.AUDIO.tracks) {
tech.audioTracks().addTrack(mediaTypes.AUDIO.tracks[id]);
}
};
/**
* Creates skeleton object used to store the loaders, tracks, and utility methods for each
* media type
*
* @return {Object}
* Object to store the loaders, tracks, and utility methods for each media type
* @function createMediaTypes
*/
export const createMediaTypes = () => {
const mediaTypes = {};
['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach((type) => {
mediaTypes[type] = {
groups: {},
tracks: {},
activePlaylistLoader: null,
activeGroup: noop,
activeTrack: noop,
getActiveGroup: noop,
onGroupChanged: noop,
onTrackChanged: noop,
lastTrack_: null,
logger_: logger(`MediaGroups[${type}]`)
};
});
return mediaTypes;
};

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,658 @@
/**
* @file playback-watcher.js
*
* Playback starts, and now my watch begins. It shall not end until my death. I shall
* take no wait, hold no uncleared timeouts, father no bad seeks. I shall wear no crowns
* and win no glory. I shall live and die at my post. I am the corrector of the underflow.
* I am the watcher of gaps. I am the shield that guards the realms of seekable. I pledge
* my life and honor to the Playback Watch, for this Player and all the Players to come.
*/
import window from 'global/window';
import * as Ranges from './ranges';
import logger from './util/logger';
import { createTimeRanges } from './util/vjs-compat';
import videojs from 'video.js';
// Set of events that reset the playback-watcher time check logic and clear the timeout
const timerCancelEvents = [
'seeking',
'seeked',
'pause',
'playing',
'error'
];
/**
* @class PlaybackWatcher
*/
export default class PlaybackWatcher extends videojs.EventTarget {
/**
* Represents an PlaybackWatcher object.
*
* @class
* @param {Object} options an object that includes the tech and settings
*/
constructor(options) {
super();
this.playlistController_ = options.playlistController;
this.tech_ = options.tech;
this.seekable = options.seekable;
this.allowSeeksWithinUnsafeLiveWindow = options.allowSeeksWithinUnsafeLiveWindow;
this.liveRangeSafeTimeDelta = options.liveRangeSafeTimeDelta;
this.media = options.media;
this.playedRanges_ = [];
this.consecutiveUpdates = 0;
this.lastRecordedTime = null;
this.checkCurrentTimeTimeout_ = null;
this.logger_ = logger('PlaybackWatcher');
this.logger_('initialize');
const playHandler = () => this.monitorCurrentTime_();
const canPlayHandler = () => this.monitorCurrentTime_();
const waitingHandler = () => this.techWaiting_();
const cancelTimerHandler = () => this.resetTimeUpdate_();
const pc = this.playlistController_;
const loaderTypes = ['main', 'subtitle', 'audio'];
const loaderChecks = {};
loaderTypes.forEach((type) => {
loaderChecks[type] = {
reset: () => this.resetSegmentDownloads_(type),
updateend: () => this.checkSegmentDownloads_(type)
};
pc[`${type}SegmentLoader_`].on('appendsdone', loaderChecks[type].updateend);
// If a rendition switch happens during a playback stall where the buffer
// isn't changing we want to reset. We cannot assume that the new rendition
// will also be stalled, until after new appends.
pc[`${type}SegmentLoader_`].on('playlistupdate', loaderChecks[type].reset);
// Playback stalls should not be detected right after seeking.
// This prevents one segment playlists (single vtt or single segment content)
// from being detected as stalling. As the buffer will not change in those cases, since
// the buffer is the entire video duration.
this.tech_.on(['seeked', 'seeking'], loaderChecks[type].reset);
});
/**
* We check if a seek was into a gap through the following steps:
* 1. We get a seeking event and we do not get a seeked event. This means that
* a seek was attempted but not completed.
* 2. We run `fixesBadSeeks_` on segment loader appends. This means that we already
* removed everything from our buffer and appended a segment, and should be ready
* to check for gaps.
*/
const setSeekingHandlers = (fn) => {
['main', 'audio'].forEach((type) => {
pc[`${type}SegmentLoader_`][fn]('appended', this.seekingAppendCheck_);
});
};
this.seekingAppendCheck_ = () => {
if (this.fixesBadSeeks_()) {
this.consecutiveUpdates = 0;
this.lastRecordedTime = this.tech_.currentTime();
setSeekingHandlers('off');
}
};
this.clearSeekingAppendCheck_ = () => setSeekingHandlers('off');
this.watchForBadSeeking_ = () => {
this.clearSeekingAppendCheck_();
setSeekingHandlers('on');
};
this.tech_.on('seeked', this.clearSeekingAppendCheck_);
this.tech_.on('seeking', this.watchForBadSeeking_);
this.tech_.on('waiting', waitingHandler);
this.tech_.on(timerCancelEvents, cancelTimerHandler);
this.tech_.on('canplay', canPlayHandler);
/*
An edge case exists that results in gaps not being skipped when they exist at the beginning of a stream. This case
is surfaced in one of two ways:
1) The `waiting` event is fired before the player has buffered content, making it impossible
to find or skip the gap. The `waiting` event is followed by a `play` event. On first play
we can check if playback is stalled due to a gap, and skip the gap if necessary.
2) A source with a gap at the beginning of the stream is loaded programatically while the player
is in a playing state. To catch this case, it's important that our one-time play listener is setup
even if the player is in a playing state
*/
this.tech_.one('play', playHandler);
// Define the dispose function to clean up our events
this.dispose = () => {
this.clearSeekingAppendCheck_();
this.logger_('dispose');
this.tech_.off('waiting', waitingHandler);
this.tech_.off(timerCancelEvents, cancelTimerHandler);
this.tech_.off('canplay', canPlayHandler);
this.tech_.off('play', playHandler);
this.tech_.off('seeking', this.watchForBadSeeking_);
this.tech_.off('seeked', this.clearSeekingAppendCheck_);
loaderTypes.forEach((type) => {
pc[`${type}SegmentLoader_`].off('appendsdone', loaderChecks[type].updateend);
pc[`${type}SegmentLoader_`].off('playlistupdate', loaderChecks[type].reset);
this.tech_.off(['seeked', 'seeking'], loaderChecks[type].reset);
});
if (this.checkCurrentTimeTimeout_) {
window.clearTimeout(this.checkCurrentTimeTimeout_);
}
this.resetTimeUpdate_();
};
}
/**
* Periodically check current time to see if playback stopped
*
* @private
*/
monitorCurrentTime_() {
this.checkCurrentTime_();
if (this.checkCurrentTimeTimeout_) {
window.clearTimeout(this.checkCurrentTimeTimeout_);
}
// 42 = 24 fps // 250 is what Webkit uses // FF uses 15
this.checkCurrentTimeTimeout_ =
window.setTimeout(this.monitorCurrentTime_.bind(this), 250);
}
/**
* Reset stalled download stats for a specific type of loader
*
* @param {string} type
* The segment loader type to check.
*
* @listens SegmentLoader#playlistupdate
* @listens Tech#seeking
* @listens Tech#seeked
*/
resetSegmentDownloads_(type) {
const loader = this.playlistController_[`${type}SegmentLoader_`];
if (this[`${type}StalledDownloads_`] > 0) {
this.logger_(`resetting possible stalled download count for ${type} loader`);
}
this[`${type}StalledDownloads_`] = 0;
this[`${type}Buffered_`] = loader.buffered_();
}
/**
* Checks on every segment `appendsdone` to see
* if segment appends are making progress. If they are not
* and we are still downloading bytes. We exclude the playlist.
*
* @param {string} type
* The segment loader type to check.
*
* @listens SegmentLoader#appendsdone
*/
checkSegmentDownloads_(type) {
const pc = this.playlistController_;
const loader = pc[`${type}SegmentLoader_`];
const buffered = loader.buffered_();
const isBufferedDifferent = Ranges.isRangeDifferent(this[`${type}Buffered_`], buffered);
this[`${type}Buffered_`] = buffered;
// if another watcher is going to fix the issue or
// the buffered value for this loader changed
// appends are working
if (isBufferedDifferent) {
const metadata = {
bufferedRanges: buffered
};
pc.trigger({ type: 'bufferedrangeschanged', metadata });
this.resetSegmentDownloads_(type);
return;
}
this[`${type}StalledDownloads_`]++;
this.logger_(`found #${this[`${type}StalledDownloads_`]} ${type} appends that did not increase buffer (possible stalled download)`, {
playlistId: loader.playlist_ && loader.playlist_.id,
buffered: Ranges.timeRangesToArray(buffered)
});
// after 10 possibly stalled appends with no reset, exclude
if (this[`${type}StalledDownloads_`] < 10) {
return;
}
this.logger_(`${type} loader stalled download exclusion`);
this.resetSegmentDownloads_(type);
this.tech_.trigger({type: 'usage', name: `vhs-${type}-download-exclusion`});
if (type === 'subtitle') {
return;
}
// TODO: should we exclude audio tracks rather than main tracks
// when type is audio?
pc.excludePlaylist({
error: { message: `Excessive ${type} segment downloading detected.` },
playlistExclusionDuration: Infinity
});
}
/**
* The purpose of this function is to emulate the "waiting" event on
* browsers that do not emit it when they are waiting for more
* data to continue playback
*
* @private
*/
checkCurrentTime_() {
if (this.tech_.paused() || this.tech_.seeking()) {
return;
}
const currentTime = this.tech_.currentTime();
const buffered = this.tech_.buffered();
if (this.lastRecordedTime === currentTime &&
(!buffered.length ||
currentTime + Ranges.SAFE_TIME_DELTA >= buffered.end(buffered.length - 1))) {
// If current time is at the end of the final buffered region, then any playback
// stall is most likely caused by buffering in a low bandwidth environment. The tech
// should fire a `waiting` event in this scenario, but due to browser and tech
// inconsistencies. Calling `techWaiting_` here allows us to simulate
// responding to a native `waiting` event when the tech fails to emit one.
return this.techWaiting_();
}
if (this.consecutiveUpdates >= 5 &&
currentTime === this.lastRecordedTime) {
this.consecutiveUpdates++;
this.waiting_();
} else if (currentTime === this.lastRecordedTime) {
this.consecutiveUpdates++;
} else {
this.playedRanges_.push(createTimeRanges([this.lastRecordedTime, currentTime]));
const metadata = {
playedRanges: this.playedRanges_
};
this.playlistController_.trigger({ type: 'playedrangeschanged', metadata });
this.consecutiveUpdates = 0;
this.lastRecordedTime = currentTime;
}
}
/**
* Resets the 'timeupdate' mechanism designed to detect that we are stalled
*
* @private
*/
resetTimeUpdate_() {
this.consecutiveUpdates = 0;
}
/**
* Fixes situations where there's a bad seek
*
* @return {boolean} whether an action was taken to fix the seek
* @private
*/
fixesBadSeeks_() {
const seeking = this.tech_.seeking();
if (!seeking) {
return false;
}
// TODO: It's possible that these seekable checks should be moved out of this function
// and into a function that runs on seekablechange. It's also possible that we only need
// afterSeekableWindow as the buffered check at the bottom is good enough to handle before
// seekable range.
const seekable = this.seekable();
const currentTime = this.tech_.currentTime();
const isAfterSeekableRange = this.afterSeekableWindow_(
seekable,
currentTime,
this.media(),
this.allowSeeksWithinUnsafeLiveWindow
);
let seekTo;
if (isAfterSeekableRange) {
const seekableEnd = seekable.end(seekable.length - 1);
// sync to live point (if VOD, our seekable was updated and we're simply adjusting)
seekTo = seekableEnd;
}
if (this.beforeSeekableWindow_(seekable, currentTime)) {
const seekableStart = seekable.start(0);
// sync to the beginning of the live window
// provide a buffer of .1 seconds to handle rounding/imprecise numbers
seekTo = seekableStart +
// if the playlist is too short and the seekable range is an exact time (can
// happen in live with a 3 segment playlist), then don't use a time delta
(seekableStart === seekable.end(0) ? 0 : Ranges.SAFE_TIME_DELTA);
}
if (typeof seekTo !== 'undefined') {
this.logger_(`Trying to seek outside of seekable at time ${currentTime} with ` +
`seekable range ${Ranges.printableRange(seekable)}. Seeking to ` +
`${seekTo}.`);
this.tech_.setCurrentTime(seekTo);
return true;
}
const sourceUpdater = this.playlistController_.sourceUpdater_;
const buffered = this.tech_.buffered();
const audioBuffered = sourceUpdater.audioBuffer ? sourceUpdater.audioBuffered() : null;
const videoBuffered = sourceUpdater.videoBuffer ? sourceUpdater.videoBuffered() : null;
const media = this.media();
// verify that at least two segment durations or one part duration have been
// appended before checking for a gap.
const minAppendedDuration = media.partTargetDuration ? media.partTargetDuration :
(media.targetDuration - Ranges.TIME_FUDGE_FACTOR) * 2;
// verify that at least two segment durations have been
// appended before checking for a gap.
const bufferedToCheck = [audioBuffered, videoBuffered];
for (let i = 0; i < bufferedToCheck.length; i++) {
// skip null buffered
if (!bufferedToCheck[i]) {
continue;
}
const timeAhead = Ranges.timeAheadOf(bufferedToCheck[i], currentTime);
// if we are less than two video/audio segment durations or one part
// duration behind we haven't appended enough to call this a bad seek.
if (timeAhead < minAppendedDuration) {
return false;
}
}
const nextRange = Ranges.findNextRange(buffered, currentTime);
// we have appended enough content, but we don't have anything buffered
// to seek over the gap
if (nextRange.length === 0) {
return false;
}
seekTo = nextRange.start(0) + Ranges.SAFE_TIME_DELTA;
this.logger_(`Buffered region starts (${nextRange.start(0)}) ` +
` just beyond seek point (${currentTime}). Seeking to ${seekTo}.`);
this.tech_.setCurrentTime(seekTo);
return true;
}
/**
* Handler for situations when we determine the player is waiting.
*
* @private
*/
waiting_() {
if (this.techWaiting_()) {
return;
}
// All tech waiting checks failed. Use last resort correction
const currentTime = this.tech_.currentTime();
const buffered = this.tech_.buffered();
const currentRange = Ranges.findRange(buffered, currentTime);
// Sometimes the player can stall for unknown reasons within a contiguous buffered
// region with no indication that anything is amiss (seen in Firefox). Seeking to
// currentTime is usually enough to kickstart the player. This checks that the player
// is currently within a buffered region before attempting a corrective seek.
// Chrome does not appear to continue `timeupdate` events after a `waiting` event
// until there is ~ 3 seconds of forward buffer available. PlaybackWatcher should also
// make sure there is ~3 seconds of forward buffer before taking any corrective action
// to avoid triggering an `unknownwaiting` event when the network is slow.
if (currentRange.length && currentTime + 3 <= currentRange.end(0)) {
this.resetTimeUpdate_();
this.tech_.setCurrentTime(currentTime);
this.logger_(`Stopped at ${currentTime} while inside a buffered region ` +
`[${currentRange.start(0)} -> ${currentRange.end(0)}]. Attempting to resume ` +
'playback by seeking to the current time.');
// unknown waiting corrections may be useful for monitoring QoS
this.tech_.trigger({type: 'usage', name: 'vhs-unknown-waiting'});
return;
}
}
/**
* Handler for situations when the tech fires a `waiting` event
*
* @return {boolean}
* True if an action (or none) was needed to correct the waiting. False if no
* checks passed
* @private
*/
techWaiting_() {
const seekable = this.seekable();
const currentTime = this.tech_.currentTime();
if (this.tech_.seeking()) {
// Tech is seeking or already waiting on another action, no action needed
return true;
}
if (this.beforeSeekableWindow_(seekable, currentTime)) {
const livePoint = seekable.end(seekable.length - 1);
this.logger_(`Fell out of live window at time ${currentTime}. Seeking to ` +
`live point (seekable end) ${livePoint}`);
this.resetTimeUpdate_();
this.tech_.setCurrentTime(livePoint);
// live window resyncs may be useful for monitoring QoS
this.tech_.trigger({type: 'usage', name: 'vhs-live-resync'});
return true;
}
const sourceUpdater = this.tech_.vhs.playlistController_.sourceUpdater_;
const buffered = this.tech_.buffered();
const videoUnderflow = this.videoUnderflow_({
audioBuffered: sourceUpdater.audioBuffered(),
videoBuffered: sourceUpdater.videoBuffered(),
currentTime
});
if (videoUnderflow) {
// Even though the video underflowed and was stuck in a gap, the audio overplayed
// the gap, leading currentTime into a buffered range. Seeking to currentTime
// allows the video to catch up to the audio position without losing any audio
// (only suffering ~3 seconds of frozen video and a pause in audio playback).
this.resetTimeUpdate_();
this.tech_.setCurrentTime(currentTime);
// video underflow may be useful for monitoring QoS
this.tech_.trigger({type: 'usage', name: 'vhs-video-underflow'});
return true;
}
const nextRange = Ranges.findNextRange(buffered, currentTime);
// check for gap
if (nextRange.length > 0) {
this.logger_(`Stopped at ${currentTime} and seeking to ${nextRange.start(0)}`);
this.resetTimeUpdate_();
this.skipTheGap_(currentTime);
return true;
}
// All checks failed. Returning false to indicate failure to correct waiting
return false;
}
afterSeekableWindow_(seekable, currentTime, playlist, allowSeeksWithinUnsafeLiveWindow = false) {
if (!seekable.length) {
// we can't make a solid case if there's no seekable, default to false
return false;
}
let allowedEnd = seekable.end(seekable.length - 1) + Ranges.SAFE_TIME_DELTA;
const isLive = !playlist.endList;
const isLLHLS = typeof playlist.partTargetDuration === 'number';
if (isLive && (isLLHLS || allowSeeksWithinUnsafeLiveWindow)) {
allowedEnd = seekable.end(seekable.length - 1) + (playlist.targetDuration * 3);
}
if (currentTime > allowedEnd) {
return true;
}
return false;
}
beforeSeekableWindow_(seekable, currentTime) {
if (seekable.length &&
// can't fall before 0 and 0 seekable start identifies VOD stream
seekable.start(0) > 0 &&
currentTime < seekable.start(0) - this.liveRangeSafeTimeDelta) {
return true;
}
return false;
}
videoUnderflow_({videoBuffered, audioBuffered, currentTime}) {
// audio only content will not have video underflow :)
if (!videoBuffered) {
return;
}
let gap;
// find a gap in demuxed content.
if (videoBuffered.length && audioBuffered.length) {
// in Chrome audio will continue to play for ~3s when we run out of video
// so we have to check that the video buffer did have some buffer in the
// past.
const lastVideoRange = Ranges.findRange(videoBuffered, currentTime - 3);
const videoRange = Ranges.findRange(videoBuffered, currentTime);
const audioRange = Ranges.findRange(audioBuffered, currentTime);
if (audioRange.length && !videoRange.length && lastVideoRange.length) {
gap = {start: lastVideoRange.end(0), end: audioRange.end(0)};
}
// find a gap in muxed content.
} else {
const nextRange = Ranges.findNextRange(videoBuffered, currentTime);
// Even if there is no available next range, there is still a possibility we are
// stuck in a gap due to video underflow.
if (!nextRange.length) {
gap = this.gapFromVideoUnderflow_(videoBuffered, currentTime);
}
}
if (gap) {
this.logger_(`Encountered a gap in video from ${gap.start} to ${gap.end}. ` +
`Seeking to current time ${currentTime}`);
return true;
}
return false;
}
/**
* Timer callback. If playback still has not proceeded, then we seek
* to the start of the next buffered region.
*
* @private
*/
skipTheGap_(scheduledCurrentTime) {
const buffered = this.tech_.buffered();
const currentTime = this.tech_.currentTime();
const nextRange = Ranges.findNextRange(buffered, currentTime);
this.resetTimeUpdate_();
if (nextRange.length === 0 ||
currentTime !== scheduledCurrentTime) {
return;
}
this.logger_(
'skipTheGap_:',
'currentTime:', currentTime,
'scheduled currentTime:', scheduledCurrentTime,
'nextRange start:', nextRange.start(0)
);
// only seek if we still have not played
this.tech_.setCurrentTime(nextRange.start(0) + Ranges.TIME_FUDGE_FACTOR);
const metadata = {
gapInfo: {
from: currentTime,
to: nextRange.start(0)
}
};
this.playlistController_.trigger({type: 'gapjumped', metadata});
this.tech_.trigger({type: 'usage', name: 'vhs-gap-skip'});
}
gapFromVideoUnderflow_(buffered, currentTime) {
// At least in Chrome, if there is a gap in the video buffer, the audio will continue
// playing for ~3 seconds after the video gap starts. This is done to account for
// video buffer underflow/underrun (note that this is not done when there is audio
// buffer underflow/underrun -- in that case the video will stop as soon as it
// encounters the gap, as audio stalls are more noticeable/jarring to a user than
// video stalls). The player's time will reflect the playthrough of audio, so the
// time will appear as if we are in a buffered region, even if we are stuck in a
// "gap."
//
// Example:
// video buffer: 0 => 10.1, 10.2 => 20
// audio buffer: 0 => 20
// overall buffer: 0 => 10.1, 10.2 => 20
// current time: 13
//
// Chrome's video froze at 10 seconds, where the video buffer encountered the gap,
// however, the audio continued playing until it reached ~3 seconds past the gap
// (13 seconds), at which point it stops as well. Since current time is past the
// gap, findNextRange will return no ranges.
//
// To check for this issue, we see if there is a gap that starts somewhere within
// a 3 second range (3 seconds +/- 1 second) back from our current time.
const gaps = Ranges.findGaps(buffered);
for (let i = 0; i < gaps.length; i++) {
const start = gaps.start(i);
const end = gaps.end(i);
// gap is starts no more than 4 seconds back
if (currentTime - start < 4 && currentTime - start > 2) {
return {
start,
end
};
}
}
return null;
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,588 @@
import window from 'global/window';
import Config from './config';
import Playlist from './playlist';
import { codecsForPlaylist } from './util/codecs.js';
import logger from './util/logger';
const logFn = logger('PlaylistSelector');
const representationToString = function(representation) {
if (!representation || !representation.playlist) {
return;
}
const playlist = representation.playlist;
return JSON.stringify({
id: playlist.id,
bandwidth: representation.bandwidth,
width: representation.width,
height: representation.height,
codecs: playlist.attributes && playlist.attributes.CODECS || ''
});
};
// Utilities
/**
* Returns the CSS value for the specified property on an element
* using `getComputedStyle`. Firefox has a long-standing issue where
* getComputedStyle() may return null when running in an iframe with
* `display: none`.
*
* @see https://bugzilla.mozilla.org/show_bug.cgi?id=548397
* @param {HTMLElement} el the htmlelement to work on
* @param {string} the proprety to get the style for
*/
const safeGetComputedStyle = function(el, property) {
if (!el) {
return '';
}
const result = window.getComputedStyle(el);
if (!result) {
return '';
}
return result[property];
};
/**
* Resuable stable sort function
*
* @param {Playlists} array
* @param {Function} sortFn Different comparators
* @function stableSort
*/
const stableSort = function(array, sortFn) {
const newArray = array.slice();
array.sort(function(left, right) {
const cmp = sortFn(left, right);
if (cmp === 0) {
return newArray.indexOf(left) - newArray.indexOf(right);
}
return cmp;
});
};
/**
* A comparator function to sort two playlist object by bandwidth.
*
* @param {Object} left a media playlist object
* @param {Object} right a media playlist object
* @return {number} Greater than zero if the bandwidth attribute of
* left is greater than the corresponding attribute of right. Less
* than zero if the bandwidth of right is greater than left and
* exactly zero if the two are equal.
*/
export const comparePlaylistBandwidth = function(left, right) {
let leftBandwidth;
let rightBandwidth;
if (left.attributes.BANDWIDTH) {
leftBandwidth = left.attributes.BANDWIDTH;
}
leftBandwidth = leftBandwidth || window.Number.MAX_VALUE;
if (right.attributes.BANDWIDTH) {
rightBandwidth = right.attributes.BANDWIDTH;
}
rightBandwidth = rightBandwidth || window.Number.MAX_VALUE;
return leftBandwidth - rightBandwidth;
};
/**
* A comparator function to sort two playlist object by resolution (width).
*
* @param {Object} left a media playlist object
* @param {Object} right a media playlist object
* @return {number} Greater than zero if the resolution.width attribute of
* left is greater than the corresponding attribute of right. Less
* than zero if the resolution.width of right is greater than left and
* exactly zero if the two are equal.
*/
export const comparePlaylistResolution = function(left, right) {
let leftWidth;
let rightWidth;
if (left.attributes.RESOLUTION &&
left.attributes.RESOLUTION.width) {
leftWidth = left.attributes.RESOLUTION.width;
}
leftWidth = leftWidth || window.Number.MAX_VALUE;
if (right.attributes.RESOLUTION &&
right.attributes.RESOLUTION.width) {
rightWidth = right.attributes.RESOLUTION.width;
}
rightWidth = rightWidth || window.Number.MAX_VALUE;
// NOTE - Fallback to bandwidth sort as appropriate in cases where multiple renditions
// have the same media dimensions/ resolution
if (leftWidth === rightWidth &&
left.attributes.BANDWIDTH &&
right.attributes.BANDWIDTH) {
return left.attributes.BANDWIDTH - right.attributes.BANDWIDTH;
}
return leftWidth - rightWidth;
};
/**
* Chooses the appropriate media playlist based on bandwidth and player size
*
* @param {Object} settings
* Object of information required to use this selector
* @param {Object} settings.main
* Object representation of the main manifest
* @param {number} settings.bandwidth
* Current calculated bandwidth of the player
* @param {number} settings.playerWidth
* Current width of the player element (should account for the device pixel ratio)
* @param {number} settings.playerHeight
* Current height of the player element (should account for the device pixel ratio)
* @param {number} settings.playerObjectFit
* Current value of the video element's object-fit CSS property. Allows taking into
* account that the video might be scaled up to cover the media element when selecting
* media playlists based on player size.
* @param {boolean} settings.limitRenditionByPlayerDimensions
* True if the player width and height should be used during the selection, false otherwise
* @param {Object} settings.playlistController
* the current playlistController object
* @return {Playlist} the highest bitrate playlist less than the
* currently detected bandwidth, accounting for some amount of
* bandwidth variance
*/
export let simpleSelector = function(settings) {
const {
main,
bandwidth: playerBandwidth,
playerWidth,
playerHeight,
playerObjectFit,
limitRenditionByPlayerDimensions,
playlistController
} = settings;
// If we end up getting called before `main` is available, exit early
if (!main) {
return;
}
const options = {
bandwidth: playerBandwidth,
width: playerWidth,
height: playerHeight,
limitRenditionByPlayerDimensions
};
let playlists = main.playlists;
// if playlist is audio only, select between currently active audio group playlists.
if (Playlist.isAudioOnly(main)) {
playlists = playlistController.getAudioTrackPlaylists_();
// add audioOnly to options so that we log audioOnly: true
// at the buttom of this function for debugging.
options.audioOnly = true;
}
// convert the playlists to an intermediary representation to make comparisons easier
let sortedPlaylistReps = playlists.map((playlist) => {
let bandwidth;
const width = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.width;
const height = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height;
bandwidth = playlist.attributes && playlist.attributes.BANDWIDTH;
bandwidth = bandwidth || window.Number.MAX_VALUE;
return {
bandwidth,
width,
height,
playlist
};
});
stableSort(sortedPlaylistReps, (left, right) => left.bandwidth - right.bandwidth);
// filter out any playlists that have been excluded due to
// incompatible configurations
sortedPlaylistReps = sortedPlaylistReps.filter((rep) => !Playlist.isIncompatible(rep.playlist));
// filter out any playlists that have been disabled manually through the representations
// api or excluded temporarily due to playback errors.
let enabledPlaylistReps = sortedPlaylistReps.filter((rep) => Playlist.isEnabled(rep.playlist));
if (!enabledPlaylistReps.length) {
// if there are no enabled playlists, then they have all been excluded or disabled
// by the user through the representations api. In this case, ignore exclusion and
// fallback to what the user wants by using playlists the user has not disabled.
enabledPlaylistReps = sortedPlaylistReps.filter((rep) => !Playlist.isDisabled(rep.playlist));
}
// filter out any variant that has greater effective bitrate
// than the current estimated bandwidth
const bandwidthPlaylistReps = enabledPlaylistReps.filter((rep) => rep.bandwidth * Config.BANDWIDTH_VARIANCE < playerBandwidth);
let highestRemainingBandwidthRep =
bandwidthPlaylistReps[bandwidthPlaylistReps.length - 1];
// get all of the renditions with the same (highest) bandwidth
// and then taking the very first element
const bandwidthBestRep = bandwidthPlaylistReps.filter((rep) => rep.bandwidth === highestRemainingBandwidthRep.bandwidth)[0];
// if we're not going to limit renditions by player size, make an early decision.
if (limitRenditionByPlayerDimensions === false) {
const chosenRep = (
bandwidthBestRep ||
enabledPlaylistReps[0] ||
sortedPlaylistReps[0]
);
if (chosenRep && chosenRep.playlist) {
let type = 'sortedPlaylistReps';
if (bandwidthBestRep) {
type = 'bandwidthBestRep';
}
if (enabledPlaylistReps[0]) {
type = 'enabledPlaylistReps';
}
logFn(`choosing ${representationToString(chosenRep)} using ${type} with options`, options);
return chosenRep.playlist;
}
logFn('could not choose a playlist with options', options);
return null;
}
// filter out playlists without resolution information
const haveResolution = bandwidthPlaylistReps.filter((rep) => rep.width && rep.height);
// sort variants by resolution
stableSort(haveResolution, (left, right) => left.width - right.width);
// if we have the exact resolution as the player use it
const resolutionBestRepList = haveResolution.filter((rep) => rep.width === playerWidth && rep.height === playerHeight);
highestRemainingBandwidthRep = resolutionBestRepList[resolutionBestRepList.length - 1];
// ensure that we pick the highest bandwidth variant that have exact resolution
const resolutionBestRep = resolutionBestRepList.filter((rep) => rep.bandwidth === highestRemainingBandwidthRep.bandwidth)[0];
let resolutionPlusOneList;
let resolutionPlusOneSmallest;
let resolutionPlusOneRep;
// find the smallest variant that is larger than the player
// if there is no match of exact resolution
if (!resolutionBestRep) {
resolutionPlusOneList = haveResolution.filter((rep) => {
if (playerObjectFit === 'cover') {
// video will be scaled up to cover the player. We need to
// make sure rendition is at least as wide and as high as the
// player.
return rep.width > playerWidth && rep.height > playerHeight;
}
// video will be scaled down to fit inside the player soon as
// its resolution exceeds player size in at least one dimension.
return rep.width > playerWidth || rep.height > playerHeight;
});
// find all the variants have the same smallest resolution
resolutionPlusOneSmallest = resolutionPlusOneList.filter((rep) => rep.width === resolutionPlusOneList[0].width &&
rep.height === resolutionPlusOneList[0].height);
// ensure that we also pick the highest bandwidth variant that
// is just-larger-than the video player
highestRemainingBandwidthRep =
resolutionPlusOneSmallest[resolutionPlusOneSmallest.length - 1];
resolutionPlusOneRep = resolutionPlusOneSmallest.filter((rep) => rep.bandwidth === highestRemainingBandwidthRep.bandwidth)[0];
}
let leastPixelDiffRep;
// If this selector proves to be better than others,
// resolutionPlusOneRep and resolutionBestRep and all
// the code involving them should be removed.
if (playlistController.leastPixelDiffSelector) {
// find the variant that is closest to the player's pixel size
const leastPixelDiffList = haveResolution.map((rep) => {
rep.pixelDiff = Math.abs(rep.width - playerWidth) + Math.abs(rep.height - playerHeight);
return rep;
});
// get the highest bandwidth, closest resolution playlist
stableSort(leastPixelDiffList, (left, right) => {
// sort by highest bandwidth if pixelDiff is the same
if (left.pixelDiff === right.pixelDiff) {
return right.bandwidth - left.bandwidth;
}
return left.pixelDiff - right.pixelDiff;
});
leastPixelDiffRep = leastPixelDiffList[0];
}
// fallback chain of variants
const chosenRep = (
leastPixelDiffRep ||
resolutionPlusOneRep ||
resolutionBestRep ||
bandwidthBestRep ||
enabledPlaylistReps[0] ||
sortedPlaylistReps[0]
);
if (chosenRep && chosenRep.playlist) {
let type = 'sortedPlaylistReps';
if (leastPixelDiffRep) {
type = 'leastPixelDiffRep';
} else if (resolutionPlusOneRep) {
type = 'resolutionPlusOneRep';
} else if (resolutionBestRep) {
type = 'resolutionBestRep';
} else if (bandwidthBestRep) {
type = 'bandwidthBestRep';
} else if (enabledPlaylistReps[0]) {
type = 'enabledPlaylistReps';
}
logFn(`choosing ${representationToString(chosenRep)} using ${type} with options`, options);
return chosenRep.playlist;
}
logFn('could not choose a playlist with options', options);
return null;
};
export const TEST_ONLY_SIMPLE_SELECTOR = (newSimpleSelector) => {
const oldSimpleSelector = simpleSelector;
simpleSelector = newSimpleSelector;
return function resetSimpleSelector() {
simpleSelector = oldSimpleSelector;
};
};
// Playlist Selectors
/**
* Chooses the appropriate media playlist based on the most recent
* bandwidth estimate and the player size.
*
* Expects to be called within the context of an instance of VhsHandler
*
* @return {Playlist} the highest bitrate playlist less than the
* currently detected bandwidth, accounting for some amount of
* bandwidth variance
*/
export const lastBandwidthSelector = function() {
let pixelRatio = this.useDevicePixelRatio ? window.devicePixelRatio || 1 : 1;
if (!isNaN(this.customPixelRatio)) {
pixelRatio = this.customPixelRatio;
}
return simpleSelector({
main: this.playlists.main,
bandwidth: this.systemBandwidth,
playerWidth: parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio,
playerHeight: parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio,
playerObjectFit: this.usePlayerObjectFit ? safeGetComputedStyle(this.tech_.el(), 'objectFit') : '',
limitRenditionByPlayerDimensions: this.limitRenditionByPlayerDimensions,
playlistController: this.playlistController_
});
};
/**
* Chooses the appropriate media playlist based on an
* exponential-weighted moving average of the bandwidth after
* filtering for player size.
*
* Expects to be called within the context of an instance of VhsHandler
*
* @param {number} decay - a number between 0 and 1. Higher values of
* this parameter will cause previous bandwidth estimates to lose
* significance more quickly.
* @return {Function} a function which can be invoked to create a new
* playlist selector function.
* @see https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
*/
export const movingAverageBandwidthSelector = function(decay) {
let average = -1;
let lastSystemBandwidth = -1;
if (decay < 0 || decay > 1) {
throw new Error('Moving average bandwidth decay must be between 0 and 1.');
}
return function() {
let pixelRatio = this.useDevicePixelRatio ? window.devicePixelRatio || 1 : 1;
if (!isNaN(this.customPixelRatio)) {
pixelRatio = this.customPixelRatio;
}
if (average < 0) {
average = this.systemBandwidth;
lastSystemBandwidth = this.systemBandwidth;
}
// stop the average value from decaying for every 250ms
// when the systemBandwidth is constant
// and
// stop average from setting to a very low value when the
// systemBandwidth becomes 0 in case of chunk cancellation
if (this.systemBandwidth > 0 && this.systemBandwidth !== lastSystemBandwidth) {
average = decay * this.systemBandwidth + (1 - decay) * average;
lastSystemBandwidth = this.systemBandwidth;
}
return simpleSelector({
main: this.playlists.main,
bandwidth: average,
playerWidth: parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio,
playerHeight: parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio,
playerObjectFit: this.usePlayerObjectFit ? safeGetComputedStyle(this.tech_.el(), 'objectFit') : '',
limitRenditionByPlayerDimensions: this.limitRenditionByPlayerDimensions,
playlistController: this.playlistController_
});
};
};
/**
* Chooses the appropriate media playlist based on the potential to rebuffer
*
* @param {Object} settings
* Object of information required to use this selector
* @param {Object} settings.main
* Object representation of the main manifest
* @param {number} settings.currentTime
* The current time of the player
* @param {number} settings.bandwidth
* Current measured bandwidth
* @param {number} settings.duration
* Duration of the media
* @param {number} settings.segmentDuration
* Segment duration to be used in round trip time calculations
* @param {number} settings.timeUntilRebuffer
* Time left in seconds until the player has to rebuffer
* @param {number} settings.currentTimeline
* The current timeline segments are being loaded from
* @param {SyncController} settings.syncController
* SyncController for determining if we have a sync point for a given playlist
* @return {Object|null}
* {Object} return.playlist
* The highest bandwidth playlist with the least amount of rebuffering
* {Number} return.rebufferingImpact
* The amount of time in seconds switching to this playlist will rebuffer. A
* negative value means that switching will cause zero rebuffering.
*/
export const minRebufferMaxBandwidthSelector = function(settings) {
const {
main,
currentTime,
bandwidth,
duration,
segmentDuration,
timeUntilRebuffer,
currentTimeline,
syncController
} = settings;
// filter out any playlists that have been excluded due to
// incompatible configurations
const compatiblePlaylists = main.playlists.filter(playlist => !Playlist.isIncompatible(playlist));
// filter out any playlists that have been disabled manually through the representations
// api or excluded temporarily due to playback errors.
let enabledPlaylists = compatiblePlaylists.filter(Playlist.isEnabled);
if (!enabledPlaylists.length) {
// if there are no enabled playlists, then they have all been excluded or disabled
// by the user through the representations api. In this case, ignore exclusion and
// fallback to what the user wants by using playlists the user has not disabled.
enabledPlaylists = compatiblePlaylists.filter(playlist => !Playlist.isDisabled(playlist));
}
const bandwidthPlaylists =
enabledPlaylists.filter(Playlist.hasAttribute.bind(null, 'BANDWIDTH'));
const rebufferingEstimates = bandwidthPlaylists.map((playlist) => {
const syncPoint = syncController.getSyncPoint(
playlist,
duration,
currentTimeline,
currentTime
);
// If there is no sync point for this playlist, switching to it will require a
// sync request first. This will double the request time
const numRequests = syncPoint ? 1 : 2;
const requestTimeEstimate = Playlist.estimateSegmentRequestTime(
segmentDuration,
bandwidth,
playlist
);
const rebufferingImpact = (requestTimeEstimate * numRequests) - timeUntilRebuffer;
return {
playlist,
rebufferingImpact
};
});
const noRebufferingPlaylists = rebufferingEstimates.filter((estimate) => estimate.rebufferingImpact <= 0);
// Sort by bandwidth DESC
stableSort(
noRebufferingPlaylists,
(a, b) => comparePlaylistBandwidth(b.playlist, a.playlist)
);
if (noRebufferingPlaylists.length) {
return noRebufferingPlaylists[0];
}
stableSort(rebufferingEstimates, (a, b) => a.rebufferingImpact - b.rebufferingImpact);
return rebufferingEstimates[0] || null;
};
/**
* Chooses the appropriate media playlist, which in this case is the lowest bitrate
* one with video. If no renditions with video exist, return the lowest audio rendition.
*
* Expects to be called within the context of an instance of VhsHandler
*
* @return {Object|null}
* {Object} return.playlist
* The lowest bitrate playlist that contains a video codec. If no such rendition
* exists pick the lowest audio rendition.
*/
export const lowestBitrateCompatibleVariantSelector = function() {
// filter out any playlists that have been excluded due to
// incompatible configurations or playback errors
const playlists = this.playlists.main.playlists.filter(Playlist.isEnabled);
// Sort ascending by bitrate
stableSort(
playlists,
(a, b) => comparePlaylistBandwidth(a, b)
);
// Parse and assume that playlists with no video codec have no video
// (this is not necessarily true, although it is generally true).
//
// If an entire manifest has no valid videos everything will get filtered
// out.
const playlistsWithVideo = playlists.filter(playlist => !!codecsForPlaylist(this.playlists.main, playlist).video);
return playlistsWithVideo[0] || null;
};

View File

@@ -0,0 +1,806 @@
/**
* @file playlist.js
*
* Playlist related utilities.
*/
import window from 'global/window';
import {isAudioCodec} from '@videojs/vhs-utils/es/codecs.js';
import {TIME_FUDGE_FACTOR} from './ranges.js';
import {createTimeRanges} from './util/vjs-compat';
/**
* Get the duration of a segment, with special cases for
* llhls segments that do not have a duration yet.
*
* @param {Object} playlist
* the playlist that the segment belongs to.
* @param {Object} segment
* the segment to get a duration for.
*
* @return {number}
* the segment duration
*/
export const segmentDurationWithParts = (playlist, segment) => {
// if this isn't a preload segment
// then we will have a segment duration that is accurate.
if (!segment.preload) {
return segment.duration;
}
// otherwise we have to add up parts and preload hints
// to get an up to date duration.
let result = 0;
(segment.parts || []).forEach(function(p) {
result += p.duration;
});
// for preload hints we have to use partTargetDuration
// as they won't even have a duration yet.
(segment.preloadHints || []).forEach(function(p) {
if (p.type === 'PART') {
result += playlist.partTargetDuration;
}
});
return result;
};
/**
* A function to get a combined list of parts and segments with durations
* and indexes.
*
* @param {Playlist} playlist the playlist to get the list for.
*
* @return {Array} The part/segment list.
*/
export const getPartsAndSegments = (playlist) => (playlist.segments || []).reduce((acc, segment, si) => {
if (segment.parts) {
segment.parts.forEach(function(part, pi) {
acc.push({duration: part.duration, segmentIndex: si, partIndex: pi, part, segment});
});
} else {
acc.push({duration: segment.duration, segmentIndex: si, partIndex: null, segment, part: null});
}
return acc;
}, []);
export const getLastParts = (media) => {
const lastSegment = media.segments && media.segments.length && media.segments[media.segments.length - 1];
return lastSegment && lastSegment.parts || [];
};
export const getKnownPartCount = ({preloadSegment}) => {
if (!preloadSegment) {
return;
}
const {parts, preloadHints} = preloadSegment;
let partCount = (preloadHints || [])
.reduce((count, hint) => count + (hint.type === 'PART' ? 1 : 0), 0);
partCount += (parts && parts.length) ? parts.length : 0;
return partCount;
};
/**
* Get the number of seconds to delay from the end of a
* live playlist.
*
* @param {Playlist} main the main playlist
* @param {Playlist} media the media playlist
* @return {number} the hold back in seconds.
*/
export const liveEdgeDelay = (main, media) => {
if (media.endList) {
return 0;
}
// dash suggestedPresentationDelay trumps everything
if (main && main.suggestedPresentationDelay) {
return main.suggestedPresentationDelay;
}
const hasParts = getLastParts(media).length > 0;
// look for "part" delays from ll-hls first
if (hasParts && media.serverControl && media.serverControl.partHoldBack) {
return media.serverControl.partHoldBack;
} else if (hasParts && media.partTargetDuration) {
return media.partTargetDuration * 3;
// finally look for full segment delays
} else if (media.serverControl && media.serverControl.holdBack) {
return media.serverControl.holdBack;
} else if (media.targetDuration) {
return media.targetDuration * 3;
}
return 0;
};
/**
* walk backward until we find a duration we can use
* or return a failure
*
* @param {Playlist} playlist the playlist to walk through
* @param {Number} endSequence the mediaSequence to stop walking on
*/
const backwardDuration = function(playlist, endSequence) {
let result = 0;
let i = endSequence - playlist.mediaSequence;
// if a start time is available for segment immediately following
// the interval, use it
let segment = playlist.segments[i];
// Walk backward until we find the latest segment with timeline
// information that is earlier than endSequence
if (segment) {
if (typeof segment.start !== 'undefined') {
return { result: segment.start, precise: true };
}
if (typeof segment.end !== 'undefined') {
return {
result: segment.end - segment.duration,
precise: true
};
}
}
while (i--) {
segment = playlist.segments[i];
if (typeof segment.end !== 'undefined') {
return { result: result + segment.end, precise: true };
}
result += segmentDurationWithParts(playlist, segment);
if (typeof segment.start !== 'undefined') {
return { result: result + segment.start, precise: true };
}
}
return { result, precise: false };
};
/**
* walk forward until we find a duration we can use
* or return a failure
*
* @param {Playlist} playlist the playlist to walk through
* @param {number} endSequence the mediaSequence to stop walking on
*/
const forwardDuration = function(playlist, endSequence) {
let result = 0;
let segment;
let i = endSequence - playlist.mediaSequence;
// Walk forward until we find the earliest segment with timeline
// information
for (; i < playlist.segments.length; i++) {
segment = playlist.segments[i];
if (typeof segment.start !== 'undefined') {
return {
result: segment.start - result,
precise: true
};
}
result += segmentDurationWithParts(playlist, segment);
if (typeof segment.end !== 'undefined') {
return {
result: segment.end - result,
precise: true
};
}
}
// indicate we didn't find a useful duration estimate
return { result: -1, precise: false };
};
/**
* Calculate the media duration from the segments associated with a
* playlist. The duration of a subinterval of the available segments
* may be calculated by specifying an end index.
*
* @param {Object} playlist a media playlist object
* @param {number=} endSequence an exclusive upper boundary
* for the playlist. Defaults to playlist length.
* @param {number} expired the amount of time that has dropped
* off the front of the playlist in a live scenario
* @return {number} the duration between the first available segment
* and end index.
*/
const intervalDuration = function(playlist, endSequence, expired) {
if (typeof endSequence === 'undefined') {
endSequence = playlist.mediaSequence + playlist.segments.length;
}
if (endSequence < playlist.mediaSequence) {
return 0;
}
// do a backward walk to estimate the duration
const backward = backwardDuration(playlist, endSequence);
if (backward.precise) {
// if we were able to base our duration estimate on timing
// information provided directly from the Media Source, return
// it
return backward.result;
}
// walk forward to see if a precise duration estimate can be made
// that way
const forward = forwardDuration(playlist, endSequence);
if (forward.precise) {
// we found a segment that has been buffered and so it's
// position is known precisely
return forward.result;
}
// return the less-precise, playlist-based duration estimate
return backward.result + expired;
};
/**
* Calculates the duration of a playlist. If a start and end index
* are specified, the duration will be for the subset of the media
* timeline between those two indices. The total duration for live
* playlists is always Infinity.
*
* @param {Object} playlist a media playlist object
* @param {number=} endSequence an exclusive upper
* boundary for the playlist. Defaults to the playlist media
* sequence number plus its length.
* @param {number=} expired the amount of time that has
* dropped off the front of the playlist in a live scenario
* @return {number} the duration between the start index and end
* index.
*/
export const duration = function(playlist, endSequence, expired) {
if (!playlist) {
return 0;
}
if (typeof expired !== 'number') {
expired = 0;
}
// if a slice of the total duration is not requested, use
// playlist-level duration indicators when they're present
if (typeof endSequence === 'undefined') {
// if present, use the duration specified in the playlist
if (playlist.totalDuration) {
return playlist.totalDuration;
}
// duration should be Infinity for live playlists
if (!playlist.endList) {
return window.Infinity;
}
}
// calculate the total duration based on the segment durations
return intervalDuration(
playlist,
endSequence,
expired
);
};
/**
* Calculate the time between two indexes in the current playlist
* neight the start- nor the end-index need to be within the current
* playlist in which case, the targetDuration of the playlist is used
* to approximate the durations of the segments
*
* @param {Array} options.durationList list to iterate over for durations.
* @param {number} options.defaultDuration duration to use for elements before or after the durationList
* @param {number} options.startIndex partsAndSegments index to start
* @param {number} options.endIndex partsAndSegments index to end.
* @return {number} the number of seconds between startIndex and endIndex
*/
export const sumDurations = function({defaultDuration, durationList, startIndex, endIndex}) {
let durations = 0;
if (startIndex > endIndex) {
[startIndex, endIndex] = [endIndex, startIndex];
}
if (startIndex < 0) {
for (let i = startIndex; i < Math.min(0, endIndex); i++) {
durations += defaultDuration;
}
startIndex = 0;
}
for (let i = startIndex; i < endIndex; i++) {
durations += durationList[i].duration;
}
return durations;
};
/**
* Calculates the playlist end time
*
* @param {Object} playlist a media playlist object
* @param {number=} expired the amount of time that has
* dropped off the front of the playlist in a live scenario
* @param {boolean|false} useSafeLiveEnd a boolean value indicating whether or not the
* playlist end calculation should consider the safe live end
* (truncate the playlist end by three segments). This is normally
* used for calculating the end of the playlist's seekable range.
* This takes into account the value of liveEdgePadding.
* Setting liveEdgePadding to 0 is equivalent to setting this to false.
* @param {number} liveEdgePadding a number indicating how far from the end of the playlist we should be in seconds.
* If this is provided, it is used in the safe live end calculation.
* Setting useSafeLiveEnd=false or liveEdgePadding=0 are equivalent.
* Corresponds to suggestedPresentationDelay in DASH manifests.
* @return {number} the end time of playlist
* @function playlistEnd
*/
export const playlistEnd = function(playlist, expired, useSafeLiveEnd, liveEdgePadding) {
if (!playlist || !playlist.segments) {
return null;
}
if (playlist.endList) {
return duration(playlist);
}
if (expired === null) {
return null;
}
expired = expired || 0;
let lastSegmentEndTime = intervalDuration(
playlist,
playlist.mediaSequence + playlist.segments.length,
expired
);
if (useSafeLiveEnd) {
liveEdgePadding = typeof liveEdgePadding === 'number' ? liveEdgePadding : liveEdgeDelay(null, playlist);
lastSegmentEndTime -= liveEdgePadding;
}
// don't return a time less than zero
return Math.max(0, lastSegmentEndTime);
};
/**
* Calculates the interval of time that is currently seekable in a
* playlist. The returned time ranges are relative to the earliest
* moment in the specified playlist that is still available. A full
* seekable implementation for live streams would need to offset
* these values by the duration of content that has expired from the
* stream.
*
* @param {Object} playlist a media playlist object
* dropped off the front of the playlist in a live scenario
* @param {number=} expired the amount of time that has
* dropped off the front of the playlist in a live scenario
* @param {number} liveEdgePadding how far from the end of the playlist we should be in seconds.
* Corresponds to suggestedPresentationDelay in DASH manifests.
* @return {TimeRanges} the periods of time that are valid targets
* for seeking
*/
export const seekable = function(playlist, expired, liveEdgePadding) {
const useSafeLiveEnd = true;
const seekableStart = expired || 0;
let seekableEnd = playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding);
if (seekableEnd === null) {
return createTimeRanges();
}
// Clamp seekable end since it can not be less than the seekable start
if (seekableEnd < seekableStart) {
seekableEnd = seekableStart;
}
return createTimeRanges(seekableStart, seekableEnd);
};
/**
* Determine the index and estimated starting time of the segment that
* contains a specified playback position in a media playlist.
*
* @param {Object} options.playlist the media playlist to query
* @param {number} options.currentTime The number of seconds since the earliest
* possible position to determine the containing segment for
* @param {number} options.startTime the time when the segment/part starts
* @param {number} options.startingSegmentIndex the segment index to start looking at.
* @param {number?} [options.startingPartIndex] the part index to look at within the segment.
*
* @return {Object} an object with partIndex, segmentIndex, and startTime.
*/
export const getMediaInfoForTime = function({
playlist,
currentTime,
startingSegmentIndex,
startingPartIndex,
startTime,
exactManifestTimings
}) {
let time = currentTime - startTime;
const partsAndSegments = getPartsAndSegments(playlist);
let startIndex = 0;
for (let i = 0; i < partsAndSegments.length; i++) {
const partAndSegment = partsAndSegments[i];
if (startingSegmentIndex !== partAndSegment.segmentIndex) {
continue;
}
// skip this if part index does not match.
if (typeof startingPartIndex === 'number' && typeof partAndSegment.partIndex === 'number' && startingPartIndex !== partAndSegment.partIndex) {
continue;
}
startIndex = i;
break;
}
if (time < 0) {
// Walk backward from startIndex in the playlist, adding durations
// until we find a segment that contains `time` and return it
if (startIndex > 0) {
for (let i = startIndex - 1; i >= 0; i--) {
const partAndSegment = partsAndSegments[i];
time += partAndSegment.duration;
if (exactManifestTimings) {
if (time < 0) {
continue;
}
} else if ((time + TIME_FUDGE_FACTOR) <= 0) {
continue;
}
return {
partIndex: partAndSegment.partIndex,
segmentIndex: partAndSegment.segmentIndex,
startTime: startTime - sumDurations({
defaultDuration: playlist.targetDuration,
durationList: partsAndSegments,
startIndex,
endIndex: i
})
};
}
}
// We were unable to find a good segment within the playlist
// so select the first segment
return {
partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
startTime: currentTime
};
}
// When startIndex is negative, we first walk forward to first segment
// adding target durations. If we "run out of time" before getting to
// the first segment, return the first segment
if (startIndex < 0) {
for (let i = startIndex; i < 0; i++) {
time -= playlist.targetDuration;
if (time < 0) {
return {
partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
startTime: currentTime
};
}
}
startIndex = 0;
}
// Walk forward from startIndex in the playlist, subtracting durations
// until we find a segment that contains `time` and return it
for (let i = startIndex; i < partsAndSegments.length; i++) {
const partAndSegment = partsAndSegments[i];
time -= partAndSegment.duration;
const canUseFudgeFactor = partAndSegment.duration > TIME_FUDGE_FACTOR;
const isExactlyAtTheEnd = time === 0;
const isExtremelyCloseToTheEnd = canUseFudgeFactor && (time + TIME_FUDGE_FACTOR >= 0);
if (isExactlyAtTheEnd || isExtremelyCloseToTheEnd) {
// 1) We are exactly at the end of the current segment.
// 2) We are extremely close to the end of the current segment (The difference is less than 1 / 30).
// We may encounter this situation when
// we don't have exact match between segment duration info in the manifest and the actual duration of the segment
// For example:
// We appended 3 segments 10 seconds each, meaning we should have 30 sec buffered,
// but we the actual buffered is 29.99999
//
// In both cases:
// if we passed current time -> it means that we already played current segment
// if we passed buffered.end -> it means that this segment is already loaded and buffered
// we should select the next segment if we have one:
if (i !== partsAndSegments.length - 1) {
continue;
}
}
if (exactManifestTimings) {
if (time > 0) {
continue;
}
} else if ((time - TIME_FUDGE_FACTOR) >= 0) {
continue;
}
return {
partIndex: partAndSegment.partIndex,
segmentIndex: partAndSegment.segmentIndex,
startTime: startTime + sumDurations({
defaultDuration: playlist.targetDuration,
durationList: partsAndSegments,
startIndex,
endIndex: i
})
};
}
// We are out of possible candidates so load the last one...
return {
segmentIndex: partsAndSegments[partsAndSegments.length - 1].segmentIndex,
partIndex: partsAndSegments[partsAndSegments.length - 1].partIndex,
startTime: currentTime
};
};
/**
* Check whether the playlist is excluded or not.
*
* @param {Object} playlist the media playlist object
* @return {boolean} whether the playlist is excluded or not
* @function isExcluded
*/
export const isExcluded = function(playlist) {
return playlist.excludeUntil && playlist.excludeUntil > Date.now();
};
/**
* Check whether the playlist is compatible with current playback configuration or has
* been excluded permanently for being incompatible.
*
* @param {Object} playlist the media playlist object
* @return {boolean} whether the playlist is incompatible or not
* @function isIncompatible
*/
export const isIncompatible = function(playlist) {
return playlist.excludeUntil && playlist.excludeUntil === Infinity;
};
/**
* Check whether the playlist is enabled or not.
*
* @param {Object} playlist the media playlist object
* @return {boolean} whether the playlist is enabled or not
* @function isEnabled
*/
export const isEnabled = function(playlist) {
const excluded = isExcluded(playlist);
return (!playlist.disabled && !excluded);
};
/**
* Check whether the playlist has been manually disabled through the representations api.
*
* @param {Object} playlist the media playlist object
* @return {boolean} whether the playlist is disabled manually or not
* @function isDisabled
*/
export const isDisabled = function(playlist) {
return playlist.disabled;
};
/**
* Returns whether the current playlist is an AES encrypted HLS stream
*
* @return {boolean} true if it's an AES encrypted HLS stream
*/
export const isAes = function(media) {
for (let i = 0; i < media.segments.length; i++) {
if (media.segments[i].key) {
return true;
}
}
return false;
};
/**
* Checks if the playlist has a value for the specified attribute
*
* @param {string} attr
* Attribute to check for
* @param {Object} playlist
* The media playlist object
* @return {boolean}
* Whether the playlist contains a value for the attribute or not
* @function hasAttribute
*/
export const hasAttribute = function(attr, playlist) {
return playlist.attributes && playlist.attributes[attr];
};
/**
* Estimates the time required to complete a segment download from the specified playlist
*
* @param {number} segmentDuration
* Duration of requested segment
* @param {number} bandwidth
* Current measured bandwidth of the player
* @param {Object} playlist
* The media playlist object
* @param {number=} bytesReceived
* Number of bytes already received for the request. Defaults to 0
* @return {number|NaN}
* The estimated time to request the segment. NaN if bandwidth information for
* the given playlist is unavailable
* @function estimateSegmentRequestTime
*/
export const estimateSegmentRequestTime = function(
segmentDuration,
bandwidth,
playlist,
bytesReceived = 0
) {
if (!hasAttribute('BANDWIDTH', playlist)) {
return NaN;
}
const size = segmentDuration * playlist.attributes.BANDWIDTH;
return (size - (bytesReceived * 8)) / bandwidth;
};
/*
* Returns whether the current playlist is the lowest rendition
*
* @return {Boolean} true if on lowest rendition
*/
export const isLowestEnabledRendition = (main, media) => {
if (main.playlists.length === 1) {
return true;
}
const currentBandwidth = media.attributes.BANDWIDTH || Number.MAX_VALUE;
return (main.playlists.filter((playlist) => {
if (!isEnabled(playlist)) {
return false;
}
return (playlist.attributes.BANDWIDTH || 0) < currentBandwidth;
}).length === 0);
};
export const playlistMatch = (a, b) => {
// both playlits are null
// or only one playlist is non-null
// no match
if (!a && !b || (!a && b) || (a && !b)) {
return false;
}
// playlist objects are the same, match
if (a === b) {
return true;
}
// first try to use id as it should be the most
// accurate
if (a.id && b.id && a.id === b.id) {
return true;
}
// next try to use reslovedUri as it should be the
// second most accurate.
if (a.resolvedUri && b.resolvedUri && a.resolvedUri === b.resolvedUri) {
return true;
}
// finally try to use uri as it should be accurate
// but might miss a few cases for relative uris
if (a.uri && b.uri && a.uri === b.uri) {
return true;
}
return false;
};
const someAudioVariant = function(main, callback) {
const AUDIO = main && main.mediaGroups && main.mediaGroups.AUDIO || {};
let found = false;
for (const groupName in AUDIO) {
for (const label in AUDIO[groupName]) {
found = callback(AUDIO[groupName][label]);
if (found) {
break;
}
}
if (found) {
break;
}
}
return !!found;
};
export const isAudioOnly = (main) => {
// we are audio only if we have no main playlists but do
// have media group playlists.
if (!main || !main.playlists || !main.playlists.length) {
// without audio variants or playlists this
// is not an audio only main.
const found = someAudioVariant(main, (variant) =>
(variant.playlists && variant.playlists.length) || variant.uri);
return found;
}
// if every playlist has only an audio codec it is audio only
for (let i = 0; i < main.playlists.length; i++) {
const playlist = main.playlists[i];
const CODECS = playlist.attributes && playlist.attributes.CODECS;
// all codecs are audio, this is an audio playlist.
if (CODECS && CODECS.split(',').every((c) => isAudioCodec(c))) {
continue;
}
// playlist is in an audio group it is audio only
const found = someAudioVariant(main, (variant) => playlistMatch(playlist, variant));
if (found) {
continue;
}
// if we make it here this playlist isn't audio and we
// are not audio only
return false;
}
// if we make it past every playlist without returning, then
// this is an audio only playlist.
return true;
};
// exports
export default {
liveEdgeDelay,
duration,
seekable,
getMediaInfoForTime,
isEnabled,
isDisabled,
isExcluded,
isIncompatible,
playlistEnd,
isAes,
hasAttribute,
estimateSegmentRequestTime,
isLowestEnabledRendition,
isAudioOnly,
playlistMatch,
segmentDurationWithParts
};

489
VApp/node_modules/@videojs/http-streaming/src/ranges.js generated vendored Normal file
View File

@@ -0,0 +1,489 @@
/**
* ranges
*
* Utilities for working with TimeRanges.
*
*/
import {createTimeRanges} from './util/vjs-compat';
// Fudge factor to account for TimeRanges rounding
export const TIME_FUDGE_FACTOR = 1 / 30;
// Comparisons between time values such as current time and the end of the buffered range
// can be misleading because of precision differences or when the current media has poorly
// aligned audio and video, which can cause values to be slightly off from what you would
// expect. This value is what we consider to be safe to use in such comparisons to account
// for these scenarios.
export const SAFE_TIME_DELTA = TIME_FUDGE_FACTOR * 3;
/**
* Clamps a value to within a range
*
* @param {number} num - the value to clamp
* @param {number} start - the start of the range to clamp within, inclusive
* @param {number} end - the end of the range to clamp within, inclusive
* @return {number}
*/
const clamp = function(num, [start, end]) {
return Math.min(Math.max(start, num), end);
};
const filterRanges = function(timeRanges, predicate) {
const results = [];
let i;
if (timeRanges && timeRanges.length) {
// Search for ranges that match the predicate
for (i = 0; i < timeRanges.length; i++) {
if (predicate(timeRanges.start(i), timeRanges.end(i))) {
results.push([timeRanges.start(i), timeRanges.end(i)]);
}
}
}
return createTimeRanges(results);
};
/**
* Attempts to find the buffered TimeRange that contains the specified
* time.
*
* @param {TimeRanges} buffered - the TimeRanges object to query
* @param {number} time - the time to filter on.
* @return {TimeRanges} a new TimeRanges object
*/
export const findRange = function(buffered, time) {
return filterRanges(buffered, function(start, end) {
return start - SAFE_TIME_DELTA <= time &&
end + SAFE_TIME_DELTA >= time;
});
};
/**
* Returns the TimeRanges that begin later than the specified time.
*
* @param {TimeRanges} timeRanges - the TimeRanges object to query
* @param {number} time - the time to filter on.
* @return {TimeRanges} a new TimeRanges object.
*/
export const findNextRange = function(timeRanges, time) {
return filterRanges(timeRanges, function(start) {
return start - TIME_FUDGE_FACTOR >= time;
});
};
/**
* Returns gaps within a list of TimeRanges
*
* @param {TimeRanges} buffered - the TimeRanges object
* @return {TimeRanges} a TimeRanges object of gaps
*/
export const findGaps = function(buffered) {
if (buffered.length < 2) {
return createTimeRanges();
}
const ranges = [];
for (let i = 1; i < buffered.length; i++) {
const start = buffered.end(i - 1);
const end = buffered.start(i);
ranges.push([start, end]);
}
return createTimeRanges(ranges);
};
/**
* Search for a likely end time for the segment that was just appened
* based on the state of the `buffered` property before and after the
* append. If we fin only one such uncommon end-point return it.
*
* @param {TimeRanges} original - the buffered time ranges before the update
* @param {TimeRanges} update - the buffered time ranges after the update
* @return {number|null} the end time added between `original` and `update`,
* or null if one cannot be unambiguously determined.
*/
export const findSoleUncommonTimeRangesEnd = function(original, update) {
let i;
let start;
let end;
const result = [];
const edges = [];
// In order to qualify as a possible candidate, the end point must:
// 1) Not have already existed in the `original` ranges
// 2) Not result from the shrinking of a range that already existed
// in the `original` ranges
// 3) Not be contained inside of a range that existed in `original`
const overlapsCurrentEnd = function(span) {
return (span[0] <= end && span[1] >= end);
};
if (original) {
// Save all the edges in the `original` TimeRanges object
for (i = 0; i < original.length; i++) {
start = original.start(i);
end = original.end(i);
edges.push([start, end]);
}
}
if (update) {
// Save any end-points in `update` that are not in the `original`
// TimeRanges object
for (i = 0; i < update.length; i++) {
start = update.start(i);
end = update.end(i);
if (edges.some(overlapsCurrentEnd)) {
continue;
}
// at this point it must be a unique non-shrinking end edge
result.push(end);
}
}
// we err on the side of caution and return null if didn't find
// exactly *one* differing end edge in the search above
if (result.length !== 1) {
return null;
}
return result[0];
};
/**
* Calculate the intersection of two TimeRanges
*
* @param {TimeRanges} bufferA
* @param {TimeRanges} bufferB
* @return {TimeRanges} The interesection of `bufferA` with `bufferB`
*/
export const bufferIntersection = function(bufferA, bufferB) {
let start = null;
let end = null;
let arity = 0;
const extents = [];
const ranges = [];
if (!bufferA || !bufferA.length || !bufferB || !bufferB.length) {
return createTimeRanges();
}
// Handle the case where we have both buffers and create an
// intersection of the two
let count = bufferA.length;
// A) Gather up all start and end times
while (count--) {
extents.push({time: bufferA.start(count), type: 'start'});
extents.push({time: bufferA.end(count), type: 'end'});
}
count = bufferB.length;
while (count--) {
extents.push({time: bufferB.start(count), type: 'start'});
extents.push({time: bufferB.end(count), type: 'end'});
}
// B) Sort them by time
extents.sort(function(a, b) {
return a.time - b.time;
});
// C) Go along one by one incrementing arity for start and decrementing
// arity for ends
for (count = 0; count < extents.length; count++) {
if (extents[count].type === 'start') {
arity++;
// D) If arity is ever incremented to 2 we are entering an
// overlapping range
if (arity === 2) {
start = extents[count].time;
}
} else if (extents[count].type === 'end') {
arity--;
// E) If arity is ever decremented to 1 we leaving an
// overlapping range
if (arity === 1) {
end = extents[count].time;
}
}
// F) Record overlapping ranges
if (start !== null && end !== null) {
ranges.push([start, end]);
start = null;
end = null;
}
}
return createTimeRanges(ranges);
};
/**
* Calculates the percentage of `segmentRange` that overlaps the
* `buffered` time ranges.
*
* @param {TimeRanges} segmentRange - the time range that the segment
* covers adjusted according to currentTime
* @param {TimeRanges} referenceRange - the original time range that the
* segment covers
* @param {number} currentTime - time in seconds where the current playback
* is at
* @param {TimeRanges} buffered - the currently buffered time ranges
* @return {number} percent of the segment currently buffered
*/
const calculateBufferedPercent = function(
adjustedRange,
referenceRange,
currentTime,
buffered
) {
const referenceDuration = referenceRange.end(0) - referenceRange.start(0);
const adjustedDuration = adjustedRange.end(0) - adjustedRange.start(0);
const bufferMissingFromAdjusted = referenceDuration - adjustedDuration;
const adjustedIntersection = bufferIntersection(adjustedRange, buffered);
const referenceIntersection = bufferIntersection(referenceRange, buffered);
let adjustedOverlap = 0;
let referenceOverlap = 0;
let count = adjustedIntersection.length;
while (count--) {
adjustedOverlap += adjustedIntersection.end(count) -
adjustedIntersection.start(count);
// If the current overlap segment starts at currentTime, then increase the
// overlap duration so that it actually starts at the beginning of referenceRange
// by including the difference between the two Range's durations
// This is a work around for the way Flash has no buffer before currentTime
// TODO: see if this is still necessary since Flash isn't included
if (adjustedIntersection.start(count) === currentTime) {
adjustedOverlap += bufferMissingFromAdjusted;
}
}
count = referenceIntersection.length;
while (count--) {
referenceOverlap += referenceIntersection.end(count) -
referenceIntersection.start(count);
}
// Use whichever value is larger for the percentage-buffered since that value
// is likely more accurate because the only way
return Math.max(adjustedOverlap, referenceOverlap) / referenceDuration * 100;
};
/**
* Return the amount of a range specified by the startOfSegment and segmentDuration
* overlaps the current buffered content.
*
* @param {number} startOfSegment - the time where the segment begins
* @param {number} segmentDuration - the duration of the segment in seconds
* @param {number} currentTime - time in seconds where the current playback
* is at
* @param {TimeRanges} buffered - the state of the buffer
* @return {number} percentage of the segment's time range that is
* already in `buffered`
*/
export const getSegmentBufferedPercent = function(
startOfSegment,
segmentDuration,
currentTime,
buffered
) {
const endOfSegment = startOfSegment + segmentDuration;
// The entire time range of the segment
const originalSegmentRange = createTimeRanges([[
startOfSegment,
endOfSegment
]]);
// The adjusted segment time range that is setup such that it starts
// no earlier than currentTime
// Flash has no notion of a back-buffer so adjustedSegmentRange adjusts
// for that and the function will still return 100% if a only half of a
// segment is actually in the buffer as long as the currentTime is also
// half-way through the segment
const adjustedSegmentRange = createTimeRanges([[
clamp(startOfSegment, [currentTime, endOfSegment]),
endOfSegment
]]);
// This condition happens when the currentTime is beyond the segment's
// end time
if (adjustedSegmentRange.start(0) === adjustedSegmentRange.end(0)) {
return 0;
}
const percent = calculateBufferedPercent(
adjustedSegmentRange,
originalSegmentRange,
currentTime,
buffered
);
// If the segment is reported as having a zero duration, return 0%
// since it is likely that we will need to fetch the segment
if (isNaN(percent) || percent === Infinity || percent === -Infinity) {
return 0;
}
return percent;
};
/**
* Gets a human readable string for a TimeRange
*
* @param {TimeRange} range
* @return {string} a human readable string
*/
export const printableRange = (range) => {
const strArr = [];
if (!range || !range.length) {
return '';
}
for (let i = 0; i < range.length; i++) {
strArr.push(range.start(i) + ' => ' + range.end(i));
}
return strArr.join(', ');
};
/**
* Calculates the amount of time left in seconds until the player hits the end of the
* buffer and causes a rebuffer
*
* @param {TimeRange} buffered
* The state of the buffer
* @param {Numnber} currentTime
* The current time of the player
* @param {number} playbackRate
* The current playback rate of the player. Defaults to 1.
* @return {number}
* Time until the player has to start rebuffering in seconds.
* @function timeUntilRebuffer
*/
export const timeUntilRebuffer = function(buffered, currentTime, playbackRate = 1) {
const bufferedEnd = buffered.length ? buffered.end(buffered.length - 1) : 0;
return (bufferedEnd - currentTime) / playbackRate;
};
/**
* Converts a TimeRanges object into an array representation
*
* @param {TimeRanges} timeRanges
* @return {Array}
*/
export const timeRangesToArray = (timeRanges) => {
const timeRangesList = [];
for (let i = 0; i < timeRanges.length; i++) {
timeRangesList.push({
start: timeRanges.start(i),
end: timeRanges.end(i)
});
}
return timeRangesList;
};
/**
* Determines if two time range objects are different.
*
* @param {TimeRange} a
* the first time range object to check
*
* @param {TimeRange} b
* the second time range object to check
*
* @return {Boolean}
* Whether the time range objects differ
*/
export const isRangeDifferent = function(a, b) {
// same object
if (a === b) {
return false;
}
// one or the other is undefined
if (!a && b || (!b && a)) {
return true;
}
// length is different
if (a.length !== b.length) {
return true;
}
// see if any start/end pair is different
for (let i = 0; i < a.length; i++) {
if (a.start(i) !== b.start(i) || a.end(i) !== b.end(i)) {
return true;
}
}
// if the length and every pair is the same
// this is the same time range
return false;
};
export const lastBufferedEnd = function(a) {
if (!a || !a.length || !a.end) {
return;
}
return a.end(a.length - 1);
};
/**
* A utility function to add up the amount of time in a timeRange
* after a specified startTime.
* ie:[[0, 10], [20, 40], [50, 60]] with a startTime 0
* would return 40 as there are 40s seconds after 0 in the timeRange
*
* @param {TimeRange} range
* The range to check against
* @param {number} startTime
* The time in the time range that you should start counting from
*
* @return {number}
* The number of seconds in the buffer passed the specified time.
*/
export const timeAheadOf = function(range, startTime) {
let time = 0;
if (!range || !range.length) {
return time;
}
for (let i = 0; i < range.length; i++) {
const start = range.start(i);
const end = range.end(i);
// startTime is after this range entirely
if (startTime > end) {
continue;
}
// startTime is within this range
if (startTime > start && startTime <= end) {
time += end - startTime;
continue;
}
// startTime is before this range.
time += end - start;
}
return time;
};

View File

@@ -0,0 +1,125 @@
import videojs from 'video.js';
import {merge} from './util/vjs-compat';
const defaultOptions = {
errorInterval: 30,
getSource(next) {
const tech = this.tech({ IWillNotUseThisInPlugins: true });
const sourceObj = tech.currentSource_ || this.currentSource();
return next(sourceObj);
}
};
/**
* Main entry point for the plugin
*
* @param {Player} player a reference to a videojs Player instance
* @param {Object} [options] an object with plugin options
* @private
*/
const initPlugin = function(player, options) {
let lastCalled = 0;
let seekTo = 0;
const localOptions = merge(defaultOptions, options);
player.ready(() => {
player.trigger({type: 'usage', name: 'vhs-error-reload-initialized'});
});
/**
* Player modifications to perform that must wait until `loadedmetadata`
* has been triggered
*
* @private
*/
const loadedMetadataHandler = function() {
if (seekTo) {
player.currentTime(seekTo);
}
};
/**
* Set the source on the player element, play, and seek if necessary
*
* @param {Object} sourceObj An object specifying the source url and mime-type to play
* @private
*/
const setSource = function(sourceObj) {
if (sourceObj === null || sourceObj === undefined) {
return;
}
seekTo = (player.duration() !== Infinity && player.currentTime()) || 0;
player.one('loadedmetadata', loadedMetadataHandler);
player.src(sourceObj);
player.trigger({type: 'usage', name: 'vhs-error-reload'});
player.play();
};
/**
* Attempt to get a source from either the built-in getSource function
* or a custom function provided via the options
*
* @private
*/
const errorHandler = function() {
// Do not attempt to reload the source if a source-reload occurred before
// 'errorInterval' time has elapsed since the last source-reload
if (Date.now() - lastCalled < localOptions.errorInterval * 1000) {
player.trigger({type: 'usage', name: 'vhs-error-reload-canceled'});
return;
}
if (!localOptions.getSource ||
typeof localOptions.getSource !== 'function') {
videojs.log.error('ERROR: reloadSourceOnError - The option getSource must be a function!');
return;
}
lastCalled = Date.now();
return localOptions.getSource.call(player, setSource);
};
/**
* Unbind any event handlers that were bound by the plugin
*
* @private
*/
const cleanupEvents = function() {
player.off('loadedmetadata', loadedMetadataHandler);
player.off('error', errorHandler);
player.off('dispose', cleanupEvents);
};
/**
* Cleanup before re-initializing the plugin
*
* @param {Object} [newOptions] an object with plugin options
* @private
*/
const reinitPlugin = function(newOptions) {
cleanupEvents();
initPlugin(player, newOptions);
};
player.on('error', errorHandler);
player.on('dispose', cleanupEvents);
// Overwrite the plugin function so that we can correctly cleanup before
// initializing the plugin
player.reloadSourceOnError = reinitPlugin;
};
/**
* Reload the source when an error is detected as long as there
* wasn't an error previously within the last 30 seconds
*
* @param {Object} [options] an object with plugin options
*/
const reloadSourceOnError = function(options) {
initPlugin(this, options);
};
export default reloadSourceOnError;

View File

@@ -0,0 +1,121 @@
import { isIncompatible, isEnabled, isAudioOnly } from './playlist.js';
import { codecsForPlaylist } from './util/codecs.js';
/**
* Returns a function that acts as the Enable/disable playlist function.
*
* @param {PlaylistLoader} loader - The main playlist loader
* @param {string} playlistID - id of the playlist
* @param {Function} changePlaylistFn - A function to be called after a
* playlist's enabled-state has been changed. Will NOT be called if a
* playlist's enabled-state is unchanged
* @param {boolean=} enable - Value to set the playlist enabled-state to
* or if undefined returns the current enabled-state for the playlist
* @return {Function} Function for setting/getting enabled
*/
const enableFunction = (loader, playlistID, changePlaylistFn) => (enable) => {
const playlist = loader.main.playlists[playlistID];
const incompatible = isIncompatible(playlist);
const currentlyEnabled = isEnabled(playlist);
if (typeof enable === 'undefined') {
return currentlyEnabled;
}
if (enable) {
delete playlist.disabled;
} else {
playlist.disabled = true;
}
const metadata = {
renditionInfo: {
id: playlistID,
bandwidth: playlist.attributes.BANDWIDTH,
resolution: playlist.attributes.RESOLUTION,
codecs: playlist.attributes.CODECS
},
cause: 'fast-quality'
};
if (enable !== currentlyEnabled && !incompatible) {
// Ensure the outside world knows about our changes
if (enable) {
// call fast quality change only when the playlist is enabled
changePlaylistFn(playlist);
loader.trigger({ type: 'renditionenabled', metadata});
} else {
loader.trigger({ type: 'renditiondisabled', metadata});
}
}
return enable;
};
/**
* The representation object encapsulates the publicly visible information
* in a media playlist along with a setter/getter-type function (enabled)
* for changing the enabled-state of a particular playlist entry
*
* @class Representation
*/
class Representation {
constructor(vhsHandler, playlist, id) {
const {
playlistController_: pc
} = vhsHandler;
const qualityChangeFunction = pc.fastQualityChange_.bind(pc);
// some playlist attributes are optional
if (playlist.attributes) {
const resolution = playlist.attributes.RESOLUTION;
this.width = resolution && resolution.width;
this.height = resolution && resolution.height;
this.bandwidth = playlist.attributes.BANDWIDTH;
this.frameRate = playlist.attributes['FRAME-RATE'];
}
this.codecs = codecsForPlaylist(pc.main(), playlist);
this.playlist = playlist;
// The id is simply the ordinality of the media playlist
// within the main playlist
this.id = id;
// Partially-apply the enableFunction to create a playlist-
// specific variant
this.enabled = enableFunction(
vhsHandler.playlists,
playlist.id,
qualityChangeFunction
);
}
}
/**
* A mixin function that adds the `representations` api to an instance
* of the VhsHandler class
*
* @param {VhsHandler} vhsHandler - An instance of VhsHandler to add the
* representation API into
*/
const renditionSelectionMixin = function(vhsHandler) {
// Add a single API-specific function to the VhsHandler instance
vhsHandler.representations = () => {
const main = vhsHandler.playlistController_.main();
const playlists = isAudioOnly(main) ?
vhsHandler.playlistController_.getAudioTrackPlaylists_() :
main.playlists;
if (!playlists) {
return [];
}
return playlists
.filter((media) => !isIncompatible(media))
.map((e, i) => new Representation(vhsHandler, e, e.id));
};
};
export default renditionSelectionMixin;

View File

@@ -0,0 +1,35 @@
/**
* @file resolve-url.js - Handling how URLs are resolved and manipulated
*/
import _resolveUrl from '@videojs/vhs-utils/es/resolve-url.js';
export const resolveUrl = _resolveUrl;
/**
* If the xhr request was redirected, return the responseURL, otherwise,
* return the original url.
*
* @api private
*
* @param {string} url - an url being requested
* @param {XMLHttpRequest} req - xhr request result
*
* @return {string}
*/
export const resolveManifestRedirect = (url, req) => {
// To understand how the responseURL below is set and generated:
// - https://fetch.spec.whatwg.org/#concept-response-url
// - https://fetch.spec.whatwg.org/#atomic-http-redirect-handling
if (
req &&
req.responseURL &&
url !== req.responseURL
) {
return req.responseURL;
}
return url;
};
export default resolveUrl;

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,292 @@
import TransmuxWorker from 'worker!./transmuxer-worker.js';
import videojs from 'video.js';
import { segmentInfoPayload } from './segment-loader';
export const handleData_ = (event, transmuxedData, callback) => {
const {
type,
initSegment,
captions,
captionStreams,
metadata,
videoFrameDtsTime,
videoFramePtsTime
} = event.data.segment;
transmuxedData.buffer.push({
captions,
captionStreams,
metadata
});
const boxes = event.data.segment.boxes || {
data: event.data.segment.data
};
const result = {
type,
// cast ArrayBuffer to TypedArray
data: new Uint8Array(
boxes.data,
boxes.data.byteOffset,
boxes.data.byteLength
),
initSegment: new Uint8Array(
initSegment.data,
initSegment.byteOffset,
initSegment.byteLength
)
};
if (typeof videoFrameDtsTime !== 'undefined') {
result.videoFrameDtsTime = videoFrameDtsTime;
}
if (typeof videoFramePtsTime !== 'undefined') {
result.videoFramePtsTime = videoFramePtsTime;
}
callback(result);
};
export const handleDone_ = ({
transmuxedData,
callback
}) => {
// Previously we only returned data on data events,
// not on done events. Clear out the buffer to keep that consistent.
transmuxedData.buffer = [];
// all buffers should have been flushed from the muxer, so start processing anything we
// have received
callback(transmuxedData);
};
export const handleGopInfo_ = (event, transmuxedData) => {
transmuxedData.gopInfo = event.data.gopInfo;
};
export const processTransmux = (options) => {
const {
transmuxer,
bytes,
audioAppendStart,
gopsToAlignWith,
remux,
onData,
onTrackInfo,
onAudioTimingInfo,
onVideoTimingInfo,
onVideoSegmentTimingInfo,
onAudioSegmentTimingInfo,
onId3,
onCaptions,
onDone,
onEndedTimeline,
onTransmuxerLog,
isEndOfTimeline,
segment,
triggerSegmentEventFn
} = options;
const transmuxedData = {
buffer: []
};
let waitForEndedTimelineEvent = isEndOfTimeline;
const handleMessage = (event) => {
if (transmuxer.currentTransmux !== options) {
// disposed
return;
}
if (event.data.action === 'data') {
handleData_(event, transmuxedData, onData);
}
if (event.data.action === 'trackinfo') {
onTrackInfo(event.data.trackInfo);
}
if (event.data.action === 'gopInfo') {
handleGopInfo_(event, transmuxedData);
}
if (event.data.action === 'audioTimingInfo') {
onAudioTimingInfo(event.data.audioTimingInfo);
}
if (event.data.action === 'videoTimingInfo') {
onVideoTimingInfo(event.data.videoTimingInfo);
}
if (event.data.action === 'videoSegmentTimingInfo') {
onVideoSegmentTimingInfo(event.data.videoSegmentTimingInfo);
}
if (event.data.action === 'audioSegmentTimingInfo') {
onAudioSegmentTimingInfo(event.data.audioSegmentTimingInfo);
}
if (event.data.action === 'id3Frame') {
onId3([event.data.id3Frame], event.data.id3Frame.dispatchType);
}
if (event.data.action === 'caption') {
onCaptions(event.data.caption);
}
if (event.data.action === 'endedtimeline') {
waitForEndedTimelineEvent = false;
onEndedTimeline();
}
if (event.data.action === 'log') {
onTransmuxerLog(event.data.log);
}
// wait for the transmuxed event since we may have audio and video
if (event.data.type !== 'transmuxed') {
return;
}
// If the "endedtimeline" event has not yet fired, and this segment represents the end
// of a timeline, that means there may still be data events before the segment
// processing can be considerred complete. In that case, the final event should be
// an "endedtimeline" event with the type "transmuxed."
if (waitForEndedTimelineEvent) {
return;
}
transmuxer.onmessage = null;
handleDone_({
transmuxedData,
callback: onDone
});
/* eslint-disable no-use-before-define */
dequeue(transmuxer);
/* eslint-enable */
};
const handleError = () => {
const error = {
message: 'Received an error message from the transmuxer worker',
metadata: {
errorType: videojs.Error.StreamingFailedToTransmuxSegment,
segmentInfo: segmentInfoPayload({segment})
}
};
onDone(null, error);
};
transmuxer.onmessage = handleMessage;
transmuxer.onerror = handleError;
if (audioAppendStart) {
transmuxer.postMessage({
action: 'setAudioAppendStart',
appendStart: audioAppendStart
});
}
// allow empty arrays to be passed to clear out GOPs
if (Array.isArray(gopsToAlignWith)) {
transmuxer.postMessage({
action: 'alignGopsWith',
gopsToAlignWith
});
}
if (typeof remux !== 'undefined') {
transmuxer.postMessage({
action: 'setRemux',
remux
});
}
if (bytes.byteLength) {
const buffer = bytes instanceof ArrayBuffer ? bytes : bytes.buffer;
const byteOffset = bytes instanceof ArrayBuffer ? 0 : bytes.byteOffset;
triggerSegmentEventFn({ type: 'segmenttransmuxingstart', segment });
transmuxer.postMessage(
{
action: 'push',
// Send the typed-array of data as an ArrayBuffer so that
// it can be sent as a "Transferable" and avoid the costly
// memory copy
data: buffer,
// To recreate the original typed-array, we need information
// about what portion of the ArrayBuffer it was a view into
byteOffset,
byteLength: bytes.byteLength
},
[ buffer ]
);
}
if (isEndOfTimeline) {
transmuxer.postMessage({ action: 'endTimeline' });
}
// even if we didn't push any bytes, we have to make sure we flush in case we reached
// the end of the segment
transmuxer.postMessage({ action: 'flush' });
};
export const dequeue = (transmuxer) => {
transmuxer.currentTransmux = null;
if (transmuxer.transmuxQueue.length) {
transmuxer.currentTransmux = transmuxer.transmuxQueue.shift();
if (typeof transmuxer.currentTransmux === 'function') {
transmuxer.currentTransmux();
} else {
processTransmux(transmuxer.currentTransmux);
}
}
};
export const processAction = (transmuxer, action) => {
transmuxer.postMessage({ action });
dequeue(transmuxer);
};
export const enqueueAction = (action, transmuxer) => {
if (!transmuxer.currentTransmux) {
transmuxer.currentTransmux = action;
processAction(transmuxer, action);
return;
}
transmuxer.transmuxQueue.push(processAction.bind(null, transmuxer, action));
};
export const reset = (transmuxer) => {
enqueueAction('reset', transmuxer);
};
export const endTimeline = (transmuxer) => {
enqueueAction('endTimeline', transmuxer);
};
export const transmux = (options) => {
if (!options.transmuxer.currentTransmux) {
options.transmuxer.currentTransmux = options;
processTransmux(options);
return;
}
options.transmuxer.transmuxQueue.push(options);
};
export const createTransmuxer = (options) => {
const transmuxer = new TransmuxWorker();
transmuxer.currentTransmux = null;
transmuxer.transmuxQueue = [];
const term = transmuxer.terminate;
transmuxer.terminate = () => {
transmuxer.currentTransmux = null;
transmuxer.transmuxQueue.length = 0;
return term.call(transmuxer);
};
transmuxer.postMessage({action: 'init', options});
return transmuxer;
};
export default {
reset,
endTimeline,
transmux,
createTransmuxer
};

View File

@@ -0,0 +1,894 @@
/**
* @file source-updater.js
*/
import videojs from 'video.js';
import logger from './util/logger';
import noop from './util/noop';
import { bufferIntersection } from './ranges.js';
import {getMimeForCodec} from '@videojs/vhs-utils/es/codecs.js';
import window from 'global/window';
import toTitleCase from './util/to-title-case.js';
import { QUOTA_EXCEEDED_ERR } from './error-codes';
import {createTimeRanges, bufferedRangesToString} from './util/vjs-compat';
const bufferTypes = [
'video',
'audio'
];
const updating = (type, sourceUpdater) => {
const sourceBuffer = sourceUpdater[`${type}Buffer`];
return (sourceBuffer && sourceBuffer.updating) || sourceUpdater.queuePending[type];
};
const nextQueueIndexOfType = (type, queue) => {
for (let i = 0; i < queue.length; i++) {
const queueEntry = queue[i];
if (queueEntry.type === 'mediaSource') {
// If the next entry is a media source entry (uses multiple source buffers), block
// processing to allow it to go through first.
return null;
}
if (queueEntry.type === type) {
return i;
}
}
return null;
};
const shiftQueue = (type, sourceUpdater) => {
if (sourceUpdater.queue.length === 0) {
return;
}
let queueIndex = 0;
let queueEntry = sourceUpdater.queue[queueIndex];
if (queueEntry.type === 'mediaSource') {
if (!sourceUpdater.updating() && sourceUpdater.mediaSource.readyState !== 'closed') {
sourceUpdater.queue.shift();
queueEntry.action(sourceUpdater);
if (queueEntry.doneFn) {
queueEntry.doneFn();
}
// Only specific source buffer actions must wait for async updateend events. Media
// Source actions process synchronously. Therefore, both audio and video source
// buffers are now clear to process the next queue entries.
shiftQueue('audio', sourceUpdater);
shiftQueue('video', sourceUpdater);
}
// Media Source actions require both source buffers, so if the media source action
// couldn't process yet (because one or both source buffers are busy), block other
// queue actions until both are available and the media source action can process.
return;
}
if (type === 'mediaSource') {
// If the queue was shifted by a media source action (this happens when pushing a
// media source action onto the queue), then it wasn't from an updateend event from an
// audio or video source buffer, so there's no change from previous state, and no
// processing should be done.
return;
}
// Media source queue entries don't need to consider whether the source updater is
// started (i.e., source buffers are created) as they don't need the source buffers, but
// source buffer queue entries do.
if (
!sourceUpdater.ready() ||
sourceUpdater.mediaSource.readyState === 'closed' ||
updating(type, sourceUpdater)
) {
return;
}
if (queueEntry.type !== type) {
queueIndex = nextQueueIndexOfType(type, sourceUpdater.queue);
if (queueIndex === null) {
// Either there's no queue entry that uses this source buffer type in the queue, or
// there's a media source queue entry before the next entry of this type, in which
// case wait for that action to process first.
return;
}
queueEntry = sourceUpdater.queue[queueIndex];
}
sourceUpdater.queue.splice(queueIndex, 1);
// Keep a record that this source buffer type is in use.
//
// The queue pending operation must be set before the action is performed in the event
// that the action results in a synchronous event that is acted upon. For instance, if
// an exception is thrown that can be handled, it's possible that new actions will be
// appended to an empty queue and immediately executed, but would not have the correct
// pending information if this property was set after the action was performed.
sourceUpdater.queuePending[type] = queueEntry;
queueEntry.action(type, sourceUpdater);
if (!queueEntry.doneFn) {
// synchronous operation, process next entry
sourceUpdater.queuePending[type] = null;
shiftQueue(type, sourceUpdater);
return;
}
};
const cleanupBuffer = (type, sourceUpdater) => {
const buffer = sourceUpdater[`${type}Buffer`];
const titleType = toTitleCase(type);
if (!buffer) {
return;
}
buffer.removeEventListener('updateend', sourceUpdater[`on${titleType}UpdateEnd_`]);
buffer.removeEventListener('error', sourceUpdater[`on${titleType}Error_`]);
sourceUpdater.codecs[type] = null;
sourceUpdater[`${type}Buffer`] = null;
};
const inSourceBuffers = (mediaSource, sourceBuffer) => mediaSource && sourceBuffer &&
Array.prototype.indexOf.call(mediaSource.sourceBuffers, sourceBuffer) !== -1;
const actions = {
appendBuffer: (bytes, segmentInfo, onError) => (type, sourceUpdater) => {
const sourceBuffer = sourceUpdater[`${type}Buffer`];
// can't do anything if the media source / source buffer is null
// or the media source does not contain this source buffer.
if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
return;
}
sourceUpdater.logger_(`Appending segment ${segmentInfo.mediaIndex}'s ${bytes.length} bytes to ${type}Buffer`);
try {
sourceBuffer.appendBuffer(bytes);
} catch (e) {
sourceUpdater.logger_(`Error with code ${e.code} ` +
(e.code === QUOTA_EXCEEDED_ERR ? '(QUOTA_EXCEEDED_ERR) ' : '') +
`when appending segment ${segmentInfo.mediaIndex} to ${type}Buffer`);
sourceUpdater.queuePending[type] = null;
onError(e);
}
},
remove: (start, end) => (type, sourceUpdater) => {
const sourceBuffer = sourceUpdater[`${type}Buffer`];
// can't do anything if the media source / source buffer is null
// or the media source does not contain this source buffer.
if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
return;
}
sourceUpdater.logger_(`Removing ${start} to ${end} from ${type}Buffer`);
try {
sourceBuffer.remove(start, end);
} catch (e) {
sourceUpdater.logger_(`Remove ${start} to ${end} from ${type}Buffer failed`);
}
},
timestampOffset: (offset) => (type, sourceUpdater) => {
const sourceBuffer = sourceUpdater[`${type}Buffer`];
// can't do anything if the media source / source buffer is null
// or the media source does not contain this source buffer.
if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
return;
}
sourceUpdater.logger_(`Setting ${type}timestampOffset to ${offset}`);
sourceBuffer.timestampOffset = offset;
},
callback: (callback) => (type, sourceUpdater) => {
callback();
},
endOfStream: (error) => (sourceUpdater) => {
if (sourceUpdater.mediaSource.readyState !== 'open') {
return;
}
sourceUpdater.logger_(`Calling mediaSource endOfStream(${error || ''})`);
try {
sourceUpdater.mediaSource.endOfStream(error);
} catch (e) {
videojs.log.warn('Failed to call media source endOfStream', e);
}
},
duration: (duration) => (sourceUpdater) => {
sourceUpdater.logger_(`Setting mediaSource duration to ${duration}`);
try {
sourceUpdater.mediaSource.duration = duration;
} catch (e) {
videojs.log.warn('Failed to set media source duration', e);
}
},
abort: () => (type, sourceUpdater) => {
if (sourceUpdater.mediaSource.readyState !== 'open') {
return;
}
const sourceBuffer = sourceUpdater[`${type}Buffer`];
// can't do anything if the media source / source buffer is null
// or the media source does not contain this source buffer.
if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
return;
}
sourceUpdater.logger_(`calling abort on ${type}Buffer`);
try {
sourceBuffer.abort();
} catch (e) {
videojs.log.warn(`Failed to abort on ${type}Buffer`, e);
}
},
addSourceBuffer: (type, codec) => (sourceUpdater) => {
const titleType = toTitleCase(type);
const mime = getMimeForCodec(codec);
sourceUpdater.logger_(`Adding ${type}Buffer with codec ${codec} to mediaSource`);
const sourceBuffer = sourceUpdater.mediaSource.addSourceBuffer(mime);
sourceBuffer.addEventListener('updateend', sourceUpdater[`on${titleType}UpdateEnd_`]);
sourceBuffer.addEventListener('error', sourceUpdater[`on${titleType}Error_`]);
sourceUpdater.codecs[type] = codec;
sourceUpdater[`${type}Buffer`] = sourceBuffer;
},
removeSourceBuffer: (type) => (sourceUpdater) => {
const sourceBuffer = sourceUpdater[`${type}Buffer`];
cleanupBuffer(type, sourceUpdater);
// can't do anything if the media source / source buffer is null
// or the media source does not contain this source buffer.
if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
return;
}
sourceUpdater.logger_(`Removing ${type}Buffer with codec ${sourceUpdater.codecs[type]} from mediaSource`);
try {
sourceUpdater.mediaSource.removeSourceBuffer(sourceBuffer);
} catch (e) {
videojs.log.warn(`Failed to removeSourceBuffer ${type}Buffer`, e);
}
},
changeType: (codec) => (type, sourceUpdater) => {
const sourceBuffer = sourceUpdater[`${type}Buffer`];
const mime = getMimeForCodec(codec);
// can't do anything if the media source / source buffer is null
// or the media source does not contain this source buffer.
if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
return;
}
// do not update codec if we don't need to.
// Only update if we change the codec base.
// For example, going from avc1.640028 to avc1.64001f does not require a changeType call.
const newCodecBase = codec.substring(0, codec.indexOf('.'));
const oldCodec = sourceUpdater.codecs[type];
const oldCodecBase = oldCodec.substring(0, oldCodec.indexOf('.'));
if (oldCodecBase === newCodecBase) {
return;
}
const metadata = {
codecsChangeInfo: {
from: oldCodec,
to: codec
}
};
sourceUpdater.trigger({ type: 'codecschange', metadata });
sourceUpdater.logger_(`changing ${type}Buffer codec from ${oldCodec} to ${codec}`);
// check if change to the provided type is supported
try {
sourceBuffer.changeType(mime);
sourceUpdater.codecs[type] = codec;
} catch (e) {
metadata.errorType = videojs.Error.StreamingCodecsChangeError;
metadata.error = e;
e.metadata = metadata;
sourceUpdater.error_ = e;
sourceUpdater.trigger('error');
videojs.log.warn(`Failed to changeType on ${type}Buffer`, e);
}
}
};
const pushQueue = ({type, sourceUpdater, action, doneFn, name}) => {
sourceUpdater.queue.push({
type,
action,
doneFn,
name
});
shiftQueue(type, sourceUpdater);
};
const onUpdateend = (type, sourceUpdater) => (e) => {
// Although there should, in theory, be a pending action for any updateend receieved,
// there are some actions that may trigger updateend events without set definitions in
// the w3c spec. For instance, setting the duration on the media source may trigger
// updateend events on source buffers. This does not appear to be in the spec. As such,
// if we encounter an updateend without a corresponding pending action from our queue
// for that source buffer type, process the next action.
const bufferedRangesForType = sourceUpdater[`${type}Buffered`]();
const descriptiveString = bufferedRangesToString(bufferedRangesForType);
sourceUpdater.logger_(`received "updateend" event for ${type} Source Buffer: `, descriptiveString);
if (sourceUpdater.queuePending[type]) {
const doneFn = sourceUpdater.queuePending[type].doneFn;
sourceUpdater.queuePending[type] = null;
if (doneFn) {
// if there's an error, report it
doneFn(sourceUpdater[`${type}Error_`]);
}
}
shiftQueue(type, sourceUpdater);
};
/**
* A queue of callbacks to be serialized and applied when a
* MediaSource and its associated SourceBuffers are not in the
* updating state. It is used by the segment loader to update the
* underlying SourceBuffers when new data is loaded, for instance.
*
* @class SourceUpdater
* @param {MediaSource} mediaSource the MediaSource to create the SourceBuffer from
* @param {string} mimeType the desired MIME type of the underlying SourceBuffer
*/
export default class SourceUpdater extends videojs.EventTarget {
constructor(mediaSource) {
super();
this.mediaSource = mediaSource;
this.sourceopenListener_ = () => shiftQueue('mediaSource', this);
this.mediaSource.addEventListener('sourceopen', this.sourceopenListener_);
this.logger_ = logger('SourceUpdater');
// initial timestamp offset is 0
this.audioTimestampOffset_ = 0;
this.videoTimestampOffset_ = 0;
this.queue = [];
this.queuePending = {
audio: null,
video: null
};
this.delayedAudioAppendQueue_ = [];
this.videoAppendQueued_ = false;
this.codecs = {};
this.onVideoUpdateEnd_ = onUpdateend('video', this);
this.onAudioUpdateEnd_ = onUpdateend('audio', this);
this.onVideoError_ = (e) => {
// used for debugging
this.videoError_ = e;
};
this.onAudioError_ = (e) => {
// used for debugging
this.audioError_ = e;
};
this.createdSourceBuffers_ = false;
this.initializedEme_ = false;
this.triggeredReady_ = false;
}
initializedEme() {
this.initializedEme_ = true;
this.triggerReady();
}
hasCreatedSourceBuffers() {
// if false, likely waiting on one of the segment loaders to get enough data to create
// source buffers
return this.createdSourceBuffers_;
}
hasInitializedAnyEme() {
return this.initializedEme_;
}
ready() {
return this.hasCreatedSourceBuffers() && this.hasInitializedAnyEme();
}
createSourceBuffers(codecs) {
if (this.hasCreatedSourceBuffers()) {
// already created them before
return;
}
// the intial addOrChangeSourceBuffers will always be
// two add buffers.
this.addOrChangeSourceBuffers(codecs);
this.createdSourceBuffers_ = true;
this.trigger('createdsourcebuffers');
this.triggerReady();
}
triggerReady() {
// only allow ready to be triggered once, this prevents the case
// where:
// 1. we trigger createdsourcebuffers
// 2. ie 11 synchronously initializates eme
// 3. the synchronous initialization causes us to trigger ready
// 4. We go back to the ready check in createSourceBuffers and ready is triggered again.
if (this.ready() && !this.triggeredReady_) {
this.triggeredReady_ = true;
this.trigger('ready');
}
}
/**
* Add a type of source buffer to the media source.
*
* @param {string} type
* The type of source buffer to add.
*
* @param {string} codec
* The codec to add the source buffer with.
*/
addSourceBuffer(type, codec) {
pushQueue({
type: 'mediaSource',
sourceUpdater: this,
action: actions.addSourceBuffer(type, codec),
name: 'addSourceBuffer'
});
}
/**
* call abort on a source buffer.
*
* @param {string} type
* The type of source buffer to call abort on.
*/
abort(type) {
pushQueue({
type,
sourceUpdater: this,
action: actions.abort(type),
name: 'abort'
});
}
/**
* Call removeSourceBuffer and remove a specific type
* of source buffer on the mediaSource.
*
* @param {string} type
* The type of source buffer to remove.
*/
removeSourceBuffer(type) {
if (!this.canRemoveSourceBuffer()) {
videojs.log.error('removeSourceBuffer is not supported!');
return;
}
pushQueue({
type: 'mediaSource',
sourceUpdater: this,
action: actions.removeSourceBuffer(type),
name: 'removeSourceBuffer'
});
}
/**
* Whether or not the removeSourceBuffer function is supported
* on the mediaSource.
*
* @return {boolean}
* if removeSourceBuffer can be called.
*/
canRemoveSourceBuffer() {
// As of Firefox 83 removeSourceBuffer
// throws errors, so we report that it does not support this.
return !videojs.browser.IS_FIREFOX && window.MediaSource &&
window.MediaSource.prototype &&
typeof window.MediaSource.prototype.removeSourceBuffer === 'function';
}
/**
* Whether or not the changeType function is supported
* on our SourceBuffers.
*
* @return {boolean}
* if changeType can be called.
*/
static canChangeType() {
return window.SourceBuffer &&
window.SourceBuffer.prototype &&
typeof window.SourceBuffer.prototype.changeType === 'function';
}
/**
* Whether or not the changeType function is supported
* on our SourceBuffers.
*
* @return {boolean}
* if changeType can be called.
*/
canChangeType() {
return this.constructor.canChangeType();
}
/**
* Call the changeType function on a source buffer, given the code and type.
*
* @param {string} type
* The type of source buffer to call changeType on.
*
* @param {string} codec
* The codec string to change type with on the source buffer.
*/
changeType(type, codec) {
if (!this.canChangeType()) {
videojs.log.error('changeType is not supported!');
return;
}
pushQueue({
type,
sourceUpdater: this,
action: actions.changeType(codec),
name: 'changeType'
});
}
/**
* Add source buffers with a codec or, if they are already created,
* call changeType on source buffers using changeType.
*
* @param {Object} codecs
* Codecs to switch to
*/
addOrChangeSourceBuffers(codecs) {
if (!codecs || typeof codecs !== 'object' || Object.keys(codecs).length === 0) {
throw new Error('Cannot addOrChangeSourceBuffers to undefined codecs');
}
Object.keys(codecs).forEach((type) => {
const codec = codecs[type];
if (!this.hasCreatedSourceBuffers()) {
return this.addSourceBuffer(type, codec);
}
if (this.canChangeType()) {
this.changeType(type, codec);
}
});
}
/**
* Queue an update to append an ArrayBuffer.
*
* @param {MediaObject} object containing audioBytes and/or videoBytes
* @param {Function} done the function to call when done
* @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-appendBuffer-void-ArrayBuffer-data
*/
appendBuffer(options, doneFn) {
const {segmentInfo, type, bytes} = options;
this.processedAppend_ = true;
if (type === 'audio' && this.videoBuffer && !this.videoAppendQueued_) {
this.delayedAudioAppendQueue_.push([options, doneFn]);
this.logger_(`delayed audio append of ${bytes.length} until video append`);
return;
}
// In the case of certain errors, for instance, QUOTA_EXCEEDED_ERR, updateend will
// not be fired. This means that the queue will be blocked until the next action
// taken by the segment-loader. Provide a mechanism for segment-loader to handle
// these errors by calling the doneFn with the specific error.
const onError = doneFn;
pushQueue({
type,
sourceUpdater: this,
action: actions.appendBuffer(bytes, segmentInfo || {mediaIndex: -1}, onError),
doneFn,
name: 'appendBuffer'
});
if (type === 'video') {
this.videoAppendQueued_ = true;
if (!this.delayedAudioAppendQueue_.length) {
return;
}
const queue = this.delayedAudioAppendQueue_.slice();
this.logger_(`queuing delayed audio ${queue.length} appendBuffers`);
this.delayedAudioAppendQueue_.length = 0;
queue.forEach((que) => {
this.appendBuffer.apply(this, que);
});
}
}
/**
* Get the audio buffer's buffered timerange.
*
* @return {TimeRange}
* The audio buffer's buffered time range
*/
audioBuffered() {
// no media source/source buffer or it isn't in the media sources
// source buffer list
if (!inSourceBuffers(this.mediaSource, this.audioBuffer)) {
return createTimeRanges();
}
return this.audioBuffer.buffered ? this.audioBuffer.buffered :
createTimeRanges();
}
/**
* Get the video buffer's buffered timerange.
*
* @return {TimeRange}
* The video buffer's buffered time range
*/
videoBuffered() {
// no media source/source buffer or it isn't in the media sources
// source buffer list
if (!inSourceBuffers(this.mediaSource, this.videoBuffer)) {
return createTimeRanges();
}
return this.videoBuffer.buffered ? this.videoBuffer.buffered :
createTimeRanges();
}
/**
* Get a combined video/audio buffer's buffered timerange.
*
* @return {TimeRange}
* the combined time range
*/
buffered() {
const video = inSourceBuffers(this.mediaSource, this.videoBuffer) ? this.videoBuffer : null;
const audio = inSourceBuffers(this.mediaSource, this.audioBuffer) ? this.audioBuffer : null;
if (audio && !video) {
return this.audioBuffered();
}
if (video && !audio) {
return this.videoBuffered();
}
return bufferIntersection(this.audioBuffered(), this.videoBuffered());
}
/**
* Add a callback to the queue that will set duration on the mediaSource.
*
* @param {number} duration
* The duration to set
*
* @param {Function} [doneFn]
* function to run after duration has been set.
*/
setDuration(duration, doneFn = noop) {
// In order to set the duration on the media source, it's necessary to wait for all
// source buffers to no longer be updating. "If the updating attribute equals true on
// any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
// abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
pushQueue({
type: 'mediaSource',
sourceUpdater: this,
action: actions.duration(duration),
name: 'duration',
doneFn
});
}
/**
* Add a mediaSource endOfStream call to the queue
*
* @param {Error} [error]
* Call endOfStream with an error
*
* @param {Function} [doneFn]
* A function that should be called when the
* endOfStream call has finished.
*/
endOfStream(error = null, doneFn = noop) {
if (typeof error !== 'string') {
error = undefined;
}
// In order to set the duration on the media source, it's necessary to wait for all
// source buffers to no longer be updating. "If the updating attribute equals true on
// any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
// abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
pushQueue({
type: 'mediaSource',
sourceUpdater: this,
action: actions.endOfStream(error),
name: 'endOfStream',
doneFn
});
}
/**
* Queue an update to remove a time range from the buffer.
*
* @param {number} start where to start the removal
* @param {number} end where to end the removal
* @param {Function} [done=noop] optional callback to be executed when the remove
* operation is complete
* @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
*/
removeAudio(start, end, done = noop) {
if (!this.audioBuffered().length || this.audioBuffered().end(0) === 0) {
done();
return;
}
pushQueue({
type: 'audio',
sourceUpdater: this,
action: actions.remove(start, end),
doneFn: done,
name: 'remove'
});
}
/**
* Queue an update to remove a time range from the buffer.
*
* @param {number} start where to start the removal
* @param {number} end where to end the removal
* @param {Function} [done=noop] optional callback to be executed when the remove
* operation is complete
* @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
*/
removeVideo(start, end, done = noop) {
if (!this.videoBuffered().length || this.videoBuffered().end(0) === 0) {
done();
return;
}
pushQueue({
type: 'video',
sourceUpdater: this,
action: actions.remove(start, end),
doneFn: done,
name: 'remove'
});
}
/**
* Whether the underlying sourceBuffer is updating or not
*
* @return {boolean} the updating status of the SourceBuffer
*/
updating() {
// the audio/video source buffer is updating
if (updating('audio', this) || updating('video', this)) {
return true;
}
return false;
}
/**
* Set/get the timestampoffset on the audio SourceBuffer
*
* @return {number} the timestamp offset
*/
audioTimestampOffset(offset) {
if (typeof offset !== 'undefined' &&
this.audioBuffer &&
// no point in updating if it's the same
this.audioTimestampOffset_ !== offset) {
pushQueue({
type: 'audio',
sourceUpdater: this,
action: actions.timestampOffset(offset),
name: 'timestampOffset'
});
this.audioTimestampOffset_ = offset;
}
return this.audioTimestampOffset_;
}
/**
* Set/get the timestampoffset on the video SourceBuffer
*
* @return {number} the timestamp offset
*/
videoTimestampOffset(offset) {
if (typeof offset !== 'undefined' &&
this.videoBuffer &&
// no point in updating if it's the same
this.videoTimestampOffset_ !== offset) {
pushQueue({
type: 'video',
sourceUpdater: this,
action: actions.timestampOffset(offset),
name: 'timestampOffset'
});
this.videoTimestampOffset_ = offset;
}
return this.videoTimestampOffset_;
}
/**
* Add a function to the queue that will be called
* when it is its turn to run in the audio queue.
*
* @param {Function} callback
* The callback to queue.
*/
audioQueueCallback(callback) {
if (!this.audioBuffer) {
return;
}
pushQueue({
type: 'audio',
sourceUpdater: this,
action: actions.callback(callback),
name: 'callback'
});
}
/**
* Add a function to the queue that will be called
* when it is its turn to run in the video queue.
*
* @param {Function} callback
* The callback to queue.
*/
videoQueueCallback(callback) {
if (!this.videoBuffer) {
return;
}
pushQueue({
type: 'video',
sourceUpdater: this,
action: actions.callback(callback),
name: 'callback'
});
}
/**
* dispose of the source updater and the underlying sourceBuffer
*/
dispose() {
this.trigger('dispose');
bufferTypes.forEach((type) => {
this.abort(type);
if (this.canRemoveSourceBuffer()) {
this.removeSourceBuffer(type);
} else {
this[`${type}QueueCallback`](() => cleanupBuffer(type, this));
}
});
this.videoAppendQueued_ = false;
this.delayedAudioAppendQueue_.length = 0;
if (this.sourceopenListener_) {
this.mediaSource.removeEventListener('sourceopen', this.sourceopenListener_);
}
this.off();
}
}

View File

@@ -0,0 +1,692 @@
/**
* @file sync-controller.js
*/
import {sumDurations, getPartsAndSegments} from './playlist';
import videojs from 'video.js';
import logger from './util/logger';
import {MediaSequenceSync, DependantMediaSequenceSync} from './util/media-sequence-sync';
// The maximum gap allowed between two media sequence tags when trying to
// synchronize expired playlist segments.
// the max media sequence diff is 48 hours of live stream
// content with two second segments. Anything larger than that
// will likely be invalid.
const MAX_MEDIA_SEQUENCE_DIFF_FOR_SYNC = 86400;
export const syncPointStrategies = [
// Stategy "VOD": Handle the VOD-case where the sync-point is *always*
// the equivalence display-time 0 === segment-index 0
{
name: 'VOD',
run: (syncController, playlist, duration, currentTimeline, currentTime) => {
if (duration !== Infinity) {
const syncPoint = {
time: 0,
segmentIndex: 0,
partIndex: null
};
return syncPoint;
}
return null;
}
},
{
name: 'MediaSequence',
/**
* run media sequence strategy
*
* @param {SyncController} syncController
* @param {Object} playlist
* @param {number} duration
* @param {number} currentTimeline
* @param {number} currentTime
* @param {string} type
*/
run: (syncController, playlist, duration, currentTimeline, currentTime, type) => {
const mediaSequenceSync = syncController.getMediaSequenceSync(type);
if (!mediaSequenceSync) {
return null;
}
if (!mediaSequenceSync.isReliable) {
return null;
}
const syncInfo = mediaSequenceSync.getSyncInfoForTime(currentTime);
if (!syncInfo) {
return null;
}
return {
time: syncInfo.start,
partIndex: syncInfo.partIndex,
segmentIndex: syncInfo.segmentIndex
};
}
},
// Stategy "ProgramDateTime": We have a program-date-time tag in this playlist
{
name: 'ProgramDateTime',
run: (syncController, playlist, duration, currentTimeline, currentTime) => {
if (!Object.keys(syncController.timelineToDatetimeMappings).length) {
return null;
}
let syncPoint = null;
let lastDistance = null;
const partsAndSegments = getPartsAndSegments(playlist);
currentTime = currentTime || 0;
for (let i = 0; i < partsAndSegments.length; i++) {
// start from the end and loop backwards for live
// or start from the front and loop forwards for non-live
const index = (playlist.endList || currentTime === 0) ? i : partsAndSegments.length - (i + 1);
const partAndSegment = partsAndSegments[index];
const segment = partAndSegment.segment;
const datetimeMapping =
syncController.timelineToDatetimeMappings[segment.timeline];
if (!datetimeMapping || !segment.dateTimeObject) {
continue;
}
const segmentTime = segment.dateTimeObject.getTime() / 1000;
let start = segmentTime + datetimeMapping;
// take part duration into account.
if (segment.parts && typeof partAndSegment.partIndex === 'number') {
for (let z = 0; z < partAndSegment.partIndex; z++) {
start += segment.parts[z].duration;
}
}
const distance = Math.abs(currentTime - start);
// Once the distance begins to increase, or if distance is 0, we have passed
// currentTime and can stop looking for better candidates
if (lastDistance !== null && (distance === 0 || lastDistance < distance)) {
break;
}
lastDistance = distance;
syncPoint = {
time: start,
segmentIndex: partAndSegment.segmentIndex,
partIndex: partAndSegment.partIndex
};
}
return syncPoint;
}
},
// Stategy "Segment": We have a known time mapping for a timeline and a
// segment in the current timeline with timing data
{
name: 'Segment',
run: (syncController, playlist, duration, currentTimeline, currentTime) => {
let syncPoint = null;
let lastDistance = null;
currentTime = currentTime || 0;
const partsAndSegments = getPartsAndSegments(playlist);
for (let i = 0; i < partsAndSegments.length; i++) {
// start from the end and loop backwards for live
// or start from the front and loop forwards for non-live
const index = (playlist.endList || currentTime === 0) ? i : partsAndSegments.length - (i + 1);
const partAndSegment = partsAndSegments[index];
const segment = partAndSegment.segment;
const start = partAndSegment.part && partAndSegment.part.start || segment && segment.start;
if (segment.timeline === currentTimeline && typeof start !== 'undefined') {
const distance = Math.abs(currentTime - start);
// Once the distance begins to increase, we have passed
// currentTime and can stop looking for better candidates
if (lastDistance !== null && lastDistance < distance) {
break;
}
if (!syncPoint || lastDistance === null || lastDistance >= distance) {
lastDistance = distance;
syncPoint = {
time: start,
segmentIndex: partAndSegment.segmentIndex,
partIndex: partAndSegment.partIndex
};
}
}
}
return syncPoint;
}
},
// Stategy "Discontinuity": We have a discontinuity with a known
// display-time
{
name: 'Discontinuity',
run: (syncController, playlist, duration, currentTimeline, currentTime) => {
let syncPoint = null;
currentTime = currentTime || 0;
if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
let lastDistance = null;
for (let i = 0; i < playlist.discontinuityStarts.length; i++) {
const segmentIndex = playlist.discontinuityStarts[i];
const discontinuity = playlist.discontinuitySequence + i + 1;
const discontinuitySync = syncController.discontinuities[discontinuity];
if (discontinuitySync) {
const distance = Math.abs(currentTime - discontinuitySync.time);
// Once the distance begins to increase, we have passed
// currentTime and can stop looking for better candidates
if (lastDistance !== null && lastDistance < distance) {
break;
}
if (!syncPoint || lastDistance === null || lastDistance >= distance) {
lastDistance = distance;
syncPoint = {
time: discontinuitySync.time,
segmentIndex,
partIndex: null
};
}
}
}
}
return syncPoint;
}
},
// Stategy "Playlist": We have a playlist with a known mapping of
// segment index to display time
{
name: 'Playlist',
run: (syncController, playlist, duration, currentTimeline, currentTime) => {
if (playlist.syncInfo) {
const syncPoint = {
time: playlist.syncInfo.time,
segmentIndex: playlist.syncInfo.mediaSequence - playlist.mediaSequence,
partIndex: null
};
return syncPoint;
}
return null;
}
}
];
export default class SyncController extends videojs.EventTarget {
constructor(options = {}) {
super();
// ...for synching across variants
this.timelines = [];
this.discontinuities = [];
this.timelineToDatetimeMappings = {};
// TODO: this map should be only available for HLS. Since only HLS has MediaSequence.
// For some reason this map helps with syncing between quality switch for MPEG-DASH as well.
// Moreover if we disable this map for MPEG-DASH - quality switch will be broken.
// MPEG-DASH should have its own separate sync strategy
const main = new MediaSequenceSync();
const audio = new DependantMediaSequenceSync(main);
const vtt = new DependantMediaSequenceSync(main);
this.mediaSequenceStorage_ = {main, audio, vtt};
this.logger_ = logger('SyncController');
}
/**
*
* @param {string} loaderType
* @return {MediaSequenceSync|null}
*/
getMediaSequenceSync(loaderType) {
return this.mediaSequenceStorage_[loaderType] || null;
}
/**
* Find a sync-point for the playlist specified
*
* A sync-point is defined as a known mapping from display-time to
* a segment-index in the current playlist.
*
* @param {Playlist} playlist
* The playlist that needs a sync-point
* @param {number} duration
* Duration of the MediaSource (Infinite if playing a live source)
* @param {number} currentTimeline
* The last timeline from which a segment was loaded
* @param {number} currentTime
* Current player's time
* @param {string} type
* Segment loader type
* @return {Object}
* A sync-point object
*/
getSyncPoint(playlist, duration, currentTimeline, currentTime, type) {
// Always use VOD sync point for VOD
if (duration !== Infinity) {
const vodSyncPointStrategy = syncPointStrategies.find(({ name }) => name === 'VOD');
return vodSyncPointStrategy.run(this, playlist, duration);
}
const syncPoints = this.runStrategies_(
playlist,
duration,
currentTimeline,
currentTime,
type
);
if (!syncPoints.length) {
// Signal that we need to attempt to get a sync-point manually
// by fetching a segment in the playlist and constructing
// a sync-point from that information
return null;
}
// If we have exact match just return it instead of finding the nearest distance
for (const syncPointInfo of syncPoints) {
const { syncPoint, strategy } = syncPointInfo;
const { segmentIndex, time } = syncPoint;
if (segmentIndex < 0) {
continue;
}
const selectedSegment = playlist.segments[segmentIndex];
const start = time;
const end = start + selectedSegment.duration;
this.logger_(`Strategy: ${strategy}. Current time: ${currentTime}. selected segment: ${segmentIndex}. Time: [${start} -> ${end}]}`);
if (currentTime >= start && currentTime < end) {
this.logger_('Found sync point with exact match: ', syncPoint);
return syncPoint;
}
}
// Now find the sync-point that is closest to the currentTime because
// that should result in the most accurate guess about which segment
// to fetch
return this.selectSyncPoint_(syncPoints, { key: 'time', value: currentTime });
}
/**
* Calculate the amount of time that has expired off the playlist during playback
*
* @param {Playlist} playlist
* Playlist object to calculate expired from
* @param {number} duration
* Duration of the MediaSource (Infinity if playling a live source)
* @return {number|null}
* The amount of time that has expired off the playlist during playback. Null
* if no sync-points for the playlist can be found.
*/
getExpiredTime(playlist, duration) {
if (!playlist || !playlist.segments) {
return null;
}
const syncPoints = this.runStrategies_(
playlist,
duration,
playlist.discontinuitySequence,
0
);
// Without sync-points, there is not enough information to determine the expired time
if (!syncPoints.length) {
return null;
}
const syncPoint = this.selectSyncPoint_(syncPoints, {
key: 'segmentIndex',
value: 0
});
// If the sync-point is beyond the start of the playlist, we want to subtract the
// duration from index 0 to syncPoint.segmentIndex instead of adding.
if (syncPoint.segmentIndex > 0) {
syncPoint.time *= -1;
}
return Math.abs(syncPoint.time + sumDurations({
defaultDuration: playlist.targetDuration,
durationList: playlist.segments,
startIndex: syncPoint.segmentIndex,
endIndex: 0
}));
}
/**
* Runs each sync-point strategy and returns a list of sync-points returned by the
* strategies
*
* @private
* @param {Playlist} playlist
* The playlist that needs a sync-point
* @param {number} duration
* Duration of the MediaSource (Infinity if playing a live source)
* @param {number} currentTimeline
* The last timeline from which a segment was loaded
* @param {number} currentTime
* Current player's time
* @param {string} type
* Segment loader type
* @return {Array}
* A list of sync-point objects
*/
runStrategies_(playlist, duration, currentTimeline, currentTime, type) {
const syncPoints = [];
// Try to find a sync-point in by utilizing various strategies...
for (let i = 0; i < syncPointStrategies.length; i++) {
const strategy = syncPointStrategies[i];
const syncPoint = strategy.run(
this,
playlist,
duration,
currentTimeline,
currentTime,
type
);
if (syncPoint) {
syncPoint.strategy = strategy.name;
syncPoints.push({
strategy: strategy.name,
syncPoint
});
}
}
return syncPoints;
}
/**
* Selects the sync-point nearest the specified target
*
* @private
* @param {Array} syncPoints
* List of sync-points to select from
* @param {Object} target
* Object specifying the property and value we are targeting
* @param {string} target.key
* Specifies the property to target. Must be either 'time' or 'segmentIndex'
* @param {number} target.value
* The value to target for the specified key.
* @return {Object}
* The sync-point nearest the target
*/
selectSyncPoint_(syncPoints, target) {
let bestSyncPoint = syncPoints[0].syncPoint;
let bestDistance = Math.abs(syncPoints[0].syncPoint[target.key] - target.value);
let bestStrategy = syncPoints[0].strategy;
for (let i = 1; i < syncPoints.length; i++) {
const newDistance = Math.abs(syncPoints[i].syncPoint[target.key] - target.value);
if (newDistance < bestDistance) {
bestDistance = newDistance;
bestSyncPoint = syncPoints[i].syncPoint;
bestStrategy = syncPoints[i].strategy;
}
}
this.logger_(`syncPoint for [${target.key}: ${target.value}] chosen with strategy` +
` [${bestStrategy}]: [time:${bestSyncPoint.time},` +
` segmentIndex:${bestSyncPoint.segmentIndex}` +
(typeof bestSyncPoint.partIndex === 'number' ? `,partIndex:${bestSyncPoint.partIndex}` : '') +
']');
return bestSyncPoint;
}
/**
* Save any meta-data present on the segments when segments leave
* the live window to the playlist to allow for synchronization at the
* playlist level later.
*
* @param {Playlist} oldPlaylist - The previous active playlist
* @param {Playlist} newPlaylist - The updated and most current playlist
*/
saveExpiredSegmentInfo(oldPlaylist, newPlaylist) {
const mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;
// Ignore large media sequence gaps
if (mediaSequenceDiff > MAX_MEDIA_SEQUENCE_DIFF_FOR_SYNC) {
videojs.log.warn(`Not saving expired segment info. Media sequence gap ${mediaSequenceDiff} is too large.`);
return;
}
// When a segment expires from the playlist and it has a start time
// save that information as a possible sync-point reference in future
for (let i = mediaSequenceDiff - 1; i >= 0; i--) {
const lastRemovedSegment = oldPlaylist.segments[i];
if (lastRemovedSegment && typeof lastRemovedSegment.start !== 'undefined') {
newPlaylist.syncInfo = {
mediaSequence: oldPlaylist.mediaSequence + i,
time: lastRemovedSegment.start
};
this.logger_(`playlist refresh sync: [time:${newPlaylist.syncInfo.time},` +
` mediaSequence: ${newPlaylist.syncInfo.mediaSequence}]`);
this.trigger('syncinfoupdate');
break;
}
}
}
/**
* Save the mapping from playlist's ProgramDateTime to display. This should only happen
* before segments start to load.
*
* @param {Playlist} playlist - The currently active playlist
*/
setDateTimeMappingForStart(playlist) {
// It's possible for the playlist to be updated before playback starts, meaning time
// zero is not yet set. If, during these playlist refreshes, a discontinuity is
// crossed, then the old time zero mapping (for the prior timeline) would be retained
// unless the mappings are cleared.
this.timelineToDatetimeMappings = {};
if (playlist.segments &&
playlist.segments.length &&
playlist.segments[0].dateTimeObject) {
const firstSegment = playlist.segments[0];
const playlistTimestamp = firstSegment.dateTimeObject.getTime() / 1000;
this.timelineToDatetimeMappings[firstSegment.timeline] = -playlistTimestamp;
}
}
/**
* Calculates and saves timeline mappings, playlist sync info, and segment timing values
* based on the latest timing information.
*
* @param {Object} options
* Options object
* @param {SegmentInfo} options.segmentInfo
* The current active request information
* @param {boolean} options.shouldSaveTimelineMapping
* If there's a timeline change, determines if the timeline mapping should be
* saved for timeline mapping and program date time mappings.
*/
saveSegmentTimingInfo({ segmentInfo, shouldSaveTimelineMapping }) {
const didCalculateSegmentTimeMapping = this.calculateSegmentTimeMapping_(
segmentInfo,
segmentInfo.timingInfo,
shouldSaveTimelineMapping
);
const segment = segmentInfo.segment;
if (didCalculateSegmentTimeMapping) {
this.saveDiscontinuitySyncInfo_(segmentInfo);
// If the playlist does not have sync information yet, record that information
// now with segment timing information
if (!segmentInfo.playlist.syncInfo) {
segmentInfo.playlist.syncInfo = {
mediaSequence: segmentInfo.playlist.mediaSequence + segmentInfo.mediaIndex,
time: segment.start
};
}
}
const dateTime = segment.dateTimeObject;
if (segment.discontinuity && shouldSaveTimelineMapping && dateTime) {
this.timelineToDatetimeMappings[segment.timeline] = -(dateTime.getTime() / 1000);
}
}
timestampOffsetForTimeline(timeline) {
if (typeof this.timelines[timeline] === 'undefined') {
return null;
}
return this.timelines[timeline].time;
}
mappingForTimeline(timeline) {
if (typeof this.timelines[timeline] === 'undefined') {
return null;
}
return this.timelines[timeline].mapping;
}
/**
* Use the "media time" for a segment to generate a mapping to "display time" and
* save that display time to the segment.
*
* @private
* @param {SegmentInfo} segmentInfo
* The current active request information
* @param {Object} timingInfo
* The start and end time of the current segment in "media time"
* @param {boolean} shouldSaveTimelineMapping
* If there's a timeline change, determines if the timeline mapping should be
* saved in timelines.
* @return {boolean}
* Returns false if segment time mapping could not be calculated
*/
calculateSegmentTimeMapping_(segmentInfo, timingInfo, shouldSaveTimelineMapping) {
// TODO: remove side effects
const segment = segmentInfo.segment;
const part = segmentInfo.part;
let mappingObj = this.timelines[segmentInfo.timeline];
let start;
let end;
if (typeof segmentInfo.timestampOffset === 'number') {
mappingObj = {
time: segmentInfo.startOfSegment,
mapping: segmentInfo.startOfSegment - timingInfo.start
};
if (shouldSaveTimelineMapping) {
this.timelines[segmentInfo.timeline] = mappingObj;
this.trigger('timestampoffset');
this.logger_(`time mapping for timeline ${segmentInfo.timeline}: ` +
`[time: ${mappingObj.time}] [mapping: ${mappingObj.mapping}]`);
}
start = segmentInfo.startOfSegment;
end = timingInfo.end + mappingObj.mapping;
} else if (mappingObj) {
start = timingInfo.start + mappingObj.mapping;
end = timingInfo.end + mappingObj.mapping;
} else {
return false;
}
if (part) {
part.start = start;
part.end = end;
}
// If we don't have a segment start yet or the start value we got
// is less than our current segment.start value, save a new start value.
// We have to do this because parts will have segment timing info saved
// multiple times and we want segment start to be the earliest part start
// value for that segment.
if (!segment.start || start < segment.start) {
segment.start = start;
}
segment.end = end;
return true;
}
/**
* Each time we have discontinuity in the playlist, attempt to calculate the location
* in display of the start of the discontinuity and save that. We also save an accuracy
* value so that we save values with the most accuracy (closest to 0.)
*
* @private
* @param {SegmentInfo} segmentInfo - The current active request information
*/
saveDiscontinuitySyncInfo_(segmentInfo) {
const playlist = segmentInfo.playlist;
const segment = segmentInfo.segment;
// If the current segment is a discontinuity then we know exactly where
// the start of the range and it's accuracy is 0 (greater accuracy values
// mean more approximation)
if (segment.discontinuity) {
this.discontinuities[segment.timeline] = {
time: segment.start,
accuracy: 0
};
} else if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
// Search for future discontinuities that we can provide better timing
// information for and save that information for sync purposes
for (let i = 0; i < playlist.discontinuityStarts.length; i++) {
const segmentIndex = playlist.discontinuityStarts[i];
const discontinuity = playlist.discontinuitySequence + i + 1;
const mediaIndexDiff = segmentIndex - segmentInfo.mediaIndex;
const accuracy = Math.abs(mediaIndexDiff);
if (!this.discontinuities[discontinuity] ||
this.discontinuities[discontinuity].accuracy > accuracy) {
let time;
if (mediaIndexDiff < 0) {
time = segment.start - sumDurations({
defaultDuration: playlist.targetDuration,
durationList: playlist.segments,
startIndex: segmentInfo.mediaIndex,
endIndex: segmentIndex
});
} else {
time = segment.end + sumDurations({
defaultDuration: playlist.targetDuration,
durationList: playlist.segments,
startIndex: segmentInfo.mediaIndex + 1,
endIndex: segmentIndex
});
}
this.discontinuities[discontinuity] = {
time,
accuracy
};
}
}
}
}
dispose() {
this.trigger('dispose');
this.off();
}
}

View File

@@ -0,0 +1,55 @@
import videojs from 'video.js';
/**
* The TimelineChangeController acts as a source for segment loaders to listen for and
* keep track of latest and pending timeline changes. This is useful to ensure proper
* sync, as each loader may need to make a consideration for what timeline the other
* loader is on before making changes which could impact the other loader's media.
*
* @class TimelineChangeController
* @extends videojs.EventTarget
*/
export default class TimelineChangeController extends videojs.EventTarget {
constructor() {
super();
this.pendingTimelineChanges_ = {};
this.lastTimelineChanges_ = {};
}
clearPendingTimelineChange(type) {
this.pendingTimelineChanges_[type] = null;
this.trigger('pendingtimelinechange');
}
pendingTimelineChange({ type, from, to }) {
if (typeof from === 'number' && typeof to === 'number') {
this.pendingTimelineChanges_[type] = { type, from, to };
this.trigger('pendingtimelinechange');
}
return this.pendingTimelineChanges_[type];
}
lastTimelineChange({ type, from, to }) {
if (typeof from === 'number' && typeof to === 'number') {
this.lastTimelineChanges_[type] = { type, from, to };
delete this.pendingTimelineChanges_[type];
const metadata = {
timelineChangeInfo: {
from,
to
}
};
this.trigger({ type: 'timelinechange', metadata });
}
return this.lastTimelineChanges_[type];
}
dispose() {
this.trigger('dispose');
this.pendingTimelineChanges_ = {};
this.lastTimelineChanges_ = {};
this.off();
}
}

View File

@@ -0,0 +1,435 @@
/* global self */
/**
* @file transmuxer-worker.js
*/
/**
* videojs-contrib-media-sources
*
* Copyright (c) 2015 Brightcove
* All rights reserved.
*
* Handles communication between the browser-world and the mux.js
* transmuxer running inside of a WebWorker by exposing a simple
* message-based interface to a Transmuxer object.
*/
import {Transmuxer} from 'mux.js/lib/mp4/transmuxer';
import CaptionParser from 'mux.js/lib/mp4/caption-parser';
import WebVttParser from 'mux.js/lib/mp4/webvtt-parser';
import mp4probe from 'mux.js/lib/mp4/probe';
import tsInspector from 'mux.js/lib/tools/ts-inspector.js';
import {
ONE_SECOND_IN_TS,
secondsToVideoTs,
videoTsToSeconds
} from 'mux.js/lib/utils/clock';
/**
* Re-emits transmuxer events by converting them into messages to the
* world outside the worker.
*
* @param {Object} transmuxer the transmuxer to wire events on
* @private
*/
const wireTransmuxerEvents = function(self, transmuxer) {
transmuxer.on('data', function(segment) {
// transfer ownership of the underlying ArrayBuffer
// instead of doing a copy to save memory
// ArrayBuffers are transferable but generic TypedArrays are not
// @link https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Using_web_workers#Passing_data_by_transferring_ownership_(transferable_objects)
const initArray = segment.initSegment;
segment.initSegment = {
data: initArray.buffer,
byteOffset: initArray.byteOffset,
byteLength: initArray.byteLength
};
const typedArray = segment.data;
segment.data = typedArray.buffer;
self.postMessage({
action: 'data',
segment,
byteOffset: typedArray.byteOffset,
byteLength: typedArray.byteLength
}, [segment.data]);
});
transmuxer.on('done', function(data) {
self.postMessage({ action: 'done' });
});
transmuxer.on('gopInfo', function(gopInfo) {
self.postMessage({
action: 'gopInfo',
gopInfo
});
});
transmuxer.on('videoSegmentTimingInfo', function(timingInfo) {
const videoSegmentTimingInfo = {
start: {
decode: videoTsToSeconds(timingInfo.start.dts),
presentation: videoTsToSeconds(timingInfo.start.pts)
},
end: {
decode: videoTsToSeconds(timingInfo.end.dts),
presentation: videoTsToSeconds(timingInfo.end.pts)
},
baseMediaDecodeTime: videoTsToSeconds(timingInfo.baseMediaDecodeTime)
};
if (timingInfo.prependedContentDuration) {
videoSegmentTimingInfo.prependedContentDuration = videoTsToSeconds(timingInfo.prependedContentDuration);
}
self.postMessage({
action: 'videoSegmentTimingInfo',
videoSegmentTimingInfo
});
});
transmuxer.on('audioSegmentTimingInfo', function(timingInfo) {
// Note that all times for [audio/video]SegmentTimingInfo events are in video clock
const audioSegmentTimingInfo = {
start: {
decode: videoTsToSeconds(timingInfo.start.dts),
presentation: videoTsToSeconds(timingInfo.start.pts)
},
end: {
decode: videoTsToSeconds(timingInfo.end.dts),
presentation: videoTsToSeconds(timingInfo.end.pts)
},
baseMediaDecodeTime: videoTsToSeconds(timingInfo.baseMediaDecodeTime)
};
if (timingInfo.prependedContentDuration) {
audioSegmentTimingInfo.prependedContentDuration =
videoTsToSeconds(timingInfo.prependedContentDuration);
}
self.postMessage({
action: 'audioSegmentTimingInfo',
audioSegmentTimingInfo
});
});
transmuxer.on('id3Frame', function(id3Frame) {
self.postMessage({
action: 'id3Frame',
id3Frame
});
});
transmuxer.on('caption', function(caption) {
self.postMessage({
action: 'caption',
caption
});
});
transmuxer.on('trackinfo', function(trackInfo) {
self.postMessage({
action: 'trackinfo',
trackInfo
});
});
transmuxer.on('audioTimingInfo', function(audioTimingInfo) {
// convert to video TS since we prioritize video time over audio
self.postMessage({
action: 'audioTimingInfo',
audioTimingInfo: {
start: videoTsToSeconds(audioTimingInfo.start),
end: videoTsToSeconds(audioTimingInfo.end)
}
});
});
transmuxer.on('videoTimingInfo', function(videoTimingInfo) {
self.postMessage({
action: 'videoTimingInfo',
videoTimingInfo: {
start: videoTsToSeconds(videoTimingInfo.start),
end: videoTsToSeconds(videoTimingInfo.end)
}
});
});
transmuxer.on('log', function(log) {
self.postMessage({action: 'log', log});
});
};
/**
* All incoming messages route through this hash. If no function exists
* to handle an incoming message, then we ignore the message.
*
* @class MessageHandlers
* @param {Object} options the options to initialize with
*/
class MessageHandlers {
constructor(self, options) {
this.options = options || {};
this.self = self;
this.init();
}
/**
* initialize our web worker and wire all the events.
*/
init() {
if (this.transmuxer) {
this.transmuxer.dispose();
}
this.transmuxer = new Transmuxer(this.options);
wireTransmuxerEvents(this.self, this.transmuxer);
}
pushMp4Captions(data) {
if (!this.captionParser) {
this.captionParser = new CaptionParser();
this.captionParser.init();
}
const segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
const parsed = this.captionParser.parse(
segment,
data.trackIds,
data.timescales
);
this.self.postMessage({
action: 'mp4Captions',
captions: parsed && parsed.captions || [],
logs: parsed && parsed.logs || [],
data: segment.buffer
}, [segment.buffer]);
}
/**
* Initializes the WebVttParser and passes the init segment.
*
* @param {Uint8Array} data mp4 boxed WebVTT init segment data
*/
initMp4WebVttParser(data) {
if (!this.webVttParser) {
this.webVttParser = new WebVttParser();
}
const segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
// Set the timescale for the parser.
// This can be called repeatedly in order to set and re-set the timescale.
this.webVttParser.init(segment);
}
/**
* Parse an mp4 encapsulated WebVTT segment and return an array of cues.
*
* @param {Uint8Array} data a text/webvtt segment
* @return {Object[]} an array of parsed cue objects
*/
getMp4WebVttText(data) {
if (!this.webVttParser) {
// timescale might not be set yet if the parser is created before an init segment is passed.
// default timescale is 90k.
this.webVttParser = new WebVttParser();
}
const segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
const parsed = this.webVttParser.parseSegment(segment);
this.self.postMessage({
action: 'getMp4WebVttText',
mp4VttCues: parsed || [],
data: segment.buffer
}, [segment.buffer]);
}
probeMp4StartTime({timescales, data}) {
const startTime = mp4probe.startTime(timescales, data);
this.self.postMessage({
action: 'probeMp4StartTime',
startTime,
data
}, [data.buffer]);
}
probeMp4Tracks({data}) {
const tracks = mp4probe.tracks(data);
this.self.postMessage({
action: 'probeMp4Tracks',
tracks,
data
}, [data.buffer]);
}
/**
* Probes an mp4 segment for EMSG boxes containing ID3 data.
* https://aomediacodec.github.io/id3-emsg/
*
* @param {Uint8Array} data segment data
* @param {number} offset segment start time
* @return {Object[]} an array of ID3 frames
*/
probeEmsgID3({data, offset}) {
const id3Frames = mp4probe.getEmsgID3(data, offset);
this.self.postMessage({
action: 'probeEmsgID3',
id3Frames,
emsgData: data
}, [data.buffer]);
}
/**
* Probe an mpeg2-ts segment to determine the start time of the segment in it's
* internal "media time," as well as whether it contains video and/or audio.
*
* @private
* @param {Uint8Array} bytes - segment bytes
* @param {number} baseStartTime
* Relative reference timestamp used when adjusting frame timestamps for rollover.
* This value should be in seconds, as it's converted to a 90khz clock within the
* function body.
* @return {Object} The start time of the current segment in "media time" as well as
* whether it contains video and/or audio
*/
probeTs({data, baseStartTime}) {
const tsStartTime = (typeof baseStartTime === 'number' && !isNaN(baseStartTime)) ?
(baseStartTime * ONE_SECOND_IN_TS) :
void 0;
const timeInfo = tsInspector.inspect(data, tsStartTime);
let result = null;
if (timeInfo) {
result = {
// each type's time info comes back as an array of 2 times, start and end
hasVideo: timeInfo.video && timeInfo.video.length === 2 || false,
hasAudio: timeInfo.audio && timeInfo.audio.length === 2 || false
};
if (result.hasVideo) {
result.videoStart = timeInfo.video[0].ptsTime;
}
if (result.hasAudio) {
result.audioStart = timeInfo.audio[0].ptsTime;
}
}
this.self.postMessage({
action: 'probeTs',
result,
data
}, [data.buffer]);
}
clearAllMp4Captions() {
if (this.captionParser) {
this.captionParser.clearAllCaptions();
}
}
clearParsedMp4Captions() {
if (this.captionParser) {
this.captionParser.clearParsedCaptions();
}
}
/**
* Adds data (a ts segment) to the start of the transmuxer pipeline for
* processing.
*
* @param {ArrayBuffer} data data to push into the muxer
*/
push(data) {
// Cast array buffer to correct type for transmuxer
const segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
this.transmuxer.push(segment);
}
/**
* Recreate the transmuxer so that the next segment added via `push`
* start with a fresh transmuxer.
*/
reset() {
this.transmuxer.reset();
}
/**
* Set the value that will be used as the `baseMediaDecodeTime` time for the
* next segment pushed in. Subsequent segments will have their `baseMediaDecodeTime`
* set relative to the first based on the PTS values.
*
* @param {Object} data used to set the timestamp offset in the muxer
*/
setTimestampOffset(data) {
const timestampOffset = data.timestampOffset || 0;
this.transmuxer.setBaseMediaDecodeTime(Math.round(secondsToVideoTs(timestampOffset)));
}
setAudioAppendStart(data) {
this.transmuxer.setAudioAppendStart(Math.ceil(secondsToVideoTs(data.appendStart)));
}
setRemux(data) {
this.transmuxer.setRemux(data.remux);
}
/**
* Forces the pipeline to finish processing the last segment and emit it's
* results.
*
* @param {Object} data event data, not really used
*/
flush(data) {
this.transmuxer.flush();
// transmuxed done action is fired after both audio/video pipelines are flushed
self.postMessage({
action: 'done',
type: 'transmuxed'
});
}
endTimeline() {
this.transmuxer.endTimeline();
// transmuxed endedtimeline action is fired after both audio/video pipelines end their
// timelines
self.postMessage({
action: 'endedtimeline',
type: 'transmuxed'
});
}
alignGopsWith(data) {
this.transmuxer.alignGopsWith(data.gopsToAlignWith.slice());
}
}
/**
* Our web worker interface so that things can talk to mux.js
* that will be running in a web worker. the scope is passed to this by
* webworkify.
*
* @param {Object} self the scope for the web worker
*/
self.onmessage = function(event) {
if (event.data.action === 'init' && event.data.options) {
this.messageHandlers = new MessageHandlers(self, event.data.options);
return;
}
if (!this.messageHandlers) {
this.messageHandlers = new MessageHandlers(self);
}
if (event.data && event.data.action && event.data.action !== 'init') {
if (this.messageHandlers[event.data.action]) {
this.messageHandlers[event.data.action](event.data);
}
}
};

View File

@@ -0,0 +1,128 @@
/**
* @file - codecs.js - Handles tasks regarding codec strings such as translating them to
* codec strings, or translating codec strings into objects that can be examined.
*/
import {
translateLegacyCodec,
parseCodecs,
codecsFromDefault
} from '@videojs/vhs-utils/es/codecs.js';
import logger from './logger.js';
const logFn = logger('CodecUtils');
/**
* Returns a set of codec strings parsed from the playlist or the default
* codec strings if no codecs were specified in the playlist
*
* @param {Playlist} media the current media playlist
* @return {Object} an object with the video and audio codecs
*/
export const getCodecs = function(media) {
// if the codecs were explicitly specified, use them instead of the
// defaults
const mediaAttributes = media.attributes || {};
if (mediaAttributes.CODECS) {
return parseCodecs(mediaAttributes.CODECS);
}
};
export const isMaat = (main, media) => {
const mediaAttributes = media.attributes || {};
return main && main.mediaGroups && main.mediaGroups.AUDIO &&
mediaAttributes.AUDIO &&
main.mediaGroups.AUDIO[mediaAttributes.AUDIO];
};
export const isMuxed = (main, media) => {
if (!isMaat(main, media)) {
return true;
}
const mediaAttributes = media.attributes || {};
const audioGroup = main.mediaGroups.AUDIO[mediaAttributes.AUDIO];
for (const groupId in audioGroup) {
// If an audio group has a URI (the case for HLS, as HLS will use external playlists),
// or there are listed playlists (the case for DASH, as the manifest will have already
// provided all of the details necessary to generate the audio playlist, as opposed to
// HLS' externally requested playlists), then the content is demuxed.
if (!audioGroup[groupId].uri && !audioGroup[groupId].playlists) {
return true;
}
}
return false;
};
export const unwrapCodecList = function(codecList) {
const codecs = {};
codecList.forEach(({mediaType, type, details}) => {
codecs[mediaType] = codecs[mediaType] || [];
codecs[mediaType].push(translateLegacyCodec(`${type}${details}`));
});
Object.keys(codecs).forEach(function(mediaType) {
if (codecs[mediaType].length > 1) {
logFn(`multiple ${mediaType} codecs found as attributes: ${codecs[mediaType].join(', ')}. Setting playlist codecs to null so that we wait for mux.js to probe segments for real codecs.`);
codecs[mediaType] = null;
return;
}
codecs[mediaType] = codecs[mediaType][0];
});
return codecs;
};
export const codecCount = function(codecObj) {
let count = 0;
if (codecObj.audio) {
count++;
}
if (codecObj.video) {
count++;
}
return count;
};
/**
* Calculates the codec strings for a working configuration of
* SourceBuffers to play variant streams in a main playlist. If
* there is no possible working configuration, an empty object will be
* returned.
*
* @param main {Object} the m3u8 object for the main playlist
* @param media {Object} the m3u8 object for the variant playlist
* @return {Object} the codec strings.
*
* @private
*/
export const codecsForPlaylist = function(main, media) {
const mediaAttributes = media.attributes || {};
const codecInfo = unwrapCodecList(getCodecs(media) || []);
// HLS with multiple-audio tracks must always get an audio codec.
// Put another way, there is no way to have a video-only multiple-audio HLS!
if (isMaat(main, media) && !codecInfo.audio) {
if (!isMuxed(main, media)) {
// It is possible for codecs to be specified on the audio media group playlist but
// not on the rendition playlist. This is mostly the case for DASH, where audio and
// video are always separate (and separately specified).
const defaultCodecs = unwrapCodecList(codecsFromDefault(main, mediaAttributes.AUDIO) || []);
if (defaultCodecs.audio) {
codecInfo.audio = defaultCodecs.audio;
}
}
}
return codecInfo;
};

View File

@@ -0,0 +1,88 @@
import {getId3Offset} from '@videojs/vhs-utils/es/id3-helpers';
import {detectContainerForBytes} from '@videojs/vhs-utils/es/containers';
import {stringToBytes, concatTypedArrays} from '@videojs/vhs-utils/es/byte-helpers';
import {callbackWrapper} from '../xhr';
import { getStreamingNetworkErrorMetadata } from '../error-codes';
// calls back if the request is readyState DONE
// which will only happen if the request is complete.
const callbackOnCompleted = (request, cb) => {
if (request.readyState === 4) {
return cb();
}
return;
};
const containerRequest = (uri, xhr, cb, requestType) => {
let bytes = [];
let id3Offset;
let finished = false;
const endRequestAndCallback = function(err, req, type, _bytes) {
req.abort();
finished = true;
return cb(err, req, type, _bytes);
};
const progressListener = function(error, request) {
if (finished) {
return;
}
if (error) {
error.metadata = getStreamingNetworkErrorMetadata({ requestType, request, error });
return endRequestAndCallback(error, request, '', bytes);
}
// grap the new part of content that was just downloaded
const newPart = request.responseText.substring(
bytes && bytes.byteLength || 0,
request.responseText.length
);
// add that onto bytes
bytes = concatTypedArrays(bytes, stringToBytes(newPart, true));
id3Offset = id3Offset || getId3Offset(bytes);
// we need at least 10 bytes to determine a type
// or we need at least two bytes after an id3Offset
if (bytes.length < 10 || (id3Offset && bytes.length < id3Offset + 2)) {
return callbackOnCompleted(request, () => endRequestAndCallback(error, request, '', bytes));
}
const type = detectContainerForBytes(bytes);
// if this looks like a ts segment but we don't have enough data
// to see the second sync byte, wait until we have enough data
// before declaring it ts
if (type === 'ts' && bytes.length < 188) {
return callbackOnCompleted(request, () => endRequestAndCallback(error, request, '', bytes));
}
// this may be an unsynced ts segment
// wait for 376 bytes before detecting no container
if (!type && bytes.length < 376) {
return callbackOnCompleted(request, () => endRequestAndCallback(error, request, '', bytes));
}
return endRequestAndCallback(null, request, type, bytes);
};
const options = {
uri,
beforeSend(request) {
// this forces the browser to pass the bytes to us unprocessed
request.overrideMimeType('text/plain; charset=x-user-defined');
request.addEventListener('progress', function({total, loaded}) {
return callbackWrapper(request, null, {statusCode: request.status}, progressListener);
});
}
};
const request = xhr(options, function(error, response) {
return callbackWrapper(request, error, response, progressListener);
});
return request;
};
export default containerRequest;

View File

@@ -0,0 +1,108 @@
export default class DateRangesStorage {
constructor() {
this.offset_ = null;
this.pendingDateRanges_ = new Map();
this.processedDateRanges_ = new Map();
}
setOffset(segments = []) {
// already set
if (this.offset_ !== null) {
return;
}
// no segment to process
if (!segments.length) {
return;
}
const [firstSegment] = segments;
// no program date time
if (firstSegment.programDateTime === undefined) {
return;
}
// Set offset as ProgramDateTime for the very first segment of the very first playlist load:
this.offset_ = firstSegment.programDateTime / 1000;
}
setPendingDateRanges(dateRanges = []) {
if (!dateRanges.length) {
return;
}
const [dateRange] = dateRanges;
const startTime = dateRange.startDate.getTime();
this.trimProcessedDateRanges_(startTime);
this.pendingDateRanges_ = dateRanges.reduce((map, pendingDateRange) => {
map.set(pendingDateRange.id, pendingDateRange);
return map;
}, new Map());
}
processDateRange(dateRange) {
this.pendingDateRanges_.delete(dateRange.id);
this.processedDateRanges_.set(dateRange.id, dateRange);
}
getDateRangesToProcess() {
if (this.offset_ === null) {
return [];
}
const dateRangeClasses = {};
const dateRangesToProcess = [];
this.pendingDateRanges_.forEach((dateRange, id) => {
if (this.processedDateRanges_.has(id)) {
return;
}
dateRange.startTime = (dateRange.startDate.getTime() / 1000) - this.offset_;
dateRange.processDateRange = () => this.processDateRange(dateRange);
dateRangesToProcess.push(dateRange);
if (!dateRange.class) {
return;
}
if (dateRangeClasses[dateRange.class]) {
const length = dateRangeClasses[dateRange.class].push(dateRange);
dateRange.classListIndex = length - 1;
} else {
dateRangeClasses[dateRange.class] = [dateRange];
dateRange.classListIndex = 0;
}
});
for (const dateRange of dateRangesToProcess) {
const classList = dateRangeClasses[dateRange.class] || [];
if (dateRange.endDate) {
dateRange.endTime = (dateRange.endDate.getTime() / 1000) - this.offset_;
} else if (dateRange.endOnNext && classList[dateRange.classListIndex + 1]) {
dateRange.endTime = classList[dateRange.classListIndex + 1].startTime;
} else if (dateRange.duration) {
dateRange.endTime = dateRange.startTime + dateRange.duration;
} else if (dateRange.plannedDuration) {
dateRange.endTime = dateRange.startTime + dateRange.plannedDuration;
} else {
dateRange.endTime = dateRange.startTime;
}
}
return dateRangesToProcess;
}
trimProcessedDateRanges_(startTime) {
const copy = new Map(this.processedDateRanges_);
copy.forEach((dateRange, id) => {
if (dateRange.startDate.getTime() < startTime) {
this.processedDateRanges_.delete(id);
}
});
}
}

View File

@@ -0,0 +1,11 @@
export const debounce = (callback, wait) => {
let timeoutId = null;
return (...args) => {
clearTimeout(timeoutId);
timeoutId = setTimeout(() => {
callback.apply(null, args);
}, wait);
};
};

View File

@@ -0,0 +1,119 @@
import { ONE_SECOND_IN_TS } from 'mux.js/lib/utils/clock';
/**
* Returns a list of gops in the buffer that have a pts value of 3 seconds or more in
* front of current time.
*
* @param {Array} buffer
* The current buffer of gop information
* @param {number} currentTime
* The current time
* @param {Double} mapping
* Offset to map display time to stream presentation time
* @return {Array}
* List of gops considered safe to append over
*/
export const gopsSafeToAlignWith = (buffer, currentTime, mapping) => {
if (typeof currentTime === 'undefined' || currentTime === null || !buffer.length) {
return [];
}
// pts value for current time + 3 seconds to give a bit more wiggle room
const currentTimePts = Math.ceil((currentTime - mapping + 3) * ONE_SECOND_IN_TS);
let i;
for (i = 0; i < buffer.length; i++) {
if (buffer[i].pts > currentTimePts) {
break;
}
}
return buffer.slice(i);
};
/**
* Appends gop information (timing and byteLength) received by the transmuxer for the
* gops appended in the last call to appendBuffer
*
* @param {Array} buffer
* The current buffer of gop information
* @param {Array} gops
* List of new gop information
* @param {boolean} replace
* If true, replace the buffer with the new gop information. If false, append the
* new gop information to the buffer in the right location of time.
* @return {Array}
* Updated list of gop information
*/
export const updateGopBuffer = (buffer, gops, replace) => {
if (!gops.length) {
return buffer;
}
if (replace) {
// If we are in safe append mode, then completely overwrite the gop buffer
// with the most recent appeneded data. This will make sure that when appending
// future segments, we only try to align with gops that are both ahead of current
// time and in the last segment appended.
return gops.slice();
}
const start = gops[0].pts;
let i = 0;
for (i; i < buffer.length; i++) {
if (buffer[i].pts >= start) {
break;
}
}
return buffer.slice(0, i).concat(gops);
};
/**
* Removes gop information in buffer that overlaps with provided start and end
*
* @param {Array} buffer
* The current buffer of gop information
* @param {Double} start
* position to start the remove at
* @param {Double} end
* position to end the remove at
* @param {Double} mapping
* Offset to map display time to stream presentation time
*/
export const removeGopBuffer = (buffer, start, end, mapping) => {
const startPts = Math.ceil((start - mapping) * ONE_SECOND_IN_TS);
const endPts = Math.ceil((end - mapping) * ONE_SECOND_IN_TS);
const updatedBuffer = buffer.slice();
let i = buffer.length;
while (i--) {
if (buffer[i].pts <= endPts) {
break;
}
}
if (i === -1) {
// no removal because end of remove range is before start of buffer
return updatedBuffer;
}
let j = i + 1;
while (j--) {
if (buffer[j].pts <= startPts) {
break;
}
}
// clamp remove range start to 0 index
j = Math.max(j, 0);
updatedBuffer.splice(j, i - j + 1);
return updatedBuffer;
};

View File

@@ -0,0 +1,11 @@
import videojs from 'video.js';
const logger = (source) => {
if (videojs.log.debug) {
return videojs.log.debug.bind(videojs, 'VHS:', `${source} >`);
}
return function() {};
};
export default logger;

View File

@@ -0,0 +1,289 @@
import {compactSegmentUrlDescription} from './segment';
class SyncInfo {
/**
* @param {number} start - media sequence start
* @param {number} end - media sequence end
* @param {number} segmentIndex - index for associated segment
* @param {number|null} [partIndex] - index for associated part
* @param {boolean} [appended] - appended indicator
*
*/
constructor({start, end, segmentIndex, partIndex = null, appended = false}) {
this.start_ = start;
this.end_ = end;
this.segmentIndex_ = segmentIndex;
this.partIndex_ = partIndex;
this.appended_ = appended;
}
isInRange(targetTime) {
return targetTime >= this.start && targetTime < this.end;
}
markAppended() {
this.appended_ = true;
}
resetAppendedStatus() {
this.appended_ = false;
}
get isAppended() {
return this.appended_;
}
get start() {
return this.start_;
}
get end() {
return this.end_;
}
get segmentIndex() {
return this.segmentIndex_;
}
get partIndex() {
return this.partIndex_;
}
}
class SyncInfoData {
/**
*
* @param {SyncInfo} segmentSyncInfo - sync info for a given segment
* @param {Array<SyncInfo>} [partsSyncInfo] - sync infos for a list of parts for a given segment
*/
constructor(segmentSyncInfo, partsSyncInfo = []) {
this.segmentSyncInfo_ = segmentSyncInfo;
this.partsSyncInfo_ = partsSyncInfo;
}
get segmentSyncInfo() {
return this.segmentSyncInfo_;
}
get partsSyncInfo() {
return this.partsSyncInfo_;
}
get hasPartsSyncInfo() {
return this.partsSyncInfo_.length > 0;
}
resetAppendStatus() {
this.segmentSyncInfo_.resetAppendedStatus();
this.partsSyncInfo_.forEach((partSyncInfo) => partSyncInfo.resetAppendedStatus());
}
}
export class MediaSequenceSync {
constructor() {
/**
* @type {Map<number, SyncInfoData>}
* @protected
*/
this.storage_ = new Map();
this.diagnostics_ = '';
this.isReliable_ = false;
this.start_ = -Infinity;
this.end_ = Infinity;
}
get start() {
return this.start_;
}
get end() {
return this.end_;
}
get diagnostics() {
return this.diagnostics_;
}
get isReliable() {
return this.isReliable_;
}
resetAppendedStatus() {
this.storage_.forEach((syncInfoData) => syncInfoData.resetAppendStatus());
}
/**
* update sync storage
*
* @param {Object} playlist
* @param {number} currentTime
*
* @return {void}
*/
update(playlist, currentTime) {
const { mediaSequence, segments } = playlist;
this.isReliable_ = this.isReliablePlaylist_(mediaSequence, segments);
if (!this.isReliable_) {
return;
}
return this.updateStorage_(
segments,
mediaSequence,
this.calculateBaseTime_(mediaSequence, segments, currentTime)
);
}
/**
* @param {number} targetTime
* @return {SyncInfo|null}
*/
getSyncInfoForTime(targetTime) {
for (const { segmentSyncInfo, partsSyncInfo } of this.storage_.values()) {
// Normal segment flow:
if (!partsSyncInfo.length) {
if (segmentSyncInfo.isInRange(targetTime)) {
return segmentSyncInfo;
}
} else {
// Low latency flow:
for (const partSyncInfo of partsSyncInfo) {
if (partSyncInfo.isInRange(targetTime)) {
return partSyncInfo;
}
}
}
}
return null;
}
getSyncInfoForMediaSequence(mediaSequence) {
return this.storage_.get(mediaSequence);
}
updateStorage_(segments, startingMediaSequence, startingTime) {
const newStorage = new Map();
let newDiagnostics = '\n';
let currentStart = startingTime;
let currentMediaSequence = startingMediaSequence;
this.start_ = currentStart;
segments.forEach((segment, segmentIndex) => {
const prevSyncInfoData = this.storage_.get(currentMediaSequence);
const segmentStart = currentStart;
const segmentEnd = segmentStart + segment.duration;
const segmentIsAppended = Boolean(prevSyncInfoData &&
prevSyncInfoData.segmentSyncInfo &&
prevSyncInfoData.segmentSyncInfo.isAppended);
const segmentSyncInfo = new SyncInfo({
start: segmentStart,
end: segmentEnd,
appended: segmentIsAppended,
segmentIndex
});
segment.syncInfo = segmentSyncInfo;
let currentPartStart = currentStart;
const partsSyncInfo = (segment.parts || []).map((part, partIndex) => {
const partStart = currentPartStart;
const partEnd = currentPartStart + part.duration;
const partIsAppended = Boolean(prevSyncInfoData &&
prevSyncInfoData.partsSyncInfo &&
prevSyncInfoData.partsSyncInfo[partIndex] &&
prevSyncInfoData.partsSyncInfo[partIndex].isAppended);
const partSyncInfo = new SyncInfo({
start: partStart,
end: partEnd,
appended: partIsAppended,
segmentIndex,
partIndex
});
currentPartStart = partEnd;
newDiagnostics += `Media Sequence: ${currentMediaSequence}.${partIndex} | Range: ${partStart} --> ${partEnd} | Appended: ${partIsAppended}\n`;
part.syncInfo = partSyncInfo;
return partSyncInfo;
});
newStorage.set(currentMediaSequence, new SyncInfoData(segmentSyncInfo, partsSyncInfo));
newDiagnostics += `${compactSegmentUrlDescription(segment.resolvedUri)} | Media Sequence: ${currentMediaSequence} | Range: ${segmentStart} --> ${segmentEnd} | Appended: ${segmentIsAppended}\n`;
currentMediaSequence++;
currentStart = segmentEnd;
});
this.end_ = currentStart;
this.storage_ = newStorage;
this.diagnostics_ = newDiagnostics;
}
calculateBaseTime_(mediaSequence, segments, fallback) {
if (!this.storage_.size) {
// Initial setup flow.
return 0;
}
if (this.storage_.has(mediaSequence)) {
// Normal flow.
return this.storage_.get(mediaSequence).segmentSyncInfo.start;
}
const minMediaSequenceFromStorage = Math.min(...this.storage_.keys());
// This case captures a race condition that can occur if we switch to a new media playlist that is out of date
// and still has an older Media Sequence. If this occurs, we extrapolate backwards to get the base time.
if (mediaSequence < minMediaSequenceFromStorage) {
const mediaSequenceDiff = minMediaSequenceFromStorage - mediaSequence;
let baseTime = this.storage_.get(minMediaSequenceFromStorage).segmentSyncInfo.start;
for (let i = 0; i < mediaSequenceDiff; i++) {
const segment = segments[i];
baseTime -= segment.duration;
}
return baseTime;
}
// Fallback flow.
// There is a gap between last recorded playlist and a new one received.
return fallback;
}
isReliablePlaylist_(mediaSequence, segments) {
return mediaSequence !== undefined && mediaSequence !== null && Array.isArray(segments) && segments.length;
}
}
export class DependantMediaSequenceSync extends MediaSequenceSync {
constructor(parent) {
super();
this.parent_ = parent;
}
calculateBaseTime_(mediaSequence, segments, fallback) {
if (!this.storage_.size) {
const info = this.parent_.getSyncInfoForMediaSequence(mediaSequence);
if (info) {
return info.segmentSyncInfo.start;
}
return 0;
}
return super.calculateBaseTime_(mediaSequence, segments, fallback);
}
}

View File

@@ -0,0 +1 @@
export default function noop() {}

View File

@@ -0,0 +1,44 @@
/**
* Combine all segments into a single Uint8Array
*
* @param {Object} segmentObj
* @return {Uint8Array} concatenated bytes
* @private
*/
export const concatSegments = (segmentObj) => {
let offset = 0;
let tempBuffer;
if (segmentObj.bytes) {
tempBuffer = new Uint8Array(segmentObj.bytes);
// combine the individual segments into one large typed-array
segmentObj.segments.forEach((segment) => {
tempBuffer.set(segment, offset);
offset += segment.byteLength;
});
}
return tempBuffer;
};
/**
* Example:
* https://host.com/path1/path2/path3/segment.ts?arg1=val1
* -->
* path3/segment.ts
*
* @param resolvedUri
* @return {string}
*/
export function compactSegmentUrlDescription(resolvedUri) {
try {
return new URL(resolvedUri)
.pathname
.split('/')
.slice(-2)
.join('/');
} catch (e) {
return '';
}
}

View File

@@ -0,0 +1,41 @@
const shallowEqual = function(a, b) {
// if both are undefined
// or one or the other is undefined
// they are not equal
if ((!a && !b) || (!a && b) || (a && !b)) {
return false;
}
// they are the same object and thus, equal
if (a === b) {
return true;
}
// sort keys so we can make sure they have
// all the same keys later.
const akeys = Object.keys(a).sort();
const bkeys = Object.keys(b).sort();
// different number of keys, not equal
if (akeys.length !== bkeys.length) {
return false;
}
for (let i = 0; i < akeys.length; i++) {
const key = akeys[i];
// different sorted keys, not equal
if (key !== bkeys[i]) {
return false;
}
// different values, not equal
if (a[key] !== b[key]) {
return false;
}
}
return true;
};
export default shallowEqual;

View File

@@ -0,0 +1,9 @@
export const stringToArrayBuffer = (string) => {
const view = new Uint8Array(new ArrayBuffer(string.length));
for (let i = 0; i < string.length; i++) {
view[i] = string.charCodeAt(i);
}
return view.buffer;
};

View File

@@ -0,0 +1,8 @@
export const uint8ToUtf8 = (uintArray) =>
decodeURIComponent(escape(String.fromCharCode.apply(null, uintArray)));
export const bufferToHexString = (buffer) => {
const uInt8Buffer = new Uint8Array(buffer);
return Array.from(uInt8Buffer).map((byte) => byte.toString(16).padStart(2, '0')).join('');
};

View File

@@ -0,0 +1,395 @@
/**
* @file text-tracks.js
*/
import window from 'global/window';
import videojs from 'video.js';
/**
* Create captions text tracks on video.js if they do not exist
*
* @param {Object} inbandTextTracks a reference to current inbandTextTracks
* @param {Object} tech the video.js tech
* @param {Object} captionStream the caption stream to create
* @private
*/
export const createCaptionsTrackIfNotExists = function(inbandTextTracks, tech, captionStream) {
if (!inbandTextTracks[captionStream]) {
tech.trigger({type: 'usage', name: 'vhs-608'});
let instreamId = captionStream;
// we need to translate SERVICEn for 708 to how mux.js currently labels them
if (/^cc708_/.test(captionStream)) {
instreamId = 'SERVICE' + captionStream.split('_')[1];
}
const track = tech.textTracks().getTrackById(instreamId);
if (track) {
// Resuse an existing track with a CC# id because this was
// very likely created by videojs-contrib-hls from information
// in the m3u8 for us to use
inbandTextTracks[captionStream] = track;
} else {
// This section gets called when we have caption services that aren't specified in the manifest.
// Manifest level caption services are handled in media-groups.js under CLOSED-CAPTIONS.
const captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};
let label = captionStream;
let language = captionStream;
let def = false;
const captionService = captionServices[instreamId];
if (captionService) {
label = captionService.label;
language = captionService.language;
def = captionService.default;
}
// Otherwise, create a track with the default `CC#` label and
// without a language
inbandTextTracks[captionStream] = tech.addRemoteTextTrack({
kind: 'captions',
id: instreamId,
// TODO: investigate why this doesn't seem to turn the caption on by default
default: def,
label,
language
}, false).track;
}
}
};
/**
* Add caption text track data to a source handler given an array of captions
*
* @param {Object}
* @param {Object} inbandTextTracks the inband text tracks
* @param {number} timestampOffset the timestamp offset of the source buffer
* @param {Array} captionArray an array of caption data
* @private
*/
export const addCaptionData = function({
inbandTextTracks,
captionArray,
timestampOffset
}) {
if (!captionArray) {
return;
}
const Cue = window.WebKitDataCue || window.VTTCue;
captionArray.forEach((caption) => {
const track = caption.stream;
// in CEA 608 captions, video.js/mux.js sends a content array
// with positioning data
if (caption.content) {
caption.content.forEach((value) => {
const cue = new Cue(
caption.startTime + timestampOffset,
caption.endTime + timestampOffset,
value.text
);
cue.line = value.line;
cue.align = 'left';
cue.position = value.position;
cue.positionAlign = 'line-left';
inbandTextTracks[track].addCue(cue);
});
} else {
// otherwise, a text value with combined captions is sent
inbandTextTracks[track].addCue(new Cue(
caption.startTime + timestampOffset,
caption.endTime + timestampOffset,
caption.text
));
}
});
};
/**
* Define properties on a cue for backwards compatability,
* but warn the user that the way that they are using it
* is depricated and will be removed at a later date.
*
* @param {Cue} cue the cue to add the properties on
* @private
*/
const deprecateOldCue = function(cue) {
Object.defineProperties(cue.frame, {
id: {
get() {
videojs.log.warn('cue.frame.id is deprecated. Use cue.value.key instead.');
return cue.value.key;
}
},
value: {
get() {
videojs.log.warn('cue.frame.value is deprecated. Use cue.value.data instead.');
return cue.value.data;
}
},
privateData: {
get() {
videojs.log.warn('cue.frame.privateData is deprecated. Use cue.value.data instead.');
return cue.value.data;
}
}
});
};
/**
* Add metadata text track data to a source handler given an array of metadata
*
* @param {Object}
* @param {Object} inbandTextTracks the inband text tracks
* @param {Array} metadataArray an array of meta data
* @param {number} timestampOffset the timestamp offset of the source buffer
* @param {number} videoDuration the duration of the video
* @private
*/
export const addMetadata = ({
inbandTextTracks,
metadataArray,
timestampOffset,
videoDuration
}) => {
if (!metadataArray) {
return;
}
const Cue = window.WebKitDataCue || window.VTTCue;
const metadataTrack = inbandTextTracks.metadataTrack_;
if (!metadataTrack) {
return;
}
metadataArray.forEach((metadata) => {
const time = metadata.cueTime + timestampOffset;
// if time isn't a finite number between 0 and Infinity, like NaN,
// ignore this bit of metadata.
// This likely occurs when you have an non-timed ID3 tag like TIT2,
// which is the "Title/Songname/Content description" frame
if (typeof time !== 'number' || window.isNaN(time) || time < 0 || !(time < Infinity)) {
return;
}
// If we have no frames, we can't create a cue.
if (!metadata.frames || !metadata.frames.length) {
return;
}
metadata.frames.forEach((frame) => {
const cue = new Cue(
time,
time,
frame.value || frame.url || frame.data || ''
);
cue.frame = frame;
cue.value = frame;
deprecateOldCue(cue);
metadataTrack.addCue(cue);
});
});
if (!metadataTrack.cues || !metadataTrack.cues.length) {
return;
}
// Updating the metadeta cues so that
// the endTime of each cue is the startTime of the next cue
// the endTime of last cue is the duration of the video
const cues = metadataTrack.cues;
const cuesArray = [];
// Create a copy of the TextTrackCueList...
// ...disregarding cues with a falsey value
for (let i = 0; i < cues.length; i++) {
if (cues[i]) {
cuesArray.push(cues[i]);
}
}
// Group cues by their startTime value
const cuesGroupedByStartTime = cuesArray.reduce((obj, cue) => {
const timeSlot = obj[cue.startTime] || [];
timeSlot.push(cue);
obj[cue.startTime] = timeSlot;
return obj;
}, {});
// Sort startTimes by ascending order
const sortedStartTimes = Object.keys(cuesGroupedByStartTime)
.sort((a, b) => Number(a) - Number(b));
// Map each cue group's endTime to the next group's startTime
sortedStartTimes.forEach((startTime, idx) => {
const cueGroup = cuesGroupedByStartTime[startTime];
const finiteDuration = isFinite(videoDuration) ? videoDuration : startTime;
const nextTime = Number(sortedStartTimes[idx + 1]) || finiteDuration;
// Map each cue's endTime the next group's startTime
cueGroup.forEach((cue) => {
cue.endTime = nextTime;
});
});
};
// object for mapping daterange attributes
const dateRangeAttr = {
id: 'ID',
class: 'CLASS',
startDate: 'START-DATE',
duration: 'DURATION',
endDate: 'END-DATE',
endOnNext: 'END-ON-NEXT',
plannedDuration: 'PLANNED-DURATION',
scte35Out: 'SCTE35-OUT',
scte35In: 'SCTE35-IN'
};
const dateRangeKeysToOmit = new Set([
'id',
'class',
'startDate',
'duration',
'endDate',
'endOnNext',
'startTime',
'endTime',
'processDateRange'
]);
/**
* Add DateRange metadata text track to a source handler given an array of metadata
*
* @param {Object}
* @param {Object} inbandTextTracks the inband text tracks
* @param {Array} dateRanges parsed media playlist
* @private
*/
export const addDateRangeMetadata = ({ inbandTextTracks, dateRanges }) => {
const metadataTrack = inbandTextTracks.metadataTrack_;
if (!metadataTrack) {
return;
}
const Cue = window.WebKitDataCue || window.VTTCue;
dateRanges.forEach((dateRange) => {
// we generate multiple cues for each date range with different attributes
for (const key of Object.keys(dateRange)) {
if (dateRangeKeysToOmit.has(key)) {
continue;
}
const cue = new Cue(dateRange.startTime, dateRange.endTime, '');
cue.id = dateRange.id;
cue.type = 'com.apple.quicktime.HLS';
cue.value = { key: dateRangeAttr[key], data: dateRange[key] };
if (key === 'scte35Out' || key === 'scte35In') {
cue.value.data = new Uint8Array(cue.value.data.match(/[\da-f]{2}/gi)).buffer;
}
metadataTrack.addCue(cue);
}
dateRange.processDateRange();
});
};
/**
* Create metadata text track on video.js if it does not exist
*
* @param {Object} inbandTextTracks a reference to current inbandTextTracks
* @param {string} dispatchType the inband metadata track dispatch type
* @param {Object} tech the video.js tech
* @private
*/
export const createMetadataTrackIfNotExists = (inbandTextTracks, dispatchType, tech) => {
if (inbandTextTracks.metadataTrack_) {
return;
}
inbandTextTracks.metadataTrack_ = tech.addRemoteTextTrack({
kind: 'metadata',
label: 'Timed Metadata'
}, false).track;
if (!videojs.browser.IS_ANY_SAFARI) {
inbandTextTracks.metadataTrack_.inBandMetadataTrackDispatchType = dispatchType;
}
};
/**
* Remove cues from a track on video.js.
*
* @param {Double} start start of where we should remove the cue
* @param {Double} end end of where the we should remove the cue
* @param {Object} track the text track to remove the cues from
* @private
*/
export const removeCuesFromTrack = function(start, end, track) {
let i;
let cue;
if (!track) {
return;
}
if (!track.cues) {
return;
}
i = track.cues.length;
while (i--) {
cue = track.cues[i];
// Remove any cue within the provided start and end time
if (cue.startTime >= start && cue.endTime <= end) {
track.removeCue(cue);
}
}
};
/**
* Remove duplicate cues from a track on video.js (a cue is considered a
* duplicate if it has the same time interval and text as another)
*
* @param {Object} track the text track to remove the duplicate cues from
* @private
*/
export const removeDuplicateCuesFromTrack = function(track) {
const cues = track.cues;
if (!cues) {
return;
}
const uniqueCues = {};
for (let i = cues.length - 1; i >= 0; i--) {
const cue = cues[i];
const cueKey = `${cue.startTime}-${cue.endTime}-${cue.text}`;
if (uniqueCues[cueKey]) {
track.removeCue(cue);
} else {
uniqueCues[cueKey] = cue;
}
}
};

View File

@@ -0,0 +1,402 @@
// TODO handle fmp4 case where the timing info is accurate and doesn't involve transmux
/**
* @file time.js
*/
import Playlist from '../playlist';
// Add 25% to the segment duration to account for small discrepencies in segment timing.
// 25% was arbitrarily chosen, and may need to be refined over time.
const SEGMENT_END_FUDGE_PERCENT = 0.25;
/**
* Converts a player time (any time that can be gotten/set from player.currentTime(),
* e.g., any time within player.seekable().start(0) to player.seekable().end(0)) to a
* program time (any time referencing the real world (e.g., EXT-X-PROGRAM-DATE-TIME)).
*
* The containing segment is required as the EXT-X-PROGRAM-DATE-TIME serves as an "anchor
* point" (a point where we have a mapping from program time to player time, with player
* time being the post transmux start of the segment).
*
* For more details, see [this doc](../../docs/program-time-from-player-time.md).
*
* @param {number} playerTime the player time
* @param {Object} segment the segment which contains the player time
* @return {Date} program time
*/
export const playerTimeToProgramTime = (playerTime, segment) => {
if (!segment.dateTimeObject) {
// Can't convert without an "anchor point" for the program time (i.e., a time that can
// be used to map the start of a segment with a real world time).
return null;
}
const transmuxerPrependedSeconds = segment.videoTimingInfo.transmuxerPrependedSeconds;
const transmuxedStart = segment.videoTimingInfo.transmuxedPresentationStart;
// get the start of the content from before old content is prepended
const startOfSegment = transmuxedStart + transmuxerPrependedSeconds;
const offsetFromSegmentStart = playerTime - startOfSegment;
return new Date(segment.dateTimeObject.getTime() + offsetFromSegmentStart * 1000);
};
export const originalSegmentVideoDuration = (videoTimingInfo) => {
return videoTimingInfo.transmuxedPresentationEnd -
videoTimingInfo.transmuxedPresentationStart -
videoTimingInfo.transmuxerPrependedSeconds;
};
/**
* Finds a segment that contains the time requested given as an ISO-8601 string. The
* returned segment might be an estimate or an accurate match.
*
* @param {string} programTime The ISO-8601 programTime to find a match for
* @param {Object} playlist A playlist object to search within
*/
export const findSegmentForProgramTime = (programTime, playlist) => {
// Assumptions:
// - verifyProgramDateTimeTags has already been run
// - live streams have been started
let dateTimeObject;
try {
dateTimeObject = new Date(programTime);
} catch (e) {
return null;
}
if (!playlist || !playlist.segments || playlist.segments.length === 0) {
return null;
}
let segment = playlist.segments[0];
if (dateTimeObject < new Date(segment.dateTimeObject)) {
// Requested time is before stream start.
return null;
}
for (let i = 0; i < playlist.segments.length - 1; i++) {
segment = playlist.segments[i];
const nextSegmentStart = new Date(playlist.segments[i + 1].dateTimeObject);
if (dateTimeObject < nextSegmentStart) {
break;
}
}
const lastSegment = playlist.segments[playlist.segments.length - 1];
const lastSegmentStart = lastSegment.dateTimeObject;
const lastSegmentDuration = lastSegment.videoTimingInfo ?
originalSegmentVideoDuration(lastSegment.videoTimingInfo) :
lastSegment.duration + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT;
const lastSegmentEnd =
new Date(lastSegmentStart.getTime() + lastSegmentDuration * 1000);
if (dateTimeObject > lastSegmentEnd) {
// Beyond the end of the stream, or our best guess of the end of the stream.
return null;
}
if (dateTimeObject > new Date(lastSegmentStart)) {
segment = lastSegment;
}
return {
segment,
estimatedStart: segment.videoTimingInfo ?
segment.videoTimingInfo.transmuxedPresentationStart :
Playlist.duration(
playlist,
playlist.mediaSequence + playlist.segments.indexOf(segment)
),
// Although, given that all segments have accurate date time objects, the segment
// selected should be accurate, unless the video has been transmuxed at some point
// (determined by the presence of the videoTimingInfo object), the segment's "player
// time" (the start time in the player) can't be considered accurate.
type: segment.videoTimingInfo ? 'accurate' : 'estimate'
};
};
/**
* Finds a segment that contains the given player time(in seconds).
*
* @param {number} time The player time to find a match for
* @param {Object} playlist A playlist object to search within
*/
export const findSegmentForPlayerTime = (time, playlist) => {
// Assumptions:
// - there will always be a segment.duration
// - we can start from zero
// - segments are in time order
if (!playlist || !playlist.segments || playlist.segments.length === 0) {
return null;
}
let segmentEnd = 0;
let segment;
for (let i = 0; i < playlist.segments.length; i++) {
segment = playlist.segments[i];
// videoTimingInfo is set after the segment is downloaded and transmuxed, and
// should contain the most accurate values we have for the segment's player times.
//
// Use the accurate transmuxedPresentationEnd value if it is available, otherwise fall
// back to an estimate based on the manifest derived (inaccurate) segment.duration, to
// calculate an end value.
segmentEnd = segment.videoTimingInfo ?
segment.videoTimingInfo.transmuxedPresentationEnd : segmentEnd + segment.duration;
if (time <= segmentEnd) {
break;
}
}
const lastSegment = playlist.segments[playlist.segments.length - 1];
if (lastSegment.videoTimingInfo &&
lastSegment.videoTimingInfo.transmuxedPresentationEnd < time) {
// The time requested is beyond the stream end.
return null;
}
if (time > segmentEnd) {
// The time is within or beyond the last segment.
//
// Check to see if the time is beyond a reasonable guess of the end of the stream.
if (time > segmentEnd + (lastSegment.duration * SEGMENT_END_FUDGE_PERCENT)) {
// Technically, because the duration value is only an estimate, the time may still
// exist in the last segment, however, there isn't enough information to make even
// a reasonable estimate.
return null;
}
segment = lastSegment;
}
return {
segment,
estimatedStart: segment.videoTimingInfo ?
segment.videoTimingInfo.transmuxedPresentationStart : segmentEnd - segment.duration,
// Because videoTimingInfo is only set after transmux, it is the only way to get
// accurate timing values.
type: segment.videoTimingInfo ? 'accurate' : 'estimate'
};
};
/**
* Gives the offset of the comparisonTimestamp from the programTime timestamp in seconds.
* If the offset returned is positive, the programTime occurs after the
* comparisonTimestamp.
* If the offset is negative, the programTime occurs before the comparisonTimestamp.
*
* @param {string} comparisonTimeStamp An ISO-8601 timestamp to compare against
* @param {string} programTime The programTime as an ISO-8601 string
* @return {number} offset
*/
export const getOffsetFromTimestamp = (comparisonTimeStamp, programTime) => {
let segmentDateTime;
let programDateTime;
try {
segmentDateTime = new Date(comparisonTimeStamp);
programDateTime = new Date(programTime);
} catch (e) {
// TODO handle error
}
const segmentTimeEpoch = segmentDateTime.getTime();
const programTimeEpoch = programDateTime.getTime();
return (programTimeEpoch - segmentTimeEpoch) / 1000;
};
/**
* Checks that all segments in this playlist have programDateTime tags.
*
* @param {Object} playlist A playlist object
*/
export const verifyProgramDateTimeTags = (playlist) => {
if (!playlist.segments || playlist.segments.length === 0) {
return false;
}
for (let i = 0; i < playlist.segments.length; i++) {
const segment = playlist.segments[i];
if (!segment.dateTimeObject) {
return false;
}
}
return true;
};
/**
* Returns the programTime of the media given a playlist and a playerTime.
* The playlist must have programDateTime tags for a programDateTime tag to be returned.
* If the segments containing the time requested have not been buffered yet, an estimate
* may be returned to the callback.
*
* @param {Object} args
* @param {Object} args.playlist A playlist object to search within
* @param {number} time A playerTime in seconds
* @param {Function} callback(err, programTime)
* @return {string} err.message A detailed error message
* @return {Object} programTime
* @return {number} programTime.mediaSeconds The streamTime in seconds
* @return {string} programTime.programDateTime The programTime as an ISO-8601 String
*/
export const getProgramTime = ({
playlist,
time = undefined,
callback
}) => {
if (!callback) {
throw new Error('getProgramTime: callback must be provided');
}
if (!playlist || time === undefined) {
return callback({
message: 'getProgramTime: playlist and time must be provided'
});
}
const matchedSegment = findSegmentForPlayerTime(time, playlist);
if (!matchedSegment) {
return callback({
message: 'valid programTime was not found'
});
}
if (matchedSegment.type === 'estimate') {
return callback({
message:
'Accurate programTime could not be determined.' +
' Please seek to e.seekTime and try again',
seekTime: matchedSegment.estimatedStart
});
}
const programTimeObject = {
mediaSeconds: time
};
const programTime = playerTimeToProgramTime(time, matchedSegment.segment);
if (programTime) {
programTimeObject.programDateTime = programTime.toISOString();
}
return callback(null, programTimeObject);
};
/**
* Seeks in the player to a time that matches the given programTime ISO-8601 string.
*
* @param {Object} args
* @param {string} args.programTime A programTime to seek to as an ISO-8601 String
* @param {Object} args.playlist A playlist to look within
* @param {number} args.retryCount The number of times to try for an accurate seek. Default is 2.
* @param {Function} args.seekTo A method to perform a seek
* @param {boolean} args.pauseAfterSeek Whether to end in a paused state after seeking. Default is true.
* @param {Object} args.tech The tech to seek on
* @param {Function} args.callback(err, newTime) A callback to return the new time to
* @return {string} err.message A detailed error message
* @return {number} newTime The exact time that was seeked to in seconds
*/
export const seekToProgramTime = ({
programTime,
playlist,
retryCount = 2,
seekTo,
pauseAfterSeek = true,
tech,
callback
}) => {
if (!callback) {
throw new Error('seekToProgramTime: callback must be provided');
}
if (typeof programTime === 'undefined' || !playlist || !seekTo) {
return callback({
message: 'seekToProgramTime: programTime, seekTo and playlist must be provided'
});
}
if (!playlist.endList && !tech.hasStarted_) {
return callback({
message: 'player must be playing a live stream to start buffering'
});
}
if (!verifyProgramDateTimeTags(playlist)) {
return callback({
message: 'programDateTime tags must be provided in the manifest ' + playlist.resolvedUri
});
}
const matchedSegment = findSegmentForProgramTime(programTime, playlist);
// no match
if (!matchedSegment) {
return callback({
message: `${programTime} was not found in the stream`
});
}
const segment = matchedSegment.segment;
const mediaOffset = getOffsetFromTimestamp(
segment.dateTimeObject,
programTime
);
if (matchedSegment.type === 'estimate') {
// we've run out of retries
if (retryCount === 0) {
return callback({
message: `${programTime} is not buffered yet. Try again`
});
}
seekTo(matchedSegment.estimatedStart + mediaOffset);
tech.one('seeked', () => {
seekToProgramTime({
programTime,
playlist,
retryCount: retryCount - 1,
seekTo,
pauseAfterSeek,
tech,
callback
});
});
return;
}
// Since the segment.start value is determined from the buffered end or ending time
// of the prior segment, the seekToTime doesn't need to account for any transmuxer
// modifications.
const seekToTime = segment.start + mediaOffset;
const seekedCallback = () => {
return callback(null, tech.currentTime());
};
// listen for seeked event
tech.one('seeked', seekedCallback);
// pause before seeking as video.js will restore this state
if (pauseAfterSeek) {
tech.pause();
}
seekTo(seekToTime);
};

View File

@@ -0,0 +1,9 @@
const toTitleCase = function(string) {
if (typeof string !== 'string') {
return string;
}
return string.replace(/./, (w) => w.toUpperCase());
};
export default toTitleCase;

View File

@@ -0,0 +1,50 @@
/**
* Provides a compatibility layer between Video.js 7 and 8 API changes for VHS.
*/
import videojs from 'video.js';
/**
* Delegates to videojs.obj.merge (Video.js 8) or
* videojs.mergeOptions (Video.js 7).
*/
export function merge(...args) {
const context = videojs.obj || videojs;
const fn = context.merge || context.mergeOptions;
return fn.apply(context, args);
}
/**
* Delegates to videojs.time.createTimeRanges (Video.js 8) or
* videojs.createTimeRanges (Video.js 7).
*/
export function createTimeRanges(...args) {
const context = videojs.time || videojs;
const fn = context.createTimeRanges || context.createTimeRanges;
return fn.apply(context, args);
}
/**
* Converts provided buffered ranges to a descriptive string
*
* @param {TimeRanges} buffered - received buffered time ranges
*
* @return {string} - descriptive string
*/
export function bufferedRangesToString(buffered) {
if (buffered.length === 0) {
return 'Buffered Ranges are empty';
}
let bufferedRangesStr = 'Buffered Ranges: \n';
for (let i = 0; i < buffered.length; i++) {
const start = buffered.start(i);
const end = buffered.end(i);
bufferedRangesStr += `${start} --> ${end}. Duration (${end - start})\n`;
}
return bufferedRangesStr;
}

View File

@@ -0,0 +1,42 @@
export const workerCallback = function(options) {
const transmuxer = options.transmuxer;
const endAction = options.endAction || options.action;
const callback = options.callback;
const message = Object.assign({}, options, {endAction: null, transmuxer: null, callback: null});
const listenForEndEvent = (event) => {
if (event.data.action !== endAction) {
return;
}
transmuxer.removeEventListener('message', listenForEndEvent);
// transfer ownership of bytes back to us.
if (event.data.data) {
event.data.data = new Uint8Array(
event.data.data,
options.byteOffset || 0,
options.byteLength || event.data.data.byteLength
);
if (options.data) {
options.data = event.data.data;
}
}
callback(event.data);
};
transmuxer.addEventListener('message', listenForEndEvent);
if (options.data) {
const isArrayBuffer = options.data instanceof ArrayBuffer;
message.byteOffset = isArrayBuffer ? 0 : options.data.byteOffset;
message.byteLength = options.data.byteLength;
const transfers = [isArrayBuffer ? options.data : options.data.buffer];
transmuxer.postMessage(message, transfers);
} else {
transmuxer.postMessage(message);
}
};

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,599 @@
/**
* @file vtt-segment-loader.js
*/
import SegmentLoader from './segment-loader';
import videojs from 'video.js';
import window from 'global/window';
import { removeCuesFromTrack, removeDuplicateCuesFromTrack } from './util/text-tracks';
import { initSegmentId } from './bin-utils';
import { uint8ToUtf8 } from './util/string';
import { REQUEST_ERRORS } from './media-segment-request';
import { ONE_SECOND_IN_TS } from 'mux.js/lib/utils/clock';
import {createTimeRanges} from './util/vjs-compat';
const VTT_LINE_TERMINATORS =
new Uint8Array('\n\n'.split('').map(char => char.charCodeAt(0)));
class NoVttJsError extends Error {
constructor() {
super('Trying to parse received VTT cues, but there is no WebVTT. Make sure vtt.js is loaded.');
}
}
/**
* An object that manages segment loading and appending.
*
* @class VTTSegmentLoader
* @param {Object} options required and optional options
* @extends videojs.EventTarget
*/
export default class VTTSegmentLoader extends SegmentLoader {
constructor(settings, options = {}) {
super(settings, options);
// SegmentLoader requires a MediaSource be specified or it will throw an error;
// however, VTTSegmentLoader has no need of a media source, so delete the reference
this.mediaSource_ = null;
this.subtitlesTrack_ = null;
this.featuresNativeTextTracks_ = settings.featuresNativeTextTracks;
this.loadVttJs = settings.loadVttJs;
// The VTT segment will have its own time mappings. Saving VTT segment timing info in
// the sync controller leads to improper behavior.
this.shouldSaveSegmentTimingInfo_ = false;
}
/**
* Indicates which time ranges are buffered
*
* @return {TimeRange}
* TimeRange object representing the current buffered ranges
*/
buffered_() {
if (!this.subtitlesTrack_ || !this.subtitlesTrack_.cues || !this.subtitlesTrack_.cues.length) {
return createTimeRanges();
}
const cues = this.subtitlesTrack_.cues;
const start = cues[0].startTime;
const end = cues[cues.length - 1].startTime;
return createTimeRanges([[start, end]]);
}
/**
* Gets and sets init segment for the provided map
*
* @param {Object} map
* The map object representing the init segment to get or set
* @param {boolean=} set
* If true, the init segment for the provided map should be saved
* @return {Object}
* map object for desired init segment
*/
initSegmentForMap(map, set = false) {
if (!map) {
return null;
}
const id = initSegmentId(map);
let storedMap = this.initSegments_[id];
if (set && !storedMap && map.bytes) {
// append WebVTT line terminators to the media initialization segment if it exists
// to follow the WebVTT spec (https://w3c.github.io/webvtt/#file-structure) that
// requires two or more WebVTT line terminators between the WebVTT header and the
// rest of the file
const combinedByteLength = VTT_LINE_TERMINATORS.byteLength + map.bytes.byteLength;
const combinedSegment = new Uint8Array(combinedByteLength);
combinedSegment.set(map.bytes);
combinedSegment.set(VTT_LINE_TERMINATORS, map.bytes.byteLength);
this.initSegments_[id] = storedMap = {
resolvedUri: map.resolvedUri,
byterange: map.byterange,
bytes: combinedSegment
};
}
return storedMap || map;
}
/**
* Returns true if all configuration required for loading is present, otherwise false.
*
* @return {boolean} True if the all configuration is ready for loading
* @private
*/
couldBeginLoading_() {
return this.playlist_ &&
this.subtitlesTrack_ &&
!this.paused();
}
/**
* Once all the starting parameters have been specified, begin
* operation. This method should only be invoked from the INIT
* state.
*
* @private
*/
init_() {
this.state = 'READY';
this.resetEverything();
return this.monitorBuffer_();
}
/**
* Set a subtitle track on the segment loader to add subtitles to
*
* @param {TextTrack=} track
* The text track to add loaded subtitles to
* @return {TextTrack}
* Returns the subtitles track
*/
track(track) {
if (typeof track === 'undefined') {
return this.subtitlesTrack_;
}
this.subtitlesTrack_ = track;
// if we were unpaused but waiting for a sourceUpdater, start
// buffering now
if (this.state === 'INIT' && this.couldBeginLoading_()) {
this.init_();
}
return this.subtitlesTrack_;
}
/**
* Remove any data in the source buffer between start and end times
*
* @param {number} start - the start time of the region to remove from the buffer
* @param {number} end - the end time of the region to remove from the buffer
*/
remove(start, end) {
removeCuesFromTrack(start, end, this.subtitlesTrack_);
}
/**
* fill the buffer with segements unless the sourceBuffers are
* currently updating
*
* Note: this function should only ever be called by monitorBuffer_
* and never directly
*
* @private
*/
fillBuffer_() {
// see if we need to begin loading immediately
const segmentInfo = this.chooseNextRequest_();
if (!segmentInfo) {
return;
}
if (this.syncController_.timestampOffsetForTimeline(segmentInfo.timeline) === null) {
// We don't have the timestamp offset that we need to sync subtitles.
// Rerun on a timestamp offset or user interaction.
const checkTimestampOffset = () => {
this.state = 'READY';
if (!this.paused()) {
// if not paused, queue a buffer check as soon as possible
this.monitorBuffer_();
}
};
this.syncController_.one('timestampoffset', checkTimestampOffset);
this.state = 'WAITING_ON_TIMELINE';
return;
}
this.loadSegment_(segmentInfo);
}
// never set a timestamp offset for vtt segments.
timestampOffsetForSegment_() {
return null;
}
chooseNextRequest_() {
return this.skipEmptySegments_(super.chooseNextRequest_());
}
/**
* Prevents the segment loader from requesting segments we know contain no subtitles
* by walking forward until we find the next segment that we don't know whether it is
* empty or not.
*
* @param {Object} segmentInfo
* a segment info object that describes the current segment
* @return {Object}
* a segment info object that describes the current segment
*/
skipEmptySegments_(segmentInfo) {
while (segmentInfo && segmentInfo.segment.empty) {
// stop at the last possible segmentInfo
if (segmentInfo.mediaIndex + 1 >= segmentInfo.playlist.segments.length) {
segmentInfo = null;
break;
}
segmentInfo = this.generateSegmentInfo_({
playlist: segmentInfo.playlist,
mediaIndex: segmentInfo.mediaIndex + 1,
startOfSegment: segmentInfo.startOfSegment + segmentInfo.duration,
isSyncRequest: segmentInfo.isSyncRequest
});
}
return segmentInfo;
}
stopForError(error) {
this.error(error);
this.state = 'READY';
this.pause();
this.trigger('error');
}
/**
* append a decrypted segement to the SourceBuffer through a SourceUpdater
*
* @private
*/
segmentRequestFinished_(error, simpleSegment, result) {
if (!this.subtitlesTrack_) {
this.state = 'READY';
return;
}
this.saveTransferStats_(simpleSegment.stats);
// the request was aborted
if (!this.pendingSegment_) {
this.state = 'READY';
this.mediaRequestsAborted += 1;
return;
}
if (error) {
if (error.code === REQUEST_ERRORS.TIMEOUT) {
this.handleTimeout_();
}
if (error.code === REQUEST_ERRORS.ABORTED) {
this.mediaRequestsAborted += 1;
} else {
this.mediaRequestsErrored += 1;
}
this.stopForError(error);
return;
}
const segmentInfo = this.pendingSegment_;
const isMp4WebVttSegmentWithCues = result.mp4VttCues && result.mp4VttCues.length;
if (isMp4WebVttSegmentWithCues) {
segmentInfo.mp4VttCues = result.mp4VttCues;
}
// although the VTT segment loader bandwidth isn't really used, it's good to
// maintain functionality between segment loaders
this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats);
// if this request included a segment key, save that data in the cache
if (simpleSegment.key) {
this.segmentKey(simpleSegment.key, true);
}
this.state = 'APPENDING';
// used for tests
this.trigger('appending');
const segment = segmentInfo.segment;
if (segment.map) {
segment.map.bytes = simpleSegment.map.bytes;
}
segmentInfo.bytes = simpleSegment.bytes;
// Make sure that vttjs has loaded, otherwise, load it and wait till it finished loading
if (typeof window.WebVTT !== 'function' && typeof this.loadVttJs === 'function') {
this.state = 'WAITING_ON_VTTJS';
// should be fine to call multiple times
// script will be loaded once but multiple listeners will be added to the queue, which is expected.
this.loadVttJs()
.then(
() => this.segmentRequestFinished_(error, simpleSegment, result),
() => this.stopForError({
message: 'Error loading vtt.js'
})
);
return;
}
segment.requested = true;
try {
this.parseVTTCues_(segmentInfo);
} catch (e) {
this.stopForError({
message: e.message,
metadata: {
errorType: videojs.Error.StreamingVttParserError,
error: e
}
});
return;
}
if (!isMp4WebVttSegmentWithCues) {
this.updateTimeMapping_(
segmentInfo,
this.syncController_.timelines[segmentInfo.timeline],
this.playlist_
);
}
if (segmentInfo.cues.length) {
segmentInfo.timingInfo = {
start: segmentInfo.cues[0].startTime,
end: segmentInfo.cues[segmentInfo.cues.length - 1].endTime
};
} else {
segmentInfo.timingInfo = {
start: segmentInfo.startOfSegment,
end: segmentInfo.startOfSegment + segmentInfo.duration
};
}
if (segmentInfo.isSyncRequest) {
this.trigger('syncinfoupdate');
this.pendingSegment_ = null;
this.state = 'READY';
return;
}
segmentInfo.byteLength = segmentInfo.bytes.byteLength;
this.mediaSecondsLoaded += segment.duration;
// Create VTTCue instances for each cue in the new segment and add them to
// the subtitle track
segmentInfo.cues.forEach((cue) => {
this.subtitlesTrack_.addCue(this.featuresNativeTextTracks_ ?
new window.VTTCue(cue.startTime, cue.endTime, cue.text) :
cue);
});
// Remove any duplicate cues from the subtitle track. The WebVTT spec allows
// cues to have identical time-intervals, but if the text is also identical
// we can safely assume it is a duplicate that can be removed (ex. when a cue
// "overlaps" VTT segments)
removeDuplicateCuesFromTrack(this.subtitlesTrack_);
this.handleAppendsDone_();
}
handleData_(simpleSegment, result) {
const isVttType = simpleSegment && simpleSegment.type === 'vtt';
const isTextResult = result && result.type === 'text';
const isFmp4VttSegment = isVttType && isTextResult;
// handle segment data for fmp4 encapsulated webvtt
if (isFmp4VttSegment) {
super.handleData_(simpleSegment, result);
}
}
updateTimingInfoEnd_() {
// noop
}
/**
* Utility function for converting mp4 webvtt cue objects into VTTCues.
*
* @param {Object} segmentInfo with mp4 webvtt cues for parsing into VTTCue objecs
*/
parseMp4VttCues_(segmentInfo) {
const timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ?
this.sourceUpdater_.audioTimestampOffset() :
this.sourceUpdater_.videoTimestampOffset();
segmentInfo.mp4VttCues.forEach((cue) => {
const start = cue.start + timestampOffset;
const end = cue.end + timestampOffset;
const vttCue = new window.VTTCue(start, end, cue.cueText);
if (cue.settings) {
cue.settings.split(' ').forEach((cueSetting) => {
const keyValString = cueSetting.split(':');
const key = keyValString[0];
const value = keyValString[1];
vttCue[key] = isNaN(value) ? value : Number(value);
});
}
segmentInfo.cues.push(vttCue);
});
}
/**
* Uses the WebVTT parser to parse the segment response
*
* @throws NoVttJsError
*
* @param {Object} segmentInfo
* a segment info object that describes the current segment
* @private
*/
parseVTTCues_(segmentInfo) {
let decoder;
let decodeBytesToString = false;
if (typeof window.WebVTT !== 'function') {
// caller is responsible for exception handling.
throw new NoVttJsError();
}
segmentInfo.cues = [];
segmentInfo.timestampmap = { MPEGTS: 0, LOCAL: 0 };
if (segmentInfo.mp4VttCues) {
this.parseMp4VttCues_(segmentInfo);
return;
}
if (typeof window.TextDecoder === 'function') {
decoder = new window.TextDecoder('utf8');
} else {
decoder = window.WebVTT.StringDecoder();
decodeBytesToString = true;
}
const parser = new window.WebVTT.Parser(
window,
window.vttjs,
decoder
);
parser.oncue = segmentInfo.cues.push.bind(segmentInfo.cues);
parser.ontimestampmap = (map) => {
segmentInfo.timestampmap = map;
};
parser.onparsingerror = (error) => {
videojs.log.warn('Error encountered when parsing cues: ' + error.message);
};
if (segmentInfo.segment.map) {
let mapData = segmentInfo.segment.map.bytes;
if (decodeBytesToString) {
mapData = uint8ToUtf8(mapData);
}
parser.parse(mapData);
}
let segmentData = segmentInfo.bytes;
if (decodeBytesToString) {
segmentData = uint8ToUtf8(segmentData);
}
parser.parse(segmentData);
parser.flush();
}
/**
* Updates the start and end times of any cues parsed by the WebVTT parser using
* the information parsed from the X-TIMESTAMP-MAP header and a TS to media time mapping
* from the SyncController
*
* @param {Object} segmentInfo
* a segment info object that describes the current segment
* @param {Object} mappingObj
* object containing a mapping from TS to media time
* @param {Object} playlist
* the playlist object containing the segment
* @private
*/
updateTimeMapping_(segmentInfo, mappingObj, playlist) {
const segment = segmentInfo.segment;
if (!mappingObj) {
// If the sync controller does not have a mapping of TS to Media Time for the
// timeline, then we don't have enough information to update the cue
// start/end times
return;
}
if (!segmentInfo.cues.length) {
// If there are no cues, we also do not have enough information to figure out
// segment timing. Mark that the segment contains no cues so we don't re-request
// an empty segment.
segment.empty = true;
return;
}
const { MPEGTS, LOCAL } = segmentInfo.timestampmap;
/**
* From the spec:
* The MPEGTS media timestamp MUST use a 90KHz timescale,
* even when non-WebVTT Media Segments use a different timescale.
*/
const mpegTsInSeconds = MPEGTS / ONE_SECOND_IN_TS;
const diff = mpegTsInSeconds - LOCAL + mappingObj.mapping;
segmentInfo.cues.forEach((cue) => {
const duration = cue.endTime - cue.startTime;
const startTime = this.handleRollover_(cue.startTime + diff, mappingObj.time);
cue.startTime = Math.max(startTime, 0);
cue.endTime = Math.max(startTime + duration, 0);
});
if (!playlist.syncInfo) {
const firstStart = segmentInfo.cues[0].startTime;
const lastStart = segmentInfo.cues[segmentInfo.cues.length - 1].startTime;
playlist.syncInfo = {
mediaSequence: playlist.mediaSequence + segmentInfo.mediaIndex,
time: Math.min(firstStart, lastStart - segment.duration)
};
}
}
/**
* MPEG-TS PES timestamps are limited to 2^33.
* Once they reach 2^33, they roll over to 0.
* mux.js handles PES timestamp rollover for the following scenarios:
* [forward rollover(right)] ->
* PES timestamps monotonically increase, and once they reach 2^33, they roll over to 0
* [backward rollover(left)] -->
* we seek back to position before rollover.
*
* According to the HLS SPEC:
* When synchronizing WebVTT with PES timestamps, clients SHOULD account
* for cases where the 33-bit PES timestamps have wrapped and the WebVTT
* cue times have not. When the PES timestamp wraps, the WebVTT Segment
* SHOULD have a X-TIMESTAMP-MAP header that maps the current WebVTT
* time to the new (low valued) PES timestamp.
*
* So we want to handle rollover here and align VTT Cue start/end time to the player's time.
*/
handleRollover_(value, reference) {
if (reference === null) {
return value;
}
let valueIn90khz = value * ONE_SECOND_IN_TS;
const referenceIn90khz = reference * ONE_SECOND_IN_TS;
let offset;
if (referenceIn90khz < valueIn90khz) {
// - 2^33
offset = -8589934592;
} else {
// + 2^33
offset = 8589934592;
}
// distance(value - reference) > 2^32
while (Math.abs(valueIn90khz - referenceIn90khz) > 4294967296) {
valueIn90khz += offset;
}
return valueIn90khz / ONE_SECOND_IN_TS;
}
}

184
VApp/node_modules/@videojs/http-streaming/src/xhr.js generated vendored Normal file
View File

@@ -0,0 +1,184 @@
/**
* @file xhr.js
*/
/**
* A wrapper for videojs.xhr that tracks bandwidth.
*
* @param {Object} options options for the XHR
* @param {Function} callback the callback to call when done
* @return {Request} the xhr request that is going to be made
*/
import videojs from 'video.js';
import window from 'global/window';
import {merge} from './util/vjs-compat';
const callbackWrapper = function(request, error, response, callback) {
const reqResponse = request.responseType === 'arraybuffer' ? request.response : request.responseText;
if (!error && reqResponse) {
request.responseTime = Date.now();
request.roundTripTime = request.responseTime - request.requestTime;
request.bytesReceived = reqResponse.byteLength || reqResponse.length;
if (!request.bandwidth) {
request.bandwidth =
Math.floor((request.bytesReceived / request.roundTripTime) * 8 * 1000);
}
}
if (response.headers) {
request.responseHeaders = response.headers;
}
// videojs.xhr now uses a specific code on the error
// object to signal that a request has timed out instead
// of setting a boolean on the request object
if (error && error.code === 'ETIMEDOUT') {
request.timedout = true;
}
// videojs.xhr no longer considers status codes outside of 200 and 0
// (for file uris) to be errors, but the old XHR did, so emulate that
// behavior. Status 206 may be used in response to byterange requests.
if (!error &&
!request.aborted &&
response.statusCode !== 200 &&
response.statusCode !== 206 &&
response.statusCode !== 0) {
error = new Error('XHR Failed with a response of: ' +
(request && (reqResponse || request.responseText)));
}
callback(error, request);
};
/**
* Iterates over the request hooks Set and calls them in order
*
* @param {Set} hooks the hook Set to iterate over
* @param {Object} options the request options to pass to the xhr wrapper
* @return the callback hook function return value, the modified or new options Object.
*/
const callAllRequestHooks = (requestSet, options) => {
if (!requestSet || !requestSet.size) {
return;
}
let newOptions = options;
requestSet.forEach((requestCallback) => {
newOptions = requestCallback(newOptions);
});
return newOptions;
};
/**
* Iterates over the response hooks Set and calls them in order.
*
* @param {Set} hooks the hook Set to iterate over
* @param {Object} request the xhr request object
* @param {Object} error the xhr error object
* @param {Object} response the xhr response object
*/
const callAllResponseHooks = (responseSet, request, error, response) => {
if (!responseSet || !responseSet.size) {
return;
}
responseSet.forEach((responseCallback) => {
responseCallback(request, error, response);
});
};
const xhrFactory = function() {
const xhr = function XhrFunction(options, callback) {
// Add a default timeout
options = merge({
timeout: 45e3
}, options);
// Allow an optional user-specified function to modify the option
// object before we construct the xhr request
// TODO: Remove beforeRequest in the next major release.
const beforeRequest = XhrFunction.beforeRequest || videojs.Vhs.xhr.beforeRequest;
// onRequest and onResponse hooks as a Set, at either the player or global level.
// TODO: new Set added here for beforeRequest alias. Remove this when beforeRequest is removed.
const _requestCallbackSet = XhrFunction._requestCallbackSet || videojs.Vhs.xhr._requestCallbackSet || new Set();
const _responseCallbackSet = XhrFunction._responseCallbackSet || videojs.Vhs.xhr._responseCallbackSet;
if (beforeRequest && typeof beforeRequest === 'function') {
videojs.log.warn('beforeRequest is deprecated, use onRequest instead.');
_requestCallbackSet.add(beforeRequest);
}
// Use the standard videojs.xhr() method unless `videojs.Vhs.xhr` has been overriden
// TODO: switch back to videojs.Vhs.xhr.name === 'XhrFunction' when we drop IE11
const xhrMethod = videojs.Vhs.xhr.original === true ? videojs.xhr : videojs.Vhs.xhr;
// call all registered onRequest hooks, assign new options.
const beforeRequestOptions = callAllRequestHooks(_requestCallbackSet, options);
// Remove the beforeRequest function from the hooks set so stale beforeRequest functions are not called.
_requestCallbackSet.delete(beforeRequest);
// xhrMethod will call XMLHttpRequest.open and XMLHttpRequest.send
const request = xhrMethod(beforeRequestOptions || options, function(error, response) {
// call all registered onResponse hooks
callAllResponseHooks(_responseCallbackSet, request, error, response);
return callbackWrapper(request, error, response, callback);
});
const originalAbort = request.abort;
request.abort = function() {
request.aborted = true;
return originalAbort.apply(request, arguments);
};
request.uri = options.uri;
request.requestType = options.requestType;
request.requestTime = Date.now();
return request;
};
xhr.original = true;
return xhr;
};
/**
* Turns segment byterange into a string suitable for use in
* HTTP Range requests
*
* @param {Object} byterange - an object with two values defining the start and end
* of a byte-range
*/
export const byterangeStr = function(byterange) {
// `byterangeEnd` is one less than `offset + length` because the HTTP range
// header uses inclusive ranges
let byterangeEnd;
const byterangeStart = byterange.offset;
if (typeof byterange.offset === 'bigint' || typeof byterange.length === 'bigint') {
byterangeEnd = window.BigInt(byterange.offset) + window.BigInt(byterange.length) - window.BigInt(1);
} else {
byterangeEnd = byterange.offset + byterange.length - 1;
}
return 'bytes=' + byterangeStart + '-' + byterangeEnd;
};
/**
* Defines headers for use in the xhr request for a particular segment.
*
* @param {Object} segment - a simplified copy of the segmentInfo object
* from SegmentLoader
*/
const segmentXhrHeaders = function(segment) {
const headers = {};
if (segment.byterange) {
headers.Range = byterangeStr(segment.byterange);
}
return headers;
};
export {segmentXhrHeaders, callbackWrapper, xhrFactory};
export default xhrFactory;