/**
* @license
* Video.js 8.17.4
* Copyright Brightcove, Inc.
* Available under Apache License Version 2.0
*
*
* Includes vtt.js
* Available under Apache License Version 2.0
*
*/
import window$1 from 'global/window';
import document$1 from 'global/document';
import XHR from '@videojs/xhr';
import vtt from 'videojs-vtt.js';
import _extends from '@babel/runtime/helpers/extends';
import _resolveUrl from '@videojs/vhs-utils/es/resolve-url.js';
import { Parser } from 'm3u8-parser';
import { DEFAULT_VIDEO_CODEC, DEFAULT_AUDIO_CODEC, parseCodecs, muxerSupportsCodec, browserSupportsCodec, translateLegacyCodec, codecsFromDefault, isAudioCodec, getMimeForCodec } from '@videojs/vhs-utils/es/codecs.js';
import { simpleTypeFromSourceType } from '@videojs/vhs-utils/es/media-types.js';
import { isArrayBufferView, concatTypedArrays, stringToBytes, toUint8 } from '@videojs/vhs-utils/es/byte-helpers';
import { generateSidxKey, parseUTCTiming, parse, addSidxSegmentsToPlaylist } from 'mpd-parser';
import parseSidx from 'mux.js/lib/tools/parse-sidx';
import { getId3Offset } from '@videojs/vhs-utils/es/id3-helpers';
import { detectContainerForBytes, isLikelyFmp4MediaSegment } from '@videojs/vhs-utils/es/containers';
import { ONE_SECOND_IN_TS } from 'mux.js/lib/utils/clock';
var version$6 = "8.17.4";
/**
* An Object that contains lifecycle hooks as keys which point to an array
* of functions that are run when a lifecycle is triggered
*
* @private
*/
const hooks_ = {};
/**
* Get a list of hooks for a specific lifecycle
*
* @param {string} type
* the lifecycle to get hooks from
*
* @param {Function|Function[]} [fn]
* Optionally add a hook (or hooks) to the lifecycle that your are getting.
*
* @return {Array}
* an array of hooks, or an empty array if there are none.
*/
const hooks = function (type, fn) {
hooks_[type] = hooks_[type] || [];
if (fn) {
hooks_[type] = hooks_[type].concat(fn);
}
return hooks_[type];
};
/**
* Add a function hook to a specific videojs lifecycle.
*
* @param {string} type
* the lifecycle to hook the function to.
*
* @param {Function|Function[]}
* The function or array of functions to attach.
*/
const hook = function (type, fn) {
hooks(type, fn);
};
/**
* Remove a hook from a specific videojs lifecycle.
*
* @param {string} type
* the lifecycle that the function hooked to
*
* @param {Function} fn
* The hooked function to remove
*
* @return {boolean}
* The function that was removed or undef
*/
const removeHook = function (type, fn) {
const index = hooks(type).indexOf(fn);
if (index <= -1) {
return false;
}
hooks_[type] = hooks_[type].slice();
hooks_[type].splice(index, 1);
return true;
};
/**
* Add a function hook that will only run once to a specific videojs lifecycle.
*
* @param {string} type
* the lifecycle to hook the function to.
*
* @param {Function|Function[]}
* The function or array of functions to attach.
*/
const hookOnce = function (type, fn) {
hooks(type, [].concat(fn).map(original => {
const wrapper = (...args) => {
removeHook(type, wrapper);
return original(...args);
};
return wrapper;
}));
};
/**
* @file fullscreen-api.js
* @module fullscreen-api
*/
/**
* Store the browser-specific methods for the fullscreen API.
*
* @type {Object}
* @see [Specification]{@link https://fullscreen.spec.whatwg.org}
* @see [Map Approach From Screenfull.js]{@link https://github.com/sindresorhus/screenfull.js}
*/
const FullscreenApi = {
prefixed: true
};
// browser API methods
const apiMap = [['requestFullscreen', 'exitFullscreen', 'fullscreenElement', 'fullscreenEnabled', 'fullscreenchange', 'fullscreenerror', 'fullscreen'],
// WebKit
['webkitRequestFullscreen', 'webkitExitFullscreen', 'webkitFullscreenElement', 'webkitFullscreenEnabled', 'webkitfullscreenchange', 'webkitfullscreenerror', '-webkit-full-screen']];
const specApi = apiMap[0];
let browserApi;
// determine the supported set of functions
for (let i = 0; i < apiMap.length; i++) {
// check for exitFullscreen function
if (apiMap[i][1] in document$1) {
browserApi = apiMap[i];
break;
}
}
// map the browser API names to the spec API names
if (browserApi) {
for (let i = 0; i < browserApi.length; i++) {
FullscreenApi[specApi[i]] = browserApi[i];
}
FullscreenApi.prefixed = browserApi[0] !== specApi[0];
}
/**
* @file create-logger.js
* @module create-logger
*/
// This is the private tracking variable for the logging history.
let history = [];
/**
* Log messages to the console and history based on the type of message
*
* @private
* @param {string} name
* The name of the console method to use.
*
* @param {Object} log
* The arguments to be passed to the matching console method.
*
* @param {string} [styles]
* styles for name
*/
const LogByTypeFactory = (name, log, styles) => (type, level, args) => {
const lvl = log.levels[level];
const lvlRegExp = new RegExp(`^(${lvl})$`);
let resultName = name;
if (type !== 'log') {
// Add the type to the front of the message when it's not "log".
args.unshift(type.toUpperCase() + ':');
}
if (styles) {
resultName = `%c${name}`;
args.unshift(styles);
}
// Add console prefix after adding to history.
args.unshift(resultName + ':');
// Add a clone of the args at this point to history.
if (history) {
history.push([].concat(args));
// only store 1000 history entries
const splice = history.length - 1000;
history.splice(0, splice > 0 ? splice : 0);
}
// If there's no console then don't try to output messages, but they will
// still be stored in history.
if (!window$1.console) {
return;
}
// Was setting these once outside of this function, but containing them
// in the function makes it easier to test cases where console doesn't exist
// when the module is executed.
let fn = window$1.console[type];
if (!fn && type === 'debug') {
// Certain browsers don't have support for console.debug. For those, we
// should default to the closest comparable log.
fn = window$1.console.info || window$1.console.log;
}
// Bail out if there's no console or if this type is not allowed by the
// current logging level.
if (!fn || !lvl || !lvlRegExp.test(type)) {
return;
}
fn[Array.isArray(args) ? 'apply' : 'call'](window$1.console, args);
};
function createLogger$1(name, delimiter = ':', styles = '') {
// This is the private tracking variable for logging level.
let level = 'info';
// the curried logByType bound to the specific log and history
let logByType;
/**
* Logs plain debug messages. Similar to `console.log`.
*
* Due to [limitations](https://github.com/jsdoc3/jsdoc/issues/955#issuecomment-313829149)
* of our JSDoc template, we cannot properly document this as both a function
* and a namespace, so its function signature is documented here.
*
* #### Arguments
* ##### *args
* *[]
*
* Any combination of values that could be passed to `console.log()`.
*
* #### Return Value
*
* `undefined`
*
* @namespace
* @param {...*} args
* One or more messages or objects that should be logged.
*/
function log(...args) {
logByType('log', level, args);
}
// This is the logByType helper that the logging methods below use
logByType = LogByTypeFactory(name, log, styles);
/**
* Create a new subLogger which chains the old name to the new name.
*
* For example, doing `mylogger = videojs.log.createLogger('player')` and then using that logger will log the following:
* ```js
* mylogger('foo');
* // > VIDEOJS: player: foo
* ```
*
* @param {string} subName
* The name to add call the new logger
* @param {string} [subDelimiter]
* Optional delimiter
* @param {string} [subStyles]
* Optional styles
* @return {Object}
*/
log.createLogger = (subName, subDelimiter, subStyles) => {
const resultDelimiter = subDelimiter !== undefined ? subDelimiter : delimiter;
const resultStyles = subStyles !== undefined ? subStyles : styles;
const resultName = `${name} ${resultDelimiter} ${subName}`;
return createLogger$1(resultName, resultDelimiter, resultStyles);
};
/**
* Create a new logger.
*
* @param {string} newName
* The name for the new logger
* @param {string} [newDelimiter]
* Optional delimiter
* @param {string} [newStyles]
* Optional styles
* @return {Object}
*/
log.createNewLogger = (newName, newDelimiter, newStyles) => {
return createLogger$1(newName, newDelimiter, newStyles);
};
/**
* Enumeration of available logging levels, where the keys are the level names
* and the values are `|`-separated strings containing logging methods allowed
* in that logging level. These strings are used to create a regular expression
* matching the function name being called.
*
* Levels provided by Video.js are:
*
* - `off`: Matches no calls. Any value that can be cast to `false` will have
* this effect. The most restrictive.
* - `all`: Matches only Video.js-provided functions (`debug`, `log`,
* `log.warn`, and `log.error`).
* - `debug`: Matches `log.debug`, `log`, `log.warn`, and `log.error` calls.
* - `info` (default): Matches `log`, `log.warn`, and `log.error` calls.
* - `warn`: Matches `log.warn` and `log.error` calls.
* - `error`: Matches only `log.error` calls.
*
* @type {Object}
*/
log.levels = {
all: 'debug|log|warn|error',
off: '',
debug: 'debug|log|warn|error',
info: 'log|warn|error',
warn: 'warn|error',
error: 'error',
DEFAULT: level
};
/**
* Get or set the current logging level.
*
* If a string matching a key from {@link module:log.levels} is provided, acts
* as a setter.
*
* @param {'all'|'debug'|'info'|'warn'|'error'|'off'} [lvl]
* Pass a valid level to set a new logging level.
*
* @return {string}
* The current logging level.
*/
log.level = lvl => {
if (typeof lvl === 'string') {
if (!log.levels.hasOwnProperty(lvl)) {
throw new Error(`"${lvl}" in not a valid log level`);
}
level = lvl;
}
return level;
};
/**
* Returns an array containing everything that has been logged to the history.
*
* This array is a shallow clone of the internal history record. However, its
* contents are _not_ cloned; so, mutating objects inside this array will
* mutate them in history.
*
* @return {Array}
*/
log.history = () => history ? [].concat(history) : [];
/**
* Allows you to filter the history by the given logger name
*
* @param {string} fname
* The name to filter by
*
* @return {Array}
* The filtered list to return
*/
log.history.filter = fname => {
return (history || []).filter(historyItem => {
// if the first item in each historyItem includes `fname`, then it's a match
return new RegExp(`.*${fname}.*`).test(historyItem[0]);
});
};
/**
* Clears the internal history tracking, but does not prevent further history
* tracking.
*/
log.history.clear = () => {
if (history) {
history.length = 0;
}
};
/**
* Disable history tracking if it is currently enabled.
*/
log.history.disable = () => {
if (history !== null) {
history.length = 0;
history = null;
}
};
/**
* Enable history tracking if it is currently disabled.
*/
log.history.enable = () => {
if (history === null) {
history = [];
}
};
/**
* Logs error messages. Similar to `console.error`.
*
* @param {...*} args
* One or more messages or objects that should be logged as an error
*/
log.error = (...args) => logByType('error', level, args);
/**
* Logs warning messages. Similar to `console.warn`.
*
* @param {...*} args
* One or more messages or objects that should be logged as a warning.
*/
log.warn = (...args) => logByType('warn', level, args);
/**
* Logs debug messages. Similar to `console.debug`, but may also act as a comparable
* log if `console.debug` is not available
*
* @param {...*} args
* One or more messages or objects that should be logged as debug.
*/
log.debug = (...args) => logByType('debug', level, args);
return log;
}
/**
* @file log.js
* @module log
*/
const log$1 = createLogger$1('VIDEOJS');
const createLogger = log$1.createLogger;
/**
* @file obj.js
* @module obj
*/
/**
* @callback obj:EachCallback
*
* @param {*} value
* The current key for the object that is being iterated over.
*
* @param {string} key
* The current key-value for object that is being iterated over
*/
/**
* @callback obj:ReduceCallback
*
* @param {*} accum
* The value that is accumulating over the reduce loop.
*
* @param {*} value
* The current key for the object that is being iterated over.
*
* @param {string} key
* The current key-value for object that is being iterated over
*
* @return {*}
* The new accumulated value.
*/
const toString = Object.prototype.toString;
/**
* Get the keys of an Object
*
* @param {Object}
* The Object to get the keys from
*
* @return {string[]}
* An array of the keys from the object. Returns an empty array if the
* object passed in was invalid or had no keys.
*
* @private
*/
const keys = function (object) {
return isObject(object) ? Object.keys(object) : [];
};
/**
* Array-like iteration for objects.
*
* @param {Object} object
* The object to iterate over
*
* @param {obj:EachCallback} fn
* The callback function which is called for each key in the object.
*/
function each(object, fn) {
keys(object).forEach(key => fn(object[key], key));
}
/**
* Array-like reduce for objects.
*
* @param {Object} object
* The Object that you want to reduce.
*
* @param {Function} fn
* A callback function which is called for each key in the object. It
* receives the accumulated value and the per-iteration value and key
* as arguments.
*
* @param {*} [initial = 0]
* Starting value
*
* @return {*}
* The final accumulated value.
*/
function reduce(object, fn, initial = 0) {
return keys(object).reduce((accum, key) => fn(accum, object[key], key), initial);
}
/**
* Returns whether a value is an object of any kind - including DOM nodes,
* arrays, regular expressions, etc. Not functions, though.
*
* This avoids the gotcha where using `typeof` on a `null` value
* results in `'object'`.
*
* @param {Object} value
* @return {boolean}
*/
function isObject(value) {
return !!value && typeof value === 'object';
}
/**
* Returns whether an object appears to be a "plain" object - that is, a
* direct instance of `Object`.
*
* @param {Object} value
* @return {boolean}
*/
function isPlain(value) {
return isObject(value) && toString.call(value) === '[object Object]' && value.constructor === Object;
}
/**
* Merge two objects recursively.
*
* Performs a deep merge like
* {@link https://lodash.com/docs/4.17.10#merge|lodash.merge}, but only merges
* plain objects (not arrays, elements, or anything else).
*
* Non-plain object values will be copied directly from the right-most
* argument.
*
* @param {Object[]} sources
* One or more objects to merge into a new object.
*
* @return {Object}
* A new object that is the merged result of all sources.
*/
function merge$1(...sources) {
const result = {};
sources.forEach(source => {
if (!source) {
return;
}
each(source, (value, key) => {
if (!isPlain(value)) {
result[key] = value;
return;
}
if (!isPlain(result[key])) {
result[key] = {};
}
result[key] = merge$1(result[key], value);
});
});
return result;
}
/**
* Returns an array of values for a given object
*
* @param {Object} source - target object
* @return {Array} - object values
*/
function values(source = {}) {
const result = [];
for (const key in source) {
if (source.hasOwnProperty(key)) {
const value = source[key];
result.push(value);
}
}
return result;
}
/**
* Object.defineProperty but "lazy", which means that the value is only set after
* it is retrieved the first time, rather than being set right away.
*
* @param {Object} obj the object to set the property on
* @param {string} key the key for the property to set
* @param {Function} getValue the function used to get the value when it is needed.
* @param {boolean} setter whether a setter should be allowed or not
*/
function defineLazyProperty(obj, key, getValue, setter = true) {
const set = value => Object.defineProperty(obj, key, {
value,
enumerable: true,
writable: true
});
const options = {
configurable: true,
enumerable: true,
get() {
const value = getValue();
set(value);
return value;
}
};
if (setter) {
options.set = set;
}
return Object.defineProperty(obj, key, options);
}
var Obj = /*#__PURE__*/Object.freeze({
__proto__: null,
each: each,
reduce: reduce,
isObject: isObject,
isPlain: isPlain,
merge: merge$1,
values: values,
defineLazyProperty: defineLazyProperty
});
/**
* @file browser.js
* @module browser
*/
/**
* Whether or not this device is an iPod.
*
* @static
* @type {Boolean}
*/
let IS_IPOD = false;
/**
* The detected iOS version - or `null`.
*
* @static
* @type {string|null}
*/
let IOS_VERSION = null;
/**
* Whether or not this is an Android device.
*
* @static
* @type {Boolean}
*/
let IS_ANDROID = false;
/**
* The detected Android version - or `null` if not Android or indeterminable.
*
* @static
* @type {number|string|null}
*/
let ANDROID_VERSION;
/**
* Whether or not this is Mozilla Firefox.
*
* @static
* @type {Boolean}
*/
let IS_FIREFOX = false;
/**
* Whether or not this is Microsoft Edge.
*
* @static
* @type {Boolean}
*/
let IS_EDGE = false;
/**
* Whether or not this is any Chromium Browser
*
* @static
* @type {Boolean}
*/
let IS_CHROMIUM = false;
/**
* Whether or not this is any Chromium browser that is not Edge.
*
* This will also be `true` for Chrome on iOS, which will have different support
* as it is actually Safari under the hood.
*
* Deprecated, as the behaviour to not match Edge was to prevent Legacy Edge's UA matching.
* IS_CHROMIUM should be used instead.
* "Chromium but not Edge" could be explicitly tested with IS_CHROMIUM && !IS_EDGE
*
* @static
* @deprecated
* @type {Boolean}
*/
let IS_CHROME = false;
/**
* The detected Chromium version - or `null`.
*
* @static
* @type {number|null}
*/
let CHROMIUM_VERSION = null;
/**
* The detected Google Chrome version - or `null`.
* This has always been the _Chromium_ version, i.e. would return on Chromium Edge.
* Deprecated, use CHROMIUM_VERSION instead.
*
* @static
* @deprecated
* @type {number|null}
*/
let CHROME_VERSION = null;
/**
* Whether or not this is a Chromecast receiver application.
*
* @static
* @type {Boolean}
*/
const IS_CHROMECAST_RECEIVER = Boolean(window$1.cast && window$1.cast.framework && window$1.cast.framework.CastReceiverContext);
/**
* The detected Internet Explorer version - or `null`.
*
* @static
* @deprecated
* @type {number|null}
*/
let IE_VERSION = null;
/**
* Whether or not this is desktop Safari.
*
* @static
* @type {Boolean}
*/
let IS_SAFARI = false;
/**
* Whether or not this is a Windows machine.
*
* @static
* @type {Boolean}
*/
let IS_WINDOWS = false;
/**
* Whether or not this device is an iPad.
*
* @static
* @type {Boolean}
*/
let IS_IPAD = false;
/**
* Whether or not this device is an iPhone.
*
* @static
* @type {Boolean}
*/
// The Facebook app's UIWebView identifies as both an iPhone and iPad, so
// to identify iPhones, we need to exclude iPads.
// http://artsy.github.io/blog/2012/10/18/the-perils-of-ios-user-agent-sniffing/
let IS_IPHONE = false;
/**
* Whether or not this is a Tizen device.
*
* @static
* @type {Boolean}
*/
let IS_TIZEN = false;
/**
* Whether or not this is a WebOS device.
*
* @static
* @type {Boolean}
*/
let IS_WEBOS = false;
/**
* Whether or not this is a Smart TV (Tizen or WebOS) device.
*
* @static
* @type {Boolean}
*/
let IS_SMART_TV = false;
/**
* Whether or not this device is touch-enabled.
*
* @static
* @const
* @type {Boolean}
*/
const TOUCH_ENABLED = Boolean(isReal() && ('ontouchstart' in window$1 || window$1.navigator.maxTouchPoints || window$1.DocumentTouch && window$1.document instanceof window$1.DocumentTouch));
const UAD = window$1.navigator && window$1.navigator.userAgentData;
if (UAD && UAD.platform && UAD.brands) {
// If userAgentData is present, use it instead of userAgent to avoid warnings
// Currently only implemented on Chromium
// userAgentData does not expose Android version, so ANDROID_VERSION remains `null`
IS_ANDROID = UAD.platform === 'Android';
IS_EDGE = Boolean(UAD.brands.find(b => b.brand === 'Microsoft Edge'));
IS_CHROMIUM = Boolean(UAD.brands.find(b => b.brand === 'Chromium'));
IS_CHROME = !IS_EDGE && IS_CHROMIUM;
CHROMIUM_VERSION = CHROME_VERSION = (UAD.brands.find(b => b.brand === 'Chromium') || {}).version || null;
IS_WINDOWS = UAD.platform === 'Windows';
}
// If the browser is not Chromium, either userAgentData is not present which could be an old Chromium browser,
// or it's a browser that has added userAgentData since that we don't have tests for yet. In either case,
// the checks need to be made agiainst the regular userAgent string.
if (!IS_CHROMIUM) {
const USER_AGENT = window$1.navigator && window$1.navigator.userAgent || '';
IS_IPOD = /iPod/i.test(USER_AGENT);
IOS_VERSION = function () {
const match = USER_AGENT.match(/OS (\d+)_/i);
if (match && match[1]) {
return match[1];
}
return null;
}();
IS_ANDROID = /Android/i.test(USER_AGENT);
ANDROID_VERSION = function () {
// This matches Android Major.Minor.Patch versions
// ANDROID_VERSION is Major.Minor as a Number, if Minor isn't available, then only Major is returned
const match = USER_AGENT.match(/Android (\d+)(?:\.(\d+))?(?:\.(\d+))*/i);
if (!match) {
return null;
}
const major = match[1] && parseFloat(match[1]);
const minor = match[2] && parseFloat(match[2]);
if (major && minor) {
return parseFloat(match[1] + '.' + match[2]);
} else if (major) {
return major;
}
return null;
}();
IS_FIREFOX = /Firefox/i.test(USER_AGENT);
IS_EDGE = /Edg/i.test(USER_AGENT);
IS_CHROMIUM = /Chrome/i.test(USER_AGENT) || /CriOS/i.test(USER_AGENT);
IS_CHROME = !IS_EDGE && IS_CHROMIUM;
CHROMIUM_VERSION = CHROME_VERSION = function () {
const match = USER_AGENT.match(/(Chrome|CriOS)\/(\d+)/);
if (match && match[2]) {
return parseFloat(match[2]);
}
return null;
}();
IE_VERSION = function () {
const result = /MSIE\s(\d+)\.\d/.exec(USER_AGENT);
let version = result && parseFloat(result[1]);
if (!version && /Trident\/7.0/i.test(USER_AGENT) && /rv:11.0/.test(USER_AGENT)) {
// IE 11 has a different user agent string than other IE versions
version = 11.0;
}
return version;
}();
IS_TIZEN = /Tizen/i.test(USER_AGENT);
IS_WEBOS = /Web0S/i.test(USER_AGENT);
IS_SMART_TV = IS_TIZEN || IS_WEBOS;
IS_SAFARI = /Safari/i.test(USER_AGENT) && !IS_CHROME && !IS_ANDROID && !IS_EDGE && !IS_SMART_TV;
IS_WINDOWS = /Windows/i.test(USER_AGENT);
IS_IPAD = /iPad/i.test(USER_AGENT) || IS_SAFARI && TOUCH_ENABLED && !/iPhone/i.test(USER_AGENT);
IS_IPHONE = /iPhone/i.test(USER_AGENT) && !IS_IPAD;
}
/**
* Whether or not this is an iOS device.
*
* @static
* @const
* @type {Boolean}
*/
const IS_IOS = IS_IPHONE || IS_IPAD || IS_IPOD;
/**
* Whether or not this is any flavor of Safari - including iOS.
*
* @static
* @const
* @type {Boolean}
*/
const IS_ANY_SAFARI = (IS_SAFARI || IS_IOS) && !IS_CHROME;
var browser = /*#__PURE__*/Object.freeze({
__proto__: null,
get IS_IPOD () { return IS_IPOD; },
get IOS_VERSION () { return IOS_VERSION; },
get IS_ANDROID () { return IS_ANDROID; },
get ANDROID_VERSION () { return ANDROID_VERSION; },
get IS_FIREFOX () { return IS_FIREFOX; },
get IS_EDGE () { return IS_EDGE; },
get IS_CHROMIUM () { return IS_CHROMIUM; },
get IS_CHROME () { return IS_CHROME; },
get CHROMIUM_VERSION () { return CHROMIUM_VERSION; },
get CHROME_VERSION () { return CHROME_VERSION; },
IS_CHROMECAST_RECEIVER: IS_CHROMECAST_RECEIVER,
get IE_VERSION () { return IE_VERSION; },
get IS_SAFARI () { return IS_SAFARI; },
get IS_WINDOWS () { return IS_WINDOWS; },
get IS_IPAD () { return IS_IPAD; },
get IS_IPHONE () { return IS_IPHONE; },
get IS_TIZEN () { return IS_TIZEN; },
get IS_WEBOS () { return IS_WEBOS; },
get IS_SMART_TV () { return IS_SMART_TV; },
TOUCH_ENABLED: TOUCH_ENABLED,
IS_IOS: IS_IOS,
IS_ANY_SAFARI: IS_ANY_SAFARI
});
/**
* @file dom.js
* @module dom
*/
/**
* Detect if a value is a string with any non-whitespace characters.
*
* @private
* @param {string} str
* The string to check
*
* @return {boolean}
* Will be `true` if the string is non-blank, `false` otherwise.
*
*/
function isNonBlankString(str) {
// we use str.trim as it will trim any whitespace characters
// from the front or back of non-whitespace characters. aka
// Any string that contains non-whitespace characters will
// still contain them after `trim` but whitespace only strings
// will have a length of 0, failing this check.
return typeof str === 'string' && Boolean(str.trim());
}
/**
* Throws an error if the passed string has whitespace. This is used by
* class methods to be relatively consistent with the classList API.
*
* @private
* @param {string} str
* The string to check for whitespace.
*
* @throws {Error}
* Throws an error if there is whitespace in the string.
*/
function throwIfWhitespace(str) {
// str.indexOf instead of regex because str.indexOf is faster performance wise.
if (str.indexOf(' ') >= 0) {
throw new Error('class has illegal whitespace characters');
}
}
/**
* Whether the current DOM interface appears to be real (i.e. not simulated).
*
* @return {boolean}
* Will be `true` if the DOM appears to be real, `false` otherwise.
*/
function isReal() {
// Both document and window will never be undefined thanks to `global`.
return document$1 === window$1.document;
}
/**
* Determines, via duck typing, whether or not a value is a DOM element.
*
* @param {*} value
* The value to check.
*
* @return {boolean}
* Will be `true` if the value is a DOM element, `false` otherwise.
*/
function isEl(value) {
return isObject(value) && value.nodeType === 1;
}
/**
* Determines if the current DOM is embedded in an iframe.
*
* @return {boolean}
* Will be `true` if the DOM is embedded in an iframe, `false`
* otherwise.
*/
function isInFrame() {
// We need a try/catch here because Safari will throw errors when attempting
// to get either `parent` or `self`
try {
return window$1.parent !== window$1.self;
} catch (x) {
return true;
}
}
/**
* Creates functions to query the DOM using a given method.
*
* @private
* @param {string} method
* The method to create the query with.
*
* @return {Function}
* The query method
*/
function createQuerier(method) {
return function (selector, context) {
if (!isNonBlankString(selector)) {
return document$1[method](null);
}
if (isNonBlankString(context)) {
context = document$1.querySelector(context);
}
const ctx = isEl(context) ? context : document$1;
return ctx[method] && ctx[method](selector);
};
}
/**
* Creates an element and applies properties, attributes, and inserts content.
*
* @param {string} [tagName='div']
* Name of tag to be created.
*
* @param {Object} [properties={}]
* Element properties to be applied.
*
* @param {Object} [attributes={}]
* Element attributes to be applied.
*
* @param {ContentDescriptor} [content]
* A content descriptor object.
*
* @return {Element}
* The element that was created.
*/
function createEl(tagName = 'div', properties = {}, attributes = {}, content) {
const el = document$1.createElement(tagName);
Object.getOwnPropertyNames(properties).forEach(function (propName) {
const val = properties[propName];
// Handle textContent since it's not supported everywhere and we have a
// method for it.
if (propName === 'textContent') {
textContent(el, val);
} else if (el[propName] !== val || propName === 'tabIndex') {
el[propName] = val;
}
});
Object.getOwnPropertyNames(attributes).forEach(function (attrName) {
el.setAttribute(attrName, attributes[attrName]);
});
if (content) {
appendContent(el, content);
}
return el;
}
/**
* Injects text into an element, replacing any existing contents entirely.
*
* @param {HTMLElement} el
* The element to add text content into
*
* @param {string} text
* The text content to add.
*
* @return {Element}
* The element with added text content.
*/
function textContent(el, text) {
if (typeof el.textContent === 'undefined') {
el.innerText = text;
} else {
el.textContent = text;
}
return el;
}
/**
* Insert an element as the first child node of another
*
* @param {Element} child
* Element to insert
*
* @param {Element} parent
* Element to insert child into
*/
function prependTo(child, parent) {
if (parent.firstChild) {
parent.insertBefore(child, parent.firstChild);
} else {
parent.appendChild(child);
}
}
/**
* Check if an element has a class name.
*
* @param {Element} element
* Element to check
*
* @param {string} classToCheck
* Class name to check for
*
* @return {boolean}
* Will be `true` if the element has a class, `false` otherwise.
*
* @throws {Error}
* Throws an error if `classToCheck` has white space.
*/
function hasClass(element, classToCheck) {
throwIfWhitespace(classToCheck);
return element.classList.contains(classToCheck);
}
/**
* Add a class name to an element.
*
* @param {Element} element
* Element to add class name to.
*
* @param {...string} classesToAdd
* One or more class name to add.
*
* @return {Element}
* The DOM element with the added class name.
*/
function addClass(element, ...classesToAdd) {
element.classList.add(...classesToAdd.reduce((prev, current) => prev.concat(current.split(/\s+/)), []));
return element;
}
/**
* Remove a class name from an element.
*
* @param {Element} element
* Element to remove a class name from.
*
* @param {...string} classesToRemove
* One or more class name to remove.
*
* @return {Element}
* The DOM element with class name removed.
*/
function removeClass(element, ...classesToRemove) {
// Protect in case the player gets disposed
if (!element) {
log$1.warn("removeClass was called with an element that doesn't exist");
return null;
}
element.classList.remove(...classesToRemove.reduce((prev, current) => prev.concat(current.split(/\s+/)), []));
return element;
}
/**
* The callback definition for toggleClass.
*
* @callback PredicateCallback
* @param {Element} element
* The DOM element of the Component.
*
* @param {string} classToToggle
* The `className` that wants to be toggled
*
* @return {boolean|undefined}
* If `true` is returned, the `classToToggle` will be added to the
* `element`, but not removed. If `false`, the `classToToggle` will be removed from
* the `element`, but not added. If `undefined`, the callback will be ignored.
*
*/
/**
* Adds or removes a class name to/from an element depending on an optional
* condition or the presence/absence of the class name.
*
* @param {Element} element
* The element to toggle a class name on.
*
* @param {string} classToToggle
* The class that should be toggled.
*
* @param {boolean|PredicateCallback} [predicate]
* See the return value for {@link module:dom~PredicateCallback}
*
* @return {Element}
* The element with a class that has been toggled.
*/
function toggleClass(element, classToToggle, predicate) {
if (typeof predicate === 'function') {
predicate = predicate(element, classToToggle);
}
if (typeof predicate !== 'boolean') {
predicate = undefined;
}
classToToggle.split(/\s+/).forEach(className => element.classList.toggle(className, predicate));
return element;
}
/**
* Apply attributes to an HTML element.
*
* @param {Element} el
* Element to add attributes to.
*
* @param {Object} [attributes]
* Attributes to be applied.
*/
function setAttributes(el, attributes) {
Object.getOwnPropertyNames(attributes).forEach(function (attrName) {
const attrValue = attributes[attrName];
if (attrValue === null || typeof attrValue === 'undefined' || attrValue === false) {
el.removeAttribute(attrName);
} else {
el.setAttribute(attrName, attrValue === true ? '' : attrValue);
}
});
}
/**
* Get an element's attribute values, as defined on the HTML tag.
*
* Attributes are not the same as properties. They're defined on the tag
* or with setAttribute.
*
* @param {Element} tag
* Element from which to get tag attributes.
*
* @return {Object}
* All attributes of the element. Boolean attributes will be `true` or
* `false`, others will be strings.
*/
function getAttributes(tag) {
const obj = {};
// known boolean attributes
// we can check for matching boolean properties, but not all browsers
// and not all tags know about these attributes, so, we still want to check them manually
const knownBooleans = ['autoplay', 'controls', 'playsinline', 'loop', 'muted', 'default', 'defaultMuted'];
if (tag && tag.attributes && tag.attributes.length > 0) {
const attrs = tag.attributes;
for (let i = attrs.length - 1; i >= 0; i--) {
const attrName = attrs[i].name;
/** @type {boolean|string} */
let attrVal = attrs[i].value;
// check for known booleans
// the matching element property will return a value for typeof
if (knownBooleans.includes(attrName)) {
// the value of an included boolean attribute is typically an empty
// string ('') which would equal false if we just check for a false value.
// we also don't want support bad code like autoplay='false'
attrVal = attrVal !== null ? true : false;
}
obj[attrName] = attrVal;
}
}
return obj;
}
/**
* Get the value of an element's attribute.
*
* @param {Element} el
* A DOM element.
*
* @param {string} attribute
* Attribute to get the value of.
*
* @return {string}
* The value of the attribute.
*/
function getAttribute(el, attribute) {
return el.getAttribute(attribute);
}
/**
* Set the value of an element's attribute.
*
* @param {Element} el
* A DOM element.
*
* @param {string} attribute
* Attribute to set.
*
* @param {string} value
* Value to set the attribute to.
*/
function setAttribute(el, attribute, value) {
el.setAttribute(attribute, value);
}
/**
* Remove an element's attribute.
*
* @param {Element} el
* A DOM element.
*
* @param {string} attribute
* Attribute to remove.
*/
function removeAttribute(el, attribute) {
el.removeAttribute(attribute);
}
/**
* Attempt to block the ability to select text.
*/
function blockTextSelection() {
document$1.body.focus();
document$1.onselectstart = function () {
return false;
};
}
/**
* Turn off text selection blocking.
*/
function unblockTextSelection() {
document$1.onselectstart = function () {
return true;
};
}
/**
* Identical to the native `getBoundingClientRect` function, but ensures that
* the method is supported at all (it is in all browsers we claim to support)
* and that the element is in the DOM before continuing.
*
* This wrapper function also shims properties which are not provided by some
* older browsers (namely, IE8).
*
* Additionally, some browsers do not support adding properties to a
* `ClientRect`/`DOMRect` object; so, we shallow-copy it with the standard
* properties (except `x` and `y` which are not widely supported). This helps
* avoid implementations where keys are non-enumerable.
*
* @param {Element} el
* Element whose `ClientRect` we want to calculate.
*
* @return {Object|undefined}
* Always returns a plain object - or `undefined` if it cannot.
*/
function getBoundingClientRect(el) {
if (el && el.getBoundingClientRect && el.parentNode) {
const rect = el.getBoundingClientRect();
const result = {};
['bottom', 'height', 'left', 'right', 'top', 'width'].forEach(k => {
if (rect[k] !== undefined) {
result[k] = rect[k];
}
});
if (!result.height) {
result.height = parseFloat(computedStyle(el, 'height'));
}
if (!result.width) {
result.width = parseFloat(computedStyle(el, 'width'));
}
return result;
}
}
/**
* Represents the position of a DOM element on the page.
*
* @typedef {Object} module:dom~Position
*
* @property {number} left
* Pixels to the left.
*
* @property {number} top
* Pixels from the top.
*/
/**
* Get the position of an element in the DOM.
*
* Uses `getBoundingClientRect` technique from John Resig.
*
* @see http://ejohn.org/blog/getboundingclientrect-is-awesome/
*
* @param {Element} el
* Element from which to get offset.
*
* @return {module:dom~Position}
* The position of the element that was passed in.
*/
function findPosition(el) {
if (!el || el && !el.offsetParent) {
return {
left: 0,
top: 0,
width: 0,
height: 0
};
}
const width = el.offsetWidth;
const height = el.offsetHeight;
let left = 0;
let top = 0;
while (el.offsetParent && el !== document$1[FullscreenApi.fullscreenElement]) {
left += el.offsetLeft;
top += el.offsetTop;
el = el.offsetParent;
}
return {
left,
top,
width,
height
};
}
/**
* Represents x and y coordinates for a DOM element or mouse pointer.
*
* @typedef {Object} module:dom~Coordinates
*
* @property {number} x
* x coordinate in pixels
*
* @property {number} y
* y coordinate in pixels
*/
/**
* Get the pointer position within an element.
*
* The base on the coordinates are the bottom left of the element.
*
* @param {Element} el
* Element on which to get the pointer position on.
*
* @param {Event} event
* Event object.
*
* @return {module:dom~Coordinates}
* A coordinates object corresponding to the mouse position.
*
*/
function getPointerPosition(el, event) {
const translated = {
x: 0,
y: 0
};
if (IS_IOS) {
let item = el;
while (item && item.nodeName.toLowerCase() !== 'html') {
const transform = computedStyle(item, 'transform');
if (/^matrix/.test(transform)) {
const values = transform.slice(7, -1).split(/,\s/).map(Number);
translated.x += values[4];
translated.y += values[5];
} else if (/^matrix3d/.test(transform)) {
const values = transform.slice(9, -1).split(/,\s/).map(Number);
translated.x += values[12];
translated.y += values[13];
}
if (item.assignedSlot && item.assignedSlot.parentElement && window$1.WebKitCSSMatrix) {
const transformValue = window$1.getComputedStyle(item.assignedSlot.parentElement).transform;
const matrix = new window$1.WebKitCSSMatrix(transformValue);
translated.x += matrix.m41;
translated.y += matrix.m42;
}
item = item.parentNode || item.host;
}
}
const position = {};
const boxTarget = findPosition(event.target);
const box = findPosition(el);
const boxW = box.width;
const boxH = box.height;
let offsetY = event.offsetY - (box.top - boxTarget.top);
let offsetX = event.offsetX - (box.left - boxTarget.left);
if (event.changedTouches) {
offsetX = event.changedTouches[0].pageX - box.left;
offsetY = event.changedTouches[0].pageY + box.top;
if (IS_IOS) {
offsetX -= translated.x;
offsetY -= translated.y;
}
}
position.y = 1 - Math.max(0, Math.min(1, offsetY / boxH));
position.x = Math.max(0, Math.min(1, offsetX / boxW));
return position;
}
/**
* Determines, via duck typing, whether or not a value is a text node.
*
* @param {*} value
* Check if this value is a text node.
*
* @return {boolean}
* Will be `true` if the value is a text node, `false` otherwise.
*/
function isTextNode(value) {
return isObject(value) && value.nodeType === 3;
}
/**
* Empties the contents of an element.
*
* @param {Element} el
* The element to empty children from
*
* @return {Element}
* The element with no children
*/
function emptyEl(el) {
while (el.firstChild) {
el.removeChild(el.firstChild);
}
return el;
}
/**
* This is a mixed value that describes content to be injected into the DOM
* via some method. It can be of the following types:
*
* Type | Description
* -----------|-------------
* `string` | The value will be normalized into a text node.
* `Element` | The value will be accepted as-is.
* `Text` | A TextNode. The value will be accepted as-is.
* `Array` | A one-dimensional array of strings, elements, text nodes, or functions. These functions should return a string, element, or text node (any other return value, like an array, will be ignored).
* `Function` | A function, which is expected to return a string, element, text node, or array - any of the other possible values described above. This means that a content descriptor could be a function that returns an array of functions, but those second-level functions must return strings, elements, or text nodes.
*
* @typedef {string|Element|Text|Array|Function} ContentDescriptor
*/
/**
* Normalizes content for eventual insertion into the DOM.
*
* This allows a wide range of content definition methods, but helps protect
* from falling into the trap of simply writing to `innerHTML`, which could
* be an XSS concern.
*
* The content for an element can be passed in multiple types and
* combinations, whose behavior is as follows:
*
* @param {ContentDescriptor} content
* A content descriptor value.
*
* @return {Array}
* All of the content that was passed in, normalized to an array of
* elements or text nodes.
*/
function normalizeContent(content) {
// First, invoke content if it is a function. If it produces an array,
// that needs to happen before normalization.
if (typeof content === 'function') {
content = content();
}
// Next up, normalize to an array, so one or many items can be normalized,
// filtered, and returned.
return (Array.isArray(content) ? content : [content]).map(value => {
// First, invoke value if it is a function to produce a new value,
// which will be subsequently normalized to a Node of some kind.
if (typeof value === 'function') {
value = value();
}
if (isEl(value) || isTextNode(value)) {
return value;
}
if (typeof value === 'string' && /\S/.test(value)) {
return document$1.createTextNode(value);
}
}).filter(value => value);
}
/**
* Normalizes and appends content to an element.
*
* @param {Element} el
* Element to append normalized content to.
*
* @param {ContentDescriptor} content
* A content descriptor value.
*
* @return {Element}
* The element with appended normalized content.
*/
function appendContent(el, content) {
normalizeContent(content).forEach(node => el.appendChild(node));
return el;
}
/**
* Normalizes and inserts content into an element; this is identical to
* `appendContent()`, except it empties the element first.
*
* @param {Element} el
* Element to insert normalized content into.
*
* @param {ContentDescriptor} content
* A content descriptor value.
*
* @return {Element}
* The element with inserted normalized content.
*/
function insertContent(el, content) {
return appendContent(emptyEl(el), content);
}
/**
* Check if an event was a single left click.
*
* @param {MouseEvent} event
* Event object.
*
* @return {boolean}
* Will be `true` if a single left click, `false` otherwise.
*/
function isSingleLeftClick(event) {
// Note: if you create something draggable, be sure to
// call it on both `mousedown` and `mousemove` event,
// otherwise `mousedown` should be enough for a button
if (event.button === undefined && event.buttons === undefined) {
// Why do we need `buttons` ?
// Because, middle mouse sometimes have this:
// e.button === 0 and e.buttons === 4
// Furthermore, we want to prevent combination click, something like
// HOLD middlemouse then left click, that would be
// e.button === 0, e.buttons === 5
// just `button` is not gonna work
// Alright, then what this block does ?
// this is for chrome `simulate mobile devices`
// I want to support this as well
return true;
}
if (event.button === 0 && event.buttons === undefined) {
// Touch screen, sometimes on some specific device, `buttons`
// doesn't have anything (safari on ios, blackberry...)
return true;
}
// `mouseup` event on a single left click has
// `button` and `buttons` equal to 0
if (event.type === 'mouseup' && event.button === 0 && event.buttons === 0) {
return true;
}
// MacOS Sonoma trackpad when "tap to click enabled"
if (event.type === 'mousedown' && event.button === 0 && event.buttons === 0) {
return true;
}
if (event.button !== 0 || event.buttons !== 1) {
// This is the reason we have those if else block above
// if any special case we can catch and let it slide
// we do it above, when get to here, this definitely
// is-not-left-click
return false;
}
return true;
}
/**
* Finds a single DOM element matching `selector` within the optional
* `context` of another DOM element (defaulting to `document`).
*
* @param {string} selector
* A valid CSS selector, which will be passed to `querySelector`.
*
* @param {Element|String} [context=document]
* A DOM element within which to query. Can also be a selector
* string in which case the first matching element will be used
* as context. If missing (or no element matches selector), falls
* back to `document`.
*
* @return {Element|null}
* The element that was found or null.
*/
const $ = createQuerier('querySelector');
/**
* Finds a all DOM elements matching `selector` within the optional
* `context` of another DOM element (defaulting to `document`).
*
* @param {string} selector
* A valid CSS selector, which will be passed to `querySelectorAll`.
*
* @param {Element|String} [context=document]
* A DOM element within which to query. Can also be a selector
* string in which case the first matching element will be used
* as context. If missing (or no element matches selector), falls
* back to `document`.
*
* @return {NodeList}
* A element list of elements that were found. Will be empty if none
* were found.
*
*/
const $$ = createQuerier('querySelectorAll');
/**
* A safe getComputedStyle.
*
* This is needed because in Firefox, if the player is loaded in an iframe with
* `display:none`, then `getComputedStyle` returns `null`, so, we do a
* null-check to make sure that the player doesn't break in these cases.
*
* @param {Element} el
* The element you want the computed style of
*
* @param {string} prop
* The property name you want
*
* @see https://bugzilla.mozilla.org/show_bug.cgi?id=548397
*/
function computedStyle(el, prop) {
if (!el || !prop) {
return '';
}
if (typeof window$1.getComputedStyle === 'function') {
let computedStyleValue;
try {
computedStyleValue = window$1.getComputedStyle(el);
} catch (e) {
return '';
}
return computedStyleValue ? computedStyleValue.getPropertyValue(prop) || computedStyleValue[prop] : '';
}
return '';
}
/**
* Copy document style sheets to another window.
*
* @param {Window} win
* The window element you want to copy the document style sheets to.
*
*/
function copyStyleSheetsToWindow(win) {
[...document$1.styleSheets].forEach(styleSheet => {
try {
const cssRules = [...styleSheet.cssRules].map(rule => rule.cssText).join('');
const style = document$1.createElement('style');
style.textContent = cssRules;
win.document.head.appendChild(style);
} catch (e) {
const link = document$1.createElement('link');
link.rel = 'stylesheet';
link.type = styleSheet.type;
// For older Safari this has to be the string; on other browsers setting the MediaList works
link.media = styleSheet.media.mediaText;
link.href = styleSheet.href;
win.document.head.appendChild(link);
}
});
}
var Dom = /*#__PURE__*/Object.freeze({
__proto__: null,
isReal: isReal,
isEl: isEl,
isInFrame: isInFrame,
createEl: createEl,
textContent: textContent,
prependTo: prependTo,
hasClass: hasClass,
addClass: addClass,
removeClass: removeClass,
toggleClass: toggleClass,
setAttributes: setAttributes,
getAttributes: getAttributes,
getAttribute: getAttribute,
setAttribute: setAttribute,
removeAttribute: removeAttribute,
blockTextSelection: blockTextSelection,
unblockTextSelection: unblockTextSelection,
getBoundingClientRect: getBoundingClientRect,
findPosition: findPosition,
getPointerPosition: getPointerPosition,
isTextNode: isTextNode,
emptyEl: emptyEl,
normalizeContent: normalizeContent,
appendContent: appendContent,
insertContent: insertContent,
isSingleLeftClick: isSingleLeftClick,
$: $,
$$: $$,
computedStyle: computedStyle,
copyStyleSheetsToWindow: copyStyleSheetsToWindow
});
/**
* @file setup.js - Functions for setting up a player without
* user interaction based on the data-setup `attribute` of the video tag.
*
* @module setup
*/
let _windowLoaded = false;
let videojs$1;
/**
* Set up any tags that have a data-setup `attribute` when the player is started.
*/
const autoSetup = function () {
if (videojs$1.options.autoSetup === false) {
return;
}
const vids = Array.prototype.slice.call(document$1.getElementsByTagName('video'));
const audios = Array.prototype.slice.call(document$1.getElementsByTagName('audio'));
const divs = Array.prototype.slice.call(document$1.getElementsByTagName('video-js'));
const mediaEls = vids.concat(audios, divs);
// Check if any media elements exist
if (mediaEls && mediaEls.length > 0) {
for (let i = 0, e = mediaEls.length; i < e; i++) {
const mediaEl = mediaEls[i];
// Check if element exists, has getAttribute func.
if (mediaEl && mediaEl.getAttribute) {
// Make sure this player hasn't already been set up.
if (mediaEl.player === undefined) {
const options = mediaEl.getAttribute('data-setup');
// Check if data-setup attr exists.
// We only auto-setup if they've added the data-setup attr.
if (options !== null) {
// Create new video.js instance.
videojs$1(mediaEl);
}
}
// If getAttribute isn't defined, we need to wait for the DOM.
} else {
autoSetupTimeout(1);
break;
}
}
// No videos were found, so keep looping unless page is finished loading.
} else if (!_windowLoaded) {
autoSetupTimeout(1);
}
};
/**
* Wait until the page is loaded before running autoSetup. This will be called in
* autoSetup if `hasLoaded` returns false.
*
* @param {number} wait
* How long to wait in ms
*
* @param {module:videojs} [vjs]
* The videojs library function
*/
function autoSetupTimeout(wait, vjs) {
// Protect against breakage in non-browser environments
if (!isReal()) {
return;
}
if (vjs) {
videojs$1 = vjs;
}
window$1.setTimeout(autoSetup, wait);
}
/**
* Used to set the internal tracking of window loaded state to true.
*
* @private
*/
function setWindowLoaded() {
_windowLoaded = true;
window$1.removeEventListener('load', setWindowLoaded);
}
if (isReal()) {
if (document$1.readyState === 'complete') {
setWindowLoaded();
} else {
/**
* Listen for the load event on window, and set _windowLoaded to true.
*
* We use a standard event listener here to avoid incrementing the GUID
* before any players are created.
*
* @listens load
*/
window$1.addEventListener('load', setWindowLoaded);
}
}
/**
* @file stylesheet.js
* @module stylesheet
*/
/**
* Create a DOM style element given a className for it.
*
* @param {string} className
* The className to add to the created style element.
*
* @return {Element}
* The element that was created.
*/
const createStyleElement = function (className) {
const style = document$1.createElement('style');
style.className = className;
return style;
};
/**
* Add text to a DOM element.
*
* @param {Element} el
* The Element to add text content to.
*
* @param {string} content
* The text to add to the element.
*/
const setTextContent = function (el, content) {
if (el.styleSheet) {
el.styleSheet.cssText = content;
} else {
el.textContent = content;
}
};
/**
* @file dom-data.js
* @module dom-data
*/
/**
* Element Data Store.
*
* Allows for binding data to an element without putting it directly on the
* element. Ex. Event listeners are stored here.
* (also from jsninja.com, slightly modified and updated for closure compiler)
*
* @type {Object}
* @private
*/
var DomData = new WeakMap();
/**
* @file guid.js
* @module guid
*/
// Default value for GUIDs. This allows us to reset the GUID counter in tests.
//
// The initial GUID is 3 because some users have come to rely on the first
// default player ID ending up as `vjs_video_3`.
//
// See: https://github.com/videojs/video.js/pull/6216
const _initialGuid = 3;
/**
* Unique ID for an element or function
*
* @type {Number}
*/
let _guid = _initialGuid;
/**
* Get a unique auto-incrementing ID by number that has not been returned before.
*
* @return {number}
* A new unique ID.
*/
function newGUID() {
return _guid++;
}
/**
* @file events.js. An Event System (John Resig - Secrets of a JS Ninja http://jsninja.com/)
* (Original book version wasn't completely usable, so fixed some things and made Closure Compiler compatible)
* This should work very similarly to jQuery's events, however it's based off the book version which isn't as
* robust as jquery's, so there's probably some differences.
*
* @file events.js
* @module events
*/
/**
* Clean up the listener cache and dispatchers
*
* @param {Element|Object} elem
* Element to clean up
*
* @param {string} type
* Type of event to clean up
*/
function _cleanUpEvents(elem, type) {
if (!DomData.has(elem)) {
return;
}
const data = DomData.get(elem);
// Remove the events of a particular type if there are none left
if (data.handlers[type].length === 0) {
delete data.handlers[type];
// data.handlers[type] = null;
// Setting to null was causing an error with data.handlers
// Remove the meta-handler from the element
if (elem.removeEventListener) {
elem.removeEventListener(type, data.dispatcher, false);
} else if (elem.detachEvent) {
elem.detachEvent('on' + type, data.dispatcher);
}
}
// Remove the events object if there are no types left
if (Object.getOwnPropertyNames(data.handlers).length <= 0) {
delete data.handlers;
delete data.dispatcher;
delete data.disabled;
}
// Finally remove the element data if there is no data left
if (Object.getOwnPropertyNames(data).length === 0) {
DomData.delete(elem);
}
}
/**
* Loops through an array of event types and calls the requested method for each type.
*
* @param {Function} fn
* The event method we want to use.
*
* @param {Element|Object} elem
* Element or object to bind listeners to
*
* @param {string[]} types
* Type of event to bind to.
*
* @param {Function} callback
* Event listener.
*/
function _handleMultipleEvents(fn, elem, types, callback) {
types.forEach(function (type) {
// Call the event method for each one of the types
fn(elem, type, callback);
});
}
/**
* Fix a native event to have standard property values
*
* @param {Object} event
* Event object to fix.
*
* @return {Object}
* Fixed event object.
*/
function fixEvent(event) {
if (event.fixed_) {
return event;
}
function returnTrue() {
return true;
}
function returnFalse() {
return false;
}
// Test if fixing up is needed
// Used to check if !event.stopPropagation instead of isPropagationStopped
// But native events return true for stopPropagation, but don't have
// other expected methods like isPropagationStopped. Seems to be a problem
// with the Javascript Ninja code. So we're just overriding all events now.
if (!event || !event.isPropagationStopped || !event.isImmediatePropagationStopped) {
const old = event || window$1.event;
event = {};
// Clone the old object so that we can modify the values event = {};
// IE8 Doesn't like when you mess with native event properties
// Firefox returns false for event.hasOwnProperty('type') and other props
// which makes copying more difficult.
// TODO: Probably best to create an allowlist of event props
const deprecatedProps = ['layerX', 'layerY', 'keyLocation', 'path', 'webkitMovementX', 'webkitMovementY', 'mozPressure', 'mozInputSource'];
for (const key in old) {
// Safari 6.0.3 warns you if you try to copy deprecated layerX/Y
// Chrome warns you if you try to copy deprecated keyboardEvent.keyLocation
// and webkitMovementX/Y
// Lighthouse complains if Event.path is copied
if (!deprecatedProps.includes(key)) {
// Chrome 32+ warns if you try to copy deprecated returnValue, but
// we still want to if preventDefault isn't supported (IE8).
if (!(key === 'returnValue' && old.preventDefault)) {
event[key] = old[key];
}
}
}
// The event occurred on this element
if (!event.target) {
event.target = event.srcElement || document$1;
}
// Handle which other element the event is related to
if (!event.relatedTarget) {
event.relatedTarget = event.fromElement === event.target ? event.toElement : event.fromElement;
}
// Stop the default browser action
event.preventDefault = function () {
if (old.preventDefault) {
old.preventDefault();
}
event.returnValue = false;
old.returnValue = false;
event.defaultPrevented = true;
};
event.defaultPrevented = false;
// Stop the event from bubbling
event.stopPropagation = function () {
if (old.stopPropagation) {
old.stopPropagation();
}
event.cancelBubble = true;
old.cancelBubble = true;
event.isPropagationStopped = returnTrue;
};
event.isPropagationStopped = returnFalse;
// Stop the event from bubbling and executing other handlers
event.stopImmediatePropagation = function () {
if (old.stopImmediatePropagation) {
old.stopImmediatePropagation();
}
event.isImmediatePropagationStopped = returnTrue;
event.stopPropagation();
};
event.isImmediatePropagationStopped = returnFalse;
// Handle mouse position
if (event.clientX !== null && event.clientX !== undefined) {
const doc = document$1.documentElement;
const body = document$1.body;
event.pageX = event.clientX + (doc && doc.scrollLeft || body && body.scrollLeft || 0) - (doc && doc.clientLeft || body && body.clientLeft || 0);
event.pageY = event.clientY + (doc && doc.scrollTop || body && body.scrollTop || 0) - (doc && doc.clientTop || body && body.clientTop || 0);
}
// Handle key presses
event.which = event.charCode || event.keyCode;
// Fix button for mouse clicks:
// 0 == left; 1 == middle; 2 == right
if (event.button !== null && event.button !== undefined) {
// The following is disabled because it does not pass videojs-standard
// and... yikes.
/* eslint-disable */
event.button = event.button & 1 ? 0 : event.button & 4 ? 1 : event.button & 2 ? 2 : 0;
/* eslint-enable */
}
}
event.fixed_ = true;
// Returns fixed-up instance
return event;
}
/**
* Whether passive event listeners are supported
*/
let _supportsPassive;
const supportsPassive = function () {
if (typeof _supportsPassive !== 'boolean') {
_supportsPassive = false;
try {
const opts = Object.defineProperty({}, 'passive', {
get() {
_supportsPassive = true;
}
});
window$1.addEventListener('test', null, opts);
window$1.removeEventListener('test', null, opts);
} catch (e) {
// disregard
}
}
return _supportsPassive;
};
/**
* Touch events Chrome expects to be passive
*/
const passiveEvents = ['touchstart', 'touchmove'];
/**
* Add an event listener to element
* It stores the handler function in a separate cache object
* and adds a generic handler to the element's event,
* along with a unique id (guid) to the element.
*
* @param {Element|Object} elem
* Element or object to bind listeners to
*
* @param {string|string[]} type
* Type of event to bind to.
*
* @param {Function} fn
* Event listener.
*/
function on(elem, type, fn) {
if (Array.isArray(type)) {
return _handleMultipleEvents(on, elem, type, fn);
}
if (!DomData.has(elem)) {
DomData.set(elem, {});
}
const data = DomData.get(elem);
// We need a place to store all our handler data
if (!data.handlers) {
data.handlers = {};
}
if (!data.handlers[type]) {
data.handlers[type] = [];
}
if (!fn.guid) {
fn.guid = newGUID();
}
data.handlers[type].push(fn);
if (!data.dispatcher) {
data.disabled = false;
data.dispatcher = function (event, hash) {
if (data.disabled) {
return;
}
event = fixEvent(event);
const handlers = data.handlers[event.type];
if (handlers) {
// Copy handlers so if handlers are added/removed during the process it doesn't throw everything off.
const handlersCopy = handlers.slice(0);
for (let m = 0, n = handlersCopy.length; m < n; m++) {
if (event.isImmediatePropagationStopped()) {
break;
} else {
try {
handlersCopy[m].call(elem, event, hash);
} catch (e) {
log$1.error(e);
}
}
}
}
};
}
if (data.handlers[type].length === 1) {
if (elem.addEventListener) {
let options = false;
if (supportsPassive() && passiveEvents.indexOf(type) > -1) {
options = {
passive: true
};
}
elem.addEventListener(type, data.dispatcher, options);
} else if (elem.attachEvent) {
elem.attachEvent('on' + type, data.dispatcher);
}
}
}
/**
* Removes event listeners from an element
*
* @param {Element|Object} elem
* Object to remove listeners from.
*
* @param {string|string[]} [type]
* Type of listener to remove. Don't include to remove all events from element.
*
* @param {Function} [fn]
* Specific listener to remove. Don't include to remove listeners for an event
* type.
*/
function off(elem, type, fn) {
// Don't want to add a cache object through getElData if not needed
if (!DomData.has(elem)) {
return;
}
const data = DomData.get(elem);
// If no events exist, nothing to unbind
if (!data.handlers) {
return;
}
if (Array.isArray(type)) {
return _handleMultipleEvents(off, elem, type, fn);
}
// Utility function
const removeType = function (el, t) {
data.handlers[t] = [];
_cleanUpEvents(el, t);
};
// Are we removing all bound events?
if (type === undefined) {
for (const t in data.handlers) {
if (Object.prototype.hasOwnProperty.call(data.handlers || {}, t)) {
removeType(elem, t);
}
}
return;
}
const handlers = data.handlers[type];
// If no handlers exist, nothing to unbind
if (!handlers) {
return;
}
// If no listener was provided, remove all listeners for type
if (!fn) {
removeType(elem, type);
return;
}
// We're only removing a single handler
if (fn.guid) {
for (let n = 0; n < handlers.length; n++) {
if (handlers[n].guid === fn.guid) {
handlers.splice(n--, 1);
}
}
}
_cleanUpEvents(elem, type);
}
/**
* Trigger an event for an element
*
* @param {Element|Object} elem
* Element to trigger an event on
*
* @param {EventTarget~Event|string} event
* A string (the type) or an event object with a type attribute
*
* @param {Object} [hash]
* data hash to pass along with the event
*
* @return {boolean|undefined}
* Returns the opposite of `defaultPrevented` if default was
* prevented. Otherwise, returns `undefined`
*/
function trigger(elem, event, hash) {
// Fetches element data and a reference to the parent (for bubbling).
// Don't want to add a data object to cache for every parent,
// so checking hasElData first.
const elemData = DomData.has(elem) ? DomData.get(elem) : {};
const parent = elem.parentNode || elem.ownerDocument;
// type = event.type || event,
// handler;
// If an event name was passed as a string, creates an event out of it
if (typeof event === 'string') {
event = {
type: event,
target: elem
};
} else if (!event.target) {
event.target = elem;
}
// Normalizes the event properties.
event = fixEvent(event);
// If the passed element has a dispatcher, executes the established handlers.
if (elemData.dispatcher) {
elemData.dispatcher.call(elem, event, hash);
}
// Unless explicitly stopped or the event does not bubble (e.g. media events)
// recursively calls this function to bubble the event up the DOM.
if (parent && !event.isPropagationStopped() && event.bubbles === true) {
trigger.call(null, parent, event, hash);
// If at the top of the DOM, triggers the default action unless disabled.
} else if (!parent && !event.defaultPrevented && event.target && event.target[event.type]) {
if (!DomData.has(event.target)) {
DomData.set(event.target, {});
}
const targetData = DomData.get(event.target);
// Checks if the target has a default action for this event.
if (event.target[event.type]) {
// Temporarily disables event dispatching on the target as we have already executed the handler.
targetData.disabled = true;
// Executes the default action.
if (typeof event.target[event.type] === 'function') {
event.target[event.type]();
}
// Re-enables event dispatching.
targetData.disabled = false;
}
}
// Inform the triggerer if the default was prevented by returning false
return !event.defaultPrevented;
}
/**
* Trigger a listener only once for an event.
*
* @param {Element|Object} elem
* Element or object to bind to.
*
* @param {string|string[]} type
* Name/type of event
*
* @param {Event~EventListener} fn
* Event listener function
*/
function one(elem, type, fn) {
if (Array.isArray(type)) {
return _handleMultipleEvents(one, elem, type, fn);
}
const func = function () {
off(elem, type, func);
fn.apply(this, arguments);
};
// copy the guid to the new function so it can removed using the original function's ID
func.guid = fn.guid = fn.guid || newGUID();
on(elem, type, func);
}
/**
* Trigger a listener only once and then turn if off for all
* configured events
*
* @param {Element|Object} elem
* Element or object to bind to.
*
* @param {string|string[]} type
* Name/type of event
*
* @param {Event~EventListener} fn
* Event listener function
*/
function any(elem, type, fn) {
const func = function () {
off(elem, type, func);
fn.apply(this, arguments);
};
// copy the guid to the new function so it can removed using the original function's ID
func.guid = fn.guid = fn.guid || newGUID();
// multiple ons, but one off for everything
on(elem, type, func);
}
var Events = /*#__PURE__*/Object.freeze({
__proto__: null,
fixEvent: fixEvent,
on: on,
off: off,
trigger: trigger,
one: one,
any: any
});
/**
* @file fn.js
* @module fn
*/
const UPDATE_REFRESH_INTERVAL = 30;
/**
* A private, internal-only function for changing the context of a function.
*
* It also stores a unique id on the function so it can be easily removed from
* events.
*
* @private
* @function
* @param {*} context
* The object to bind as scope.
*
* @param {Function} fn
* The function to be bound to a scope.
*
* @param {number} [uid]
* An optional unique ID for the function to be set
*
* @return {Function}
* The new function that will be bound into the context given
*/
const bind_ = function (context, fn, uid) {
// Make sure the function has a unique ID
if (!fn.guid) {
fn.guid = newGUID();
}
// Create the new function that changes the context
const bound = fn.bind(context);
// Allow for the ability to individualize this function
// Needed in the case where multiple objects might share the same prototype
// IF both items add an event listener with the same function, then you try to remove just one
// it will remove both because they both have the same guid.
// when using this, you need to use the bind method when you remove the listener as well.
// currently used in text tracks
bound.guid = uid ? uid + '_' + fn.guid : fn.guid;
return bound;
};
/**
* Wraps the given function, `fn`, with a new function that only invokes `fn`
* at most once per every `wait` milliseconds.
*
* @function
* @param {Function} fn
* The function to be throttled.
*
* @param {number} wait
* The number of milliseconds by which to throttle.
*
* @return {Function}
*/
const throttle = function (fn, wait) {
let last = window$1.performance.now();
const throttled = function (...args) {
const now = window$1.performance.now();
if (now - last >= wait) {
fn(...args);
last = now;
}
};
return throttled;
};
/**
* Creates a debounced function that delays invoking `func` until after `wait`
* milliseconds have elapsed since the last time the debounced function was
* invoked.
*
* Inspired by lodash and underscore implementations.
*
* @function
* @param {Function} func
* The function to wrap with debounce behavior.
*
* @param {number} wait
* The number of milliseconds to wait after the last invocation.
*
* @param {boolean} [immediate]
* Whether or not to invoke the function immediately upon creation.
*
* @param {Object} [context=window]
* The "context" in which the debounced function should debounce. For
* example, if this function should be tied to a Video.js player,
* the player can be passed here. Alternatively, defaults to the
* global `window` object.
*
* @return {Function}
* A debounced function.
*/
const debounce = function (func, wait, immediate, context = window$1) {
let timeout;
const cancel = () => {
context.clearTimeout(timeout);
timeout = null;
};
/* eslint-disable consistent-this */
const debounced = function () {
const self = this;
const args = arguments;
let later = function () {
timeout = null;
later = null;
if (!immediate) {
func.apply(self, args);
}
};
if (!timeout && immediate) {
func.apply(self, args);
}
context.clearTimeout(timeout);
timeout = context.setTimeout(later, wait);
};
/* eslint-enable consistent-this */
debounced.cancel = cancel;
return debounced;
};
var Fn = /*#__PURE__*/Object.freeze({
__proto__: null,
UPDATE_REFRESH_INTERVAL: UPDATE_REFRESH_INTERVAL,
bind_: bind_,
throttle: throttle,
debounce: debounce
});
/**
* @file src/js/event-target.js
*/
let EVENT_MAP;
/**
* `EventTarget` is a class that can have the same API as the DOM `EventTarget`. It
* adds shorthand functions that wrap around lengthy functions. For example:
* the `on` function is a wrapper around `addEventListener`.
*
* @see [EventTarget Spec]{@link https://www.w3.org/TR/DOM-Level-2-Events/events.html#Events-EventTarget}
* @class EventTarget
*/
class EventTarget$2 {
/**
* Adds an `event listener` to an instance of an `EventTarget`. An `event listener` is a
* function that will get called when an event with a certain name gets triggered.
*
* @param {string|string[]} type
* An event name or an array of event names.
*
* @param {Function} fn
* The function to call with `EventTarget`s
*/
on(type, fn) {
// Remove the addEventListener alias before calling Events.on
// so we don't get into an infinite type loop
const ael = this.addEventListener;
this.addEventListener = () => {};
on(this, type, fn);
this.addEventListener = ael;
}
/**
* Removes an `event listener` for a specific event from an instance of `EventTarget`.
* This makes it so that the `event listener` will no longer get called when the
* named event happens.
*
* @param {string|string[]} type
* An event name or an array of event names.
*
* @param {Function} fn
* The function to remove.
*/
off(type, fn) {
off(this, type, fn);
}
/**
* This function will add an `event listener` that gets triggered only once. After the
* first trigger it will get removed. This is like adding an `event listener`
* with {@link EventTarget#on} that calls {@link EventTarget#off} on itself.
*
* @param {string|string[]} type
* An event name or an array of event names.
*
* @param {Function} fn
* The function to be called once for each event name.
*/
one(type, fn) {
// Remove the addEventListener aliasing Events.on
// so we don't get into an infinite type loop
const ael = this.addEventListener;
this.addEventListener = () => {};
one(this, type, fn);
this.addEventListener = ael;
}
/**
* This function will add an `event listener` that gets triggered only once and is
* removed from all events. This is like adding an array of `event listener`s
* with {@link EventTarget#on} that calls {@link EventTarget#off} on all events the
* first time it is triggered.
*
* @param {string|string[]} type
* An event name or an array of event names.
*
* @param {Function} fn
* The function to be called once for each event name.
*/
any(type, fn) {
// Remove the addEventListener aliasing Events.on
// so we don't get into an infinite type loop
const ael = this.addEventListener;
this.addEventListener = () => {};
any(this, type, fn);
this.addEventListener = ael;
}
/**
* This function causes an event to happen. This will then cause any `event listeners`
* that are waiting for that event, to get called. If there are no `event listeners`
* for an event then nothing will happen.
*
* If the name of the `Event` that is being triggered is in `EventTarget.allowedEvents_`.
* Trigger will also call the `on` + `uppercaseEventName` function.
*
* Example:
* 'click' is in `EventTarget.allowedEvents_`, so, trigger will attempt to call
* `onClick` if it exists.
*
* @param {string|EventTarget~Event|Object} event
* The name of the event, an `Event`, or an object with a key of type set to
* an event name.
*/
trigger(event) {
const type = event.type || event;
// deprecation
// In a future version we should default target to `this`
// similar to how we default the target to `elem` in
// `Events.trigger`. Right now the default `target` will be
// `document` due to the `Event.fixEvent` call.
if (typeof event === 'string') {
event = {
type
};
}
event = fixEvent(event);
if (this.allowedEvents_[type] && this['on' + type]) {
this['on' + type](event);
}
trigger(this, event);
}
queueTrigger(event) {
// only set up EVENT_MAP if it'll be used
if (!EVENT_MAP) {
EVENT_MAP = new Map();
}
const type = event.type || event;
let map = EVENT_MAP.get(this);
if (!map) {
map = new Map();
EVENT_MAP.set(this, map);
}
const oldTimeout = map.get(type);
map.delete(type);
window$1.clearTimeout(oldTimeout);
const timeout = window$1.setTimeout(() => {
map.delete(type);
// if we cleared out all timeouts for the current target, delete its map
if (map.size === 0) {
map = null;
EVENT_MAP.delete(this);
}
this.trigger(event);
}, 0);
map.set(type, timeout);
}
}
/**
* A Custom DOM event.
*
* @typedef {CustomEvent} Event
* @see [Properties]{@link https://developer.mozilla.org/en-US/docs/Web/API/CustomEvent}
*/
/**
* All event listeners should follow the following format.
*
* @callback EventListener
* @this {EventTarget}
*
* @param {Event} event
* the event that triggered this function
*
* @param {Object} [hash]
* hash of data sent during the event
*/
/**
* An object containing event names as keys and booleans as values.
*
* > NOTE: If an event name is set to a true value here {@link EventTarget#trigger}
* will have extra functionality. See that function for more information.
*
* @property EventTarget.prototype.allowedEvents_
* @protected
*/
EventTarget$2.prototype.allowedEvents_ = {};
/**
* An alias of {@link EventTarget#on}. Allows `EventTarget` to mimic
* the standard DOM API.
*
* @function
* @see {@link EventTarget#on}
*/
EventTarget$2.prototype.addEventListener = EventTarget$2.prototype.on;
/**
* An alias of {@link EventTarget#off}. Allows `EventTarget` to mimic
* the standard DOM API.
*
* @function
* @see {@link EventTarget#off}
*/
EventTarget$2.prototype.removeEventListener = EventTarget$2.prototype.off;
/**
* An alias of {@link EventTarget#trigger}. Allows `EventTarget` to mimic
* the standard DOM API.
*
* @function
* @see {@link EventTarget#trigger}
*/
EventTarget$2.prototype.dispatchEvent = EventTarget$2.prototype.trigger;
/**
* @file mixins/evented.js
* @module evented
*/
const objName = obj => {
if (typeof obj.name === 'function') {
return obj.name();
}
if (typeof obj.name === 'string') {
return obj.name;
}
if (obj.name_) {
return obj.name_;
}
if (obj.constructor && obj.constructor.name) {
return obj.constructor.name;
}
return typeof obj;
};
/**
* Returns whether or not an object has had the evented mixin applied.
*
* @param {Object} object
* An object to test.
*
* @return {boolean}
* Whether or not the object appears to be evented.
*/
const isEvented = object => object instanceof EventTarget$2 || !!object.eventBusEl_ && ['on', 'one', 'off', 'trigger'].every(k => typeof object[k] === 'function');
/**
* Adds a callback to run after the evented mixin applied.
*
* @param {Object} target
* An object to Add
* @param {Function} callback
* The callback to run.
*/
const addEventedCallback = (target, callback) => {
if (isEvented(target)) {
callback();
} else {
if (!target.eventedCallbacks) {
target.eventedCallbacks = [];
}
target.eventedCallbacks.push(callback);
}
};
/**
* Whether a value is a valid event type - non-empty string or array.
*
* @private
* @param {string|Array} type
* The type value to test.
*
* @return {boolean}
* Whether or not the type is a valid event type.
*/
const isValidEventType = type =>
// The regex here verifies that the `type` contains at least one non-
// whitespace character.
typeof type === 'string' && /\S/.test(type) || Array.isArray(type) && !!type.length;
/**
* Validates a value to determine if it is a valid event target. Throws if not.
*
* @private
* @throws {Error}
* If the target does not appear to be a valid event target.
*
* @param {Object} target
* The object to test.
*
* @param {Object} obj
* The evented object we are validating for
*
* @param {string} fnName
* The name of the evented mixin function that called this.
*/
const validateTarget = (target, obj, fnName) => {
if (!target || !target.nodeName && !isEvented(target)) {
throw new Error(`Invalid target for ${objName(obj)}#${fnName}; must be a DOM node or evented object.`);
}
};
/**
* Validates a value to determine if it is a valid event target. Throws if not.
*
* @private
* @throws {Error}
* If the type does not appear to be a valid event type.
*
* @param {string|Array} type
* The type to test.
*
* @param {Object} obj
* The evented object we are validating for
*
* @param {string} fnName
* The name of the evented mixin function that called this.
*/
const validateEventType = (type, obj, fnName) => {
if (!isValidEventType(type)) {
throw new Error(`Invalid event type for ${objName(obj)}#${fnName}; must be a non-empty string or array.`);
}
};
/**
* Validates a value to determine if it is a valid listener. Throws if not.
*
* @private
* @throws {Error}
* If the listener is not a function.
*
* @param {Function} listener
* The listener to test.
*
* @param {Object} obj
* The evented object we are validating for
*
* @param {string} fnName
* The name of the evented mixin function that called this.
*/
const validateListener = (listener, obj, fnName) => {
if (typeof listener !== 'function') {
throw new Error(`Invalid listener for ${objName(obj)}#${fnName}; must be a function.`);
}
};
/**
* Takes an array of arguments given to `on()` or `one()`, validates them, and
* normalizes them into an object.
*
* @private
* @param {Object} self
* The evented object on which `on()` or `one()` was called. This
* object will be bound as the `this` value for the listener.
*
* @param {Array} args
* An array of arguments passed to `on()` or `one()`.
*
* @param {string} fnName
* The name of the evented mixin function that called this.
*
* @return {Object}
* An object containing useful values for `on()` or `one()` calls.
*/
const normalizeListenArgs = (self, args, fnName) => {
// If the number of arguments is less than 3, the target is always the
// evented object itself.
const isTargetingSelf = args.length < 3 || args[0] === self || args[0] === self.eventBusEl_;
let target;
let type;
let listener;
if (isTargetingSelf) {
target = self.eventBusEl_;
// Deal with cases where we got 3 arguments, but we are still listening to
// the evented object itself.
if (args.length >= 3) {
args.shift();
}
[type, listener] = args;
} else {
// This was `[target, type, listener] = args;` but this block needs more than
// one statement to produce minified output compatible with Chrome 53.
// See https://github.com/videojs/video.js/pull/8810
target = args[0];
type = args[1];
listener = args[2];
}
validateTarget(target, self, fnName);
validateEventType(type, self, fnName);
validateListener(listener, self, fnName);
listener = bind_(self, listener);
return {
isTargetingSelf,
target,
type,
listener
};
};
/**
* Adds the listener to the event type(s) on the target, normalizing for
* the type of target.
*
* @private
* @param {Element|Object} target
* A DOM node or evented object.
*
* @param {string} method
* The event binding method to use ("on" or "one").
*
* @param {string|Array} type
* One or more event type(s).
*
* @param {Function} listener
* A listener function.
*/
const listen = (target, method, type, listener) => {
validateTarget(target, target, method);
if (target.nodeName) {
Events[method](target, type, listener);
} else {
target[method](type, listener);
}
};
/**
* Contains methods that provide event capabilities to an object which is passed
* to {@link module:evented|evented}.
*
* @mixin EventedMixin
*/
const EventedMixin = {
/**
* Add a listener to an event (or events) on this object or another evented
* object.
*
* @param {string|Array|Element|Object} targetOrType
* If this is a string or array, it represents the event type(s)
* that will trigger the listener.
*
* Another evented object can be passed here instead, which will
* cause the listener to listen for events on _that_ object.
*
* In either case, the listener's `this` value will be bound to
* this object.
*
* @param {string|Array|Function} typeOrListener
* If the first argument was a string or array, this should be the
* listener function. Otherwise, this is a string or array of event
* type(s).
*
* @param {Function} [listener]
* If the first argument was another evented object, this will be
* the listener function.
*/
on(...args) {
const {
isTargetingSelf,
target,
type,
listener
} = normalizeListenArgs(this, args, 'on');
listen(target, 'on', type, listener);
// If this object is listening to another evented object.
if (!isTargetingSelf) {
// If this object is disposed, remove the listener.
const removeListenerOnDispose = () => this.off(target, type, listener);
// Use the same function ID as the listener so we can remove it later it
// using the ID of the original listener.
removeListenerOnDispose.guid = listener.guid;
// Add a listener to the target's dispose event as well. This ensures
// that if the target is disposed BEFORE this object, we remove the
// removal listener that was just added. Otherwise, we create a memory leak.
const removeRemoverOnTargetDispose = () => this.off('dispose', removeListenerOnDispose);
// Use the same function ID as the listener so we can remove it later
// it using the ID of the original listener.
removeRemoverOnTargetDispose.guid = listener.guid;
listen(this, 'on', 'dispose', removeListenerOnDispose);
listen(target, 'on', 'dispose', removeRemoverOnTargetDispose);
}
},
/**
* Add a listener to an event (or events) on this object or another evented
* object. The listener will be called once per event and then removed.
*
* @param {string|Array|Element|Object} targetOrType
* If this is a string or array, it represents the event type(s)
* that will trigger the listener.
*
* Another evented object can be passed here instead, which will
* cause the listener to listen for events on _that_ object.
*
* In either case, the listener's `this` value will be bound to
* this object.
*
* @param {string|Array|Function} typeOrListener
* If the first argument was a string or array, this should be the
* listener function. Otherwise, this is a string or array of event
* type(s).
*
* @param {Function} [listener]
* If the first argument was another evented object, this will be
* the listener function.
*/
one(...args) {
const {
isTargetingSelf,
target,
type,
listener
} = normalizeListenArgs(this, args, 'one');
// Targeting this evented object.
if (isTargetingSelf) {
listen(target, 'one', type, listener);
// Targeting another evented object.
} else {
// TODO: This wrapper is incorrect! It should only
// remove the wrapper for the event type that called it.
// Instead all listeners are removed on the first trigger!
// see https://github.com/videojs/video.js/issues/5962
const wrapper = (...largs) => {
this.off(target, type, wrapper);
listener.apply(null, largs);
};
// Use the same function ID as the listener so we can remove it later
// it using the ID of the original listener.
wrapper.guid = listener.guid;
listen(target, 'one', type, wrapper);
}
},
/**
* Add a listener to an event (or events) on this object or another evented
* object. The listener will only be called once for the first event that is triggered
* then removed.
*
* @param {string|Array|Element|Object} targetOrType
* If this is a string or array, it represents the event type(s)
* that will trigger the listener.
*
* Another evented object can be passed here instead, which will
* cause the listener to listen for events on _that_ object.
*
* In either case, the listener's `this` value will be bound to
* this object.
*
* @param {string|Array|Function} typeOrListener
* If the first argument was a string or array, this should be the
* listener function. Otherwise, this is a string or array of event
* type(s).
*
* @param {Function} [listener]
* If the first argument was another evented object, this will be
* the listener function.
*/
any(...args) {
const {
isTargetingSelf,
target,
type,
listener
} = normalizeListenArgs(this, args, 'any');
// Targeting this evented object.
if (isTargetingSelf) {
listen(target, 'any', type, listener);
// Targeting another evented object.
} else {
const wrapper = (...largs) => {
this.off(target, type, wrapper);
listener.apply(null, largs);
};
// Use the same function ID as the listener so we can remove it later
// it using the ID of the original listener.
wrapper.guid = listener.guid;
listen(target, 'any', type, wrapper);
}
},
/**
* Removes listener(s) from event(s) on an evented object.
*
* @param {string|Array|Element|Object} [targetOrType]
* If this is a string or array, it represents the event type(s).
*
* Another evented object can be passed here instead, in which case
* ALL 3 arguments are _required_.
*
* @param {string|Array|Function} [typeOrListener]
* If the first argument was a string or array, this may be the
* listener function. Otherwise, this is a string or array of event
* type(s).
*
* @param {Function} [listener]
* If the first argument was another evented object, this will be
* the listener function; otherwise, _all_ listeners bound to the
* event type(s) will be removed.
*/
off(targetOrType, typeOrListener, listener) {
// Targeting this evented object.
if (!targetOrType || isValidEventType(targetOrType)) {
off(this.eventBusEl_, targetOrType, typeOrListener);
// Targeting another evented object.
} else {
const target = targetOrType;
const type = typeOrListener;
// Fail fast and in a meaningful way!
validateTarget(target, this, 'off');
validateEventType(type, this, 'off');
validateListener(listener, this, 'off');
// Ensure there's at least a guid, even if the function hasn't been used
listener = bind_(this, listener);
// Remove the dispose listener on this evented object, which was given
// the same guid as the event listener in on().
this.off('dispose', listener);
if (target.nodeName) {
off(target, type, listener);
off(target, 'dispose', listener);
} else if (isEvented(target)) {
target.off(type, listener);
target.off('dispose', listener);
}
}
},
/**
* Fire an event on this evented object, causing its listeners to be called.
*
* @param {string|Object} event
* An event type or an object with a type property.
*
* @param {Object} [hash]
* An additional object to pass along to listeners.
*
* @return {boolean}
* Whether or not the default behavior was prevented.
*/
trigger(event, hash) {
validateTarget(this.eventBusEl_, this, 'trigger');
const type = event && typeof event !== 'string' ? event.type : event;
if (!isValidEventType(type)) {
throw new Error(`Invalid event type for ${objName(this)}#trigger; ` + 'must be a non-empty string or object with a type key that has a non-empty value.');
}
return trigger(this.eventBusEl_, event, hash);
}
};
/**
* Applies {@link module:evented~EventedMixin|EventedMixin} to a target object.
*
* @param {Object} target
* The object to which to add event methods.
*
* @param {Object} [options={}]
* Options for customizing the mixin behavior.
*
* @param {string} [options.eventBusKey]
* By default, adds a `eventBusEl_` DOM element to the target object,
* which is used as an event bus. If the target object already has a
* DOM element that should be used, pass its key here.
*
* @return {Object}
* The target object.
*/
function evented(target, options = {}) {
const {
eventBusKey
} = options;
// Set or create the eventBusEl_.
if (eventBusKey) {
if (!target[eventBusKey].nodeName) {
throw new Error(`The eventBusKey "${eventBusKey}" does not refer to an element.`);
}
target.eventBusEl_ = target[eventBusKey];
} else {
target.eventBusEl_ = createEl('span', {
className: 'vjs-event-bus'
});
}
Object.assign(target, EventedMixin);
if (target.eventedCallbacks) {
target.eventedCallbacks.forEach(callback => {
callback();
});
}
// When any evented object is disposed, it removes all its listeners.
target.on('dispose', () => {
target.off();
[target, target.el_, target.eventBusEl_].forEach(function (val) {
if (val && DomData.has(val)) {
DomData.delete(val);
}
});
window$1.setTimeout(() => {
target.eventBusEl_ = null;
}, 0);
});
return target;
}
/**
* @file mixins/stateful.js
* @module stateful
*/
/**
* Contains methods that provide statefulness to an object which is passed
* to {@link module:stateful}.
*
* @mixin StatefulMixin
*/
const StatefulMixin = {
/**
* A hash containing arbitrary keys and values representing the state of
* the object.
*
* @type {Object}
*/
state: {},
/**
* Set the state of an object by mutating its
* {@link module:stateful~StatefulMixin.state|state} object in place.
*
* @fires module:stateful~StatefulMixin#statechanged
* @param {Object|Function} stateUpdates
* A new set of properties to shallow-merge into the plugin state.
* Can be a plain object or a function returning a plain object.
*
* @return {Object|undefined}
* An object containing changes that occurred. If no changes
* occurred, returns `undefined`.
*/
setState(stateUpdates) {
// Support providing the `stateUpdates` state as a function.
if (typeof stateUpdates === 'function') {
stateUpdates = stateUpdates();
}
let changes;
each(stateUpdates, (value, key) => {
// Record the change if the value is different from what's in the
// current state.
if (this.state[key] !== value) {
changes = changes || {};
changes[key] = {
from: this.state[key],
to: value
};
}
this.state[key] = value;
});
// Only trigger "statechange" if there were changes AND we have a trigger
// function. This allows us to not require that the target object be an
// evented object.
if (changes && isEvented(this)) {
/**
* An event triggered on an object that is both
* {@link module:stateful|stateful} and {@link module:evented|evented}
* indicating that its state has changed.
*
* @event module:stateful~StatefulMixin#statechanged
* @type {Object}
* @property {Object} changes
* A hash containing the properties that were changed and
* the values they were changed `from` and `to`.
*/
this.trigger({
changes,
type: 'statechanged'
});
}
return changes;
}
};
/**
* Applies {@link module:stateful~StatefulMixin|StatefulMixin} to a target
* object.
*
* If the target object is {@link module:evented|evented} and has a
* `handleStateChanged` method, that method will be automatically bound to the
* `statechanged` event on itself.
*
* @param {Object} target
* The object to be made stateful.
*
* @param {Object} [defaultState]
* A default set of properties to populate the newly-stateful object's
* `state` property.
*
* @return {Object}
* Returns the `target`.
*/
function stateful(target, defaultState) {
Object.assign(target, StatefulMixin);
// This happens after the mixing-in because we need to replace the `state`
// added in that step.
target.state = Object.assign({}, target.state, defaultState);
// Auto-bind the `handleStateChanged` method of the target object if it exists.
if (typeof target.handleStateChanged === 'function' && isEvented(target)) {
target.on('statechanged', target.handleStateChanged);
}
return target;
}
/**
* @file str.js
* @module to-lower-case
*/
/**
* Lowercase the first letter of a string.
*
* @param {string} string
* String to be lowercased
*
* @return {string}
* The string with a lowercased first letter
*/
const toLowerCase = function (string) {
if (typeof string !== 'string') {
return string;
}
return string.replace(/./, w => w.toLowerCase());
};
/**
* Uppercase the first letter of a string.
*
* @param {string} string
* String to be uppercased
*
* @return {string}
* The string with an uppercased first letter
*/
const toTitleCase$1 = function (string) {
if (typeof string !== 'string') {
return string;
}
return string.replace(/./, w => w.toUpperCase());
};
/**
* Compares the TitleCase versions of the two strings for equality.
*
* @param {string} str1
* The first string to compare
*
* @param {string} str2
* The second string to compare
*
* @return {boolean}
* Whether the TitleCase versions of the strings are equal
*/
const titleCaseEquals = function (str1, str2) {
return toTitleCase$1(str1) === toTitleCase$1(str2);
};
var Str = /*#__PURE__*/Object.freeze({
__proto__: null,
toLowerCase: toLowerCase,
toTitleCase: toTitleCase$1,
titleCaseEquals: titleCaseEquals
});
/**
* Player Component - Base class for all UI objects
*
* @file component.js
*/
/** @import Player from './player' */
/**
* A callback to be called if and when the component is ready.
* `this` will be the Component instance.
*
* @callback ReadyCallback
* @returns {void}
*/
/**
* Base class for all UI Components.
* Components are UI objects which represent both a javascript object and an element
* in the DOM. They can be children of other components, and can have
* children themselves.
*
* Components can also use methods from {@link EventTarget}
*/
class Component$1 {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of component options.
*
* @param {Object[]} [options.children]
* An array of children objects to initialize this component with. Children objects have
* a name property that will be used if more than one component of the same type needs to be
* added.
*
* @param {string} [options.className]
* A class or space separated list of classes to add the component
*
* @param {ReadyCallback} [ready]
* Function that gets called when the `Component` is ready.
*/
constructor(player, options, ready) {
// The component might be the player itself and we can't pass `this` to super
if (!player && this.play) {
this.player_ = player = this; // eslint-disable-line
} else {
this.player_ = player;
}
this.isDisposed_ = false;
// Hold the reference to the parent component via `addChild` method
this.parentComponent_ = null;
// Make a copy of prototype.options_ to protect against overriding defaults
this.options_ = merge$1({}, this.options_);
// Updated options with supplied options
options = this.options_ = merge$1(this.options_, options);
// Get ID from options or options element if one is supplied
this.id_ = options.id || options.el && options.el.id;
// If there was no ID from the options, generate one
if (!this.id_) {
// Don't require the player ID function in the case of mock players
const id = player && player.id && player.id() || 'no_player';
this.id_ = `${id}_component_${newGUID()}`;
}
this.name_ = options.name || null;
// Create element if one wasn't provided in options
if (options.el) {
this.el_ = options.el;
} else if (options.createEl !== false) {
this.el_ = this.createEl();
}
if (options.className && this.el_) {
options.className.split(' ').forEach(c => this.addClass(c));
}
// Remove the placeholder event methods. If the component is evented, the
// real methods are added next
['on', 'off', 'one', 'any', 'trigger'].forEach(fn => {
this[fn] = undefined;
});
// if evented is anything except false, we want to mixin in evented
if (options.evented !== false) {
// Make this an evented object and use `el_`, if available, as its event bus
evented(this, {
eventBusKey: this.el_ ? 'el_' : null
});
this.handleLanguagechange = this.handleLanguagechange.bind(this);
this.on(this.player_, 'languagechange', this.handleLanguagechange);
}
stateful(this, this.constructor.defaultState);
this.children_ = [];
this.childIndex_ = {};
this.childNameIndex_ = {};
this.setTimeoutIds_ = new Set();
this.setIntervalIds_ = new Set();
this.rafIds_ = new Set();
this.namedRafs_ = new Map();
this.clearingTimersOnDispose_ = false;
// Add any child components in options
if (options.initChildren !== false) {
this.initChildren();
}
// Don't want to trigger ready here or it will go before init is actually
// finished for all children that run this constructor
this.ready(ready);
if (options.reportTouchActivity !== false) {
this.enableTouchActivity();
}
}
// `on`, `off`, `one`, `any` and `trigger` are here so tsc includes them in definitions.
// They are replaced or removed in the constructor
/**
* Adds an `event listener` to an instance of an `EventTarget`. An `event listener` is a
* function that will get called when an event with a certain name gets triggered.
*
* @param {string|string[]} type
* An event name or an array of event names.
*
* @param {Function} fn
* The function to call with `EventTarget`s
*/
/**
* Removes an `event listener` for a specific event from an instance of `EventTarget`.
* This makes it so that the `event listener` will no longer get called when the
* named event happens.
*
* @param {string|string[]} type
* An event name or an array of event names.
*
* @param {Function} [fn]
* The function to remove. If not specified, all listeners managed by Video.js will be removed.
*/
/**
* This function will add an `event listener` that gets triggered only once. After the
* first trigger it will get removed. This is like adding an `event listener`
* with {@link EventTarget#on} that calls {@link EventTarget#off} on itself.
*
* @param {string|string[]} type
* An event name or an array of event names.
*
* @param {Function} fn
* The function to be called once for each event name.
*/
/**
* This function will add an `event listener` that gets triggered only once and is
* removed from all events. This is like adding an array of `event listener`s
* with {@link EventTarget#on} that calls {@link EventTarget#off} on all events the
* first time it is triggered.
*
* @param {string|string[]} type
* An event name or an array of event names.
*
* @param {Function} fn
* The function to be called once for each event name.
*/
/**
* This function causes an event to happen. This will then cause any `event listeners`
* that are waiting for that event, to get called. If there are no `event listeners`
* for an event then nothing will happen.
*
* If the name of the `Event` that is being triggered is in `EventTarget.allowedEvents_`.
* Trigger will also call the `on` + `uppercaseEventName` function.
*
* Example:
* 'click' is in `EventTarget.allowedEvents_`, so, trigger will attempt to call
* `onClick` if it exists.
*
* @param {string|Event|Object} event
* The name of the event, an `Event`, or an object with a key of type set to
* an event name.
*
* @param {Object} [hash]
* Optionally extra argument to pass through to an event listener
*/
/**
* Dispose of the `Component` and all child components.
*
* @fires Component#dispose
*
* @param {Object} options
* @param {Element} options.originalEl element with which to replace player element
*/
dispose(options = {}) {
// Bail out if the component has already been disposed.
if (this.isDisposed_) {
return;
}
if (this.readyQueue_) {
this.readyQueue_.length = 0;
}
/**
* Triggered when a `Component` is disposed.
*
* @event Component#dispose
* @type {Event}
*
* @property {boolean} [bubbles=false]
* set to false so that the dispose event does not
* bubble up
*/
this.trigger({
type: 'dispose',
bubbles: false
});
this.isDisposed_ = true;
// Dispose all children.
if (this.children_) {
for (let i = this.children_.length - 1; i >= 0; i--) {
if (this.children_[i].dispose) {
this.children_[i].dispose();
}
}
}
// Delete child references
this.children_ = null;
this.childIndex_ = null;
this.childNameIndex_ = null;
this.parentComponent_ = null;
if (this.el_) {
// Remove element from DOM
if (this.el_.parentNode) {
if (options.restoreEl) {
this.el_.parentNode.replaceChild(options.restoreEl, this.el_);
} else {
this.el_.parentNode.removeChild(this.el_);
}
}
this.el_ = null;
}
// remove reference to the player after disposing of the element
this.player_ = null;
}
/**
* Determine whether or not this component has been disposed.
*
* @return {boolean}
* If the component has been disposed, will be `true`. Otherwise, `false`.
*/
isDisposed() {
return Boolean(this.isDisposed_);
}
/**
* Return the {@link Player} that the `Component` has attached to.
*
* @return {Player}
* The player that this `Component` has attached to.
*/
player() {
return this.player_;
}
/**
* Deep merge of options objects with new options.
* > Note: When both `obj` and `options` contain properties whose values are objects.
* The two properties get merged using {@link module:obj.merge}
*
* @param {Object} obj
* The object that contains new options.
*
* @return {Object}
* A new object of `this.options_` and `obj` merged together.
*/
options(obj) {
if (!obj) {
return this.options_;
}
this.options_ = merge$1(this.options_, obj);
return this.options_;
}
/**
* Get the `Component`s DOM element
*
* @return {Element}
* The DOM element for this `Component`.
*/
el() {
return this.el_;
}
/**
* Create the `Component`s DOM element.
*
* @param {string} [tagName]
* Element's DOM node type. e.g. 'div'
*
* @param {Object} [properties]
* An object of properties that should be set.
*
* @param {Object} [attributes]
* An object of attributes that should be set.
*
* @return {Element}
* The element that gets created.
*/
createEl(tagName, properties, attributes) {
return createEl(tagName, properties, attributes);
}
/**
* Localize a string given the string in english.
*
* If tokens are provided, it'll try and run a simple token replacement on the provided string.
* The tokens it looks for look like `{1}` with the index being 1-indexed into the tokens array.
*
* If a `defaultValue` is provided, it'll use that over `string`,
* if a value isn't found in provided language files.
* This is useful if you want to have a descriptive key for token replacement
* but have a succinct localized string and not require `en.json` to be included.
*
* Currently, it is used for the progress bar timing.
* ```js
* {
* "progress bar timing: currentTime={1} duration={2}": "{1} of {2}"
* }
* ```
* It is then used like so:
* ```js
* this.localize('progress bar timing: currentTime={1} duration{2}',
* [this.player_.currentTime(), this.player_.duration()],
* '{1} of {2}');
* ```
*
* Which outputs something like: `01:23 of 24:56`.
*
*
* @param {string} string
* The string to localize and the key to lookup in the language files.
* @param {string[]} [tokens]
* If the current item has token replacements, provide the tokens here.
* @param {string} [defaultValue]
* Defaults to `string`. Can be a default value to use for token replacement
* if the lookup key is needed to be separate.
*
* @return {string}
* The localized string or if no localization exists the english string.
*/
localize(string, tokens, defaultValue = string) {
const code = this.player_.language && this.player_.language();
const languages = this.player_.languages && this.player_.languages();
const language = languages && languages[code];
const primaryCode = code && code.split('-')[0];
const primaryLang = languages && languages[primaryCode];
let localizedString = defaultValue;
if (language && language[string]) {
localizedString = language[string];
} else if (primaryLang && primaryLang[string]) {
localizedString = primaryLang[string];
}
if (tokens) {
localizedString = localizedString.replace(/\{(\d+)\}/g, function (match, index) {
const value = tokens[index - 1];
let ret = value;
if (typeof value === 'undefined') {
ret = match;
}
return ret;
});
}
return localizedString;
}
/**
* Handles language change for the player in components. Should be overridden by sub-components.
*
* @abstract
*/
handleLanguagechange() {}
/**
* Return the `Component`s DOM element. This is where children get inserted.
* This will usually be the the same as the element returned in {@link Component#el}.
*
* @return {Element}
* The content element for this `Component`.
*/
contentEl() {
return this.contentEl_ || this.el_;
}
/**
* Get this `Component`s ID
*
* @return {string}
* The id of this `Component`
*/
id() {
return this.id_;
}
/**
* Get the `Component`s name. The name gets used to reference the `Component`
* and is set during registration.
*
* @return {string}
* The name of this `Component`.
*/
name() {
return this.name_;
}
/**
* Get an array of all child components
*
* @return {Array}
* The children
*/
children() {
return this.children_;
}
/**
* Returns the child `Component` with the given `id`.
*
* @param {string} id
* The id of the child `Component` to get.
*
* @return {Component|undefined}
* The child `Component` with the given `id` or undefined.
*/
getChildById(id) {
return this.childIndex_[id];
}
/**
* Returns the child `Component` with the given `name`.
*
* @param {string} name
* The name of the child `Component` to get.
*
* @return {Component|undefined}
* The child `Component` with the given `name` or undefined.
*/
getChild(name) {
if (!name) {
return;
}
return this.childNameIndex_[name];
}
/**
* Returns the descendant `Component` following the givent
* descendant `names`. For instance ['foo', 'bar', 'baz'] would
* try to get 'foo' on the current component, 'bar' on the 'foo'
* component and 'baz' on the 'bar' component and return undefined
* if any of those don't exist.
*
* @param {...string[]|...string} names
* The name of the child `Component` to get.
*
* @return {Component|undefined}
* The descendant `Component` following the given descendant
* `names` or undefined.
*/
getDescendant(...names) {
// flatten array argument into the main array
names = names.reduce((acc, n) => acc.concat(n), []);
let currentChild = this;
for (let i = 0; i < names.length; i++) {
currentChild = currentChild.getChild(names[i]);
if (!currentChild || !currentChild.getChild) {
return;
}
}
return currentChild;
}
/**
* Adds an SVG icon element to another element or component.
*
* @param {string} iconName
* The name of icon. A list of all the icon names can be found at 'sandbox/svg-icons.html'
*
* @param {Element} [el=this.el()]
* Element to set the title on. Defaults to the current Component's element.
*
* @return {Element}
* The newly created icon element.
*/
setIcon(iconName, el = this.el()) {
// TODO: In v9 of video.js, we will want to remove font icons entirely.
// This means this check, as well as the others throughout the code, and
// the unecessary CSS for font icons, will need to be removed.
// See https://github.com/videojs/video.js/pull/8260 as to which components
// need updating.
if (!this.player_.options_.experimentalSvgIcons) {
return;
}
const xmlnsURL = 'http://www.w3.org/2000/svg';
// The below creates an element in the format of:
// ....
const iconContainer = createEl('span', {
className: 'vjs-icon-placeholder vjs-svg-icon'
}, {
'aria-hidden': 'true'
});
const svgEl = document$1.createElementNS(xmlnsURL, 'svg');
svgEl.setAttributeNS(null, 'viewBox', '0 0 512 512');
const useEl = document$1.createElementNS(xmlnsURL, 'use');
svgEl.appendChild(useEl);
useEl.setAttributeNS(null, 'href', `#vjs-icon-${iconName}`);
iconContainer.appendChild(svgEl);
// Replace a pre-existing icon if one exists.
if (this.iconIsSet_) {
el.replaceChild(iconContainer, el.querySelector('.vjs-icon-placeholder'));
} else {
el.appendChild(iconContainer);
}
this.iconIsSet_ = true;
return iconContainer;
}
/**
* Add a child `Component` inside the current `Component`.
*
* @param {string|Component} child
* The name or instance of a child to add.
*
* @param {Object} [options={}]
* The key/value store of options that will get passed to children of
* the child.
*
* @param {number} [index=this.children_.length]
* The index to attempt to add a child into.
*
*
* @return {Component}
* The `Component` that gets added as a child. When using a string the
* `Component` will get created by this process.
*/
addChild(child, options = {}, index = this.children_.length) {
let component;
let componentName;
// If child is a string, create component with options
if (typeof child === 'string') {
componentName = toTitleCase$1(child);
const componentClassName = options.componentClass || componentName;
// Set name through options
options.name = componentName;
// Create a new object & element for this controls set
// If there's no .player_, this is a player
const ComponentClass = Component$1.getComponent(componentClassName);
if (!ComponentClass) {
throw new Error(`Component ${componentClassName} does not exist`);
}
// data stored directly on the videojs object may be
// misidentified as a component to retain
// backwards-compatibility with 4.x. check to make sure the
// component class can be instantiated.
if (typeof ComponentClass !== 'function') {
return null;
}
component = new ComponentClass(this.player_ || this, options);
// child is a component instance
} else {
component = child;
}
if (component.parentComponent_) {
component.parentComponent_.removeChild(component);
}
this.children_.splice(index, 0, component);
component.parentComponent_ = this;
if (typeof component.id === 'function') {
this.childIndex_[component.id()] = component;
}
// If a name wasn't used to create the component, check if we can use the
// name function of the component
componentName = componentName || component.name && toTitleCase$1(component.name());
if (componentName) {
this.childNameIndex_[componentName] = component;
this.childNameIndex_[toLowerCase(componentName)] = component;
}
// Add the UI object's element to the container div (box)
// Having an element is not required
if (typeof component.el === 'function' && component.el()) {
// If inserting before a component, insert before that component's element
let refNode = null;
if (this.children_[index + 1]) {
// Most children are components, but the video tech is an HTML element
if (this.children_[index + 1].el_) {
refNode = this.children_[index + 1].el_;
} else if (isEl(this.children_[index + 1])) {
refNode = this.children_[index + 1];
}
}
this.contentEl().insertBefore(component.el(), refNode);
}
// Return so it can stored on parent object if desired.
return component;
}
/**
* Remove a child `Component` from this `Component`s list of children. Also removes
* the child `Component`s element from this `Component`s element.
*
* @param {Component} component
* The child `Component` to remove.
*/
removeChild(component) {
if (typeof component === 'string') {
component = this.getChild(component);
}
if (!component || !this.children_) {
return;
}
let childFound = false;
for (let i = this.children_.length - 1; i >= 0; i--) {
if (this.children_[i] === component) {
childFound = true;
this.children_.splice(i, 1);
break;
}
}
if (!childFound) {
return;
}
component.parentComponent_ = null;
this.childIndex_[component.id()] = null;
this.childNameIndex_[toTitleCase$1(component.name())] = null;
this.childNameIndex_[toLowerCase(component.name())] = null;
const compEl = component.el();
if (compEl && compEl.parentNode === this.contentEl()) {
this.contentEl().removeChild(component.el());
}
}
/**
* Add and initialize default child `Component`s based upon options.
*/
initChildren() {
const children = this.options_.children;
if (children) {
// `this` is `parent`
const parentOptions = this.options_;
const handleAdd = child => {
const name = child.name;
let opts = child.opts;
// Allow options for children to be set at the parent options
// e.g. videojs(id, { controlBar: false });
// instead of videojs(id, { children: { controlBar: false });
if (parentOptions[name] !== undefined) {
opts = parentOptions[name];
}
// Allow for disabling default components
// e.g. options['children']['posterImage'] = false
if (opts === false) {
return;
}
// Allow options to be passed as a simple boolean if no configuration
// is necessary.
if (opts === true) {
opts = {};
}
// We also want to pass the original player options
// to each component as well so they don't need to
// reach back into the player for options later.
opts.playerOptions = this.options_.playerOptions;
// Create and add the child component.
// Add a direct reference to the child by name on the parent instance.
// If two of the same component are used, different names should be supplied
// for each
const newChild = this.addChild(name, opts);
if (newChild) {
this[name] = newChild;
}
};
// Allow for an array of children details to passed in the options
let workingChildren;
const Tech = Component$1.getComponent('Tech');
if (Array.isArray(children)) {
workingChildren = children;
} else {
workingChildren = Object.keys(children);
}
workingChildren
// children that are in this.options_ but also in workingChildren would
// give us extra children we do not want. So, we want to filter them out.
.concat(Object.keys(this.options_).filter(function (child) {
return !workingChildren.some(function (wchild) {
if (typeof wchild === 'string') {
return child === wchild;
}
return child === wchild.name;
});
})).map(child => {
let name;
let opts;
if (typeof child === 'string') {
name = child;
opts = children[name] || this.options_[name] || {};
} else {
name = child.name;
opts = child;
}
return {
name,
opts
};
}).filter(child => {
// we have to make sure that child.name isn't in the techOrder since
// techs are registered as Components but can't aren't compatible
// See https://github.com/videojs/video.js/issues/2772
const c = Component$1.getComponent(child.opts.componentClass || toTitleCase$1(child.name));
return c && !Tech.isTech(c);
}).forEach(handleAdd);
}
}
/**
* Builds the default DOM class name. Should be overridden by sub-components.
*
* @return {string}
* The DOM class name for this object.
*
* @abstract
*/
buildCSSClass() {
// Child classes can include a function that does:
// return 'CLASS NAME' + this._super();
return '';
}
/**
* Bind a listener to the component's ready state.
* Different from event listeners in that if the ready event has already happened
* it will trigger the function immediately.
*
* @param {ReadyCallback} fn
* Function that gets called when the `Component` is ready.
*/
ready(fn, sync = false) {
if (!fn) {
return;
}
if (!this.isReady_) {
this.readyQueue_ = this.readyQueue_ || [];
this.readyQueue_.push(fn);
return;
}
if (sync) {
fn.call(this);
} else {
// Call the function asynchronously by default for consistency
this.setTimeout(fn, 1);
}
}
/**
* Trigger all the ready listeners for this `Component`.
*
* @fires Component#ready
*/
triggerReady() {
this.isReady_ = true;
// Ensure ready is triggered asynchronously
this.setTimeout(function () {
const readyQueue = this.readyQueue_;
// Reset Ready Queue
this.readyQueue_ = [];
if (readyQueue && readyQueue.length > 0) {
readyQueue.forEach(function (fn) {
fn.call(this);
}, this);
}
// Allow for using event listeners also
/**
* Triggered when a `Component` is ready.
*
* @event Component#ready
* @type {Event}
*/
this.trigger('ready');
}, 1);
}
/**
* Find a single DOM element matching a `selector`. This can be within the `Component`s
* `contentEl()` or another custom context.
*
* @param {string} selector
* A valid CSS selector, which will be passed to `querySelector`.
*
* @param {Element|string} [context=this.contentEl()]
* A DOM element within which to query. Can also be a selector string in
* which case the first matching element will get used as context. If
* missing `this.contentEl()` gets used. If `this.contentEl()` returns
* nothing it falls back to `document`.
*
* @return {Element|null}
* the dom element that was found, or null
*
* @see [Information on CSS Selectors](https://developer.mozilla.org/en-US/docs/Web/Guide/CSS/Getting_Started/Selectors)
*/
$(selector, context) {
return $(selector, context || this.contentEl());
}
/**
* Finds all DOM element matching a `selector`. This can be within the `Component`s
* `contentEl()` or another custom context.
*
* @param {string} selector
* A valid CSS selector, which will be passed to `querySelectorAll`.
*
* @param {Element|string} [context=this.contentEl()]
* A DOM element within which to query. Can also be a selector string in
* which case the first matching element will get used as context. If
* missing `this.contentEl()` gets used. If `this.contentEl()` returns
* nothing it falls back to `document`.
*
* @return {NodeList}
* a list of dom elements that were found
*
* @see [Information on CSS Selectors](https://developer.mozilla.org/en-US/docs/Web/Guide/CSS/Getting_Started/Selectors)
*/
$$(selector, context) {
return $$(selector, context || this.contentEl());
}
/**
* Check if a component's element has a CSS class name.
*
* @param {string} classToCheck
* CSS class name to check.
*
* @return {boolean}
* - True if the `Component` has the class.
* - False if the `Component` does not have the class`
*/
hasClass(classToCheck) {
return hasClass(this.el_, classToCheck);
}
/**
* Add a CSS class name to the `Component`s element.
*
* @param {...string} classesToAdd
* One or more CSS class name to add.
*/
addClass(...classesToAdd) {
addClass(this.el_, ...classesToAdd);
}
/**
* Remove a CSS class name from the `Component`s element.
*
* @param {...string} classesToRemove
* One or more CSS class name to remove.
*/
removeClass(...classesToRemove) {
removeClass(this.el_, ...classesToRemove);
}
/**
* Add or remove a CSS class name from the component's element.
* - `classToToggle` gets added when {@link Component#hasClass} would return false.
* - `classToToggle` gets removed when {@link Component#hasClass} would return true.
*
* @param {string} classToToggle
* The class to add or remove. Passed to DOMTokenList's toggle()
*
* @param {boolean|Dom.PredicateCallback} [predicate]
* A boolean or function that returns a boolean. Passed to DOMTokenList's toggle().
*/
toggleClass(classToToggle, predicate) {
toggleClass(this.el_, classToToggle, predicate);
}
/**
* Show the `Component`s element if it is hidden by removing the
* 'vjs-hidden' class name from it.
*/
show() {
this.removeClass('vjs-hidden');
}
/**
* Hide the `Component`s element if it is currently showing by adding the
* 'vjs-hidden` class name to it.
*/
hide() {
this.addClass('vjs-hidden');
}
/**
* Lock a `Component`s element in its visible state by adding the 'vjs-lock-showing'
* class name to it. Used during fadeIn/fadeOut.
*
* @private
*/
lockShowing() {
this.addClass('vjs-lock-showing');
}
/**
* Unlock a `Component`s element from its visible state by removing the 'vjs-lock-showing'
* class name from it. Used during fadeIn/fadeOut.
*
* @private
*/
unlockShowing() {
this.removeClass('vjs-lock-showing');
}
/**
* Get the value of an attribute on the `Component`s element.
*
* @param {string} attribute
* Name of the attribute to get the value from.
*
* @return {string|null}
* - The value of the attribute that was asked for.
* - Can be an empty string on some browsers if the attribute does not exist
* or has no value
* - Most browsers will return null if the attribute does not exist or has
* no value.
*
* @see [DOM API]{@link https://developer.mozilla.org/en-US/docs/Web/API/Element/getAttribute}
*/
getAttribute(attribute) {
return getAttribute(this.el_, attribute);
}
/**
* Set the value of an attribute on the `Component`'s element
*
* @param {string} attribute
* Name of the attribute to set.
*
* @param {string} value
* Value to set the attribute to.
*
* @see [DOM API]{@link https://developer.mozilla.org/en-US/docs/Web/API/Element/setAttribute}
*/
setAttribute(attribute, value) {
setAttribute(this.el_, attribute, value);
}
/**
* Remove an attribute from the `Component`s element.
*
* @param {string} attribute
* Name of the attribute to remove.
*
* @see [DOM API]{@link https://developer.mozilla.org/en-US/docs/Web/API/Element/removeAttribute}
*/
removeAttribute(attribute) {
removeAttribute(this.el_, attribute);
}
/**
* Get or set the width of the component based upon the CSS styles.
* See {@link Component#dimension} for more detailed information.
*
* @param {number|string} [num]
* The width that you want to set postfixed with '%', 'px' or nothing.
*
* @param {boolean} [skipListeners]
* Skip the componentresize event trigger
*
* @return {number|undefined}
* The width when getting, zero if there is no width
*/
width(num, skipListeners) {
return this.dimension('width', num, skipListeners);
}
/**
* Get or set the height of the component based upon the CSS styles.
* See {@link Component#dimension} for more detailed information.
*
* @param {number|string} [num]
* The height that you want to set postfixed with '%', 'px' or nothing.
*
* @param {boolean} [skipListeners]
* Skip the componentresize event trigger
*
* @return {number|undefined}
* The height when getting, zero if there is no height
*/
height(num, skipListeners) {
return this.dimension('height', num, skipListeners);
}
/**
* Set both the width and height of the `Component` element at the same time.
*
* @param {number|string} width
* Width to set the `Component`s element to.
*
* @param {number|string} height
* Height to set the `Component`s element to.
*/
dimensions(width, height) {
// Skip componentresize listeners on width for optimization
this.width(width, true);
this.height(height);
}
/**
* Get or set width or height of the `Component` element. This is the shared code
* for the {@link Component#width} and {@link Component#height}.
*
* Things to know:
* - If the width or height in an number this will return the number postfixed with 'px'.
* - If the width/height is a percent this will return the percent postfixed with '%'
* - Hidden elements have a width of 0 with `window.getComputedStyle`. This function
* defaults to the `Component`s `style.width` and falls back to `window.getComputedStyle`.
* See [this]{@link http://www.foliotek.com/devblog/getting-the-width-of-a-hidden-element-with-jquery-using-width/}
* for more information
* - If you want the computed style of the component, use {@link Component#currentWidth}
* and {@link {Component#currentHeight}
*
* @fires Component#componentresize
*
* @param {string} widthOrHeight
8 'width' or 'height'
*
* @param {number|string} [num]
8 New dimension
*
* @param {boolean} [skipListeners]
* Skip componentresize event trigger
*
* @return {number|undefined}
* The dimension when getting or 0 if unset
*/
dimension(widthOrHeight, num, skipListeners) {
if (num !== undefined) {
// Set to zero if null or literally NaN (NaN !== NaN)
if (num === null || num !== num) {
num = 0;
}
// Check if using css width/height (% or px) and adjust
if (('' + num).indexOf('%') !== -1 || ('' + num).indexOf('px') !== -1) {
this.el_.style[widthOrHeight] = num;
} else if (num === 'auto') {
this.el_.style[widthOrHeight] = '';
} else {
this.el_.style[widthOrHeight] = num + 'px';
}
// skipListeners allows us to avoid triggering the resize event when setting both width and height
if (!skipListeners) {
/**
* Triggered when a component is resized.
*
* @event Component#componentresize
* @type {Event}
*/
this.trigger('componentresize');
}
return;
}
// Not setting a value, so getting it
// Make sure element exists
if (!this.el_) {
return 0;
}
// Get dimension value from style
const val = this.el_.style[widthOrHeight];
const pxIndex = val.indexOf('px');
if (pxIndex !== -1) {
// Return the pixel value with no 'px'
return parseInt(val.slice(0, pxIndex), 10);
}
// No px so using % or no style was set, so falling back to offsetWidth/height
// If component has display:none, offset will return 0
// TODO: handle display:none and no dimension style using px
return parseInt(this.el_['offset' + toTitleCase$1(widthOrHeight)], 10);
}
/**
* Get the computed width or the height of the component's element.
*
* Uses `window.getComputedStyle`.
*
* @param {string} widthOrHeight
* A string containing 'width' or 'height'. Whichever one you want to get.
*
* @return {number}
* The dimension that gets asked for or 0 if nothing was set
* for that dimension.
*/
currentDimension(widthOrHeight) {
let computedWidthOrHeight = 0;
if (widthOrHeight !== 'width' && widthOrHeight !== 'height') {
throw new Error('currentDimension only accepts width or height value');
}
computedWidthOrHeight = computedStyle(this.el_, widthOrHeight);
// remove 'px' from variable and parse as integer
computedWidthOrHeight = parseFloat(computedWidthOrHeight);
// if the computed value is still 0, it's possible that the browser is lying
// and we want to check the offset values.
// This code also runs wherever getComputedStyle doesn't exist.
if (computedWidthOrHeight === 0 || isNaN(computedWidthOrHeight)) {
const rule = `offset${toTitleCase$1(widthOrHeight)}`;
computedWidthOrHeight = this.el_[rule];
}
return computedWidthOrHeight;
}
/**
* An object that contains width and height values of the `Component`s
* computed style. Uses `window.getComputedStyle`.
*
* @typedef {Object} Component~DimensionObject
*
* @property {number} width
* The width of the `Component`s computed style.
*
* @property {number} height
* The height of the `Component`s computed style.
*/
/**
* Get an object that contains computed width and height values of the
* component's element.
*
* Uses `window.getComputedStyle`.
*
* @return {Component~DimensionObject}
* The computed dimensions of the component's element.
*/
currentDimensions() {
return {
width: this.currentDimension('width'),
height: this.currentDimension('height')
};
}
/**
* Get the computed width of the component's element.
*
* Uses `window.getComputedStyle`.
*
* @return {number}
* The computed width of the component's element.
*/
currentWidth() {
return this.currentDimension('width');
}
/**
* Get the computed height of the component's element.
*
* Uses `window.getComputedStyle`.
*
* @return {number}
* The computed height of the component's element.
*/
currentHeight() {
return this.currentDimension('height');
}
/**
* Retrieves the position and size information of the component's element.
*
* @return {Object} An object with `boundingClientRect` and `center` properties.
* - `boundingClientRect`: An object with properties `x`, `y`, `width`,
* `height`, `top`, `right`, `bottom`, and `left`, representing
* the bounding rectangle of the element.
* - `center`: An object with properties `x` and `y`, representing
* the center point of the element. `width` and `height` are set to 0.
*/
getPositions() {
const rect = this.el_.getBoundingClientRect();
// Creating objects that mirror DOMRectReadOnly for boundingClientRect and center
const boundingClientRect = {
x: rect.x,
y: rect.y,
width: rect.width,
height: rect.height,
top: rect.top,
right: rect.right,
bottom: rect.bottom,
left: rect.left
};
// Calculating the center position
const center = {
x: rect.left + rect.width / 2,
y: rect.top + rect.height / 2,
width: 0,
height: 0,
top: rect.top + rect.height / 2,
right: rect.left + rect.width / 2,
bottom: rect.top + rect.height / 2,
left: rect.left + rect.width / 2
};
return {
boundingClientRect,
center
};
}
/**
* Set the focus to this component
*/
focus() {
this.el_.focus();
}
/**
* Remove the focus from this component
*/
blur() {
this.el_.blur();
}
/**
* When this Component receives a `keydown` event which it does not process,
* it passes the event to the Player for handling.
*
* @param {KeyboardEvent} event
* The `keydown` event that caused this function to be called.
*/
handleKeyDown(event) {
if (this.player_) {
// We only stop propagation here because we want unhandled events to fall
// back to the browser. Exclude Tab for focus trapping, exclude also when spatialNavigation is enabled.
if (event.key !== 'Tab' && !(this.player_.options_.playerOptions.spatialNavigation && this.player_.options_.playerOptions.spatialNavigation.enabled)) {
event.stopPropagation();
}
this.player_.handleKeyDown(event);
}
}
/**
* Many components used to have a `handleKeyPress` method, which was poorly
* named because it listened to a `keydown` event. This method name now
* delegates to `handleKeyDown`. This means anyone calling `handleKeyPress`
* will not see their method calls stop working.
*
* @param {KeyboardEvent} event
* The event that caused this function to be called.
*/
handleKeyPress(event) {
this.handleKeyDown(event);
}
/**
* Emit a 'tap' events when touch event support gets detected. This gets used to
* support toggling the controls through a tap on the video. They get enabled
* because every sub-component would have extra overhead otherwise.
*
* @protected
* @fires Component#tap
* @listens Component#touchstart
* @listens Component#touchmove
* @listens Component#touchleave
* @listens Component#touchcancel
* @listens Component#touchend
*/
emitTapEvents() {
// Track the start time so we can determine how long the touch lasted
let touchStart = 0;
let firstTouch = null;
// Maximum movement allowed during a touch event to still be considered a tap
// Other popular libs use anywhere from 2 (hammer.js) to 15,
// so 10 seems like a nice, round number.
const tapMovementThreshold = 10;
// The maximum length a touch can be while still being considered a tap
const touchTimeThreshold = 200;
let couldBeTap;
this.on('touchstart', function (event) {
// If more than one finger, don't consider treating this as a click
if (event.touches.length === 1) {
// Copy pageX/pageY from the object
firstTouch = {
pageX: event.touches[0].pageX,
pageY: event.touches[0].pageY
};
// Record start time so we can detect a tap vs. "touch and hold"
touchStart = window$1.performance.now();
// Reset couldBeTap tracking
couldBeTap = true;
}
});
this.on('touchmove', function (event) {
// If more than one finger, don't consider treating this as a click
if (event.touches.length > 1) {
couldBeTap = false;
} else if (firstTouch) {
// Some devices will throw touchmoves for all but the slightest of taps.
// So, if we moved only a small distance, this could still be a tap
const xdiff = event.touches[0].pageX - firstTouch.pageX;
const ydiff = event.touches[0].pageY - firstTouch.pageY;
const touchDistance = Math.sqrt(xdiff * xdiff + ydiff * ydiff);
if (touchDistance > tapMovementThreshold) {
couldBeTap = false;
}
}
});
const noTap = function () {
couldBeTap = false;
};
// TODO: Listen to the original target. http://youtu.be/DujfpXOKUp8?t=13m8s
this.on('touchleave', noTap);
this.on('touchcancel', noTap);
// When the touch ends, measure how long it took and trigger the appropriate
// event
this.on('touchend', function (event) {
firstTouch = null;
// Proceed only if the touchmove/leave/cancel event didn't happen
if (couldBeTap === true) {
// Measure how long the touch lasted
const touchTime = window$1.performance.now() - touchStart;
// Make sure the touch was less than the threshold to be considered a tap
if (touchTime < touchTimeThreshold) {
// Don't let browser turn this into a click
event.preventDefault();
/**
* Triggered when a `Component` is tapped.
*
* @event Component#tap
* @type {MouseEvent}
*/
this.trigger('tap');
// It may be good to copy the touchend event object and change the
// type to tap, if the other event properties aren't exact after
// Events.fixEvent runs (e.g. event.target)
}
}
});
}
/**
* This function reports user activity whenever touch events happen. This can get
* turned off by any sub-components that wants touch events to act another way.
*
* Report user touch activity when touch events occur. User activity gets used to
* determine when controls should show/hide. It is simple when it comes to mouse
* events, because any mouse event should show the controls. So we capture mouse
* events that bubble up to the player and report activity when that happens.
* With touch events it isn't as easy as `touchstart` and `touchend` toggle player
* controls. So touch events can't help us at the player level either.
*
* User activity gets checked asynchronously. So what could happen is a tap event
* on the video turns the controls off. Then the `touchend` event bubbles up to
* the player. Which, if it reported user activity, would turn the controls right
* back on. We also don't want to completely block touch events from bubbling up.
* Furthermore a `touchmove` event and anything other than a tap, should not turn
* controls back on.
*
* @listens Component#touchstart
* @listens Component#touchmove
* @listens Component#touchend
* @listens Component#touchcancel
*/
enableTouchActivity() {
// Don't continue if the root player doesn't support reporting user activity
if (!this.player() || !this.player().reportUserActivity) {
return;
}
// listener for reporting that the user is active
const report = bind_(this.player(), this.player().reportUserActivity);
let touchHolding;
this.on('touchstart', function () {
report();
// For as long as the they are touching the device or have their mouse down,
// we consider them active even if they're not moving their finger or mouse.
// So we want to continue to update that they are active
this.clearInterval(touchHolding);
// report at the same interval as activityCheck
touchHolding = this.setInterval(report, 250);
});
const touchEnd = function (event) {
report();
// stop the interval that maintains activity if the touch is holding
this.clearInterval(touchHolding);
};
this.on('touchmove', report);
this.on('touchend', touchEnd);
this.on('touchcancel', touchEnd);
}
/**
* A callback that has no parameters and is bound into `Component`s context.
*
* @callback Component~GenericCallback
* @this Component
*/
/**
* Creates a function that runs after an `x` millisecond timeout. This function is a
* wrapper around `window.setTimeout`. There are a few reasons to use this one
* instead though:
* 1. It gets cleared via {@link Component#clearTimeout} when
* {@link Component#dispose} gets called.
* 2. The function callback will gets turned into a {@link Component~GenericCallback}
*
* > Note: You can't use `window.clearTimeout` on the id returned by this function. This
* will cause its dispose listener not to get cleaned up! Please use
* {@link Component#clearTimeout} or {@link Component#dispose} instead.
*
* @param {Component~GenericCallback} fn
* The function that will be run after `timeout`.
*
* @param {number} timeout
* Timeout in milliseconds to delay before executing the specified function.
*
* @return {number}
* Returns a timeout ID that gets used to identify the timeout. It can also
* get used in {@link Component#clearTimeout} to clear the timeout that
* was set.
*
* @listens Component#dispose
* @see [Similar to]{@link https://developer.mozilla.org/en-US/docs/Web/API/WindowTimers/setTimeout}
*/
setTimeout(fn, timeout) {
// declare as variables so they are properly available in timeout function
// eslint-disable-next-line
var timeoutId;
fn = bind_(this, fn);
this.clearTimersOnDispose_();
timeoutId = window$1.setTimeout(() => {
if (this.setTimeoutIds_.has(timeoutId)) {
this.setTimeoutIds_.delete(timeoutId);
}
fn();
}, timeout);
this.setTimeoutIds_.add(timeoutId);
return timeoutId;
}
/**
* Clears a timeout that gets created via `window.setTimeout` or
* {@link Component#setTimeout}. If you set a timeout via {@link Component#setTimeout}
* use this function instead of `window.clearTimout`. If you don't your dispose
* listener will not get cleaned up until {@link Component#dispose}!
*
* @param {number} timeoutId
* The id of the timeout to clear. The return value of
* {@link Component#setTimeout} or `window.setTimeout`.
*
* @return {number}
* Returns the timeout id that was cleared.
*
* @see [Similar to]{@link https://developer.mozilla.org/en-US/docs/Web/API/WindowTimers/clearTimeout}
*/
clearTimeout(timeoutId) {
if (this.setTimeoutIds_.has(timeoutId)) {
this.setTimeoutIds_.delete(timeoutId);
window$1.clearTimeout(timeoutId);
}
return timeoutId;
}
/**
* Creates a function that gets run every `x` milliseconds. This function is a wrapper
* around `window.setInterval`. There are a few reasons to use this one instead though.
* 1. It gets cleared via {@link Component#clearInterval} when
* {@link Component#dispose} gets called.
* 2. The function callback will be a {@link Component~GenericCallback}
*
* @param {Component~GenericCallback} fn
* The function to run every `x` seconds.
*
* @param {number} interval
* Execute the specified function every `x` milliseconds.
*
* @return {number}
* Returns an id that can be used to identify the interval. It can also be be used in
* {@link Component#clearInterval} to clear the interval.
*
* @listens Component#dispose
* @see [Similar to]{@link https://developer.mozilla.org/en-US/docs/Web/API/WindowTimers/setInterval}
*/
setInterval(fn, interval) {
fn = bind_(this, fn);
this.clearTimersOnDispose_();
const intervalId = window$1.setInterval(fn, interval);
this.setIntervalIds_.add(intervalId);
return intervalId;
}
/**
* Clears an interval that gets created via `window.setInterval` or
* {@link Component#setInterval}. If you set an interval via {@link Component#setInterval}
* use this function instead of `window.clearInterval`. If you don't your dispose
* listener will not get cleaned up until {@link Component#dispose}!
*
* @param {number} intervalId
* The id of the interval to clear. The return value of
* {@link Component#setInterval} or `window.setInterval`.
*
* @return {number}
* Returns the interval id that was cleared.
*
* @see [Similar to]{@link https://developer.mozilla.org/en-US/docs/Web/API/WindowTimers/clearInterval}
*/
clearInterval(intervalId) {
if (this.setIntervalIds_.has(intervalId)) {
this.setIntervalIds_.delete(intervalId);
window$1.clearInterval(intervalId);
}
return intervalId;
}
/**
* Queues up a callback to be passed to requestAnimationFrame (rAF), but
* with a few extra bonuses:
*
* - Supports browsers that do not support rAF by falling back to
* {@link Component#setTimeout}.
*
* - The callback is turned into a {@link Component~GenericCallback} (i.e.
* bound to the component).
*
* - Automatic cancellation of the rAF callback is handled if the component
* is disposed before it is called.
*
* @param {Component~GenericCallback} fn
* A function that will be bound to this component and executed just
* before the browser's next repaint.
*
* @return {number}
* Returns an rAF ID that gets used to identify the timeout. It can
* also be used in {@link Component#cancelAnimationFrame} to cancel
* the animation frame callback.
*
* @listens Component#dispose
* @see [Similar to]{@link https://developer.mozilla.org/en-US/docs/Web/API/window/requestAnimationFrame}
*/
requestAnimationFrame(fn) {
this.clearTimersOnDispose_();
// declare as variables so they are properly available in rAF function
// eslint-disable-next-line
var id;
fn = bind_(this, fn);
id = window$1.requestAnimationFrame(() => {
if (this.rafIds_.has(id)) {
this.rafIds_.delete(id);
}
fn();
});
this.rafIds_.add(id);
return id;
}
/**
* Request an animation frame, but only one named animation
* frame will be queued. Another will never be added until
* the previous one finishes.
*
* @param {string} name
* The name to give this requestAnimationFrame
*
* @param {Component~GenericCallback} fn
* A function that will be bound to this component and executed just
* before the browser's next repaint.
*/
requestNamedAnimationFrame(name, fn) {
if (this.namedRafs_.has(name)) {
this.cancelNamedAnimationFrame(name);
}
this.clearTimersOnDispose_();
fn = bind_(this, fn);
const id = this.requestAnimationFrame(() => {
fn();
if (this.namedRafs_.has(name)) {
this.namedRafs_.delete(name);
}
});
this.namedRafs_.set(name, id);
return name;
}
/**
* Cancels a current named animation frame if it exists.
*
* @param {string} name
* The name of the requestAnimationFrame to cancel.
*/
cancelNamedAnimationFrame(name) {
if (!this.namedRafs_.has(name)) {
return;
}
this.cancelAnimationFrame(this.namedRafs_.get(name));
this.namedRafs_.delete(name);
}
/**
* Cancels a queued callback passed to {@link Component#requestAnimationFrame}
* (rAF).
*
* If you queue an rAF callback via {@link Component#requestAnimationFrame},
* use this function instead of `window.cancelAnimationFrame`. If you don't,
* your dispose listener will not get cleaned up until {@link Component#dispose}!
*
* @param {number} id
* The rAF ID to clear. The return value of {@link Component#requestAnimationFrame}.
*
* @return {number}
* Returns the rAF ID that was cleared.
*
* @see [Similar to]{@link https://developer.mozilla.org/en-US/docs/Web/API/window/cancelAnimationFrame}
*/
cancelAnimationFrame(id) {
if (this.rafIds_.has(id)) {
this.rafIds_.delete(id);
window$1.cancelAnimationFrame(id);
}
return id;
}
/**
* A function to setup `requestAnimationFrame`, `setTimeout`,
* and `setInterval`, clearing on dispose.
*
* > Previously each timer added and removed dispose listeners on it's own.
* For better performance it was decided to batch them all, and use `Set`s
* to track outstanding timer ids.
*
* @private
*/
clearTimersOnDispose_() {
if (this.clearingTimersOnDispose_) {
return;
}
this.clearingTimersOnDispose_ = true;
this.one('dispose', () => {
[['namedRafs_', 'cancelNamedAnimationFrame'], ['rafIds_', 'cancelAnimationFrame'], ['setTimeoutIds_', 'clearTimeout'], ['setIntervalIds_', 'clearInterval']].forEach(([idName, cancelName]) => {
// for a `Set` key will actually be the value again
// so forEach((val, val) =>` but for maps we want to use
// the key.
this[idName].forEach((val, key) => this[cancelName](key));
});
this.clearingTimersOnDispose_ = false;
});
}
/**
* Decide whether an element is actually disabled or not.
*
* @function isActuallyDisabled
* @param element {Node}
* @return {boolean}
*
* @see {@link https://html.spec.whatwg.org/multipage/semantics-other.html#concept-element-disabled}
*/
getIsDisabled() {
return Boolean(this.el_.disabled);
}
/**
* Decide whether the element is expressly inert or not.
*
* @see {@link https://html.spec.whatwg.org/multipage/interaction.html#expressly-inert}
* @function isExpresslyInert
* @param element {Node}
* @return {boolean}
*/
getIsExpresslyInert() {
return this.el_.inert && !this.el_.ownerDocument.documentElement.inert;
}
/**
* Determine whether or not this component can be considered as focusable component.
*
* @param {HTMLElement} el - The HTML element representing the component.
* @return {boolean}
* If the component can be focused, will be `true`. Otherwise, `false`.
*/
getIsFocusable(el) {
const element = el || this.el_;
return element.tabIndex >= 0 && !(this.getIsDisabled() || this.getIsExpresslyInert());
}
/**
* Determine whether or not this component is currently visible/enabled/etc...
*
* @param {HTMLElement} el - The HTML element representing the component.
* @return {boolean}
* If the component can is currently visible & enabled, will be `true`. Otherwise, `false`.
*/
getIsAvailableToBeFocused(el) {
/**
* Decide the style property of this element is specified whether it's visible or not.
*
* @function isVisibleStyleProperty
* @param element {CSSStyleDeclaration}
* @return {boolean}
*/
function isVisibleStyleProperty(element) {
const elementStyle = window$1.getComputedStyle(element, null);
const thisVisibility = elementStyle.getPropertyValue('visibility');
const thisDisplay = elementStyle.getPropertyValue('display');
const invisibleStyle = ['hidden', 'collapse'];
return thisDisplay !== 'none' && !invisibleStyle.includes(thisVisibility);
}
/**
* Decide whether the element is being rendered or not.
* 1. If an element has the style as "visibility: hidden | collapse" or "display: none", it is not being rendered.
* 2. If an element has the style as "opacity: 0", it is not being rendered.(that is, invisible).
* 3. If width and height of an element are explicitly set to 0, it is not being rendered.
* 4. If a parent element is hidden, an element itself is not being rendered.
* (CSS visibility property and display property are inherited.)
*
* @see {@link https://html.spec.whatwg.org/multipage/rendering.html#being-rendered}
* @function isBeingRendered
* @param element {Node}
* @return {boolean}
*/
function isBeingRendered(element) {
if (!isVisibleStyleProperty(element.parentElement)) {
return false;
}
if (!isVisibleStyleProperty(element) || element.style.opacity === '0' || window$1.getComputedStyle(element).height === '0px' || window$1.getComputedStyle(element).width === '0px') {
return false;
}
return true;
}
/**
* Determine if the element is visible for the user or not.
* 1. If an element sum of its offsetWidth, offsetHeight, height and width is less than 1 is not visible.
* 2. If elementCenter.x is less than is not visible.
* 3. If elementCenter.x is more than the document's width is not visible.
* 4. If elementCenter.y is less than 0 is not visible.
* 5. If elementCenter.y is the document's height is not visible.
*
* @function isVisible
* @param element {Node}
* @return {boolean}
*/
function isVisible(element) {
if (element.offsetWidth + element.offsetHeight + element.getBoundingClientRect().height + element.getBoundingClientRect().width === 0) {
return false;
}
// Define elementCenter object with props of x and y
// x: Left position relative to the viewport plus element's width (no margin) divided between 2.
// y: Top position relative to the viewport plus element's height (no margin) divided between 2.
const elementCenter = {
x: element.getBoundingClientRect().left + element.offsetWidth / 2,
y: element.getBoundingClientRect().top + element.offsetHeight / 2
};
if (elementCenter.x < 0) {
return false;
}
if (elementCenter.x > (document$1.documentElement.clientWidth || window$1.innerWidth)) {
return false;
}
if (elementCenter.y < 0) {
return false;
}
if (elementCenter.y > (document$1.documentElement.clientHeight || window$1.innerHeight)) {
return false;
}
let pointContainer = document$1.elementFromPoint(elementCenter.x, elementCenter.y);
while (pointContainer) {
if (pointContainer === element) {
return true;
}
if (pointContainer.parentNode) {
pointContainer = pointContainer.parentNode;
} else {
return false;
}
}
}
// If no DOM element was passed as argument use this component's element.
if (!el) {
el = this.el();
}
// If element is visible, is being rendered & either does not have a parent element or its tabIndex is not negative.
if (isVisible(el) && isBeingRendered(el) && (!el.parentElement || el.tabIndex >= 0)) {
return true;
}
return false;
}
/**
* Register a `Component` with `videojs` given the name and the component.
*
* > NOTE: {@link Tech}s should not be registered as a `Component`. {@link Tech}s
* should be registered using {@link Tech.registerTech} or
* {@link videojs:videojs.registerTech}.
*
* > NOTE: This function can also be seen on videojs as
* {@link videojs:videojs.registerComponent}.
*
* @param {string} name
* The name of the `Component` to register.
*
* @param {Component} ComponentToRegister
* The `Component` class to register.
*
* @return {Component}
* The `Component` that was registered.
*/
static registerComponent(name, ComponentToRegister) {
if (typeof name !== 'string' || !name) {
throw new Error(`Illegal component name, "${name}"; must be a non-empty string.`);
}
const Tech = Component$1.getComponent('Tech');
// We need to make sure this check is only done if Tech has been registered.
const isTech = Tech && Tech.isTech(ComponentToRegister);
const isComp = Component$1 === ComponentToRegister || Component$1.prototype.isPrototypeOf(ComponentToRegister.prototype);
if (isTech || !isComp) {
let reason;
if (isTech) {
reason = 'techs must be registered using Tech.registerTech()';
} else {
reason = 'must be a Component subclass';
}
throw new Error(`Illegal component, "${name}"; ${reason}.`);
}
name = toTitleCase$1(name);
if (!Component$1.components_) {
Component$1.components_ = {};
}
const Player = Component$1.getComponent('Player');
if (name === 'Player' && Player && Player.players) {
const players = Player.players;
const playerNames = Object.keys(players);
// If we have players that were disposed, then their name will still be
// in Players.players. So, we must loop through and verify that the value
// for each item is not null. This allows registration of the Player component
// after all players have been disposed or before any were created.
if (players && playerNames.length > 0 && playerNames.map(pname => players[pname]).every(Boolean)) {
throw new Error('Can not register Player component after player has been created.');
}
}
Component$1.components_[name] = ComponentToRegister;
Component$1.components_[toLowerCase(name)] = ComponentToRegister;
return ComponentToRegister;
}
/**
* Get a `Component` based on the name it was registered with.
*
* @param {string} name
* The Name of the component to get.
*
* @return {typeof Component}
* The `Component` that got registered under the given name.
*/
static getComponent(name) {
if (!name || !Component$1.components_) {
return;
}
return Component$1.components_[name];
}
}
Component$1.registerComponent('Component', Component$1);
/**
* @file time.js
* @module time
*/
/**
* Returns the time for the specified index at the start or end
* of a TimeRange object.
*
* @typedef {Function} TimeRangeIndex
*
* @param {number} [index=0]
* The range number to return the time for.
*
* @return {number}
* The time offset at the specified index.
*
* @deprecated The index argument must be provided.
* In the future, leaving it out will throw an error.
*/
/**
* An object that contains ranges of time, which mimics {@link TimeRanges}.
*
* @typedef {Object} TimeRange
*
* @property {number} length
* The number of time ranges represented by this object.
*
* @property {module:time~TimeRangeIndex} start
* Returns the time offset at which a specified time range begins.
*
* @property {module:time~TimeRangeIndex} end
* Returns the time offset at which a specified time range ends.
*
* @see https://developer.mozilla.org/en-US/docs/Web/API/TimeRanges
*/
/**
* Check if any of the time ranges are over the maximum index.
*
* @private
* @param {string} fnName
* The function name to use for logging
*
* @param {number} index
* The index to check
*
* @param {number} maxIndex
* The maximum possible index
*
* @throws {Error} if the timeRanges provided are over the maxIndex
*/
function rangeCheck(fnName, index, maxIndex) {
if (typeof index !== 'number' || index < 0 || index > maxIndex) {
throw new Error(`Failed to execute '${fnName}' on 'TimeRanges': The index provided (${index}) is non-numeric or out of bounds (0-${maxIndex}).`);
}
}
/**
* Get the time for the specified index at the start or end
* of a TimeRange object.
*
* @private
* @param {string} fnName
* The function name to use for logging
*
* @param {string} valueIndex
* The property that should be used to get the time. should be
* 'start' or 'end'
*
* @param {Array} ranges
* An array of time ranges
*
* @param {Array} [rangeIndex=0]
* The index to start the search at
*
* @return {number}
* The time that offset at the specified index.
*
* @deprecated rangeIndex must be set to a value, in the future this will throw an error.
* @throws {Error} if rangeIndex is more than the length of ranges
*/
function getRange(fnName, valueIndex, ranges, rangeIndex) {
rangeCheck(fnName, rangeIndex, ranges.length - 1);
return ranges[rangeIndex][valueIndex];
}
/**
* Create a time range object given ranges of time.
*
* @private
* @param {Array} [ranges]
* An array of time ranges.
*
* @return {TimeRange}
*/
function createTimeRangesObj(ranges) {
let timeRangesObj;
if (ranges === undefined || ranges.length === 0) {
timeRangesObj = {
length: 0,
start() {
throw new Error('This TimeRanges object is empty');
},
end() {
throw new Error('This TimeRanges object is empty');
}
};
} else {
timeRangesObj = {
length: ranges.length,
start: getRange.bind(null, 'start', 0, ranges),
end: getRange.bind(null, 'end', 1, ranges)
};
}
if (window$1.Symbol && window$1.Symbol.iterator) {
timeRangesObj[window$1.Symbol.iterator] = () => (ranges || []).values();
}
return timeRangesObj;
}
/**
* Create a `TimeRange` object which mimics an
* {@link https://developer.mozilla.org/en-US/docs/Web/API/TimeRanges|HTML5 TimeRanges instance}.
*
* @param {number|Array[]} start
* The start of a single range (a number) or an array of ranges (an
* array of arrays of two numbers each).
*
* @param {number} end
* The end of a single range. Cannot be used with the array form of
* the `start` argument.
*
* @return {TimeRange}
*/
function createTimeRanges$1(start, end) {
if (Array.isArray(start)) {
return createTimeRangesObj(start);
} else if (start === undefined || end === undefined) {
return createTimeRangesObj();
}
return createTimeRangesObj([[start, end]]);
}
/**
* Format seconds as a time string, H:MM:SS or M:SS. Supplying a guide (in
* seconds) will force a number of leading zeros to cover the length of the
* guide.
*
* @private
* @param {number} seconds
* Number of seconds to be turned into a string
*
* @param {number} guide
* Number (in seconds) to model the string after
*
* @return {string}
* Time formatted as H:MM:SS or M:SS
*/
const defaultImplementation = function (seconds, guide) {
seconds = seconds < 0 ? 0 : seconds;
let s = Math.floor(seconds % 60);
let m = Math.floor(seconds / 60 % 60);
let h = Math.floor(seconds / 3600);
const gm = Math.floor(guide / 60 % 60);
const gh = Math.floor(guide / 3600);
// handle invalid times
if (isNaN(seconds) || seconds === Infinity) {
// '-' is false for all relational operators (e.g. <, >=) so this setting
// will add the minimum number of fields specified by the guide
h = m = s = '-';
}
// Check if we need to show hours
h = h > 0 || gh > 0 ? h + ':' : '';
// If hours are showing, we may need to add a leading zero.
// Always show at least one digit of minutes.
m = ((h || gm >= 10) && m < 10 ? '0' + m : m) + ':';
// Check if leading zero is need for seconds
s = s < 10 ? '0' + s : s;
return h + m + s;
};
// Internal pointer to the current implementation.
let implementation = defaultImplementation;
/**
* Replaces the default formatTime implementation with a custom implementation.
*
* @param {Function} customImplementation
* A function which will be used in place of the default formatTime
* implementation. Will receive the current time in seconds and the
* guide (in seconds) as arguments.
*/
function setFormatTime(customImplementation) {
implementation = customImplementation;
}
/**
* Resets formatTime to the default implementation.
*/
function resetFormatTime() {
implementation = defaultImplementation;
}
/**
* Delegates to either the default time formatting function or a custom
* function supplied via `setFormatTime`.
*
* Formats seconds as a time string (H:MM:SS or M:SS). Supplying a
* guide (in seconds) will force a number of leading zeros to cover the
* length of the guide.
*
* @example formatTime(125, 600) === "02:05"
* @param {number} seconds
* Number of seconds to be turned into a string
*
* @param {number} guide
* Number (in seconds) to model the string after
*
* @return {string}
* Time formatted as H:MM:SS or M:SS
*/
function formatTime(seconds, guide = seconds) {
return implementation(seconds, guide);
}
var Time = /*#__PURE__*/Object.freeze({
__proto__: null,
createTimeRanges: createTimeRanges$1,
createTimeRange: createTimeRanges$1,
setFormatTime: setFormatTime,
resetFormatTime: resetFormatTime,
formatTime: formatTime
});
/**
* @file buffer.js
* @module buffer
*/
/** @import { TimeRange } from './time' */
/**
* Compute the percentage of the media that has been buffered.
*
* @param {TimeRange} buffered
* The current `TimeRanges` object representing buffered time ranges
*
* @param {number} duration
* Total duration of the media
*
* @return {number}
* Percent buffered of the total duration in decimal form.
*/
function bufferedPercent(buffered, duration) {
let bufferedDuration = 0;
let start;
let end;
if (!duration) {
return 0;
}
if (!buffered || !buffered.length) {
buffered = createTimeRanges$1(0, 0);
}
for (let i = 0; i < buffered.length; i++) {
start = buffered.start(i);
end = buffered.end(i);
// buffered end can be bigger than duration by a very small fraction
if (end > duration) {
end = duration;
}
bufferedDuration += end - start;
}
return bufferedDuration / duration;
}
/**
* @file media-error.js
*/
/**
* A Custom `MediaError` class which mimics the standard HTML5 `MediaError` class.
*
* @param {number|string|Object|MediaError} value
* This can be of multiple types:
* - number: should be a standard error code
* - string: an error message (the code will be 0)
* - Object: arbitrary properties
* - `MediaError` (native): used to populate a video.js `MediaError` object
* - `MediaError` (video.js): will return itself if it's already a
* video.js `MediaError` object.
*
* @see [MediaError Spec]{@link https://dev.w3.org/html5/spec-author-view/video.html#mediaerror}
* @see [Encrypted MediaError Spec]{@link https://www.w3.org/TR/2013/WD-encrypted-media-20130510/#error-codes}
*
* @class MediaError
*/
function MediaError(value) {
// Allow redundant calls to this constructor to avoid having `instanceof`
// checks peppered around the code.
if (value instanceof MediaError) {
return value;
}
if (typeof value === 'number') {
this.code = value;
} else if (typeof value === 'string') {
// default code is zero, so this is a custom error
this.message = value;
} else if (isObject(value)) {
// We assign the `code` property manually because native `MediaError` objects
// do not expose it as an own/enumerable property of the object.
if (typeof value.code === 'number') {
this.code = value.code;
}
Object.assign(this, value);
}
if (!this.message) {
this.message = MediaError.defaultMessages[this.code] || '';
}
}
/**
* The error code that refers two one of the defined `MediaError` types
*
* @type {Number}
*/
MediaError.prototype.code = 0;
/**
* An optional message that to show with the error. Message is not part of the HTML5
* video spec but allows for more informative custom errors.
*
* @type {String}
*/
MediaError.prototype.message = '';
/**
* An optional status code that can be set by plugins to allow even more detail about
* the error. For example a plugin might provide a specific HTTP status code and an
* error message for that code. Then when the plugin gets that error this class will
* know how to display an error message for it. This allows a custom message to show
* up on the `Player` error overlay.
*
* @type {Array}
*/
MediaError.prototype.status = null;
/**
* An object containing an error type, as well as other information regarding the error.
*
* @typedef {{errorType: string, [key: string]: any}} ErrorMetadata
*/
/**
* An optional object to give more detail about the error. This can be used to give
* a higher level of specificity to an error versus the more generic MediaError codes.
* `metadata` expects an `errorType` string that should align with the values from videojs.Error.
*
* @type {ErrorMetadata}
*/
MediaError.prototype.metadata = null;
/**
* Errors indexed by the W3C standard. The order **CANNOT CHANGE**! See the
* specification listed under {@link MediaError} for more information.
*
* @enum {array}
* @readonly
* @property {string} 0 - MEDIA_ERR_CUSTOM
* @property {string} 1 - MEDIA_ERR_ABORTED
* @property {string} 2 - MEDIA_ERR_NETWORK
* @property {string} 3 - MEDIA_ERR_DECODE
* @property {string} 4 - MEDIA_ERR_SRC_NOT_SUPPORTED
* @property {string} 5 - MEDIA_ERR_ENCRYPTED
*/
MediaError.errorTypes = ['MEDIA_ERR_CUSTOM', 'MEDIA_ERR_ABORTED', 'MEDIA_ERR_NETWORK', 'MEDIA_ERR_DECODE', 'MEDIA_ERR_SRC_NOT_SUPPORTED', 'MEDIA_ERR_ENCRYPTED'];
/**
* The default `MediaError` messages based on the {@link MediaError.errorTypes}.
*
* @type {Array}
* @constant
*/
MediaError.defaultMessages = {
1: 'You aborted the media playback',
2: 'A network error caused the media download to fail part-way.',
3: 'The media playback was aborted due to a corruption problem or because the media used features your browser did not support.',
4: 'The media could not be loaded, either because the server or network failed or because the format is not supported.',
5: 'The media is encrypted and we do not have the keys to decrypt it.'
};
/**
* W3C error code for any custom error.
*
* @member MediaError#MEDIA_ERR_CUSTOM
* @constant {number}
* @default 0
*/
MediaError.MEDIA_ERR_CUSTOM = 0;
/**
* W3C error code for any custom error.
*
* @member MediaError.MEDIA_ERR_CUSTOM
* @constant {number}
* @default 0
*/
MediaError.prototype.MEDIA_ERR_CUSTOM = 0;
/**
* W3C error code for media error aborted.
*
* @member MediaError#MEDIA_ERR_ABORTED
* @constant {number}
* @default 1
*/
MediaError.MEDIA_ERR_ABORTED = 1;
/**
* W3C error code for media error aborted.
*
* @member MediaError.MEDIA_ERR_ABORTED
* @constant {number}
* @default 1
*/
MediaError.prototype.MEDIA_ERR_ABORTED = 1;
/**
* W3C error code for any network error.
*
* @member MediaError#MEDIA_ERR_NETWORK
* @constant {number}
* @default 2
*/
MediaError.MEDIA_ERR_NETWORK = 2;
/**
* W3C error code for any network error.
*
* @member MediaError.MEDIA_ERR_NETWORK
* @constant {number}
* @default 2
*/
MediaError.prototype.MEDIA_ERR_NETWORK = 2;
/**
* W3C error code for any decoding error.
*
* @member MediaError#MEDIA_ERR_DECODE
* @constant {number}
* @default 3
*/
MediaError.MEDIA_ERR_DECODE = 3;
/**
* W3C error code for any decoding error.
*
* @member MediaError.MEDIA_ERR_DECODE
* @constant {number}
* @default 3
*/
MediaError.prototype.MEDIA_ERR_DECODE = 3;
/**
* W3C error code for any time that a source is not supported.
*
* @member MediaError#MEDIA_ERR_SRC_NOT_SUPPORTED
* @constant {number}
* @default 4
*/
MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED = 4;
/**
* W3C error code for any time that a source is not supported.
*
* @member MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED
* @constant {number}
* @default 4
*/
MediaError.prototype.MEDIA_ERR_SRC_NOT_SUPPORTED = 4;
/**
* W3C error code for any time that a source is encrypted.
*
* @member MediaError#MEDIA_ERR_ENCRYPTED
* @constant {number}
* @default 5
*/
MediaError.MEDIA_ERR_ENCRYPTED = 5;
/**
* W3C error code for any time that a source is encrypted.
*
* @member MediaError.MEDIA_ERR_ENCRYPTED
* @constant {number}
* @default 5
*/
MediaError.prototype.MEDIA_ERR_ENCRYPTED = 5;
/**
* Returns whether an object is `Promise`-like (i.e. has a `then` method).
*
* @param {Object} value
* An object that may or may not be `Promise`-like.
*
* @return {boolean}
* Whether or not the object is `Promise`-like.
*/
function isPromise(value) {
return value !== undefined && value !== null && typeof value.then === 'function';
}
/**
* Silence a Promise-like object.
*
* This is useful for avoiding non-harmful, but potentially confusing "uncaught
* play promise" rejection error messages.
*
* @param {Object} value
* An object that may or may not be `Promise`-like.
*/
function silencePromise(value) {
if (isPromise(value)) {
value.then(null, e => {});
}
}
/**
* @file text-track-list-converter.js Utilities for capturing text track state and
* re-creating tracks based on a capture.
*
* @module text-track-list-converter
*/
/** @import Tech from '../tech/tech' */
/**
* Examine a single {@link TextTrack} and return a JSON-compatible javascript object that
* represents the {@link TextTrack}'s state.
*
* @param {TextTrack} track
* The text track to query.
*
* @return {Object}
* A serializable javascript representation of the TextTrack.
* @private
*/
const trackToJson_ = function (track) {
const ret = ['kind', 'label', 'language', 'id', 'inBandMetadataTrackDispatchType', 'mode', 'src'].reduce((acc, prop, i) => {
if (track[prop]) {
acc[prop] = track[prop];
}
return acc;
}, {
cues: track.cues && Array.prototype.map.call(track.cues, function (cue) {
return {
startTime: cue.startTime,
endTime: cue.endTime,
text: cue.text,
id: cue.id
};
})
});
return ret;
};
/**
* Examine a {@link Tech} and return a JSON-compatible javascript array that represents the
* state of all {@link TextTrack}s currently configured. The return array is compatible with
* {@link text-track-list-converter:jsonToTextTracks}.
*
* @param {Tech} tech
* The tech object to query
*
* @return {Array}
* A serializable javascript representation of the {@link Tech}s
* {@link TextTrackList}.
*/
const textTracksToJson = function (tech) {
const trackEls = tech.$$('track');
const trackObjs = Array.prototype.map.call(trackEls, t => t.track);
const tracks = Array.prototype.map.call(trackEls, function (trackEl) {
const json = trackToJson_(trackEl.track);
if (trackEl.src) {
json.src = trackEl.src;
}
return json;
});
return tracks.concat(Array.prototype.filter.call(tech.textTracks(), function (track) {
return trackObjs.indexOf(track) === -1;
}).map(trackToJson_));
};
/**
* Create a set of remote {@link TextTrack}s on a {@link Tech} based on an array of javascript
* object {@link TextTrack} representations.
*
* @param {Array} json
* An array of `TextTrack` representation objects, like those that would be
* produced by `textTracksToJson`.
*
* @param {Tech} tech
* The `Tech` to create the `TextTrack`s on.
*/
const jsonToTextTracks = function (json, tech) {
json.forEach(function (track) {
const addedTrack = tech.addRemoteTextTrack(track).track;
if (!track.src && track.cues) {
track.cues.forEach(cue => addedTrack.addCue(cue));
}
});
return tech.textTracks();
};
var textTrackConverter = {
textTracksToJson,
jsonToTextTracks,
trackToJson_
};
/**
* @file modal-dialog.js
*/
/** @import Player from './player' */
/** @import { ContentDescriptor } from './utils/dom' */
const MODAL_CLASS_NAME = 'vjs-modal-dialog';
/**
* The `ModalDialog` displays over the video and its controls, which blocks
* interaction with the player until it is closed.
*
* Modal dialogs include a "Close" button and will close when that button
* is activated - or when ESC is pressed anywhere.
*
* @extends Component
*/
class ModalDialog extends Component$1 {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*
* @param {ContentDescriptor} [options.content=undefined]
* Provide customized content for this modal.
*
* @param {string} [options.description]
* A text description for the modal, primarily for accessibility.
*
* @param {boolean} [options.fillAlways=false]
* Normally, modals are automatically filled only the first time
* they open. This tells the modal to refresh its content
* every time it opens.
*
* @param {string} [options.label]
* A text label for the modal, primarily for accessibility.
*
* @param {boolean} [options.pauseOnOpen=true]
* If `true`, playback will will be paused if playing when
* the modal opens, and resumed when it closes.
*
* @param {boolean} [options.temporary=true]
* If `true`, the modal can only be opened once; it will be
* disposed as soon as it's closed.
*
* @param {boolean} [options.uncloseable=false]
* If `true`, the user will not be able to close the modal
* through the UI in the normal ways. Programmatic closing is
* still possible.
*/
constructor(player, options) {
super(player, options);
this.handleKeyDown_ = e => this.handleKeyDown(e);
this.close_ = e => this.close(e);
this.opened_ = this.hasBeenOpened_ = this.hasBeenFilled_ = false;
this.closeable(!this.options_.uncloseable);
this.content(this.options_.content);
// Make sure the contentEl is defined AFTER any children are initialized
// because we only want the contents of the modal in the contentEl
// (not the UI elements like the close button).
this.contentEl_ = createEl('div', {
className: `${MODAL_CLASS_NAME}-content`
}, {
role: 'document'
});
this.descEl_ = createEl('p', {
className: `${MODAL_CLASS_NAME}-description vjs-control-text`,
id: this.el().getAttribute('aria-describedby')
});
textContent(this.descEl_, this.description());
this.el_.appendChild(this.descEl_);
this.el_.appendChild(this.contentEl_);
}
/**
* Create the `ModalDialog`'s DOM element
*
* @return {Element}
* The DOM element that gets created.
*/
createEl() {
return super.createEl('div', {
className: this.buildCSSClass(),
tabIndex: -1
}, {
'aria-describedby': `${this.id()}_description`,
'aria-hidden': 'true',
'aria-label': this.label(),
'role': 'dialog',
'aria-live': 'polite'
});
}
dispose() {
this.contentEl_ = null;
this.descEl_ = null;
this.previouslyActiveEl_ = null;
super.dispose();
}
/**
* Builds the default DOM `className`.
*
* @return {string}
* The DOM `className` for this object.
*/
buildCSSClass() {
return `${MODAL_CLASS_NAME} vjs-hidden ${super.buildCSSClass()}`;
}
/**
* Returns the label string for this modal. Primarily used for accessibility.
*
* @return {string}
* the localized or raw label of this modal.
*/
label() {
return this.localize(this.options_.label || 'Modal Window');
}
/**
* Returns the description string for this modal. Primarily used for
* accessibility.
*
* @return {string}
* The localized or raw description of this modal.
*/
description() {
let desc = this.options_.description || this.localize('This is a modal window.');
// Append a universal closeability message if the modal is closeable.
if (this.closeable()) {
desc += ' ' + this.localize('This modal can be closed by pressing the Escape key or activating the close button.');
}
return desc;
}
/**
* Opens the modal.
*
* @fires ModalDialog#beforemodalopen
* @fires ModalDialog#modalopen
*/
open() {
if (this.opened_) {
if (this.options_.fillAlways) {
this.fill();
}
return;
}
const player = this.player();
/**
* Fired just before a `ModalDialog` is opened.
*
* @event ModalDialog#beforemodalopen
* @type {Event}
*/
this.trigger('beforemodalopen');
this.opened_ = true;
// Fill content if the modal has never opened before and
// never been filled.
if (this.options_.fillAlways || !this.hasBeenOpened_ && !this.hasBeenFilled_) {
this.fill();
}
// If the player was playing, pause it and take note of its previously
// playing state.
this.wasPlaying_ = !player.paused();
if (this.options_.pauseOnOpen && this.wasPlaying_) {
player.pause();
}
this.on('keydown', this.handleKeyDown_);
// Hide controls and note if they were enabled.
this.hadControls_ = player.controls();
player.controls(false);
this.show();
this.conditionalFocus_();
this.el().setAttribute('aria-hidden', 'false');
/**
* Fired just after a `ModalDialog` is opened.
*
* @event ModalDialog#modalopen
* @type {Event}
*/
this.trigger('modalopen');
this.hasBeenOpened_ = true;
}
/**
* If the `ModalDialog` is currently open or closed.
*
* @param {boolean} [value]
* If given, it will open (`true`) or close (`false`) the modal.
*
* @return {boolean}
* the current open state of the modaldialog
*/
opened(value) {
if (typeof value === 'boolean') {
this[value ? 'open' : 'close']();
}
return this.opened_;
}
/**
* Closes the modal, does nothing if the `ModalDialog` is
* not open.
*
* @fires ModalDialog#beforemodalclose
* @fires ModalDialog#modalclose
*/
close() {
if (!this.opened_) {
return;
}
const player = this.player();
/**
* Fired just before a `ModalDialog` is closed.
*
* @event ModalDialog#beforemodalclose
* @type {Event}
*/
this.trigger('beforemodalclose');
this.opened_ = false;
if (this.wasPlaying_ && this.options_.pauseOnOpen) {
player.play();
}
this.off('keydown', this.handleKeyDown_);
if (this.hadControls_) {
player.controls(true);
}
this.hide();
this.el().setAttribute('aria-hidden', 'true');
/**
* Fired just after a `ModalDialog` is closed.
*
* @event ModalDialog#modalclose
* @type {Event}
*
* @property {boolean} [bubbles=true]
*/
this.trigger({
type: 'modalclose',
bubbles: true
});
this.conditionalBlur_();
if (this.options_.temporary) {
this.dispose();
}
}
/**
* Check to see if the `ModalDialog` is closeable via the UI.
*
* @param {boolean} [value]
* If given as a boolean, it will set the `closeable` option.
*
* @return {boolean}
* Returns the final value of the closable option.
*/
closeable(value) {
if (typeof value === 'boolean') {
const closeable = this.closeable_ = !!value;
let close = this.getChild('closeButton');
// If this is being made closeable and has no close button, add one.
if (closeable && !close) {
// The close button should be a child of the modal - not its
// content element, so temporarily change the content element.
const temp = this.contentEl_;
this.contentEl_ = this.el_;
close = this.addChild('closeButton', {
controlText: 'Close Modal Dialog'
});
this.contentEl_ = temp;
this.on(close, 'close', this.close_);
}
// If this is being made uncloseable and has a close button, remove it.
if (!closeable && close) {
this.off(close, 'close', this.close_);
this.removeChild(close);
close.dispose();
}
}
return this.closeable_;
}
/**
* Fill the modal's content element with the modal's "content" option.
* The content element will be emptied before this change takes place.
*/
fill() {
this.fillWith(this.content());
}
/**
* Fill the modal's content element with arbitrary content.
* The content element will be emptied before this change takes place.
*
* @fires ModalDialog#beforemodalfill
* @fires ModalDialog#modalfill
*
* @param {ContentDescriptor} [content]
* The same rules apply to this as apply to the `content` option.
*/
fillWith(content) {
const contentEl = this.contentEl();
const parentEl = contentEl.parentNode;
const nextSiblingEl = contentEl.nextSibling;
/**
* Fired just before a `ModalDialog` is filled with content.
*
* @event ModalDialog#beforemodalfill
* @type {Event}
*/
this.trigger('beforemodalfill');
this.hasBeenFilled_ = true;
// Detach the content element from the DOM before performing
// manipulation to avoid modifying the live DOM multiple times.
parentEl.removeChild(contentEl);
this.empty();
insertContent(contentEl, content);
/**
* Fired just after a `ModalDialog` is filled with content.
*
* @event ModalDialog#modalfill
* @type {Event}
*/
this.trigger('modalfill');
// Re-inject the re-filled content element.
if (nextSiblingEl) {
parentEl.insertBefore(contentEl, nextSiblingEl);
} else {
parentEl.appendChild(contentEl);
}
// make sure that the close button is last in the dialog DOM
const closeButton = this.getChild('closeButton');
if (closeButton) {
parentEl.appendChild(closeButton.el_);
}
/**
* Fired after `ModalDialog` is re-filled with content & close button is appended.
*
* @event ModalDialog#aftermodalfill
* @type {Event}
*/
this.trigger('aftermodalfill');
}
/**
* Empties the content element. This happens anytime the modal is filled.
*
* @fires ModalDialog#beforemodalempty
* @fires ModalDialog#modalempty
*/
empty() {
/**
* Fired just before a `ModalDialog` is emptied.
*
* @event ModalDialog#beforemodalempty
* @type {Event}
*/
this.trigger('beforemodalempty');
emptyEl(this.contentEl());
/**
* Fired just after a `ModalDialog` is emptied.
*
* @event ModalDialog#modalempty
* @type {Event}
*/
this.trigger('modalempty');
}
/**
* Gets or sets the modal content, which gets normalized before being
* rendered into the DOM.
*
* This does not update the DOM or fill the modal, but it is called during
* that process.
*
* @param {ContentDescriptor} [value]
* If defined, sets the internal content value to be used on the
* next call(s) to `fill`. This value is normalized before being
* inserted. To "clear" the internal content value, pass `null`.
*
* @return {ContentDescriptor}
* The current content of the modal dialog
*/
content(value) {
if (typeof value !== 'undefined') {
this.content_ = value;
}
return this.content_;
}
/**
* conditionally focus the modal dialog if focus was previously on the player.
*
* @private
*/
conditionalFocus_() {
const activeEl = document$1.activeElement;
const playerEl = this.player_.el_;
this.previouslyActiveEl_ = null;
if (playerEl.contains(activeEl) || playerEl === activeEl) {
this.previouslyActiveEl_ = activeEl;
this.focus();
}
}
/**
* conditionally blur the element and refocus the last focused element
*
* @private
*/
conditionalBlur_() {
if (this.previouslyActiveEl_) {
this.previouslyActiveEl_.focus();
this.previouslyActiveEl_ = null;
}
}
/**
* Keydown handler. Attached when modal is focused.
*
* @listens keydown
*/
handleKeyDown(event) {
/**
* Fired a custom keyDown event that bubbles.
*
* @event ModalDialog#modalKeydown
* @type {Event}
*/
this.trigger({
type: 'modalKeydown',
originalEvent: event,
target: this,
bubbles: true
});
// Do not allow keydowns to reach out of the modal dialog.
event.stopPropagation();
if (event.key === 'Escape' && this.closeable()) {
event.preventDefault();
this.close();
return;
}
// exit early if it isn't a tab key
if (event.key !== 'Tab') {
return;
}
const focusableEls = this.focusableEls_();
const activeEl = this.el_.querySelector(':focus');
let focusIndex;
for (let i = 0; i < focusableEls.length; i++) {
if (activeEl === focusableEls[i]) {
focusIndex = i;
break;
}
}
if (document$1.activeElement === this.el_) {
focusIndex = 0;
}
if (event.shiftKey && focusIndex === 0) {
focusableEls[focusableEls.length - 1].focus();
event.preventDefault();
} else if (!event.shiftKey && focusIndex === focusableEls.length - 1) {
focusableEls[0].focus();
event.preventDefault();
}
}
/**
* get all focusable elements
*
* @private
*/
focusableEls_() {
const allChildren = this.el_.querySelectorAll('*');
return Array.prototype.filter.call(allChildren, child => {
return (child instanceof window$1.HTMLAnchorElement || child instanceof window$1.HTMLAreaElement) && child.hasAttribute('href') || (child instanceof window$1.HTMLInputElement || child instanceof window$1.HTMLSelectElement || child instanceof window$1.HTMLTextAreaElement || child instanceof window$1.HTMLButtonElement) && !child.hasAttribute('disabled') || child instanceof window$1.HTMLIFrameElement || child instanceof window$1.HTMLObjectElement || child instanceof window$1.HTMLEmbedElement || child.hasAttribute('tabindex') && child.getAttribute('tabindex') !== -1 || child.hasAttribute('contenteditable');
});
}
}
/**
* Default options for `ModalDialog` default options.
*
* @type {Object}
* @private
*/
ModalDialog.prototype.options_ = {
pauseOnOpen: true,
temporary: true
};
Component$1.registerComponent('ModalDialog', ModalDialog);
/**
* @file track-list.js
*/
/** @import Track from './track' */
/**
* Common functionaliy between {@link TextTrackList}, {@link AudioTrackList}, and
* {@link VideoTrackList}
*
* @extends EventTarget
*/
class TrackList extends EventTarget$2 {
/**
* Create an instance of this class
*
* @param { Track[] } tracks
* A list of tracks to initialize the list with.
*
* @abstract
*/
constructor(tracks = []) {
super();
this.tracks_ = [];
/**
* @memberof TrackList
* @member {number} length
* The current number of `Track`s in the this Trackist.
* @instance
*/
Object.defineProperty(this, 'length', {
get() {
return this.tracks_.length;
}
});
for (let i = 0; i < tracks.length; i++) {
this.addTrack(tracks[i]);
}
}
/**
* Add a {@link Track} to the `TrackList`
*
* @param {Track} track
* The audio, video, or text track to add to the list.
*
* @fires TrackList#addtrack
*/
addTrack(track) {
const index = this.tracks_.length;
if (!('' + index in this)) {
Object.defineProperty(this, index, {
get() {
return this.tracks_[index];
}
});
}
// Do not add duplicate tracks
if (this.tracks_.indexOf(track) === -1) {
this.tracks_.push(track);
/**
* Triggered when a track is added to a track list.
*
* @event TrackList#addtrack
* @type {Event}
* @property {Track} track
* A reference to track that was added.
*/
this.trigger({
track,
type: 'addtrack',
target: this
});
}
/**
* Triggered when a track label is changed.
*
* @event TrackList#addtrack
* @type {Event}
* @property {Track} track
* A reference to track that was added.
*/
track.labelchange_ = () => {
this.trigger({
track,
type: 'labelchange',
target: this
});
};
if (isEvented(track)) {
track.addEventListener('labelchange', track.labelchange_);
}
}
/**
* Remove a {@link Track} from the `TrackList`
*
* @param {Track} rtrack
* The audio, video, or text track to remove from the list.
*
* @fires TrackList#removetrack
*/
removeTrack(rtrack) {
let track;
for (let i = 0, l = this.length; i < l; i++) {
if (this[i] === rtrack) {
track = this[i];
if (track.off) {
track.off();
}
this.tracks_.splice(i, 1);
break;
}
}
if (!track) {
return;
}
/**
* Triggered when a track is removed from track list.
*
* @event TrackList#removetrack
* @type {Event}
* @property {Track} track
* A reference to track that was removed.
*/
this.trigger({
track,
type: 'removetrack',
target: this
});
}
/**
* Get a Track from the TrackList by a tracks id
*
* @param {string} id - the id of the track to get
* @method getTrackById
* @return {Track}
* @private
*/
getTrackById(id) {
let result = null;
for (let i = 0, l = this.length; i < l; i++) {
const track = this[i];
if (track.id === id) {
result = track;
break;
}
}
return result;
}
}
/**
* Triggered when a different track is selected/enabled.
*
* @event TrackList#change
* @type {Event}
*/
/**
* Events that can be called with on + eventName. See {@link EventHandler}.
*
* @property {Object} TrackList#allowedEvents_
* @protected
*/
TrackList.prototype.allowedEvents_ = {
change: 'change',
addtrack: 'addtrack',
removetrack: 'removetrack',
labelchange: 'labelchange'
};
// emulate attribute EventHandler support to allow for feature detection
for (const event in TrackList.prototype.allowedEvents_) {
TrackList.prototype['on' + event] = null;
}
/**
* @file audio-track-list.js
*/
/** @import AudioTrack from './audio-track' */
/**
* Anywhere we call this function we diverge from the spec
* as we only support one enabled audiotrack at a time
*
* @param {AudioTrackList} list
* list to work on
*
* @param {AudioTrack} track
* The track to skip
*
* @private
*/
const disableOthers$1 = function (list, track) {
for (let i = 0; i < list.length; i++) {
if (!Object.keys(list[i]).length || track.id === list[i].id) {
continue;
}
// another audio track is enabled, disable it
list[i].enabled = false;
}
};
/**
* The current list of {@link AudioTrack} for a media file.
*
* @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#audiotracklist}
* @extends TrackList
*/
class AudioTrackList extends TrackList {
/**
* Create an instance of this class.
*
* @param {AudioTrack[]} [tracks=[]]
* A list of `AudioTrack` to instantiate the list with.
*/
constructor(tracks = []) {
// make sure only 1 track is enabled
// sorted from last index to first index
for (let i = tracks.length - 1; i >= 0; i--) {
if (tracks[i].enabled) {
disableOthers$1(tracks, tracks[i]);
break;
}
}
super(tracks);
this.changing_ = false;
}
/**
* Add an {@link AudioTrack} to the `AudioTrackList`.
*
* @param {AudioTrack} track
* The AudioTrack to add to the list
*
* @fires TrackList#addtrack
*/
addTrack(track) {
if (track.enabled) {
disableOthers$1(this, track);
}
super.addTrack(track);
// native tracks don't have this
if (!track.addEventListener) {
return;
}
track.enabledChange_ = () => {
// when we are disabling other tracks (since we don't support
// more than one track at a time) we will set changing_
// to true so that we don't trigger additional change events
if (this.changing_) {
return;
}
this.changing_ = true;
disableOthers$1(this, track);
this.changing_ = false;
this.trigger('change');
};
/**
* @listens AudioTrack#enabledchange
* @fires TrackList#change
*/
track.addEventListener('enabledchange', track.enabledChange_);
}
removeTrack(rtrack) {
super.removeTrack(rtrack);
if (rtrack.removeEventListener && rtrack.enabledChange_) {
rtrack.removeEventListener('enabledchange', rtrack.enabledChange_);
rtrack.enabledChange_ = null;
}
}
}
/**
* @file video-track-list.js
*/
/** @import VideoTrack from './video-track' */
/**
* Un-select all other {@link VideoTrack}s that are selected.
*
* @param {VideoTrackList} list
* list to work on
*
* @param {VideoTrack} track
* The track to skip
*
* @private
*/
const disableOthers = function (list, track) {
for (let i = 0; i < list.length; i++) {
if (!Object.keys(list[i]).length || track.id === list[i].id) {
continue;
}
// another video track is enabled, disable it
list[i].selected = false;
}
};
/**
* The current list of {@link VideoTrack} for a video.
*
* @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#videotracklist}
* @extends TrackList
*/
class VideoTrackList extends TrackList {
/**
* Create an instance of this class.
*
* @param {VideoTrack[]} [tracks=[]]
* A list of `VideoTrack` to instantiate the list with.
*/
constructor(tracks = []) {
// make sure only 1 track is enabled
// sorted from last index to first index
for (let i = tracks.length - 1; i >= 0; i--) {
if (tracks[i].selected) {
disableOthers(tracks, tracks[i]);
break;
}
}
super(tracks);
this.changing_ = false;
/**
* @member {number} VideoTrackList#selectedIndex
* The current index of the selected {@link VideoTrack`}.
*/
Object.defineProperty(this, 'selectedIndex', {
get() {
for (let i = 0; i < this.length; i++) {
if (this[i].selected) {
return i;
}
}
return -1;
},
set() {}
});
}
/**
* Add a {@link VideoTrack} to the `VideoTrackList`.
*
* @param {VideoTrack} track
* The VideoTrack to add to the list
*
* @fires TrackList#addtrack
*/
addTrack(track) {
if (track.selected) {
disableOthers(this, track);
}
super.addTrack(track);
// native tracks don't have this
if (!track.addEventListener) {
return;
}
track.selectedChange_ = () => {
if (this.changing_) {
return;
}
this.changing_ = true;
disableOthers(this, track);
this.changing_ = false;
this.trigger('change');
};
/**
* @listens VideoTrack#selectedchange
* @fires TrackList#change
*/
track.addEventListener('selectedchange', track.selectedChange_);
}
removeTrack(rtrack) {
super.removeTrack(rtrack);
if (rtrack.removeEventListener && rtrack.selectedChange_) {
rtrack.removeEventListener('selectedchange', rtrack.selectedChange_);
rtrack.selectedChange_ = null;
}
}
}
/**
* @file text-track-list.js
*/
/** @import TextTrack from './text-track' */
/**
* The current list of {@link TextTrack} for a media file.
*
* @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#texttracklist}
* @extends TrackList
*/
class TextTrackList extends TrackList {
/**
* Add a {@link TextTrack} to the `TextTrackList`
*
* @param {TextTrack} track
* The text track to add to the list.
*
* @fires TrackList#addtrack
*/
addTrack(track) {
super.addTrack(track);
if (!this.queueChange_) {
this.queueChange_ = () => this.queueTrigger('change');
}
if (!this.triggerSelectedlanguagechange) {
this.triggerSelectedlanguagechange_ = () => this.trigger('selectedlanguagechange');
}
/**
* @listens TextTrack#modechange
* @fires TrackList#change
*/
track.addEventListener('modechange', this.queueChange_);
const nonLanguageTextTrackKind = ['metadata', 'chapters'];
if (nonLanguageTextTrackKind.indexOf(track.kind) === -1) {
track.addEventListener('modechange', this.triggerSelectedlanguagechange_);
}
}
removeTrack(rtrack) {
super.removeTrack(rtrack);
// manually remove the event handlers we added
if (rtrack.removeEventListener) {
if (this.queueChange_) {
rtrack.removeEventListener('modechange', this.queueChange_);
}
if (this.selectedlanguagechange_) {
rtrack.removeEventListener('modechange', this.triggerSelectedlanguagechange_);
}
}
}
}
/**
* @file html-track-element-list.js
*/
/**
* The current list of {@link HtmlTrackElement}s.
*/
class HtmlTrackElementList {
/**
* Create an instance of this class.
*
* @param {HtmlTrackElement[]} [tracks=[]]
* A list of `HtmlTrackElement` to instantiate the list with.
*/
constructor(trackElements = []) {
this.trackElements_ = [];
/**
* @memberof HtmlTrackElementList
* @member {number} length
* The current number of `Track`s in the this Trackist.
* @instance
*/
Object.defineProperty(this, 'length', {
get() {
return this.trackElements_.length;
}
});
for (let i = 0, length = trackElements.length; i < length; i++) {
this.addTrackElement_(trackElements[i]);
}
}
/**
* Add an {@link HtmlTrackElement} to the `HtmlTrackElementList`
*
* @param {HtmlTrackElement} trackElement
* The track element to add to the list.
*
* @private
*/
addTrackElement_(trackElement) {
const index = this.trackElements_.length;
if (!('' + index in this)) {
Object.defineProperty(this, index, {
get() {
return this.trackElements_[index];
}
});
}
// Do not add duplicate elements
if (this.trackElements_.indexOf(trackElement) === -1) {
this.trackElements_.push(trackElement);
}
}
/**
* Get an {@link HtmlTrackElement} from the `HtmlTrackElementList` given an
* {@link TextTrack}.
*
* @param {TextTrack} track
* The track associated with a track element.
*
* @return {HtmlTrackElement|undefined}
* The track element that was found or undefined.
*
* @private
*/
getTrackElementByTrack_(track) {
let trackElement_;
for (let i = 0, length = this.trackElements_.length; i < length; i++) {
if (track === this.trackElements_[i].track) {
trackElement_ = this.trackElements_[i];
break;
}
}
return trackElement_;
}
/**
* Remove a {@link HtmlTrackElement} from the `HtmlTrackElementList`
*
* @param {HtmlTrackElement} trackElement
* The track element to remove from the list.
*
* @private
*/
removeTrackElement_(trackElement) {
for (let i = 0, length = this.trackElements_.length; i < length; i++) {
if (trackElement === this.trackElements_[i]) {
if (this.trackElements_[i].track && typeof this.trackElements_[i].track.off === 'function') {
this.trackElements_[i].track.off();
}
if (typeof this.trackElements_[i].off === 'function') {
this.trackElements_[i].off();
}
this.trackElements_.splice(i, 1);
break;
}
}
}
}
/**
* @file text-track-cue-list.js
*/
/**
* @typedef {Object} TextTrackCueList~TextTrackCue
*
* @property {string} id
* The unique id for this text track cue
*
* @property {number} startTime
* The start time for this text track cue
*
* @property {number} endTime
* The end time for this text track cue
*
* @property {boolean} pauseOnExit
* Pause when the end time is reached if true.
*
* @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#texttrackcue}
*/
/**
* A List of TextTrackCues.
*
* @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#texttrackcuelist}
*/
class TextTrackCueList {
/**
* Create an instance of this class..
*
* @param {Array} cues
* A list of cues to be initialized with
*/
constructor(cues) {
TextTrackCueList.prototype.setCues_.call(this, cues);
/**
* @memberof TextTrackCueList
* @member {number} length
* The current number of `TextTrackCue`s in the TextTrackCueList.
* @instance
*/
Object.defineProperty(this, 'length', {
get() {
return this.length_;
}
});
}
/**
* A setter for cues in this list. Creates getters
* an an index for the cues.
*
* @param {Array} cues
* An array of cues to set
*
* @private
*/
setCues_(cues) {
const oldLength = this.length || 0;
let i = 0;
const l = cues.length;
this.cues_ = cues;
this.length_ = cues.length;
const defineProp = function (index) {
if (!('' + index in this)) {
Object.defineProperty(this, '' + index, {
get() {
return this.cues_[index];
}
});
}
};
if (oldLength < l) {
i = oldLength;
for (; i < l; i++) {
defineProp.call(this, i);
}
}
}
/**
* Get a `TextTrackCue` that is currently in the `TextTrackCueList` by id.
*
* @param {string} id
* The id of the cue that should be searched for.
*
* @return {TextTrackCueList~TextTrackCue|null}
* A single cue or null if none was found.
*/
getCueById(id) {
let result = null;
for (let i = 0, l = this.length; i < l; i++) {
const cue = this[i];
if (cue.id === id) {
result = cue;
break;
}
}
return result;
}
}
/**
* @file track-kinds.js
*/
/**
* All possible `VideoTrackKind`s
*
* @see https://html.spec.whatwg.org/multipage/embedded-content.html#dom-videotrack-kind
* @typedef VideoTrack~Kind
* @enum
*/
const VideoTrackKind = {
alternative: 'alternative',
captions: 'captions',
main: 'main',
sign: 'sign',
subtitles: 'subtitles',
commentary: 'commentary'
};
/**
* All possible `AudioTrackKind`s
*
* @see https://html.spec.whatwg.org/multipage/embedded-content.html#dom-audiotrack-kind
* @typedef AudioTrack~Kind
* @enum
*/
const AudioTrackKind = {
'alternative': 'alternative',
'descriptions': 'descriptions',
'main': 'main',
'main-desc': 'main-desc',
'translation': 'translation',
'commentary': 'commentary'
};
/**
* All possible `TextTrackKind`s
*
* @see https://html.spec.whatwg.org/multipage/embedded-content.html#dom-texttrack-kind
* @typedef TextTrack~Kind
* @enum
*/
const TextTrackKind = {
subtitles: 'subtitles',
captions: 'captions',
descriptions: 'descriptions',
chapters: 'chapters',
metadata: 'metadata'
};
/**
* All possible `TextTrackMode`s
*
* @see https://html.spec.whatwg.org/multipage/embedded-content.html#texttrackmode
* @typedef TextTrack~Mode
* @enum
*/
const TextTrackMode = {
disabled: 'disabled',
hidden: 'hidden',
showing: 'showing'
};
/**
* @file track.js
*/
/**
* A Track class that contains all of the common functionality for {@link AudioTrack},
* {@link VideoTrack}, and {@link TextTrack}.
*
* > Note: This class should not be used directly
*
* @see {@link https://html.spec.whatwg.org/multipage/embedded-content.html}
* @extends EventTarget
* @abstract
*/
class Track extends EventTarget$2 {
/**
* Create an instance of this class.
*
* @param {Object} [options={}]
* Object of option names and values
*
* @param {string} [options.kind='']
* A valid kind for the track type you are creating.
*
* @param {string} [options.id='vjs_track_' + Guid.newGUID()]
* A unique id for this AudioTrack.
*
* @param {string} [options.label='']
* The menu label for this track.
*
* @param {string} [options.language='']
* A valid two character language code.
*
* @abstract
*/
constructor(options = {}) {
super();
const trackProps = {
id: options.id || 'vjs_track_' + newGUID(),
kind: options.kind || '',
language: options.language || ''
};
let label = options.label || '';
/**
* @memberof Track
* @member {string} id
* The id of this track. Cannot be changed after creation.
* @instance
*
* @readonly
*/
/**
* @memberof Track
* @member {string} kind
* The kind of track that this is. Cannot be changed after creation.
* @instance
*
* @readonly
*/
/**
* @memberof Track
* @member {string} language
* The two letter language code for this track. Cannot be changed after
* creation.
* @instance
*
* @readonly
*/
for (const key in trackProps) {
Object.defineProperty(this, key, {
get() {
return trackProps[key];
},
set() {}
});
}
/**
* @memberof Track
* @member {string} label
* The label of this track. Cannot be changed after creation.
* @instance
*
* @fires Track#labelchange
*/
Object.defineProperty(this, 'label', {
get() {
return label;
},
set(newLabel) {
if (newLabel !== label) {
label = newLabel;
/**
* An event that fires when label changes on this track.
*
* > Note: This is not part of the spec!
*
* @event Track#labelchange
* @type {Event}
*/
this.trigger('labelchange');
}
}
});
}
}
/**
* @file url.js
* @module url
*/
/**
* Resolve and parse the elements of a URL.
*
* @function
* @param {string} url
* The url to parse
*
* @return {URL}
* An object of url details
*/
const parseUrl = function (url) {
return new URL(url, document$1.baseURI);
};
/**
* Get absolute version of relative URL.
*
* @function
* @param {string} url
* URL to make absolute
*
* @return {string}
* Absolute URL
*/
const getAbsoluteURL = function (url) {
return new URL(url, document$1.baseURI).href;
};
/**
* Returns the extension of the passed file name. It will return an empty string
* if passed an invalid path.
*
* @function
* @param {string} path
* The fileName path like '/path/to/file.mp4'
*
* @return {string}
* The extension in lower case or an empty string if no
* extension could be found.
*/
const getFileExtension = function (path) {
if (typeof path === 'string') {
const splitPathRe = /^(\/?)([\s\S]*?)((?:\.{1,2}|[^\/]+?)(\.([^\.\/\?]+)))(?:[\/]*|[\?].*)$/;
const pathParts = splitPathRe.exec(path);
if (pathParts) {
return pathParts.pop().toLowerCase();
}
}
return '';
};
/**
* Returns whether the url passed is a cross domain request or not.
*
* @function
* @param {string} url
* The url to check.
*
* @param {URL} [winLoc]
* the domain to check the url against, defaults to window.location
*
* @return {boolean}
* Whether it is a cross domain request or not.
*/
const isCrossOrigin = function (url, winLoc = window$1.location) {
return parseUrl(url).origin !== winLoc.origin;
};
var Url = /*#__PURE__*/Object.freeze({
__proto__: null,
parseUrl: parseUrl,
getAbsoluteURL: getAbsoluteURL,
getFileExtension: getFileExtension,
isCrossOrigin: isCrossOrigin
});
/**
* @file text-track.js
*/
/** @import Tech from '../tech/tech' */
/**
* Takes a webvtt file contents and parses it into cues
*
* @param {string} srcContent
* webVTT file contents
*
* @param {TextTrack} track
* TextTrack to add cues to. Cues come from the srcContent.
*
* @private
*/
const parseCues = function (srcContent, track) {
const parser = new window$1.WebVTT.Parser(window$1, window$1.vttjs, window$1.WebVTT.StringDecoder());
const errors = [];
parser.oncue = function (cue) {
track.addCue(cue);
};
parser.onparsingerror = function (error) {
errors.push(error);
};
parser.onflush = function () {
track.trigger({
type: 'loadeddata',
target: track
});
};
parser.parse(srcContent);
if (errors.length > 0) {
if (window$1.console && window$1.console.groupCollapsed) {
window$1.console.groupCollapsed(`Text Track parsing errors for ${track.src}`);
}
errors.forEach(error => log$1.error(error));
if (window$1.console && window$1.console.groupEnd) {
window$1.console.groupEnd();
}
}
parser.flush();
};
/**
* Load a `TextTrack` from a specified url.
*
* @param {string} src
* Url to load track from.
*
* @param {TextTrack} track
* Track to add cues to. Comes from the content at the end of `url`.
*
* @private
*/
const loadTrack = function (src, track) {
const opts = {
uri: src
};
const crossOrigin = isCrossOrigin(src);
if (crossOrigin) {
opts.cors = crossOrigin;
}
const withCredentials = track.tech_.crossOrigin() === 'use-credentials';
if (withCredentials) {
opts.withCredentials = withCredentials;
}
XHR(opts, bind_(this, function (err, response, responseBody) {
if (err) {
return log$1.error(err, response);
}
track.loaded_ = true;
// Make sure that vttjs has loaded, otherwise, wait till it finished loading
// NOTE: this is only used for the alt/video.novtt.js build
if (typeof window$1.WebVTT !== 'function') {
if (track.tech_) {
// to prevent use before define eslint error, we define loadHandler
// as a let here
track.tech_.any(['vttjsloaded', 'vttjserror'], event => {
if (event.type === 'vttjserror') {
log$1.error(`vttjs failed to load, stopping trying to process ${track.src}`);
return;
}
return parseCues(responseBody, track);
});
}
} else {
parseCues(responseBody, track);
}
}));
};
/**
* A representation of a single `TextTrack`.
*
* @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#texttrack}
* @extends Track
*/
class TextTrack extends Track {
/**
* Create an instance of this class.
*
* @param {Object} options={}
* Object of option names and values
*
* @param {Tech} options.tech
* A reference to the tech that owns this TextTrack.
*
* @param {TextTrack~Kind} [options.kind='subtitles']
* A valid text track kind.
*
* @param {TextTrack~Mode} [options.mode='disabled']
* A valid text track mode.
*
* @param {string} [options.id='vjs_track_' + Guid.newGUID()]
* A unique id for this TextTrack.
*
* @param {string} [options.label='']
* The menu label for this track.
*
* @param {string} [options.language='']
* A valid two character language code.
*
* @param {string} [options.srclang='']
* A valid two character language code. An alternative, but deprioritized
* version of `options.language`
*
* @param {string} [options.src]
* A url to TextTrack cues.
*
* @param {boolean} [options.default]
* If this track should default to on or off.
*/
constructor(options = {}) {
if (!options.tech) {
throw new Error('A tech was not provided.');
}
const settings = merge$1(options, {
kind: TextTrackKind[options.kind] || 'subtitles',
language: options.language || options.srclang || ''
});
let mode = TextTrackMode[settings.mode] || 'disabled';
const default_ = settings.default;
if (settings.kind === 'metadata' || settings.kind === 'chapters') {
mode = 'hidden';
}
super(settings);
this.tech_ = settings.tech;
this.cues_ = [];
this.activeCues_ = [];
this.preload_ = this.tech_.preloadTextTracks !== false;
const cues = new TextTrackCueList(this.cues_);
const activeCues = new TextTrackCueList(this.activeCues_);
let changed = false;
this.timeupdateHandler = bind_(this, function (event = {}) {
if (this.tech_.isDisposed()) {
return;
}
if (!this.tech_.isReady_) {
if (event.type !== 'timeupdate') {
this.rvf_ = this.tech_.requestVideoFrameCallback(this.timeupdateHandler);
}
return;
}
// Accessing this.activeCues for the side-effects of updating itself
// due to its nature as a getter function. Do not remove or cues will
// stop updating!
// Use the setter to prevent deletion from uglify (pure_getters rule)
this.activeCues = this.activeCues;
if (changed) {
this.trigger('cuechange');
changed = false;
}
if (event.type !== 'timeupdate') {
this.rvf_ = this.tech_.requestVideoFrameCallback(this.timeupdateHandler);
}
});
const disposeHandler = () => {
this.stopTracking();
};
this.tech_.one('dispose', disposeHandler);
if (mode !== 'disabled') {
this.startTracking();
}
Object.defineProperties(this, {
/**
* @memberof TextTrack
* @member {boolean} default
* If this track was set to be on or off by default. Cannot be changed after
* creation.
* @instance
*
* @readonly
*/
default: {
get() {
return default_;
},
set() {}
},
/**
* @memberof TextTrack
* @member {string} mode
* Set the mode of this TextTrack to a valid {@link TextTrack~Mode}. Will
* not be set if setting to an invalid mode.
* @instance
*
* @fires TextTrack#modechange
*/
mode: {
get() {
return mode;
},
set(newMode) {
if (!TextTrackMode[newMode]) {
return;
}
if (mode === newMode) {
return;
}
mode = newMode;
if (!this.preload_ && mode !== 'disabled' && this.cues.length === 0) {
// On-demand load.
loadTrack(this.src, this);
}
this.stopTracking();
if (mode !== 'disabled') {
this.startTracking();
}
/**
* An event that fires when mode changes on this track. This allows
* the TextTrackList that holds this track to act accordingly.
*
* > Note: This is not part of the spec!
*
* @event TextTrack#modechange
* @type {Event}
*/
this.trigger('modechange');
}
},
/**
* @memberof TextTrack
* @member {TextTrackCueList} cues
* The text track cue list for this TextTrack.
* @instance
*/
cues: {
get() {
if (!this.loaded_) {
return null;
}
return cues;
},
set() {}
},
/**
* @memberof TextTrack
* @member {TextTrackCueList} activeCues
* The list text track cues that are currently active for this TextTrack.
* @instance
*/
activeCues: {
get() {
if (!this.loaded_) {
return null;
}
// nothing to do
if (this.cues.length === 0) {
return activeCues;
}
const ct = this.tech_.currentTime();
const active = [];
for (let i = 0, l = this.cues.length; i < l; i++) {
const cue = this.cues[i];
if (cue.startTime <= ct && cue.endTime >= ct) {
active.push(cue);
}
}
changed = false;
if (active.length !== this.activeCues_.length) {
changed = true;
} else {
for (let i = 0; i < active.length; i++) {
if (this.activeCues_.indexOf(active[i]) === -1) {
changed = true;
}
}
}
this.activeCues_ = active;
activeCues.setCues_(this.activeCues_);
return activeCues;
},
// /!\ Keep this setter empty (see the timeupdate handler above)
set() {}
}
});
if (settings.src) {
this.src = settings.src;
if (!this.preload_) {
// Tracks will load on-demand.
// Act like we're loaded for other purposes.
this.loaded_ = true;
}
if (this.preload_ || settings.kind !== 'subtitles' && settings.kind !== 'captions') {
loadTrack(this.src, this);
}
} else {
this.loaded_ = true;
}
}
startTracking() {
// More precise cues based on requestVideoFrameCallback with a requestAnimationFram fallback
this.rvf_ = this.tech_.requestVideoFrameCallback(this.timeupdateHandler);
// Also listen to timeupdate in case rVFC/rAF stops (window in background, audio in video el)
this.tech_.on('timeupdate', this.timeupdateHandler);
}
stopTracking() {
if (this.rvf_) {
this.tech_.cancelVideoFrameCallback(this.rvf_);
this.rvf_ = undefined;
}
this.tech_.off('timeupdate', this.timeupdateHandler);
}
/**
* Add a cue to the internal list of cues.
*
* @param {TextTrack~Cue} cue
* The cue to add to our internal list
*/
addCue(originalCue) {
let cue = originalCue;
// Testing if the cue is a VTTCue in a way that survives minification
if (!('getCueAsHTML' in cue)) {
cue = new window$1.vttjs.VTTCue(originalCue.startTime, originalCue.endTime, originalCue.text);
for (const prop in originalCue) {
if (!(prop in cue)) {
cue[prop] = originalCue[prop];
}
}
// make sure that `id` is copied over
cue.id = originalCue.id;
cue.originalCue_ = originalCue;
}
const tracks = this.tech_.textTracks();
for (let i = 0; i < tracks.length; i++) {
if (tracks[i] !== this) {
tracks[i].removeCue(cue);
}
}
this.cues_.push(cue);
this.cues.setCues_(this.cues_);
}
/**
* Remove a cue from our internal list
*
* @param {TextTrack~Cue} removeCue
* The cue to remove from our internal list
*/
removeCue(removeCue) {
let i = this.cues_.length;
while (i--) {
const cue = this.cues_[i];
if (cue === removeCue || cue.originalCue_ && cue.originalCue_ === removeCue) {
this.cues_.splice(i, 1);
this.cues.setCues_(this.cues_);
break;
}
}
}
}
/**
* cuechange - One or more cues in the track have become active or stopped being active.
*
* @protected
*/
TextTrack.prototype.allowedEvents_ = {
cuechange: 'cuechange'
};
/**
* A representation of a single `AudioTrack`. If it is part of an {@link AudioTrackList}
* only one `AudioTrack` in the list will be enabled at a time.
*
* @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#audiotrack}
* @extends Track
*/
class AudioTrack extends Track {
/**
* Create an instance of this class.
*
* @param {Object} [options={}]
* Object of option names and values
*
* @param {AudioTrack~Kind} [options.kind='']
* A valid audio track kind
*
* @param {string} [options.id='vjs_track_' + Guid.newGUID()]
* A unique id for this AudioTrack.
*
* @param {string} [options.label='']
* The menu label for this track.
*
* @param {string} [options.language='']
* A valid two character language code.
*
* @param {boolean} [options.enabled]
* If this track is the one that is currently playing. If this track is part of
* an {@link AudioTrackList}, only one {@link AudioTrack} will be enabled.
*/
constructor(options = {}) {
const settings = merge$1(options, {
kind: AudioTrackKind[options.kind] || ''
});
super(settings);
let enabled = false;
/**
* @memberof AudioTrack
* @member {boolean} enabled
* If this `AudioTrack` is enabled or not. When setting this will
* fire {@link AudioTrack#enabledchange} if the state of enabled is changed.
* @instance
*
* @fires VideoTrack#selectedchange
*/
Object.defineProperty(this, 'enabled', {
get() {
return enabled;
},
set(newEnabled) {
// an invalid or unchanged value
if (typeof newEnabled !== 'boolean' || newEnabled === enabled) {
return;
}
enabled = newEnabled;
/**
* An event that fires when enabled changes on this track. This allows
* the AudioTrackList that holds this track to act accordingly.
*
* > Note: This is not part of the spec! Native tracks will do
* this internally without an event.
*
* @event AudioTrack#enabledchange
* @type {Event}
*/
this.trigger('enabledchange');
}
});
// if the user sets this track to selected then
// set selected to that true value otherwise
// we keep it false
if (settings.enabled) {
this.enabled = settings.enabled;
}
this.loaded_ = true;
}
}
/**
* A representation of a single `VideoTrack`.
*
* @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#videotrack}
* @extends Track
*/
class VideoTrack extends Track {
/**
* Create an instance of this class.
*
* @param {Object} [options={}]
* Object of option names and values
*
* @param {string} [options.kind='']
* A valid {@link VideoTrack~Kind}
*
* @param {string} [options.id='vjs_track_' + Guid.newGUID()]
* A unique id for this AudioTrack.
*
* @param {string} [options.label='']
* The menu label for this track.
*
* @param {string} [options.language='']
* A valid two character language code.
*
* @param {boolean} [options.selected]
* If this track is the one that is currently playing.
*/
constructor(options = {}) {
const settings = merge$1(options, {
kind: VideoTrackKind[options.kind] || ''
});
super(settings);
let selected = false;
/**
* @memberof VideoTrack
* @member {boolean} selected
* If this `VideoTrack` is selected or not. When setting this will
* fire {@link VideoTrack#selectedchange} if the state of selected changed.
* @instance
*
* @fires VideoTrack#selectedchange
*/
Object.defineProperty(this, 'selected', {
get() {
return selected;
},
set(newSelected) {
// an invalid or unchanged value
if (typeof newSelected !== 'boolean' || newSelected === selected) {
return;
}
selected = newSelected;
/**
* An event that fires when selected changes on this track. This allows
* the VideoTrackList that holds this track to act accordingly.
*
* > Note: This is not part of the spec! Native tracks will do
* this internally without an event.
*
* @event VideoTrack#selectedchange
* @type {Event}
*/
this.trigger('selectedchange');
}
});
// if the user sets this track to selected then
// set selected to that true value otherwise
// we keep it false
if (settings.selected) {
this.selected = settings.selected;
}
}
}
/**
* @file html-track-element.js
*/
/** @import Tech from '../tech/tech' */
/**
* A single track represented in the DOM.
*
* @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#htmltrackelement}
* @extends EventTarget
*/
class HTMLTrackElement extends EventTarget$2 {
/**
* Create an instance of this class.
*
* @param {Object} options={}
* Object of option names and values
*
* @param {Tech} options.tech
* A reference to the tech that owns this HTMLTrackElement.
*
* @param {TextTrack~Kind} [options.kind='subtitles']
* A valid text track kind.
*
* @param {TextTrack~Mode} [options.mode='disabled']
* A valid text track mode.
*
* @param {string} [options.id='vjs_track_' + Guid.newGUID()]
* A unique id for this TextTrack.
*
* @param {string} [options.label='']
* The menu label for this track.
*
* @param {string} [options.language='']
* A valid two character language code.
*
* @param {string} [options.srclang='']
* A valid two character language code. An alternative, but deprioritized
* version of `options.language`
*
* @param {string} [options.src]
* A url to TextTrack cues.
*
* @param {boolean} [options.default]
* If this track should default to on or off.
*/
constructor(options = {}) {
super();
let readyState;
const track = new TextTrack(options);
this.kind = track.kind;
this.src = track.src;
this.srclang = track.language;
this.label = track.label;
this.default = track.default;
Object.defineProperties(this, {
/**
* @memberof HTMLTrackElement
* @member {HTMLTrackElement~ReadyState} readyState
* The current ready state of the track element.
* @instance
*/
readyState: {
get() {
return readyState;
}
},
/**
* @memberof HTMLTrackElement
* @member {TextTrack} track
* The underlying TextTrack object.
* @instance
*
*/
track: {
get() {
return track;
}
}
});
readyState = HTMLTrackElement.NONE;
/**
* @listens TextTrack#loadeddata
* @fires HTMLTrackElement#load
*/
track.addEventListener('loadeddata', () => {
readyState = HTMLTrackElement.LOADED;
this.trigger({
type: 'load',
target: this
});
});
}
}
/**
* @protected
*/
HTMLTrackElement.prototype.allowedEvents_ = {
load: 'load'
};
/**
* The text track not loaded state.
*
* @type {number}
* @static
*/
HTMLTrackElement.NONE = 0;
/**
* The text track loading state.
*
* @type {number}
* @static
*/
HTMLTrackElement.LOADING = 1;
/**
* The text track loaded state.
*
* @type {number}
* @static
*/
HTMLTrackElement.LOADED = 2;
/**
* The text track failed to load state.
*
* @type {number}
* @static
*/
HTMLTrackElement.ERROR = 3;
/*
* This file contains all track properties that are used in
* player.js, tech.js, html5.js and possibly other techs in the future.
*/
const NORMAL = {
audio: {
ListClass: AudioTrackList,
TrackClass: AudioTrack,
capitalName: 'Audio'
},
video: {
ListClass: VideoTrackList,
TrackClass: VideoTrack,
capitalName: 'Video'
},
text: {
ListClass: TextTrackList,
TrackClass: TextTrack,
capitalName: 'Text'
}
};
Object.keys(NORMAL).forEach(function (type) {
NORMAL[type].getterName = `${type}Tracks`;
NORMAL[type].privateName = `${type}Tracks_`;
});
const REMOTE = {
remoteText: {
ListClass: TextTrackList,
TrackClass: TextTrack,
capitalName: 'RemoteText',
getterName: 'remoteTextTracks',
privateName: 'remoteTextTracks_'
},
remoteTextEl: {
ListClass: HtmlTrackElementList,
TrackClass: HTMLTrackElement,
capitalName: 'RemoteTextTrackEls',
getterName: 'remoteTextTrackEls',
privateName: 'remoteTextTrackEls_'
}
};
const ALL = Object.assign({}, NORMAL, REMOTE);
REMOTE.names = Object.keys(REMOTE);
NORMAL.names = Object.keys(NORMAL);
ALL.names = [].concat(REMOTE.names).concat(NORMAL.names);
/**
* @file tech.js
*/
/** @import { TimeRange } from '../utils/time' */
/**
* An Object containing a structure like: `{src: 'url', type: 'mimetype'}` or string
* that just contains the src url alone.
* * `var SourceObject = {src: 'http://ex.com/video.mp4', type: 'video/mp4'};`
* `var SourceString = 'http://example.com/some-video.mp4';`
*
* @typedef {Object|string} SourceObject
*
* @property {string} src
* The url to the source
*
* @property {string} type
* The mime type of the source
*/
/**
* A function used by {@link Tech} to create a new {@link TextTrack}.
*
* @private
*
* @param {Tech} self
* An instance of the Tech class.
*
* @param {string} kind
* `TextTrack` kind (subtitles, captions, descriptions, chapters, or metadata)
*
* @param {string} [label]
* Label to identify the text track
*
* @param {string} [language]
* Two letter language abbreviation
*
* @param {Object} [options={}]
* An object with additional text track options
*
* @return {TextTrack}
* The text track that was created.
*/
function createTrackHelper(self, kind, label, language, options = {}) {
const tracks = self.textTracks();
options.kind = kind;
if (label) {
options.label = label;
}
if (language) {
options.language = language;
}
options.tech = self;
const track = new ALL.text.TrackClass(options);
tracks.addTrack(track);
return track;
}
/**
* This is the base class for media playback technology controllers, such as
* {@link HTML5}
*
* @extends Component
*/
class Tech extends Component$1 {
/**
* Create an instance of this Tech.
*
* @param {Object} [options]
* The key/value store of player options.
*
* @param {Function} [ready]
* Callback function to call when the `HTML5` Tech is ready.
*/
constructor(options = {}, ready = function () {}) {
// we don't want the tech to report user activity automatically.
// This is done manually in addControlsListeners
options.reportTouchActivity = false;
super(null, options, ready);
this.onDurationChange_ = e => this.onDurationChange(e);
this.trackProgress_ = e => this.trackProgress(e);
this.trackCurrentTime_ = e => this.trackCurrentTime(e);
this.stopTrackingCurrentTime_ = e => this.stopTrackingCurrentTime(e);
this.disposeSourceHandler_ = e => this.disposeSourceHandler(e);
this.queuedHanders_ = new Set();
// keep track of whether the current source has played at all to
// implement a very limited played()
this.hasStarted_ = false;
this.on('playing', function () {
this.hasStarted_ = true;
});
this.on('loadstart', function () {
this.hasStarted_ = false;
});
ALL.names.forEach(name => {
const props = ALL[name];
if (options && options[props.getterName]) {
this[props.privateName] = options[props.getterName];
}
});
// Manually track progress in cases where the browser/tech doesn't report it.
if (!this.featuresProgressEvents) {
this.manualProgressOn();
}
// Manually track timeupdates in cases where the browser/tech doesn't report it.
if (!this.featuresTimeupdateEvents) {
this.manualTimeUpdatesOn();
}
['Text', 'Audio', 'Video'].forEach(track => {
if (options[`native${track}Tracks`] === false) {
this[`featuresNative${track}Tracks`] = false;
}
});
if (options.nativeCaptions === false || options.nativeTextTracks === false) {
this.featuresNativeTextTracks = false;
} else if (options.nativeCaptions === true || options.nativeTextTracks === true) {
this.featuresNativeTextTracks = true;
}
if (!this.featuresNativeTextTracks) {
this.emulateTextTracks();
}
this.preloadTextTracks = options.preloadTextTracks !== false;
this.autoRemoteTextTracks_ = new ALL.text.ListClass();
this.initTrackListeners();
// Turn on component tap events only if not using native controls
if (!options.nativeControlsForTouch) {
this.emitTapEvents();
}
if (this.constructor) {
this.name_ = this.constructor.name || 'Unknown Tech';
}
}
/**
* A special function to trigger source set in a way that will allow player
* to re-trigger if the player or tech are not ready yet.
*
* @fires Tech#sourceset
* @param {string} src The source string at the time of the source changing.
*/
triggerSourceset(src) {
if (!this.isReady_) {
// on initial ready we have to trigger source set
// 1ms after ready so that player can watch for it.
this.one('ready', () => this.setTimeout(() => this.triggerSourceset(src), 1));
}
/**
* Fired when the source is set on the tech causing the media element
* to reload.
*
* @see {@link Player#event:sourceset}
* @event Tech#sourceset
* @type {Event}
*/
this.trigger({
src,
type: 'sourceset'
});
}
/* Fallbacks for unsupported event types
================================================================================ */
/**
* Polyfill the `progress` event for browsers that don't support it natively.
*
* @see {@link Tech#trackProgress}
*/
manualProgressOn() {
this.on('durationchange', this.onDurationChange_);
this.manualProgress = true;
// Trigger progress watching when a source begins loading
this.one('ready', this.trackProgress_);
}
/**
* Turn off the polyfill for `progress` events that was created in
* {@link Tech#manualProgressOn}
*/
manualProgressOff() {
this.manualProgress = false;
this.stopTrackingProgress();
this.off('durationchange', this.onDurationChange_);
}
/**
* This is used to trigger a `progress` event when the buffered percent changes. It
* sets an interval function that will be called every 500 milliseconds to check if the
* buffer end percent has changed.
*
* > This function is called by {@link Tech#manualProgressOn}
*
* @param {Event} event
* The `ready` event that caused this to run.
*
* @listens Tech#ready
* @fires Tech#progress
*/
trackProgress(event) {
this.stopTrackingProgress();
this.progressInterval = this.setInterval(bind_(this, function () {
// Don't trigger unless buffered amount is greater than last time
const numBufferedPercent = this.bufferedPercent();
if (this.bufferedPercent_ !== numBufferedPercent) {
/**
* See {@link Player#progress}
*
* @event Tech#progress
* @type {Event}
*/
this.trigger('progress');
}
this.bufferedPercent_ = numBufferedPercent;
if (numBufferedPercent === 1) {
this.stopTrackingProgress();
}
}), 500);
}
/**
* Update our internal duration on a `durationchange` event by calling
* {@link Tech#duration}.
*
* @param {Event} event
* The `durationchange` event that caused this to run.
*
* @listens Tech#durationchange
*/
onDurationChange(event) {
this.duration_ = this.duration();
}
/**
* Get and create a `TimeRange` object for buffering.
*
* @return {TimeRange}
* The time range object that was created.
*/
buffered() {
return createTimeRanges$1(0, 0);
}
/**
* Get the percentage of the current video that is currently buffered.
*
* @return {number}
* A number from 0 to 1 that represents the decimal percentage of the
* video that is buffered.
*
*/
bufferedPercent() {
return bufferedPercent(this.buffered(), this.duration_);
}
/**
* Turn off the polyfill for `progress` events that was created in
* {@link Tech#manualProgressOn}
* Stop manually tracking progress events by clearing the interval that was set in
* {@link Tech#trackProgress}.
*/
stopTrackingProgress() {
this.clearInterval(this.progressInterval);
}
/**
* Polyfill the `timeupdate` event for browsers that don't support it.
*
* @see {@link Tech#trackCurrentTime}
*/
manualTimeUpdatesOn() {
this.manualTimeUpdates = true;
this.on('play', this.trackCurrentTime_);
this.on('pause', this.stopTrackingCurrentTime_);
}
/**
* Turn off the polyfill for `timeupdate` events that was created in
* {@link Tech#manualTimeUpdatesOn}
*/
manualTimeUpdatesOff() {
this.manualTimeUpdates = false;
this.stopTrackingCurrentTime();
this.off('play', this.trackCurrentTime_);
this.off('pause', this.stopTrackingCurrentTime_);
}
/**
* Sets up an interval function to track current time and trigger `timeupdate` every
* 250 milliseconds.
*
* @listens Tech#play
* @triggers Tech#timeupdate
*/
trackCurrentTime() {
if (this.currentTimeInterval) {
this.stopTrackingCurrentTime();
}
this.currentTimeInterval = this.setInterval(function () {
/**
* Triggered at an interval of 250ms to indicated that time is passing in the video.
*
* @event Tech#timeupdate
* @type {Event}
*/
this.trigger({
type: 'timeupdate',
target: this,
manuallyTriggered: true
});
// 42 = 24 fps // 250 is what Webkit uses // FF uses 15
}, 250);
}
/**
* Stop the interval function created in {@link Tech#trackCurrentTime} so that the
* `timeupdate` event is no longer triggered.
*
* @listens {Tech#pause}
*/
stopTrackingCurrentTime() {
this.clearInterval(this.currentTimeInterval);
// #1002 - if the video ends right before the next timeupdate would happen,
// the progress bar won't make it all the way to the end
this.trigger({
type: 'timeupdate',
target: this,
manuallyTriggered: true
});
}
/**
* Turn off all event polyfills, clear the `Tech`s {@link AudioTrackList},
* {@link VideoTrackList}, and {@link TextTrackList}, and dispose of this Tech.
*
* @fires Component#dispose
*/
dispose() {
// clear out all tracks because we can't reuse them between techs
this.clearTracks(NORMAL.names);
// Turn off any manual progress or timeupdate tracking
if (this.manualProgress) {
this.manualProgressOff();
}
if (this.manualTimeUpdates) {
this.manualTimeUpdatesOff();
}
super.dispose();
}
/**
* Clear out a single `TrackList` or an array of `TrackLists` given their names.
*
* > Note: Techs without source handlers should call this between sources for `video`
* & `audio` tracks. You don't want to use them between tracks!
*
* @param {string[]|string} types
* TrackList names to clear, valid names are `video`, `audio`, and
* `text`.
*/
clearTracks(types) {
types = [].concat(types);
// clear out all tracks because we can't reuse them between techs
types.forEach(type => {
const list = this[`${type}Tracks`]() || [];
let i = list.length;
while (i--) {
const track = list[i];
if (type === 'text') {
this.removeRemoteTextTrack(track);
}
list.removeTrack(track);
}
});
}
/**
* Remove any TextTracks added via addRemoteTextTrack that are
* flagged for automatic garbage collection
*/
cleanupAutoTextTracks() {
const list = this.autoRemoteTextTracks_ || [];
let i = list.length;
while (i--) {
const track = list[i];
this.removeRemoteTextTrack(track);
}
}
/**
* Reset the tech, which will removes all sources and reset the internal readyState.
*
* @abstract
*/
reset() {}
/**
* Get the value of `crossOrigin` from the tech.
*
* @abstract
*
* @see {Html5#crossOrigin}
*/
crossOrigin() {}
/**
* Set the value of `crossOrigin` on the tech.
*
* @abstract
*
* @param {string} crossOrigin the crossOrigin value
* @see {Html5#setCrossOrigin}
*/
setCrossOrigin() {}
/**
* Get or set an error on the Tech.
*
* @param {MediaError} [err]
* Error to set on the Tech
*
* @return {MediaError|null}
* The current error object on the tech, or null if there isn't one.
*/
error(err) {
if (err !== undefined) {
this.error_ = new MediaError(err);
this.trigger('error');
}
return this.error_;
}
/**
* Returns the `TimeRange`s that have been played through for the current source.
*
* > NOTE: This implementation is incomplete. It does not track the played `TimeRange`.
* It only checks whether the source has played at all or not.
*
* @return {TimeRange}
* - A single time range if this video has played
* - An empty set of ranges if not.
*/
played() {
if (this.hasStarted_) {
return createTimeRanges$1(0, 0);
}
return createTimeRanges$1();
}
/**
* Start playback
*
* @abstract
*
* @see {Html5#play}
*/
play() {}
/**
* Set whether we are scrubbing or not
*
* @abstract
* @param {boolean} _isScrubbing
* - true for we are currently scrubbing
* - false for we are no longer scrubbing
*
* @see {Html5#setScrubbing}
*/
setScrubbing(_isScrubbing) {}
/**
* Get whether we are scrubbing or not
*
* @abstract
*
* @see {Html5#scrubbing}
*/
scrubbing() {}
/**
* Causes a manual time update to occur if {@link Tech#manualTimeUpdatesOn} was
* previously called.
*
* @param {number} _seconds
* Set the current time of the media to this.
* @fires Tech#timeupdate
*/
setCurrentTime(_seconds) {
// improve the accuracy of manual timeupdates
if (this.manualTimeUpdates) {
/**
* A manual `timeupdate` event.
*
* @event Tech#timeupdate
* @type {Event}
*/
this.trigger({
type: 'timeupdate',
target: this,
manuallyTriggered: true
});
}
}
/**
* Turn on listeners for {@link VideoTrackList}, {@link {AudioTrackList}, and
* {@link TextTrackList} events.
*
* This adds {@link EventTarget~EventListeners} for `addtrack`, and `removetrack`.
*
* @fires Tech#audiotrackchange
* @fires Tech#videotrackchange
* @fires Tech#texttrackchange
*/
initTrackListeners() {
/**
* Triggered when tracks are added or removed on the Tech {@link AudioTrackList}
*
* @event Tech#audiotrackchange
* @type {Event}
*/
/**
* Triggered when tracks are added or removed on the Tech {@link VideoTrackList}
*
* @event Tech#videotrackchange
* @type {Event}
*/
/**
* Triggered when tracks are added or removed on the Tech {@link TextTrackList}
*
* @event Tech#texttrackchange
* @type {Event}
*/
NORMAL.names.forEach(name => {
const props = NORMAL[name];
const trackListChanges = () => {
this.trigger(`${name}trackchange`);
};
const tracks = this[props.getterName]();
tracks.addEventListener('removetrack', trackListChanges);
tracks.addEventListener('addtrack', trackListChanges);
this.on('dispose', () => {
tracks.removeEventListener('removetrack', trackListChanges);
tracks.removeEventListener('addtrack', trackListChanges);
});
});
}
/**
* Emulate TextTracks using vtt.js if necessary
*
* @fires Tech#vttjsloaded
* @fires Tech#vttjserror
*/
addWebVttScript_() {
if (window$1.WebVTT) {
return;
}
// Initially, Tech.el_ is a child of a dummy-div wait until the Component system
// signals that the Tech is ready at which point Tech.el_ is part of the DOM
// before inserting the WebVTT script
if (document$1.body.contains(this.el())) {
// load via require if available and vtt.js script location was not passed in
// as an option. novtt builds will turn the above require call into an empty object
// which will cause this if check to always fail.
if (!this.options_['vtt.js'] && isPlain(vtt) && Object.keys(vtt).length > 0) {
this.trigger('vttjsloaded');
return;
}
// load vtt.js via the script location option or the cdn of no location was
// passed in
const script = document$1.createElement('script');
script.src = this.options_['vtt.js'] || 'https://vjs.zencdn.net/vttjs/0.14.1/vtt.min.js';
script.onload = () => {
/**
* Fired when vtt.js is loaded.
*
* @event Tech#vttjsloaded
* @type {Event}
*/
this.trigger('vttjsloaded');
};
script.onerror = () => {
/**
* Fired when vtt.js was not loaded due to an error
*
* @event Tech#vttjsloaded
* @type {Event}
*/
this.trigger('vttjserror');
};
this.on('dispose', () => {
script.onload = null;
script.onerror = null;
});
// but have not loaded yet and we set it to true before the inject so that
// we don't overwrite the injected window.WebVTT if it loads right away
window$1.WebVTT = true;
this.el().parentNode.appendChild(script);
} else {
this.ready(this.addWebVttScript_);
}
}
/**
* Emulate texttracks
*
*/
emulateTextTracks() {
const tracks = this.textTracks();
const remoteTracks = this.remoteTextTracks();
const handleAddTrack = e => tracks.addTrack(e.track);
const handleRemoveTrack = e => tracks.removeTrack(e.track);
remoteTracks.on('addtrack', handleAddTrack);
remoteTracks.on('removetrack', handleRemoveTrack);
this.addWebVttScript_();
const updateDisplay = () => this.trigger('texttrackchange');
const textTracksChanges = () => {
updateDisplay();
for (let i = 0; i < tracks.length; i++) {
const track = tracks[i];
track.removeEventListener('cuechange', updateDisplay);
if (track.mode === 'showing') {
track.addEventListener('cuechange', updateDisplay);
}
}
};
textTracksChanges();
tracks.addEventListener('change', textTracksChanges);
tracks.addEventListener('addtrack', textTracksChanges);
tracks.addEventListener('removetrack', textTracksChanges);
this.on('dispose', function () {
remoteTracks.off('addtrack', handleAddTrack);
remoteTracks.off('removetrack', handleRemoveTrack);
tracks.removeEventListener('change', textTracksChanges);
tracks.removeEventListener('addtrack', textTracksChanges);
tracks.removeEventListener('removetrack', textTracksChanges);
for (let i = 0; i < tracks.length; i++) {
const track = tracks[i];
track.removeEventListener('cuechange', updateDisplay);
}
});
}
/**
* Create and returns a remote {@link TextTrack} object.
*
* @param {string} kind
* `TextTrack` kind (subtitles, captions, descriptions, chapters, or metadata)
*
* @param {string} [label]
* Label to identify the text track
*
* @param {string} [language]
* Two letter language abbreviation
*
* @return {TextTrack}
* The TextTrack that gets created.
*/
addTextTrack(kind, label, language) {
if (!kind) {
throw new Error('TextTrack kind is required but was not provided');
}
return createTrackHelper(this, kind, label, language);
}
/**
* Create an emulated TextTrack for use by addRemoteTextTrack
*
* This is intended to be overridden by classes that inherit from
* Tech in order to create native or custom TextTracks.
*
* @param {Object} options
* The object should contain the options to initialize the TextTrack with.
*
* @param {string} [options.kind]
* `TextTrack` kind (subtitles, captions, descriptions, chapters, or metadata).
*
* @param {string} [options.label].
* Label to identify the text track
*
* @param {string} [options.language]
* Two letter language abbreviation.
*
* @return {HTMLTrackElement}
* The track element that gets created.
*/
createRemoteTextTrack(options) {
const track = merge$1(options, {
tech: this
});
return new REMOTE.remoteTextEl.TrackClass(track);
}
/**
* Creates a remote text track object and returns an html track element.
*
* > Note: This can be an emulated {@link HTMLTrackElement} or a native one.
*
* @param {Object} options
* See {@link Tech#createRemoteTextTrack} for more detailed properties.
*
* @param {boolean} [manualCleanup=false]
* - When false: the TextTrack will be automatically removed from the video
* element whenever the source changes
* - When True: The TextTrack will have to be cleaned up manually
*
* @return {HTMLTrackElement}
* An Html Track Element.
*
*/
addRemoteTextTrack(options = {}, manualCleanup) {
const htmlTrackElement = this.createRemoteTextTrack(options);
if (typeof manualCleanup !== 'boolean') {
manualCleanup = false;
}
// store HTMLTrackElement and TextTrack to remote list
this.remoteTextTrackEls().addTrackElement_(htmlTrackElement);
this.remoteTextTracks().addTrack(htmlTrackElement.track);
if (manualCleanup === false) {
// create the TextTrackList if it doesn't exist
this.ready(() => this.autoRemoteTextTracks_.addTrack(htmlTrackElement.track));
}
return htmlTrackElement;
}
/**
* Remove a remote text track from the remote `TextTrackList`.
*
* @param {TextTrack} track
* `TextTrack` to remove from the `TextTrackList`
*/
removeRemoteTextTrack(track) {
const trackElement = this.remoteTextTrackEls().getTrackElementByTrack_(track);
// remove HTMLTrackElement and TextTrack from remote list
this.remoteTextTrackEls().removeTrackElement_(trackElement);
this.remoteTextTracks().removeTrack(track);
this.autoRemoteTextTracks_.removeTrack(track);
}
/**
* Gets available media playback quality metrics as specified by the W3C's Media
* Playback Quality API.
*
* @see [Spec]{@link https://wicg.github.io/media-playback-quality}
*
* @return {Object}
* An object with supported media playback quality metrics
*
* @abstract
*/
getVideoPlaybackQuality() {
return {};
}
/**
* Attempt to create a floating video window always on top of other windows
* so that users may continue consuming media while they interact with other
* content sites, or applications on their device.
*
* @see [Spec]{@link https://wicg.github.io/picture-in-picture}
*
* @return {Promise|undefined}
* A promise with a Picture-in-Picture window if the browser supports
* Promises (or one was passed in as an option). It returns undefined
* otherwise.
*
* @abstract
*/
requestPictureInPicture() {
return Promise.reject();
}
/**
* A method to check for the value of the 'disablePictureInPicture' property.
* Defaults to true, as it should be considered disabled if the tech does not support pip
*
* @abstract
*/
disablePictureInPicture() {
return true;
}
/**
* A method to set or unset the 'disablePictureInPicture' property.
*
* @abstract
*/
setDisablePictureInPicture() {}
/**
* A fallback implementation of requestVideoFrameCallback using requestAnimationFrame
*
* @param {function} cb
* @return {number} request id
*/
requestVideoFrameCallback(cb) {
const id = newGUID();
if (!this.isReady_ || this.paused()) {
this.queuedHanders_.add(id);
this.one('playing', () => {
if (this.queuedHanders_.has(id)) {
this.queuedHanders_.delete(id);
cb();
}
});
} else {
this.requestNamedAnimationFrame(id, cb);
}
return id;
}
/**
* A fallback implementation of cancelVideoFrameCallback
*
* @param {number} id id of callback to be cancelled
*/
cancelVideoFrameCallback(id) {
if (this.queuedHanders_.has(id)) {
this.queuedHanders_.delete(id);
} else {
this.cancelNamedAnimationFrame(id);
}
}
/**
* A method to set a poster from a `Tech`.
*
* @abstract
*/
setPoster() {}
/**
* A method to check for the presence of the 'playsinline' attribute.
*
* @abstract
*/
playsinline() {}
/**
* A method to set or unset the 'playsinline' attribute.
*
* @abstract
*/
setPlaysinline() {}
/**
* Attempt to force override of native audio tracks.
*
* @param {boolean} override - If set to true native audio will be overridden,
* otherwise native audio will potentially be used.
*
* @abstract
*/
overrideNativeAudioTracks(override) {}
/**
* Attempt to force override of native video tracks.
*
* @param {boolean} override - If set to true native video will be overridden,
* otherwise native video will potentially be used.
*
* @abstract
*/
overrideNativeVideoTracks(override) {}
/**
* Check if the tech can support the given mime-type.
*
* The base tech does not support any type, but source handlers might
* overwrite this.
*
* @param {string} _type
* The mimetype to check for support
*
* @return {string}
* 'probably', 'maybe', or empty string
*
* @see [Spec]{@link https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/canPlayType}
*
* @abstract
*/
canPlayType(_type) {
return '';
}
/**
* Check if the type is supported by this tech.
*
* The base tech does not support any type, but source handlers might
* overwrite this.
*
* @param {string} _type
* The media type to check
* @return {string} Returns the native video element's response
*/
static canPlayType(_type) {
return '';
}
/**
* Check if the tech can support the given source
*
* @param {Object} srcObj
* The source object
* @param {Object} options
* The options passed to the tech
* @return {string} 'probably', 'maybe', or '' (empty string)
*/
static canPlaySource(srcObj, options) {
return Tech.canPlayType(srcObj.type);
}
/*
* Return whether the argument is a Tech or not.
* Can be passed either a Class like `Html5` or a instance like `player.tech_`
*
* @param {Object} component
* The item to check
*
* @return {boolean}
* Whether it is a tech or not
* - True if it is a tech
* - False if it is not
*/
static isTech(component) {
return component.prototype instanceof Tech || component instanceof Tech || component === Tech;
}
/**
* Registers a `Tech` into a shared list for videojs.
*
* @param {string} name
* Name of the `Tech` to register.
*
* @param {Object} tech
* The `Tech` class to register.
*/
static registerTech(name, tech) {
if (!Tech.techs_) {
Tech.techs_ = {};
}
if (!Tech.isTech(tech)) {
throw new Error(`Tech ${name} must be a Tech`);
}
if (!Tech.canPlayType) {
throw new Error('Techs must have a static canPlayType method on them');
}
if (!Tech.canPlaySource) {
throw new Error('Techs must have a static canPlaySource method on them');
}
name = toTitleCase$1(name);
Tech.techs_[name] = tech;
Tech.techs_[toLowerCase(name)] = tech;
if (name !== 'Tech') {
// camel case the techName for use in techOrder
Tech.defaultTechOrder_.push(name);
}
return tech;
}
/**
* Get a `Tech` from the shared list by name.
*
* @param {string} name
* `camelCase` or `TitleCase` name of the Tech to get
*
* @return {Tech|undefined}
* The `Tech` or undefined if there was no tech with the name requested.
*/
static getTech(name) {
if (!name) {
return;
}
if (Tech.techs_ && Tech.techs_[name]) {
return Tech.techs_[name];
}
name = toTitleCase$1(name);
if (window$1 && window$1.videojs && window$1.videojs[name]) {
log$1.warn(`The ${name} tech was added to the videojs object when it should be registered using videojs.registerTech(name, tech)`);
return window$1.videojs[name];
}
}
}
/**
* Get the {@link VideoTrackList}
*
* @returns {VideoTrackList}
* @method Tech.prototype.videoTracks
*/
/**
* Get the {@link AudioTrackList}
*
* @returns {AudioTrackList}
* @method Tech.prototype.audioTracks
*/
/**
* Get the {@link TextTrackList}
*
* @returns {TextTrackList}
* @method Tech.prototype.textTracks
*/
/**
* Get the remote element {@link TextTrackList}
*
* @returns {TextTrackList}
* @method Tech.prototype.remoteTextTracks
*/
/**
* Get the remote element {@link HtmlTrackElementList}
*
* @returns {HtmlTrackElementList}
* @method Tech.prototype.remoteTextTrackEls
*/
ALL.names.forEach(function (name) {
const props = ALL[name];
Tech.prototype[props.getterName] = function () {
this[props.privateName] = this[props.privateName] || new props.ListClass();
return this[props.privateName];
};
});
/**
* List of associated text tracks
*
* @type {TextTrackList}
* @private
* @property Tech#textTracks_
*/
/**
* List of associated audio tracks.
*
* @type {AudioTrackList}
* @private
* @property Tech#audioTracks_
*/
/**
* List of associated video tracks.
*
* @type {VideoTrackList}
* @private
* @property Tech#videoTracks_
*/
/**
* Boolean indicating whether the `Tech` supports volume control.
*
* @type {boolean}
* @default
*/
Tech.prototype.featuresVolumeControl = true;
/**
* Boolean indicating whether the `Tech` supports muting volume.
*
* @type {boolean}
* @default
*/
Tech.prototype.featuresMuteControl = true;
/**
* Boolean indicating whether the `Tech` supports fullscreen resize control.
* Resizing plugins using request fullscreen reloads the plugin
*
* @type {boolean}
* @default
*/
Tech.prototype.featuresFullscreenResize = false;
/**
* Boolean indicating whether the `Tech` supports changing the speed at which the video
* plays. Examples:
* - Set player to play 2x (twice) as fast
* - Set player to play 0.5x (half) as fast
*
* @type {boolean}
* @default
*/
Tech.prototype.featuresPlaybackRate = false;
/**
* Boolean indicating whether the `Tech` supports the `progress` event.
* This will be used to determine if {@link Tech#manualProgressOn} should be called.
*
* @type {boolean}
* @default
*/
Tech.prototype.featuresProgressEvents = false;
/**
* Boolean indicating whether the `Tech` supports the `sourceset` event.
*
* A tech should set this to `true` and then use {@link Tech#triggerSourceset}
* to trigger a {@link Tech#event:sourceset} at the earliest time after getting
* a new source.
*
* @type {boolean}
* @default
*/
Tech.prototype.featuresSourceset = false;
/**
* Boolean indicating whether the `Tech` supports the `timeupdate` event.
* This will be used to determine if {@link Tech#manualTimeUpdates} should be called.
*
* @type {boolean}
* @default
*/
Tech.prototype.featuresTimeupdateEvents = false;
/**
* Boolean indicating whether the `Tech` supports the native `TextTrack`s.
* This will help us integrate with native `TextTrack`s if the browser supports them.
*
* @type {boolean}
* @default
*/
Tech.prototype.featuresNativeTextTracks = false;
/**
* Boolean indicating whether the `Tech` supports `requestVideoFrameCallback`.
*
* @type {boolean}
* @default
*/
Tech.prototype.featuresVideoFrameCallback = false;
/**
* A functional mixin for techs that want to use the Source Handler pattern.
* Source handlers are scripts for handling specific formats.
* The source handler pattern is used for adaptive formats (HLS, DASH) that
* manually load video data and feed it into a Source Buffer (Media Source Extensions)
* Example: `Tech.withSourceHandlers.call(MyTech);`
*
* @param {Tech} _Tech
* The tech to add source handler functions to.
*
* @mixes Tech~SourceHandlerAdditions
*/
Tech.withSourceHandlers = function (_Tech) {
/**
* Register a source handler
*
* @param {Function} handler
* The source handler class
*
* @param {number} [index]
* Register it at the following index
*/
_Tech.registerSourceHandler = function (handler, index) {
let handlers = _Tech.sourceHandlers;
if (!handlers) {
handlers = _Tech.sourceHandlers = [];
}
if (index === undefined) {
// add to the end of the list
index = handlers.length;
}
handlers.splice(index, 0, handler);
};
/**
* Check if the tech can support the given type. Also checks the
* Techs sourceHandlers.
*
* @param {string} type
* The mimetype to check.
*
* @return {string}
* 'probably', 'maybe', or '' (empty string)
*/
_Tech.canPlayType = function (type) {
const handlers = _Tech.sourceHandlers || [];
let can;
for (let i = 0; i < handlers.length; i++) {
can = handlers[i].canPlayType(type);
if (can) {
return can;
}
}
return '';
};
/**
* Returns the first source handler that supports the source.
*
* TODO: Answer question: should 'probably' be prioritized over 'maybe'
*
* @param {SourceObject} source
* The source object
*
* @param {Object} options
* The options passed to the tech
*
* @return {SourceHandler|null}
* The first source handler that supports the source or null if
* no SourceHandler supports the source
*/
_Tech.selectSourceHandler = function (source, options) {
const handlers = _Tech.sourceHandlers || [];
let can;
for (let i = 0; i < handlers.length; i++) {
can = handlers[i].canHandleSource(source, options);
if (can) {
return handlers[i];
}
}
return null;
};
/**
* Check if the tech can support the given source.
*
* @param {SourceObject} srcObj
* The source object
*
* @param {Object} options
* The options passed to the tech
*
* @return {string}
* 'probably', 'maybe', or '' (empty string)
*/
_Tech.canPlaySource = function (srcObj, options) {
const sh = _Tech.selectSourceHandler(srcObj, options);
if (sh) {
return sh.canHandleSource(srcObj, options);
}
return '';
};
/**
* When using a source handler, prefer its implementation of
* any function normally provided by the tech.
*/
const deferrable = ['seekable', 'seeking', 'duration'];
/**
* A wrapper around {@link Tech#seekable} that will call a `SourceHandler`s seekable
* function if it exists, with a fallback to the Techs seekable function.
*
* @method _Tech.seekable
*/
/**
* A wrapper around {@link Tech#duration} that will call a `SourceHandler`s duration
* function if it exists, otherwise it will fallback to the techs duration function.
*
* @method _Tech.duration
*/
deferrable.forEach(function (fnName) {
const originalFn = this[fnName];
if (typeof originalFn !== 'function') {
return;
}
this[fnName] = function () {
if (this.sourceHandler_ && this.sourceHandler_[fnName]) {
return this.sourceHandler_[fnName].apply(this.sourceHandler_, arguments);
}
return originalFn.apply(this, arguments);
};
}, _Tech.prototype);
/**
* Create a function for setting the source using a source object
* and source handlers.
* Should never be called unless a source handler was found.
*
* @param {SourceObject} source
* A source object with src and type keys
*/
_Tech.prototype.setSource = function (source) {
let sh = _Tech.selectSourceHandler(source, this.options_);
if (!sh) {
// Fall back to a native source handler when unsupported sources are
// deliberately set
if (_Tech.nativeSourceHandler) {
sh = _Tech.nativeSourceHandler;
} else {
log$1.error('No source handler found for the current source.');
}
}
// Dispose any existing source handler
this.disposeSourceHandler();
this.off('dispose', this.disposeSourceHandler_);
if (sh !== _Tech.nativeSourceHandler) {
this.currentSource_ = source;
}
this.sourceHandler_ = sh.handleSource(source, this, this.options_);
this.one('dispose', this.disposeSourceHandler_);
};
/**
* Clean up any existing SourceHandlers and listeners when the Tech is disposed.
*
* @listens Tech#dispose
*/
_Tech.prototype.disposeSourceHandler = function () {
// if we have a source and get another one
// then we are loading something new
// than clear all of our current tracks
if (this.currentSource_) {
this.clearTracks(['audio', 'video']);
this.currentSource_ = null;
}
// always clean up auto-text tracks
this.cleanupAutoTextTracks();
if (this.sourceHandler_) {
if (this.sourceHandler_.dispose) {
this.sourceHandler_.dispose();
}
this.sourceHandler_ = null;
}
};
};
// The base Tech class needs to be registered as a Component. It is the only
// Tech that can be registered as a Component.
Component$1.registerComponent('Tech', Tech);
Tech.registerTech('Tech', Tech);
/**
* A list of techs that should be added to techOrder on Players
*
* @private
*/
Tech.defaultTechOrder_ = [];
/**
* @file middleware.js
* @module middleware
*/
/** @import Player from '../player' */
/** @import Tech from '../tech/tech' */
const middlewares = {};
const middlewareInstances = {};
const TERMINATOR = {};
/**
* A middleware object is a plain JavaScript object that has methods that
* match the {@link Tech} methods found in the lists of allowed
* {@link module:middleware.allowedGetters|getters},
* {@link module:middleware.allowedSetters|setters}, and
* {@link module:middleware.allowedMediators|mediators}.
*
* @typedef {Object} MiddlewareObject
*/
/**
* A middleware factory function that should return a
* {@link module:middleware~MiddlewareObject|MiddlewareObject}.
*
* This factory will be called for each player when needed, with the player
* passed in as an argument.
*
* @callback MiddlewareFactory
* @param {Player} player
* A Video.js player.
*/
/**
* Define a middleware that the player should use by way of a factory function
* that returns a middleware object.
*
* @param {string} type
* The MIME type to match or `"*"` for all MIME types.
*
* @param {MiddlewareFactory} middleware
* A middleware factory function that will be executed for
* matching types.
*/
function use(type, middleware) {
middlewares[type] = middlewares[type] || [];
middlewares[type].push(middleware);
}
/**
* Asynchronously sets a source using middleware by recursing through any
* matching middlewares and calling `setSource` on each, passing along the
* previous returned value each time.
*
* @param {Player} player
* A {@link Player} instance.
*
* @param {Tech~SourceObject} src
* A source object.
*
* @param {Function}
* The next middleware to run.
*/
function setSource(player, src, next) {
player.setTimeout(() => setSourceHelper(src, middlewares[src.type], next, player), 1);
}
/**
* When the tech is set, passes the tech to each middleware's `setTech` method.
*
* @param {Object[]} middleware
* An array of middleware instances.
*
* @param {Tech} tech
* A Video.js tech.
*/
function setTech(middleware, tech) {
middleware.forEach(mw => mw.setTech && mw.setTech(tech));
}
/**
* Calls a getter on the tech first, through each middleware
* from right to left to the player.
*
* @param {Object[]} middleware
* An array of middleware instances.
*
* @param {Tech} tech
* The current tech.
*
* @param {string} method
* A method name.
*
* @return {*}
* The final value from the tech after middleware has intercepted it.
*/
function get(middleware, tech, method) {
return middleware.reduceRight(middlewareIterator(method), tech[method]());
}
/**
* Takes the argument given to the player and calls the setter method on each
* middleware from left to right to the tech.
*
* @param {Object[]} middleware
* An array of middleware instances.
*
* @param {Tech} tech
* The current tech.
*
* @param {string} method
* A method name.
*
* @param {*} arg
* The value to set on the tech.
*
* @return {*}
* The return value of the `method` of the `tech`.
*/
function set(middleware, tech, method, arg) {
return tech[method](middleware.reduce(middlewareIterator(method), arg));
}
/**
* Takes the argument given to the player and calls the `call` version of the
* method on each middleware from left to right.
*
* Then, call the passed in method on the tech and return the result unchanged
* back to the player, through middleware, this time from right to left.
*
* @param {Object[]} middleware
* An array of middleware instances.
*
* @param {Tech} tech
* The current tech.
*
* @param {string} method
* A method name.
*
* @param {*} arg
* The value to set on the tech.
*
* @return {*}
* The return value of the `method` of the `tech`, regardless of the
* return values of middlewares.
*/
function mediate(middleware, tech, method, arg = null) {
const callMethod = 'call' + toTitleCase$1(method);
const middlewareValue = middleware.reduce(middlewareIterator(callMethod), arg);
const terminated = middlewareValue === TERMINATOR;
// deprecated. The `null` return value should instead return TERMINATOR to
// prevent confusion if a techs method actually returns null.
const returnValue = terminated ? null : tech[method](middlewareValue);
executeRight(middleware, method, returnValue, terminated);
return returnValue;
}
/**
* Enumeration of allowed getters where the keys are method names.
*
* @type {Object}
*/
const allowedGetters = {
buffered: 1,
currentTime: 1,
duration: 1,
muted: 1,
played: 1,
paused: 1,
seekable: 1,
volume: 1,
ended: 1
};
/**
* Enumeration of allowed setters where the keys are method names.
*
* @type {Object}
*/
const allowedSetters = {
setCurrentTime: 1,
setMuted: 1,
setVolume: 1
};
/**
* Enumeration of allowed mediators where the keys are method names.
*
* @type {Object}
*/
const allowedMediators = {
play: 1,
pause: 1
};
function middlewareIterator(method) {
return (value, mw) => {
// if the previous middleware terminated, pass along the termination
if (value === TERMINATOR) {
return TERMINATOR;
}
if (mw[method]) {
return mw[method](value);
}
return value;
};
}
function executeRight(mws, method, value, terminated) {
for (let i = mws.length - 1; i >= 0; i--) {
const mw = mws[i];
if (mw[method]) {
mw[method](terminated, value);
}
}
}
/**
* Clear the middleware cache for a player.
*
* @param {Player} player
* A {@link Player} instance.
*/
function clearCacheForPlayer(player) {
if (middlewareInstances.hasOwnProperty(player.id())) {
delete middlewareInstances[player.id()];
}
}
/**
* {
* [playerId]: [[mwFactory, mwInstance], ...]
* }
*
* @private
*/
function getOrCreateFactory(player, mwFactory) {
const mws = middlewareInstances[player.id()];
let mw = null;
if (mws === undefined || mws === null) {
mw = mwFactory(player);
middlewareInstances[player.id()] = [[mwFactory, mw]];
return mw;
}
for (let i = 0; i < mws.length; i++) {
const [mwf, mwi] = mws[i];
if (mwf !== mwFactory) {
continue;
}
mw = mwi;
}
if (mw === null) {
mw = mwFactory(player);
mws.push([mwFactory, mw]);
}
return mw;
}
function setSourceHelper(src = {}, middleware = [], next, player, acc = [], lastRun = false) {
const [mwFactory, ...mwrest] = middleware;
// if mwFactory is a string, then we're at a fork in the road
if (typeof mwFactory === 'string') {
setSourceHelper(src, middlewares[mwFactory], next, player, acc, lastRun);
// if we have an mwFactory, call it with the player to get the mw,
// then call the mw's setSource method
} else if (mwFactory) {
const mw = getOrCreateFactory(player, mwFactory);
// if setSource isn't present, implicitly select this middleware
if (!mw.setSource) {
acc.push(mw);
return setSourceHelper(src, mwrest, next, player, acc, lastRun);
}
mw.setSource(Object.assign({}, src), function (err, _src) {
// something happened, try the next middleware on the current level
// make sure to use the old src
if (err) {
return setSourceHelper(src, mwrest, next, player, acc, lastRun);
}
// we've succeeded, now we need to go deeper
acc.push(mw);
// if it's the same type, continue down the current chain
// otherwise, we want to go down the new chain
setSourceHelper(_src, src.type === _src.type ? mwrest : middlewares[_src.type], next, player, acc, lastRun);
});
} else if (mwrest.length) {
setSourceHelper(src, mwrest, next, player, acc, lastRun);
} else if (lastRun) {
next(src, acc);
} else {
setSourceHelper(src, middlewares['*'], next, player, acc, true);
}
}
/** @import Player from '../player' */
/**
* Mimetypes
*
* @see https://www.iana.org/assignments/media-types/media-types.xhtml
* @typedef Mimetypes~Kind
* @enum
*/
const MimetypesKind = {
opus: 'video/ogg',
ogv: 'video/ogg',
mp4: 'video/mp4',
mov: 'video/mp4',
m4v: 'video/mp4',
mkv: 'video/x-matroska',
m4a: 'audio/mp4',
mp3: 'audio/mpeg',
aac: 'audio/aac',
caf: 'audio/x-caf',
flac: 'audio/flac',
oga: 'audio/ogg',
wav: 'audio/wav',
m3u8: 'application/x-mpegURL',
mpd: 'application/dash+xml',
jpg: 'image/jpeg',
jpeg: 'image/jpeg',
gif: 'image/gif',
png: 'image/png',
svg: 'image/svg+xml',
webp: 'image/webp'
};
/**
* Get the mimetype of a given src url if possible
*
* @param {string} src
* The url to the src
*
* @return {string}
* return the mimetype if it was known or empty string otherwise
*/
const getMimetype = function (src = '') {
const ext = getFileExtension(src);
const mimetype = MimetypesKind[ext.toLowerCase()];
return mimetype || '';
};
/**
* Find the mime type of a given source string if possible. Uses the player
* source cache.
*
* @param {Player} player
* The player object
*
* @param {string} src
* The source string
*
* @return {string}
* The type that was found
*/
const findMimetype = (player, src) => {
if (!src) {
return '';
}
// 1. check for the type in the `source` cache
if (player.cache_.source.src === src && player.cache_.source.type) {
return player.cache_.source.type;
}
// 2. see if we have this source in our `currentSources` cache
const matchingSources = player.cache_.sources.filter(s => s.src === src);
if (matchingSources.length) {
return matchingSources[0].type;
}
// 3. look for the src url in source elements and use the type there
const sources = player.$$('source');
for (let i = 0; i < sources.length; i++) {
const s = sources[i];
if (s.type && s.src && s.src === src) {
return s.type;
}
}
// 4. finally fallback to our list of mime types based on src url extension
return getMimetype(src);
};
/**
* @module filter-source
*/
/**
* Filter out single bad source objects or multiple source objects in an
* array. Also flattens nested source object arrays into a 1 dimensional
* array of source objects.
*
* @param {Tech~SourceObject|Tech~SourceObject[]} src
* The src object to filter
*
* @return {Tech~SourceObject[]}
* An array of sourceobjects containing only valid sources
*
* @private
*/
const filterSource = function (src) {
// traverse array
if (Array.isArray(src)) {
let newsrc = [];
src.forEach(function (srcobj) {
srcobj = filterSource(srcobj);
if (Array.isArray(srcobj)) {
newsrc = newsrc.concat(srcobj);
} else if (isObject(srcobj)) {
newsrc.push(srcobj);
}
});
src = newsrc;
} else if (typeof src === 'string' && src.trim()) {
// convert string into object
src = [fixSource({
src
})];
} else if (isObject(src) && typeof src.src === 'string' && src.src && src.src.trim()) {
// src is already valid
src = [fixSource(src)];
} else {
// invalid source, turn it into an empty array
src = [];
}
return src;
};
/**
* Checks src mimetype, adding it when possible
*
* @param {Tech~SourceObject} src
* The src object to check
* @return {Tech~SourceObject}
* src Object with known type
*/
function fixSource(src) {
if (!src.type) {
const mimetype = getMimetype(src.src);
if (mimetype) {
src.type = mimetype;
}
}
return src;
}
var icons = "\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n ";
// /**
// Determine the keycode for the 'back' key based on the platform
const backKeyCode = IS_TIZEN ? 10009 : IS_WEBOS ? 461 : 8;
const SpatialNavKeyCodes = {
codes: {
play: 415,
pause: 19,
ff: 417,
rw: 412,
back: backKeyCode
},
names: {
415: 'play',
19: 'pause',
417: 'ff',
412: 'rw',
[backKeyCode]: 'back'
},
isEventKey(event, keyName) {
keyName = keyName.toLowerCase();
if (this.names[event.keyCode] && this.names[event.keyCode] === keyName) {
return true;
}
return false;
},
getEventName(event) {
if (this.names[event.keyCode]) {
return this.names[event.keyCode];
} else if (this.codes[event.code]) {
const code = this.codes[event.code];
return this.names[code];
}
return null;
}
};
/**
* @file spatial-navigation.js
*/
/** @import Component from './component' */
/** @import Player from './player' */
// The number of seconds the `step*` functions move the timeline.
const STEP_SECONDS$1 = 5;
/**
* Spatial Navigation in Video.js enhances user experience and accessibility on smartTV devices,
* enabling seamless navigation through interactive elements within the player using remote control arrow keys.
* This functionality allows users to effortlessly navigate through focusable components.
*
* @extends EventTarget
*/
class SpatialNavigation extends EventTarget$2 {
/**
* Constructs a SpatialNavigation instance with initial settings.
* Sets up the player instance, and prepares the spatial navigation system.
*
* @class
* @param {Player} player - The Video.js player instance to which the spatial navigation is attached.
*/
constructor(player) {
super();
this.player_ = player;
this.focusableComponents = [];
this.isListening_ = false;
this.isPaused_ = false;
this.onKeyDown_ = this.onKeyDown_.bind(this);
this.lastFocusedComponent_ = null;
}
/**
* Starts the spatial navigation by adding a keydown event listener to the video container.
* This method ensures that the event listener is added only once.
*/
start() {
// If the listener is already active, exit early.
if (this.isListening_) {
return;
}
// Add the event listener since the listener is not yet active.
this.player_.on('keydown', this.onKeyDown_);
this.player_.on('modalKeydown', this.onKeyDown_);
// Listen for source change events
this.player_.on('loadedmetadata', () => {
this.focus(this.updateFocusableComponents()[0]);
});
this.player_.on('modalclose', () => {
this.refocusComponent();
});
this.player_.on('focusin', this.handlePlayerFocus_.bind(this));
this.player_.on('focusout', this.handlePlayerBlur_.bind(this));
this.isListening_ = true;
if (this.player_.errorDisplay) {
this.player_.errorDisplay.on('aftermodalfill', () => {
this.updateFocusableComponents();
if (this.focusableComponents.length) {
// The modal has focusable components:
if (this.focusableComponents.length > 1) {
// The modal has close button + some additional buttons.
// Focusing first additional button:
this.focusableComponents[1].focus();
} else {
// The modal has only close button,
// Focusing it:
this.focusableComponents[0].focus();
}
}
});
}
}
/**
* Stops the spatial navigation by removing the keydown event listener from the video container.
* Also sets the `isListening_` flag to false.
*/
stop() {
this.player_.off('keydown', this.onKeyDown_);
this.isListening_ = false;
}
/**
* Responds to keydown events for spatial navigation and media control.
*
* Determines if spatial navigation or media control is active and handles key inputs accordingly.
*
* @param {KeyboardEvent} event - The keydown event to be handled.
*/
onKeyDown_(event) {
// Determine if the event is a custom modalKeydown event
const actualEvent = event.originalEvent ? event.originalEvent : event;
if (['ArrowLeft', 'ArrowRight', 'ArrowUp', 'ArrowDown'].includes(actualEvent.key)) {
// Handle directional navigation
if (this.isPaused_) {
return;
}
actualEvent.preventDefault();
// "ArrowLeft" => "left" etc
const direction = actualEvent.key.substring(5).toLowerCase();
this.move(direction);
} else if (SpatialNavKeyCodes.isEventKey(actualEvent, 'play') || SpatialNavKeyCodes.isEventKey(actualEvent, 'pause') || SpatialNavKeyCodes.isEventKey(actualEvent, 'ff') || SpatialNavKeyCodes.isEventKey(actualEvent, 'rw')) {
// Handle media actions
actualEvent.preventDefault();
const action = SpatialNavKeyCodes.getEventName(actualEvent);
this.performMediaAction_(action);
} else if (SpatialNavKeyCodes.isEventKey(actualEvent, 'Back') && event.target && event.target.closeable()) {
actualEvent.preventDefault();
event.target.close();
}
}
/**
* Performs media control actions based on the given key input.
*
* Controls the playback and seeking functionalities of the media player.
*
* @param {string} key - The key representing the media action to be performed.
* Accepted keys: 'play', 'pause', 'ff' (fast-forward), 'rw' (rewind).
*/
performMediaAction_(key) {
if (this.player_) {
switch (key) {
case 'play':
if (this.player_.paused()) {
this.player_.play();
}
break;
case 'pause':
if (!this.player_.paused()) {
this.player_.pause();
}
break;
case 'ff':
this.userSeek_(this.player_.currentTime() + STEP_SECONDS$1);
break;
case 'rw':
this.userSeek_(this.player_.currentTime() - STEP_SECONDS$1);
break;
}
}
}
/**
* Prevent liveThreshold from causing seeks to seem like they
* are not happening from a user perspective.
*
* @param {number} ct
* current time to seek to
*/
userSeek_(ct) {
if (this.player_.liveTracker && this.player_.liveTracker.isLive()) {
this.player_.liveTracker.nextSeekedFromUser();
}
this.player_.currentTime(ct);
}
/**
* Pauses the spatial navigation functionality.
* This method sets a flag that can be used to temporarily disable the navigation logic.
*/
pause() {
this.isPaused_ = true;
}
/**
* Resumes the spatial navigation functionality if it has been paused.
* This method resets the pause flag, re-enabling the navigation logic.
*/
resume() {
this.isPaused_ = false;
}
/**
* Handles Player Blur.
*
* @param {string|Event|Object} event
* The name of the event, an `Event`, or an object with a key of type set to
* an event name.
*
* Calls for handling of the Player Blur if:
* *The next focused element is not a child of current focused element &
* The next focused element is not a child of the Player.
* *There is no next focused element
*/
handlePlayerBlur_(event) {
const nextFocusedElement = event.relatedTarget;
let isChildrenOfPlayer = null;
const currentComponent = this.getCurrentComponent(event.target);
if (nextFocusedElement) {
isChildrenOfPlayer = Boolean(nextFocusedElement.closest('.video-js'));
// If nextFocusedElement is the 'TextTrackSettings' component
if (nextFocusedElement.classList.contains('vjs-text-track-settings') && !this.isPaused_) {
this.searchForTrackSelect_();
}
}
if (!event.currentTarget.contains(event.relatedTarget) && !isChildrenOfPlayer || !nextFocusedElement) {
if (currentComponent && currentComponent.name() === 'CloseButton') {
this.refocusComponent();
} else {
this.pause();
if (currentComponent && currentComponent.el()) {
// Store last focused component
this.lastFocusedComponent_ = currentComponent;
}
}
}
}
/**
* Handles the Player focus event.
*
* Calls for handling of the Player Focus if current element is focusable.
*/
handlePlayerFocus_() {
if (this.getCurrentComponent() && this.getCurrentComponent().getIsFocusable()) {
this.resume();
}
}
/**
* Gets a set of focusable components.
*
* @return {Array}
* Returns an array of focusable components.
*/
updateFocusableComponents() {
const player = this.player_;
const focusableComponents = [];
/**
* Searches for children candidates.
*
* Pushes Components to array of 'focusableComponents'.
* Calls itself if there is children elements inside iterated component.
*
* @param {Array} componentsArray - The array of components to search for focusable children.
*/
function searchForChildrenCandidates(componentsArray) {
for (const i of componentsArray) {
if (i.hasOwnProperty('el_') && i.getIsFocusable() && i.getIsAvailableToBeFocused(i.el())) {
focusableComponents.push(i);
}
if (i.hasOwnProperty('children_') && i.children_.length > 0) {
searchForChildrenCandidates(i.children_);
}
}
}
// Iterate inside all children components of the player.
player.children_.forEach(value => {
if (value.hasOwnProperty('el_')) {
// If component has required functions 'getIsFocusable' & 'getIsAvailableToBeFocused', is focusable & available to be focused.
if (value.getIsFocusable && value.getIsAvailableToBeFocused && value.getIsFocusable() && value.getIsAvailableToBeFocused(value.el())) {
focusableComponents.push(value);
return;
// If component has posible children components as candidates.
} else if (value.hasOwnProperty('children_') && value.children_.length > 0) {
searchForChildrenCandidates(value.children_);
// If component has posible item components as candidates.
} else if (value.hasOwnProperty('items') && value.items.length > 0) {
searchForChildrenCandidates(value.items);
// If there is a suitable child element within the component's DOM element.
} else if (this.findSuitableDOMChild(value)) {
focusableComponents.push(value);
}
}
// TODO - Refactor the following logic after refactor of videojs-errors elements to be components is done.
if (value.name_ === 'ErrorDisplay' && value.opened_) {
const buttonContainer = value.el_.querySelector('.vjs-errors-ok-button-container');
if (buttonContainer) {
const modalButtons = buttonContainer.querySelectorAll('button');
modalButtons.forEach((element, index) => {
// Add elements as objects to be handled by the spatial navigation
focusableComponents.push({
name: () => {
return 'ModalButton' + (index + 1);
},
el: () => element,
getPositions: () => {
const rect = element.getBoundingClientRect();
// Creating objects that mirror DOMRectReadOnly for boundingClientRect and center
const boundingClientRect = {
x: rect.x,
y: rect.y,
width: rect.width,
height: rect.height,
top: rect.top,
right: rect.right,
bottom: rect.bottom,
left: rect.left
};
// Calculating the center position
const center = {
x: rect.left + rect.width / 2,
y: rect.top + rect.height / 2,
width: 0,
height: 0,
top: rect.top + rect.height / 2,
right: rect.left + rect.width / 2,
bottom: rect.top + rect.height / 2,
left: rect.left + rect.width / 2
};
return {
boundingClientRect,
center
};
},
// Asume that the following are always focusable
getIsAvailableToBeFocused: () => true,
getIsFocusable: el => true,
focus: () => element.focus()
});
});
}
}
});
this.focusableComponents = focusableComponents;
return this.focusableComponents;
}
/**
* Finds a suitable child element within the provided component's DOM element.
*
* @param {Object} component - The component containing the DOM element to search within.
* @return {HTMLElement|null} Returns the suitable child element if found, or null if not found.
*/
findSuitableDOMChild(component) {
/**
* Recursively searches for a suitable child node that can be focused within a given component.
* It first checks if the provided node itself can be focused according to the component's
* `getIsFocusable` and `getIsAvailableToBeFocused` methods. If not, it recursively searches
* through the node's children to find a suitable child node that meets the focusability criteria.
*
* @param {HTMLElement} node - The DOM node to start the search from.
* @return {HTMLElement|null} The first child node that is focusable and available to be focused,
* or `null` if no suitable child is found.
*/
function searchForSuitableChild(node) {
if (component.getIsFocusable(node) && component.getIsAvailableToBeFocused(node)) {
return node;
}
for (let i = 0; i < node.children.length; i++) {
const child = node.children[i];
const suitableChild = searchForSuitableChild(child);
if (suitableChild) {
return suitableChild;
}
}
return null;
}
if (component.el()) {
return searchForSuitableChild(component.el());
}
return null;
}
/**
* Gets the currently focused component from the list of focusable components.
* If a target element is provided, it uses that element to find the corresponding
* component. If no target is provided, it defaults to using the document's currently
* active element.
*
* @param {HTMLElement} [target] - The DOM element to check against the focusable components.
* If not provided, `document.activeElement` is used.
* @return {Component|null} - Returns the focused component if found among the focusable components,
* otherwise returns null if no matching component is found.
*/
getCurrentComponent(target) {
this.updateFocusableComponents();
// eslint-disable-next-line
const curComp = target || document.activeElement;
if (this.focusableComponents.length) {
for (const i of this.focusableComponents) {
// If component Node is equal to the current active element.
if (i.el() === curComp) {
return i;
}
}
}
}
/**
* Adds a component to the array of focusable components.
*
* @param {Component} component
* The `Component` to be added.
*/
add(component) {
const focusableComponents = [...this.focusableComponents];
if (component.hasOwnProperty('el_') && component.getIsFocusable() && component.getIsAvailableToBeFocused(component.el())) {
focusableComponents.push(component);
}
this.focusableComponents = focusableComponents;
// Trigger the notification manually
this.trigger({
type: 'focusableComponentsChanged',
focusableComponents: this.focusableComponents
});
}
/**
* Removes component from the array of focusable components.
*
* @param {Component} component - The component to be removed from the focusable components array.
*/
remove(component) {
for (let i = 0; i < this.focusableComponents.length; i++) {
if (this.focusableComponents[i].name() === component.name()) {
this.focusableComponents.splice(i, 1);
// Trigger the notification manually
this.trigger({
type: 'focusableComponentsChanged',
focusableComponents: this.focusableComponents
});
return;
}
}
}
/**
* Clears array of focusable components.
*/
clear() {
// Check if the array is already empty to avoid unnecessary event triggering
if (this.focusableComponents.length > 0) {
// Clear the array
this.focusableComponents = [];
// Trigger the notification manually
this.trigger({
type: 'focusableComponentsChanged',
focusableComponents: this.focusableComponents
});
}
}
/**
* Navigates to the next focusable component based on the specified direction.
*
* @param {string} direction 'up', 'down', 'left', 'right'
*/
move(direction) {
const currentFocusedComponent = this.getCurrentComponent();
if (!currentFocusedComponent) {
return;
}
const currentPositions = currentFocusedComponent.getPositions();
const candidates = this.focusableComponents.filter(component => component !== currentFocusedComponent && this.isInDirection_(currentPositions.boundingClientRect, component.getPositions().boundingClientRect, direction));
const bestCandidate = this.findBestCandidate_(currentPositions.center, candidates, direction);
if (bestCandidate) {
this.focus(bestCandidate);
} else {
this.trigger({
type: 'endOfFocusableComponents',
direction,
focusedComponent: currentFocusedComponent
});
}
}
/**
* Finds the best candidate on the current center position,
* the list of candidates, and the specified navigation direction.
*
* @param {Object} currentCenter The center position of the current focused component element.
* @param {Array} candidates An array of candidate components to receive focus.
* @param {string} direction The direction of navigation ('up', 'down', 'left', 'right').
* @return {Object|null} The component that is the best candidate for receiving focus.
*/
findBestCandidate_(currentCenter, candidates, direction) {
let minDistance = Infinity;
let bestCandidate = null;
for (const candidate of candidates) {
const candidateCenter = candidate.getPositions().center;
const distance = this.calculateDistance_(currentCenter, candidateCenter, direction);
if (distance < minDistance) {
minDistance = distance;
bestCandidate = candidate;
}
}
return bestCandidate;
}
/**
* Determines if a target rectangle is in the specified navigation direction
* relative to a source rectangle.
*
* @param {Object} srcRect The bounding rectangle of the source element.
* @param {Object} targetRect The bounding rectangle of the target element.
* @param {string} direction The navigation direction ('up', 'down', 'left', 'right').
* @return {boolean} True if the target is in the specified direction relative to the source.
*/
isInDirection_(srcRect, targetRect, direction) {
switch (direction) {
case 'right':
return targetRect.left >= srcRect.right;
case 'left':
return targetRect.right <= srcRect.left;
case 'down':
return targetRect.top >= srcRect.bottom;
case 'up':
return targetRect.bottom <= srcRect.top;
default:
return false;
}
}
/**
* Focus the last focused component saved before blur on player.
*/
refocusComponent() {
if (this.lastFocusedComponent_) {
// If user is not active, set it to active.
if (!this.player_.userActive()) {
this.player_.userActive(true);
}
this.updateFocusableComponents();
// Search inside array of 'focusableComponents' for a match of name of
// the last focused component.
for (let i = 0; i < this.focusableComponents.length; i++) {
if (this.focusableComponents[i].name() === this.lastFocusedComponent_.name()) {
this.focus(this.focusableComponents[i]);
return;
}
}
} else {
this.focus(this.updateFocusableComponents()[0]);
}
}
/**
* Focuses on a given component.
* If the component is available to be focused, it focuses on the component.
* If not, it attempts to find a suitable DOM child within the component and focuses on it.
*
* @param {Component} component - The component to be focused.
*/
focus(component) {
if (typeof component !== 'object') {
return;
}
if (component.getIsAvailableToBeFocused(component.el())) {
component.focus();
} else if (this.findSuitableDOMChild(component)) {
this.findSuitableDOMChild(component).focus();
}
}
/**
* Calculates the distance between two points, adjusting the calculation based on
* the specified navigation direction.
*
* @param {Object} center1 The center point of the first element.
* @param {Object} center2 The center point of the second element.
* @param {string} direction The direction of navigation ('up', 'down', 'left', 'right').
* @return {number} The calculated distance between the two centers.
*/
calculateDistance_(center1, center2, direction) {
const dx = Math.abs(center1.x - center2.x);
const dy = Math.abs(center1.y - center2.y);
let distance;
switch (direction) {
case 'right':
case 'left':
// Higher weight for vertical distance in horizontal navigation.
distance = dx + dy * 100;
break;
case 'up':
// Strongly prioritize vertical proximity for UP navigation.
// Adjust the weight to ensure that elements directly above are favored.
distance = dy * 2 + dx * 0.5;
break;
case 'down':
// More balanced weight for vertical and horizontal distances.
// Adjust the weights here to find the best balance.
distance = dy * 5 + dx;
break;
default:
distance = dx + dy;
}
return distance;
}
/**
* This gets called by 'handlePlayerBlur_' if 'spatialNavigation' is enabled.
* Searches for the first 'TextTrackSelect' inside of modal to focus.
*
* @private
*/
searchForTrackSelect_() {
const spatialNavigation = this;
for (const component of spatialNavigation.updateFocusableComponents()) {
if (component.constructor.name === 'TextTrackSelect') {
spatialNavigation.focus(component);
break;
}
}
}
}
/**
* @file loader.js
*/
/** @import Player from '../player' */
/**
* The `MediaLoader` is the `Component` that decides which playback technology to load
* when a player is initialized.
*
* @extends Component
*/
class MediaLoader extends Component$1 {
/**
* Create an instance of this class.
*
* @param {Player} player
* The `Player` that this class should attach to.
*
* @param {Object} [options]
* The key/value store of player options.
*
* @param {Function} [ready]
* The function that is run when this component is ready.
*/
constructor(player, options, ready) {
// MediaLoader has no element
const options_ = merge$1({
createEl: false
}, options);
super(player, options_, ready);
// If there are no sources when the player is initialized,
// load the first supported playback technology.
if (!options.playerOptions.sources || options.playerOptions.sources.length === 0) {
for (let i = 0, j = options.playerOptions.techOrder; i < j.length; i++) {
const techName = toTitleCase$1(j[i]);
let tech = Tech.getTech(techName);
// Support old behavior of techs being registered as components.
// Remove once that deprecated behavior is removed.
if (!techName) {
tech = Component$1.getComponent(techName);
}
// Check if the browser supports this technology
if (tech && tech.isSupported()) {
player.loadTech_(techName);
break;
}
}
} else {
// Loop through playback technologies (e.g. HTML5) and check for support.
// Then load the best source.
// A few assumptions here:
// All playback technologies respect preload false.
player.src(options.playerOptions.sources);
}
}
}
Component$1.registerComponent('MediaLoader', MediaLoader);
/**
* @file clickable-component.js
*/
/** @import Player from './player' */
/**
* Component which is clickable or keyboard actionable, but is not a
* native HTML button.
*
* @extends Component
*/
class ClickableComponent extends Component$1 {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of component options.
*
* @param {function} [options.clickHandler]
* The function to call when the button is clicked / activated
*
* @param {string} [options.controlText]
* The text to set on the button
*
* @param {string} [options.className]
* A class or space separated list of classes to add the component
*
*/
constructor(player, options) {
super(player, options);
if (this.options_.controlText) {
this.controlText(this.options_.controlText);
}
this.handleMouseOver_ = e => this.handleMouseOver(e);
this.handleMouseOut_ = e => this.handleMouseOut(e);
this.handleClick_ = e => this.handleClick(e);
this.handleKeyDown_ = e => this.handleKeyDown(e);
this.emitTapEvents();
this.enable();
}
/**
* Create the `ClickableComponent`s DOM element.
*
* @param {string} [tag=div]
* The element's node type.
*
* @param {Object} [props={}]
* An object of properties that should be set on the element.
*
* @param {Object} [attributes={}]
* An object of attributes that should be set on the element.
*
* @return {Element}
* The element that gets created.
*/
createEl(tag = 'div', props = {}, attributes = {}) {
props = Object.assign({
className: this.buildCSSClass(),
tabIndex: 0
}, props);
if (tag === 'button') {
log$1.error(`Creating a ClickableComponent with an HTML element of ${tag} is not supported; use a Button instead.`);
}
// Add ARIA attributes for clickable element which is not a native HTML button
attributes = Object.assign({
role: 'button'
}, attributes);
this.tabIndex_ = props.tabIndex;
const el = createEl(tag, props, attributes);
if (!this.player_.options_.experimentalSvgIcons) {
el.appendChild(createEl('span', {
className: 'vjs-icon-placeholder'
}, {
'aria-hidden': true
}));
}
this.createControlTextEl(el);
return el;
}
dispose() {
// remove controlTextEl_ on dispose
this.controlTextEl_ = null;
super.dispose();
}
/**
* Create a control text element on this `ClickableComponent`
*
* @param {Element} [el]
* Parent element for the control text.
*
* @return {Element}
* The control text element that gets created.
*/
createControlTextEl(el) {
this.controlTextEl_ = createEl('span', {
className: 'vjs-control-text'
}, {
// let the screen reader user know that the text of the element may change
'aria-live': 'polite'
});
if (el) {
el.appendChild(this.controlTextEl_);
}
this.controlText(this.controlText_, el);
return this.controlTextEl_;
}
/**
* Get or set the localize text to use for the controls on the `ClickableComponent`.
*
* @param {string} [text]
* Control text for element.
*
* @param {Element} [el=this.el()]
* Element to set the title on.
*
* @return {string}
* - The control text when getting
*/
controlText(text, el = this.el()) {
if (text === undefined) {
return this.controlText_ || 'Need Text';
}
const localizedText = this.localize(text);
/** @protected */
this.controlText_ = text;
textContent(this.controlTextEl_, localizedText);
if (!this.nonIconControl && !this.player_.options_.noUITitleAttributes) {
// Set title attribute if only an icon is shown
el.setAttribute('title', localizedText);
}
}
/**
* Builds the default DOM `className`.
*
* @return {string}
* The DOM `className` for this object.
*/
buildCSSClass() {
return `vjs-control vjs-button ${super.buildCSSClass()}`;
}
/**
* Enable this `ClickableComponent`
*/
enable() {
if (!this.enabled_) {
this.enabled_ = true;
this.removeClass('vjs-disabled');
this.el_.setAttribute('aria-disabled', 'false');
if (typeof this.tabIndex_ !== 'undefined') {
this.el_.setAttribute('tabIndex', this.tabIndex_);
}
this.on(['tap', 'click'], this.handleClick_);
this.on('keydown', this.handleKeyDown_);
}
}
/**
* Disable this `ClickableComponent`
*/
disable() {
this.enabled_ = false;
this.addClass('vjs-disabled');
this.el_.setAttribute('aria-disabled', 'true');
if (typeof this.tabIndex_ !== 'undefined') {
this.el_.removeAttribute('tabIndex');
}
this.off('mouseover', this.handleMouseOver_);
this.off('mouseout', this.handleMouseOut_);
this.off(['tap', 'click'], this.handleClick_);
this.off('keydown', this.handleKeyDown_);
}
/**
* Handles language change in ClickableComponent for the player in components
*
*
*/
handleLanguagechange() {
this.controlText(this.controlText_);
}
/**
* Event handler that is called when a `ClickableComponent` receives a
* `click` or `tap` event.
*
* @param {Event} event
* The `tap` or `click` event that caused this function to be called.
*
* @listens tap
* @listens click
* @abstract
*/
handleClick(event) {
if (this.options_.clickHandler) {
this.options_.clickHandler.call(this, arguments);
}
}
/**
* Event handler that is called when a `ClickableComponent` receives a
* `keydown` event.
*
* By default, if the key is Space or Enter, it will trigger a `click` event.
*
* @param {KeyboardEvent} event
* The `keydown` event that caused this function to be called.
*
* @listens keydown
*/
handleKeyDown(event) {
// Support Space or Enter key operation to fire a click event. Also,
// prevent the event from propagating through the DOM and triggering
// Player hotkeys.
if (event.key === ' ' || event.key === 'Enter') {
event.preventDefault();
event.stopPropagation();
this.trigger('click');
} else {
// Pass keypress handling up for unsupported keys
super.handleKeyDown(event);
}
}
}
Component$1.registerComponent('ClickableComponent', ClickableComponent);
/**
* @file poster-image.js
*/
/** @import Player from './player' */
/**
* A `ClickableComponent` that handles showing the poster image for the player.
*
* @extends ClickableComponent
*/
class PosterImage extends ClickableComponent {
/**
* Create an instance of this class.
*
* @param {Player} player
* The `Player` that this class should attach to.
*
* @param {Object} [options]
* The key/value store of player options.
*/
constructor(player, options) {
super(player, options);
this.update();
this.update_ = e => this.update(e);
player.on('posterchange', this.update_);
}
/**
* Clean up and dispose of the `PosterImage`.
*/
dispose() {
this.player().off('posterchange', this.update_);
super.dispose();
}
/**
* Create the `PosterImage`s DOM element.
*
* @return {Element}
* The element that gets created.
*/
createEl() {
// The el is an empty div to keep position in the DOM
// A picture and img el will be inserted when a source is set
return createEl('div', {
className: 'vjs-poster'
});
}
/**
* Get or set the `PosterImage`'s crossOrigin option.
*
* @param {string|null} [value]
* The value to set the crossOrigin to. If an argument is
* given, must be one of `'anonymous'` or `'use-credentials'`, or 'null'.
*
* @return {string|null}
* - The current crossOrigin value of the `Player` when getting.
* - undefined when setting
*/
crossOrigin(value) {
// `null` can be set to unset a value
if (typeof value === 'undefined') {
if (this.$('img')) {
// If the poster's element exists, give its value
return this.$('img').crossOrigin;
} else if (this.player_.tech_ && this.player_.tech_.isReady_) {
// If not but the tech is ready, query the tech
return this.player_.crossOrigin();
}
// Otherwise check options as the poster is usually set before the state of crossorigin
// can be retrieved by the getter
return this.player_.options_.crossOrigin || this.player_.options_.crossorigin || null;
}
if (value !== null && value !== 'anonymous' && value !== 'use-credentials') {
this.player_.log.warn(`crossOrigin must be null, "anonymous" or "use-credentials", given "${value}"`);
return;
}
if (this.$('img')) {
this.$('img').crossOrigin = value;
}
return;
}
/**
* An {@link EventTarget~EventListener} for {@link Player#posterchange} events.
*
* @listens Player#posterchange
*
* @param {Event} [event]
* The `Player#posterchange` event that triggered this function.
*/
update(event) {
const url = this.player().poster();
this.setSrc(url);
// If there's no poster source we should display:none on this component
// so it's not still clickable or right-clickable
if (url) {
this.show();
} else {
this.hide();
}
}
/**
* Set the source of the `PosterImage` depending on the display method. (Re)creates
* the inner picture and img elementss when needed.
*
* @param {string} [url]
* The URL to the source for the `PosterImage`. If not specified or falsy,
* any source and ant inner picture/img are removed.
*/
setSrc(url) {
if (!url) {
this.el_.textContent = '';
return;
}
if (!this.$('img')) {
this.el_.appendChild(createEl('picture', {
className: 'vjs-poster',
// Don't want poster to be tabbable.
tabIndex: -1
}, {}, createEl('img', {
loading: 'lazy',
crossOrigin: this.crossOrigin()
}, {
alt: ''
})));
}
this.$('img').src = url;
}
/**
* An {@link EventTarget~EventListener} for clicks on the `PosterImage`. See
* {@link ClickableComponent#handleClick} for instances where this will be triggered.
*
* @listens tap
* @listens click
* @listens keydown
*
* @param {Event} event
+ The `click`, `tap` or `keydown` event that caused this function to be called.
*/
handleClick(event) {
// We don't want a click to trigger playback when controls are disabled
if (!this.player_.controls()) {
return;
}
if (this.player_.tech(true)) {
this.player_.tech(true).focus();
}
if (this.player_.paused()) {
silencePromise(this.player_.play());
} else {
this.player_.pause();
}
}
}
/**
* Get or set the `PosterImage`'s crossorigin option. For the HTML5 player, this
* sets the `crossOrigin` property on the ` ` tag to control the CORS
* behavior.
*
* @param {string|null} [value]
* The value to set the `PosterImages`'s crossorigin to. If an argument is
* given, must be one of `anonymous` or `use-credentials`.
*
* @return {string|null|undefined}
* - The current crossorigin value of the `Player` when getting.
* - undefined when setting
*/
PosterImage.prototype.crossorigin = PosterImage.prototype.crossOrigin;
Component$1.registerComponent('PosterImage', PosterImage);
/**
* @file text-track-display.js
*/
/** @import Player from '../player' */
const darkGray = '#222';
const lightGray = '#ccc';
const fontMap = {
monospace: 'monospace',
sansSerif: 'sans-serif',
serif: 'serif',
monospaceSansSerif: '"Andale Mono", "Lucida Console", monospace',
monospaceSerif: '"Courier New", monospace',
proportionalSansSerif: 'sans-serif',
proportionalSerif: 'serif',
casual: '"Comic Sans MS", Impact, fantasy',
script: '"Monotype Corsiva", cursive',
smallcaps: '"Andale Mono", "Lucida Console", monospace, sans-serif'
};
/**
* Construct an rgba color from a given hex color code.
*
* @param {number} color
* Hex number for color, like #f0e or #f604e2.
*
* @param {number} opacity
* Value for opacity, 0.0 - 1.0.
*
* @return {string}
* The rgba color that was created, like 'rgba(255, 0, 0, 0.3)'.
*/
function constructColor(color, opacity) {
let hex;
if (color.length === 4) {
// color looks like "#f0e"
hex = color[1] + color[1] + color[2] + color[2] + color[3] + color[3];
} else if (color.length === 7) {
// color looks like "#f604e2"
hex = color.slice(1);
} else {
throw new Error('Invalid color code provided, ' + color + '; must be formatted as e.g. #f0e or #f604e2.');
}
return 'rgba(' + parseInt(hex.slice(0, 2), 16) + ',' + parseInt(hex.slice(2, 4), 16) + ',' + parseInt(hex.slice(4, 6), 16) + ',' + opacity + ')';
}
/**
* Try to update the style of a DOM element. Some style changes will throw an error,
* particularly in IE8. Those should be noops.
*
* @param {Element} el
* The DOM element to be styled.
*
* @param {string} style
* The CSS property on the element that should be styled.
*
* @param {string} rule
* The style rule that should be applied to the property.
*
* @private
*/
function tryUpdateStyle(el, style, rule) {
try {
el.style[style] = rule;
} catch (e) {
// Satisfies linter.
return;
}
}
/**
* Converts the CSS top/right/bottom/left property numeric value to string in pixels.
*
* @param {number} position
* The CSS top/right/bottom/left property value.
*
* @return {string}
* The CSS property value that was created, like '10px'.
*
* @private
*/
function getCSSPositionValue(position) {
return position ? `${position}px` : '';
}
/**
* The component for displaying text track cues.
*
* @extends Component
*/
class TextTrackDisplay extends Component$1 {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*
* @param {Function} [ready]
* The function to call when `TextTrackDisplay` is ready.
*/
constructor(player, options, ready) {
super(player, options, ready);
const updateDisplayTextHandler = e => this.updateDisplay(e);
const updateDisplayHandler = e => {
this.updateDisplayOverlay();
this.updateDisplay(e);
};
player.on('loadstart', e => this.toggleDisplay(e));
player.on('texttrackchange', updateDisplayTextHandler);
player.on('loadedmetadata', e => {
this.updateDisplayOverlay();
this.preselectTrack(e);
});
// This used to be called during player init, but was causing an error
// if a track should show by default and the display hadn't loaded yet.
// Should probably be moved to an external track loader when we support
// tracks that don't need a display.
player.ready(bind_(this, function () {
if (player.tech_ && player.tech_.featuresNativeTextTracks) {
this.hide();
return;
}
player.on('fullscreenchange', updateDisplayHandler);
player.on('playerresize', updateDisplayHandler);
const screenOrientation = window$1.screen.orientation || window$1;
const changeOrientationEvent = window$1.screen.orientation ? 'change' : 'orientationchange';
screenOrientation.addEventListener(changeOrientationEvent, updateDisplayHandler);
player.on('dispose', () => screenOrientation.removeEventListener(changeOrientationEvent, updateDisplayHandler));
const tracks = this.options_.playerOptions.tracks || [];
for (let i = 0; i < tracks.length; i++) {
this.player_.addRemoteTextTrack(tracks[i], true);
}
this.preselectTrack();
}));
}
/**
* Preselect a track following this precedence:
* - matches the previously selected {@link TextTrack}'s language and kind
* - matches the previously selected {@link TextTrack}'s language only
* - is the first default captions track
* - is the first default descriptions track
*
* @listens Player#loadstart
*/
preselectTrack() {
const modes = {
captions: 1,
subtitles: 1
};
const trackList = this.player_.textTracks();
const userPref = this.player_.cache_.selectedLanguage;
let firstDesc;
let firstCaptions;
let preferredTrack;
for (let i = 0; i < trackList.length; i++) {
const track = trackList[i];
if (userPref && userPref.enabled && userPref.language && userPref.language === track.language && track.kind in modes) {
// Always choose the track that matches both language and kind
if (track.kind === userPref.kind) {
preferredTrack = track;
// or choose the first track that matches language
} else if (!preferredTrack) {
preferredTrack = track;
}
// clear everything if offTextTrackMenuItem was clicked
} else if (userPref && !userPref.enabled) {
preferredTrack = null;
firstDesc = null;
firstCaptions = null;
} else if (track.default) {
if (track.kind === 'descriptions' && !firstDesc) {
firstDesc = track;
} else if (track.kind in modes && !firstCaptions) {
firstCaptions = track;
}
}
}
// The preferredTrack matches the user preference and takes
// precedence over all the other tracks.
// So, display the preferredTrack before the first default track
// and the subtitles/captions track before the descriptions track
if (preferredTrack) {
preferredTrack.mode = 'showing';
} else if (firstCaptions) {
firstCaptions.mode = 'showing';
} else if (firstDesc) {
firstDesc.mode = 'showing';
}
}
/**
* Turn display of {@link TextTrack}'s from the current state into the other state.
* There are only two states:
* - 'shown'
* - 'hidden'
*
* @listens Player#loadstart
*/
toggleDisplay() {
if (this.player_.tech_ && this.player_.tech_.featuresNativeTextTracks) {
this.hide();
} else {
this.show();
}
}
/**
* Create the {@link Component}'s DOM element.
*
* @return {Element}
* The element that was created.
*/
createEl() {
return super.createEl('div', {
className: 'vjs-text-track-display'
}, {
'translate': 'yes',
'aria-live': 'off',
'aria-atomic': 'true'
});
}
/**
* Clear all displayed {@link TextTrack}s.
*/
clearDisplay() {
if (typeof window$1.WebVTT === 'function') {
window$1.WebVTT.processCues(window$1, [], this.el_);
}
}
/**
* Update the displayed TextTrack when a either a {@link Player#texttrackchange} or
* a {@link Player#fullscreenchange} is fired.
*
* @listens Player#texttrackchange
* @listens Player#fullscreenchange
*/
updateDisplay() {
const tracks = this.player_.textTracks();
const allowMultipleShowingTracks = this.options_.allowMultipleShowingTracks;
this.clearDisplay();
if (allowMultipleShowingTracks) {
const showingTracks = [];
for (let i = 0; i < tracks.length; ++i) {
const track = tracks[i];
if (track.mode !== 'showing') {
continue;
}
showingTracks.push(track);
}
this.updateForTrack(showingTracks);
return;
}
// Track display prioritization model: if multiple tracks are 'showing',
// display the first 'subtitles' or 'captions' track which is 'showing',
// otherwise display the first 'descriptions' track which is 'showing'
let descriptionsTrack = null;
let captionsSubtitlesTrack = null;
let i = tracks.length;
while (i--) {
const track = tracks[i];
if (track.mode === 'showing') {
if (track.kind === 'descriptions') {
descriptionsTrack = track;
} else {
captionsSubtitlesTrack = track;
}
}
}
if (captionsSubtitlesTrack) {
if (this.getAttribute('aria-live') !== 'off') {
this.setAttribute('aria-live', 'off');
}
this.updateForTrack(captionsSubtitlesTrack);
} else if (descriptionsTrack) {
if (this.getAttribute('aria-live') !== 'assertive') {
this.setAttribute('aria-live', 'assertive');
}
this.updateForTrack(descriptionsTrack);
}
if (!window$1.CSS.supports('inset', '10px')) {
const textTrackDisplay = this.el_;
const vjsTextTrackCues = textTrackDisplay.querySelectorAll('.vjs-text-track-cue');
const controlBarHeight = this.player_.controlBar.el_.getBoundingClientRect().height;
const playerHeight = this.player_.el_.getBoundingClientRect().height;
// Clear inline style before getting actual height of textTrackDisplay
textTrackDisplay.style = '';
// textrack style updates, this styles are required to be inline
tryUpdateStyle(textTrackDisplay, 'position', 'relative');
tryUpdateStyle(textTrackDisplay, 'height', playerHeight - controlBarHeight + 'px');
tryUpdateStyle(textTrackDisplay, 'top', 'unset');
if (IS_SMART_TV) {
tryUpdateStyle(textTrackDisplay, 'bottom', playerHeight + 'px');
} else {
tryUpdateStyle(textTrackDisplay, 'bottom', '0px');
}
// vjsTextTrackCue style updates
if (vjsTextTrackCues.length > 0) {
vjsTextTrackCues.forEach(vjsTextTrackCue => {
// verify if inset styles are inline
if (vjsTextTrackCue.style.inset) {
const insetStyles = vjsTextTrackCue.style.inset.split(' ');
// expected value is always 3
if (insetStyles.length === 3) {
Object.assign(vjsTextTrackCue.style, {
top: insetStyles[0],
right: insetStyles[1],
bottom: insetStyles[2],
left: 'unset'
});
}
}
});
}
}
}
/**
* Updates the displayed TextTrack to be sure it overlays the video when a either
* a {@link Player#texttrackchange} or a {@link Player#fullscreenchange} is fired.
*/
updateDisplayOverlay() {
// inset-inline and inset-block are not supprted on old chrome, but these are
// only likely to be used on TV devices
if (!this.player_.videoHeight() || !window$1.CSS.supports('inset-inline: 10px')) {
return;
}
const playerWidth = this.player_.currentWidth();
const playerHeight = this.player_.currentHeight();
const playerAspectRatio = playerWidth / playerHeight;
const videoAspectRatio = this.player_.videoWidth() / this.player_.videoHeight();
let insetInlineMatch = 0;
let insetBlockMatch = 0;
if (Math.abs(playerAspectRatio - videoAspectRatio) > 0.1) {
if (playerAspectRatio > videoAspectRatio) {
insetInlineMatch = Math.round((playerWidth - playerHeight * videoAspectRatio) / 2);
} else {
insetBlockMatch = Math.round((playerHeight - playerWidth / videoAspectRatio) / 2);
}
}
tryUpdateStyle(this.el_, 'insetInline', getCSSPositionValue(insetInlineMatch));
tryUpdateStyle(this.el_, 'insetBlock', getCSSPositionValue(insetBlockMatch));
}
/**
* Style {@Link TextTrack} activeCues according to {@Link TextTrackSettings}.
*
* @param {TextTrack} track
* Text track object containing active cues to style.
*/
updateDisplayState(track) {
const overrides = this.player_.textTrackSettings.getValues();
const cues = track.activeCues;
let i = cues.length;
while (i--) {
const cue = cues[i];
if (!cue) {
continue;
}
const cueDiv = cue.displayState;
if (overrides.color) {
cueDiv.firstChild.style.color = overrides.color;
}
if (overrides.textOpacity) {
tryUpdateStyle(cueDiv.firstChild, 'color', constructColor(overrides.color || '#fff', overrides.textOpacity));
}
if (overrides.backgroundColor) {
cueDiv.firstChild.style.backgroundColor = overrides.backgroundColor;
}
if (overrides.backgroundOpacity) {
tryUpdateStyle(cueDiv.firstChild, 'backgroundColor', constructColor(overrides.backgroundColor || '#000', overrides.backgroundOpacity));
}
if (overrides.windowColor) {
if (overrides.windowOpacity) {
tryUpdateStyle(cueDiv, 'backgroundColor', constructColor(overrides.windowColor, overrides.windowOpacity));
} else {
cueDiv.style.backgroundColor = overrides.windowColor;
}
}
if (overrides.edgeStyle) {
if (overrides.edgeStyle === 'dropshadow') {
cueDiv.firstChild.style.textShadow = `2px 2px 3px ${darkGray}, 2px 2px 4px ${darkGray}, 2px 2px 5px ${darkGray}`;
} else if (overrides.edgeStyle === 'raised') {
cueDiv.firstChild.style.textShadow = `1px 1px ${darkGray}, 2px 2px ${darkGray}, 3px 3px ${darkGray}`;
} else if (overrides.edgeStyle === 'depressed') {
cueDiv.firstChild.style.textShadow = `1px 1px ${lightGray}, 0 1px ${lightGray}, -1px -1px ${darkGray}, 0 -1px ${darkGray}`;
} else if (overrides.edgeStyle === 'uniform') {
cueDiv.firstChild.style.textShadow = `0 0 4px ${darkGray}, 0 0 4px ${darkGray}, 0 0 4px ${darkGray}, 0 0 4px ${darkGray}`;
}
}
if (overrides.fontPercent && overrides.fontPercent !== 1) {
const fontSize = window$1.parseFloat(cueDiv.style.fontSize);
cueDiv.style.fontSize = fontSize * overrides.fontPercent + 'px';
cueDiv.style.height = 'auto';
cueDiv.style.top = 'auto';
}
if (overrides.fontFamily && overrides.fontFamily !== 'default') {
if (overrides.fontFamily === 'small-caps') {
cueDiv.firstChild.style.fontVariant = 'small-caps';
} else {
cueDiv.firstChild.style.fontFamily = fontMap[overrides.fontFamily];
}
}
}
}
/**
* Add an {@link TextTrack} to to the {@link Tech}s {@link TextTrackList}.
*
* @param {TextTrack|TextTrack[]} tracks
* Text track object or text track array to be added to the list.
*/
updateForTrack(tracks) {
if (!Array.isArray(tracks)) {
tracks = [tracks];
}
if (typeof window$1.WebVTT !== 'function' || tracks.every(track => {
return !track.activeCues;
})) {
return;
}
const cues = [];
// push all active track cues
for (let i = 0; i < tracks.length; ++i) {
const track = tracks[i];
for (let j = 0; j < track.activeCues.length; ++j) {
cues.push(track.activeCues[j]);
}
}
// removes all cues before it processes new ones
window$1.WebVTT.processCues(window$1, cues, this.el_);
// add unique class to each language text track & add settings styling if necessary
for (let i = 0; i < tracks.length; ++i) {
const track = tracks[i];
for (let j = 0; j < track.activeCues.length; ++j) {
const cueEl = track.activeCues[j].displayState;
addClass(cueEl, 'vjs-text-track-cue', 'vjs-text-track-cue-' + (track.language ? track.language : i));
if (track.language) {
setAttribute(cueEl, 'lang', track.language);
}
}
if (this.player_.textTrackSettings) {
this.updateDisplayState(track);
}
}
}
}
Component$1.registerComponent('TextTrackDisplay', TextTrackDisplay);
/**
* @file loading-spinner.js
*/
/**
* A loading spinner for use during waiting/loading events.
*
* @extends Component
*/
class LoadingSpinner extends Component$1 {
/**
* Create the `LoadingSpinner`s DOM element.
*
* @return {Element}
* The dom element that gets created.
*/
createEl() {
const isAudio = this.player_.isAudio();
const playerType = this.localize(isAudio ? 'Audio Player' : 'Video Player');
const controlText = createEl('span', {
className: 'vjs-control-text',
textContent: this.localize('{1} is loading.', [playerType])
});
const el = super.createEl('div', {
className: 'vjs-loading-spinner',
dir: 'ltr'
});
el.appendChild(controlText);
return el;
}
/**
* Update control text on languagechange
*/
handleLanguagechange() {
this.$('.vjs-control-text').textContent = this.localize('{1} is loading.', [this.player_.isAudio() ? 'Audio Player' : 'Video Player']);
}
}
Component$1.registerComponent('LoadingSpinner', LoadingSpinner);
/**
* @file button.js
*/
/**
* Base class for all buttons.
*
* @extends ClickableComponent
*/
class Button extends ClickableComponent {
/**
* Create the `Button`s DOM element.
*
* @param {string} [tag="button"]
* The element's node type. This argument is IGNORED: no matter what
* is passed, it will always create a `button` element.
*
* @param {Object} [props={}]
* An object of properties that should be set on the element.
*
* @param {Object} [attributes={}]
* An object of attributes that should be set on the element.
*
* @return {Element}
* The element that gets created.
*/
createEl(tag, props = {}, attributes = {}) {
tag = 'button';
props = Object.assign({
className: this.buildCSSClass()
}, props);
// Add attributes for button element
attributes = Object.assign({
// Necessary since the default button type is "submit"
type: 'button'
}, attributes);
const el = createEl(tag, props, attributes);
if (!this.player_.options_.experimentalSvgIcons) {
el.appendChild(createEl('span', {
className: 'vjs-icon-placeholder'
}, {
'aria-hidden': true
}));
}
this.createControlTextEl(el);
return el;
}
/**
* Add a child `Component` inside of this `Button`.
*
* @param {string|Component} child
* The name or instance of a child to add.
*
* @param {Object} [options={}]
* The key/value store of options that will get passed to children of
* the child.
*
* @return {Component}
* The `Component` that gets added as a child. When using a string the
* `Component` will get created by this process.
*
* @deprecated since version 5
*/
addChild(child, options = {}) {
const className = this.constructor.name;
log$1.warn(`Adding an actionable (user controllable) child to a Button (${className}) is not supported; use a ClickableComponent instead.`);
// Avoid the error message generated by ClickableComponent's addChild method
return Component$1.prototype.addChild.call(this, child, options);
}
/**
* Enable the `Button` element so that it can be activated or clicked. Use this with
* {@link Button#disable}.
*/
enable() {
super.enable();
this.el_.removeAttribute('disabled');
}
/**
* Disable the `Button` element so that it cannot be activated or clicked. Use this with
* {@link Button#enable}.
*/
disable() {
super.disable();
this.el_.setAttribute('disabled', 'disabled');
}
/**
* This gets called when a `Button` has focus and `keydown` is triggered via a key
* press.
*
* @param {KeyboardEvent} event
* The event that caused this function to get called.
*
* @listens keydown
*/
handleKeyDown(event) {
// Ignore Space or Enter key operation, which is handled by the browser for
// a button - though not for its super class, ClickableComponent. Also,
// prevent the event from propagating through the DOM and triggering Player
// hotkeys. We do not preventDefault here because we _want_ the browser to
// handle it.
if (event.key === ' ' || event.key === 'Enter') {
event.stopPropagation();
return;
}
// Pass keypress handling up for unsupported keys
super.handleKeyDown(event);
}
}
Component$1.registerComponent('Button', Button);
/**
* @file big-play-button.js
*/
/**
* The initial play button that shows before the video has played. The hiding of the
* `BigPlayButton` get done via CSS and `Player` states.
*
* @extends Button
*/
class BigPlayButton extends Button {
constructor(player, options) {
super(player, options);
this.mouseused_ = false;
this.setIcon('play');
this.on('mousedown', e => this.handleMouseDown(e));
}
/**
* Builds the default DOM `className`.
*
* @return {string}
* The DOM `className` for this object. Always returns 'vjs-big-play-button'.
*/
buildCSSClass() {
return 'vjs-big-play-button';
}
/**
* This gets called when a `BigPlayButton` "clicked". See {@link ClickableComponent}
* for more detailed information on what a click can be.
*
* @param {KeyboardEvent|MouseEvent|TouchEvent} event
* The `keydown`, `tap`, or `click` event that caused this function to be
* called.
*
* @listens tap
* @listens click
*/
handleClick(event) {
const playPromise = this.player_.play();
// exit early if clicked via the mouse
if (this.mouseused_ && 'clientX' in event && 'clientY' in event) {
silencePromise(playPromise);
if (this.player_.tech(true)) {
this.player_.tech(true).focus();
}
return;
}
const cb = this.player_.getChild('controlBar');
const playToggle = cb && cb.getChild('playToggle');
if (!playToggle) {
this.player_.tech(true).focus();
return;
}
const playFocus = () => playToggle.focus();
if (isPromise(playPromise)) {
playPromise.then(playFocus, () => {});
} else {
this.setTimeout(playFocus, 1);
}
}
/**
* Event handler that is called when a `BigPlayButton` receives a
* `keydown` event.
*
* @param {KeyboardEvent} event
* The `keydown` event that caused this function to be called.
*
* @listens keydown
*/
handleKeyDown(event) {
this.mouseused_ = false;
super.handleKeyDown(event);
}
/**
* Handle `mousedown` events on the `BigPlayButton`.
*
* @param {MouseEvent} event
* `mousedown` or `touchstart` event that triggered this function
*
* @listens mousedown
*/
handleMouseDown(event) {
this.mouseused_ = true;
}
}
/**
* The text that should display over the `BigPlayButton`s controls. Added to for localization.
*
* @type {string}
* @protected
*/
BigPlayButton.prototype.controlText_ = 'Play Video';
Component$1.registerComponent('BigPlayButton', BigPlayButton);
/**
* @file close-button.js
*/
/** @import Player from './player' */
/**
* The `CloseButton` is a `{@link Button}` that fires a `close` event when
* it gets clicked.
*
* @extends Button
*/
class CloseButton extends Button {
/**
* Creates an instance of the this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*/
constructor(player, options) {
super(player, options);
this.setIcon('cancel');
this.controlText(options && options.controlText || this.localize('Close'));
}
/**
* Builds the default DOM `className`.
*
* @return {string}
* The DOM `className` for this object.
*/
buildCSSClass() {
return `vjs-close-button ${super.buildCSSClass()}`;
}
/**
* This gets called when a `CloseButton` gets clicked. See
* {@link ClickableComponent#handleClick} for more information on when
* this will be triggered
*
* @param {Event} event
* The `keydown`, `tap`, or `click` event that caused this function to be
* called.
*
* @listens tap
* @listens click
* @fires CloseButton#close
*/
handleClick(event) {
/**
* Triggered when the a `CloseButton` is clicked.
*
* @event CloseButton#close
* @type {Event}
*
* @property {boolean} [bubbles=false]
* set to false so that the close event does not
* bubble up to parents if there is no listener
*/
this.trigger({
type: 'close',
bubbles: false
});
}
/**
* Event handler that is called when a `CloseButton` receives a
* `keydown` event.
*
* By default, if the key is Esc, it will trigger a `click` event.
*
* @param {KeyboardEvent} event
* The `keydown` event that caused this function to be called.
*
* @listens keydown
*/
handleKeyDown(event) {
// Esc button will trigger `click` event
if (event.key === 'Escape') {
event.preventDefault();
event.stopPropagation();
this.trigger('click');
} else {
// Pass keypress handling up for unsupported keys
super.handleKeyDown(event);
}
}
}
Component$1.registerComponent('CloseButton', CloseButton);
/**
* @file play-toggle.js
*/
/** @import Player from './player' */
/**
* Button to toggle between play and pause.
*
* @extends Button
*/
class PlayToggle extends Button {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options={}]
* The key/value store of player options.
*/
constructor(player, options = {}) {
super(player, options);
// show or hide replay icon
options.replay = options.replay === undefined || options.replay;
this.setIcon('play');
this.on(player, 'play', e => this.handlePlay(e));
this.on(player, 'pause', e => this.handlePause(e));
if (options.replay) {
this.on(player, 'ended', e => this.handleEnded(e));
}
}
/**
* Builds the default DOM `className`.
*
* @return {string}
* The DOM `className` for this object.
*/
buildCSSClass() {
return `vjs-play-control ${super.buildCSSClass()}`;
}
/**
* This gets called when an `PlayToggle` is "clicked". See
* {@link ClickableComponent} for more detailed information on what a click can be.
*
* @param {Event} [event]
* The `keydown`, `tap`, or `click` event that caused this function to be
* called.
*
* @listens tap
* @listens click
*/
handleClick(event) {
if (this.player_.paused()) {
silencePromise(this.player_.play());
} else {
this.player_.pause();
}
}
/**
* This gets called once after the video has ended and the user seeks so that
* we can change the replay button back to a play button.
*
* @param {Event} [event]
* The event that caused this function to run.
*
* @listens Player#seeked
*/
handleSeeked(event) {
this.removeClass('vjs-ended');
if (this.player_.paused()) {
this.handlePause(event);
} else {
this.handlePlay(event);
}
}
/**
* Add the vjs-playing class to the element so it can change appearance.
*
* @param {Event} [event]
* The event that caused this function to run.
*
* @listens Player#play
*/
handlePlay(event) {
this.removeClass('vjs-ended', 'vjs-paused');
this.addClass('vjs-playing');
// change the button text to "Pause"
this.setIcon('pause');
this.controlText('Pause');
}
/**
* Add the vjs-paused class to the element so it can change appearance.
*
* @param {Event} [event]
* The event that caused this function to run.
*
* @listens Player#pause
*/
handlePause(event) {
this.removeClass('vjs-playing');
this.addClass('vjs-paused');
// change the button text to "Play"
this.setIcon('play');
this.controlText('Play');
}
/**
* Add the vjs-ended class to the element so it can change appearance
*
* @param {Event} [event]
* The event that caused this function to run.
*
* @listens Player#ended
*/
handleEnded(event) {
this.removeClass('vjs-playing');
this.addClass('vjs-ended');
// change the button text to "Replay"
this.setIcon('replay');
this.controlText('Replay');
// on the next seek remove the replay button
this.one(this.player_, 'seeked', e => this.handleSeeked(e));
}
}
/**
* The text that should display over the `PlayToggle`s controls. Added for localization.
*
* @type {string}
* @protected
*/
PlayToggle.prototype.controlText_ = 'Play';
Component$1.registerComponent('PlayToggle', PlayToggle);
/**
* @file time-display.js
*/
/** @import Player from '../../player' */
/**
* Displays time information about the video
*
* @extends Component
*/
class TimeDisplay extends Component$1 {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*/
constructor(player, options) {
super(player, options);
this.on(player, ['timeupdate', 'ended', 'seeking'], e => this.update(e));
this.updateTextNode_();
}
/**
* Create the `Component`'s DOM element
*
* @return {Element}
* The element that was created.
*/
createEl() {
const className = this.buildCSSClass();
const el = super.createEl('div', {
className: `${className} vjs-time-control vjs-control`
});
const span = createEl('span', {
className: 'vjs-control-text',
textContent: `${this.localize(this.labelText_)}\u00a0`
}, {
role: 'presentation'
});
el.appendChild(span);
this.contentEl_ = createEl('span', {
className: `${className}-display`
}, {
// span elements have no implicit role, but some screen readers (notably VoiceOver)
// treat them as a break between items in the DOM when using arrow keys
// (or left-to-right swipes on iOS) to read contents of a page. Using
// role='presentation' causes VoiceOver to NOT treat this span as a break.
role: 'presentation'
});
el.appendChild(this.contentEl_);
return el;
}
dispose() {
this.contentEl_ = null;
this.textNode_ = null;
super.dispose();
}
/**
* Updates the displayed time according to the `updateContent` function which is defined in the child class.
*
* @param {Event} [event]
* The `timeupdate`, `ended` or `seeking` (if enableSmoothSeeking is true) event that caused this function to be called.
*/
update(event) {
if (!this.player_.options_.enableSmoothSeeking && event.type === 'seeking') {
return;
}
this.updateContent(event);
}
/**
* Updates the time display text node with a new time
*
* @param {number} [time=0] the time to update to
*
* @private
*/
updateTextNode_(time = 0) {
time = formatTime(time);
if (this.formattedTime_ === time) {
return;
}
this.formattedTime_ = time;
this.requestNamedAnimationFrame('TimeDisplay#updateTextNode_', () => {
if (!this.contentEl_) {
return;
}
let oldNode = this.textNode_;
if (oldNode && this.contentEl_.firstChild !== oldNode) {
oldNode = null;
log$1.warn('TimeDisplay#updateTextnode_: Prevented replacement of text node element since it was no longer a child of this node. Appending a new node instead.');
}
this.textNode_ = document$1.createTextNode(this.formattedTime_);
if (!this.textNode_) {
return;
}
if (oldNode) {
this.contentEl_.replaceChild(this.textNode_, oldNode);
} else {
this.contentEl_.appendChild(this.textNode_);
}
});
}
/**
* To be filled out in the child class, should update the displayed time
* in accordance with the fact that the current time has changed.
*
* @param {Event} [event]
* The `timeupdate` event that caused this to run.
*
* @listens Player#timeupdate
*/
updateContent(event) {}
}
/**
* The text that is added to the `TimeDisplay` for screen reader users.
*
* @type {string}
* @private
*/
TimeDisplay.prototype.labelText_ = 'Time';
/**
* The text that should display over the `TimeDisplay`s controls. Added to for localization.
*
* @type {string}
* @protected
*
* @deprecated in v7; controlText_ is not used in non-active display Components
*/
TimeDisplay.prototype.controlText_ = 'Time';
Component$1.registerComponent('TimeDisplay', TimeDisplay);
/**
* @file current-time-display.js
*/
/**
* Displays the current time
*
* @extends Component
*/
class CurrentTimeDisplay extends TimeDisplay {
/**
* Builds the default DOM `className`.
*
* @return {string}
* The DOM `className` for this object.
*/
buildCSSClass() {
return 'vjs-current-time';
}
/**
* Update current time display
*
* @param {Event} [event]
* The `timeupdate` event that caused this function to run.
*
* @listens Player#timeupdate
*/
updateContent(event) {
// Allows for smooth scrubbing, when player can't keep up.
let time;
if (this.player_.ended()) {
time = this.player_.duration();
} else {
time = this.player_.scrubbing() ? this.player_.getCache().currentTime : this.player_.currentTime();
}
this.updateTextNode_(time);
}
}
/**
* The text that is added to the `CurrentTimeDisplay` for screen reader users.
*
* @type {string}
* @private
*/
CurrentTimeDisplay.prototype.labelText_ = 'Current Time';
/**
* The text that should display over the `CurrentTimeDisplay`s controls. Added to for localization.
*
* @type {string}
* @protected
*
* @deprecated in v7; controlText_ is not used in non-active display Components
*/
CurrentTimeDisplay.prototype.controlText_ = 'Current Time';
Component$1.registerComponent('CurrentTimeDisplay', CurrentTimeDisplay);
/**
* @file duration-display.js
*/
/** @import Player from '../../player' */
/**
* Displays the duration
*
* @extends Component
*/
class DurationDisplay extends TimeDisplay {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*/
constructor(player, options) {
super(player, options);
const updateContent = e => this.updateContent(e);
// we do not want to/need to throttle duration changes,
// as they should always display the changed duration as
// it has changed
this.on(player, 'durationchange', updateContent);
// Listen to loadstart because the player duration is reset when a new media element is loaded,
// but the durationchange on the user agent will not fire.
// @see [Spec]{@link https://www.w3.org/TR/2011/WD-html5-20110113/video.html#media-element-load-algorithm}
this.on(player, 'loadstart', updateContent);
// Also listen for timeupdate (in the parent) and loadedmetadata because removing those
// listeners could have broken dependent applications/libraries. These
// can likely be removed for 7.0.
this.on(player, 'loadedmetadata', updateContent);
}
/**
* Builds the default DOM `className`.
*
* @return {string}
* The DOM `className` for this object.
*/
buildCSSClass() {
return 'vjs-duration';
}
/**
* Update duration time display.
*
* @param {Event} [event]
* The `durationchange`, `timeupdate`, or `loadedmetadata` event that caused
* this function to be called.
*
* @listens Player#durationchange
* @listens Player#timeupdate
* @listens Player#loadedmetadata
*/
updateContent(event) {
const duration = this.player_.duration();
this.updateTextNode_(duration);
}
}
/**
* The text that is added to the `DurationDisplay` for screen reader users.
*
* @type {string}
* @private
*/
DurationDisplay.prototype.labelText_ = 'Duration';
/**
* The text that should display over the `DurationDisplay`s controls. Added to for localization.
*
* @type {string}
* @protected
*
* @deprecated in v7; controlText_ is not used in non-active display Components
*/
DurationDisplay.prototype.controlText_ = 'Duration';
Component$1.registerComponent('DurationDisplay', DurationDisplay);
/**
* @file time-divider.js
*/
/**
* The separator between the current time and duration.
* Can be hidden if it's not needed in the design.
*
* @extends Component
*/
class TimeDivider extends Component$1 {
/**
* Create the component's DOM element
*
* @return {Element}
* The element that was created.
*/
createEl() {
const el = super.createEl('div', {
className: 'vjs-time-control vjs-time-divider'
}, {
// this element and its contents can be hidden from assistive techs since
// it is made extraneous by the announcement of the control text
// for the current time and duration displays
'aria-hidden': true
});
const div = super.createEl('div');
const span = super.createEl('span', {
textContent: '/'
});
div.appendChild(span);
el.appendChild(div);
return el;
}
}
Component$1.registerComponent('TimeDivider', TimeDivider);
/**
* @file remaining-time-display.js
*/
/** @import Player from '../../player' */
/**
* Displays the time left in the video
*
* @extends Component
*/
class RemainingTimeDisplay extends TimeDisplay {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*/
constructor(player, options) {
super(player, options);
this.on(player, 'durationchange', e => this.updateContent(e));
}
/**
* Builds the default DOM `className`.
*
* @return {string}
* The DOM `className` for this object.
*/
buildCSSClass() {
return 'vjs-remaining-time';
}
/**
* Create the `Component`'s DOM element with the "minus" character prepend to the time
*
* @return {Element}
* The element that was created.
*/
createEl() {
const el = super.createEl();
if (this.options_.displayNegative !== false) {
el.insertBefore(createEl('span', {}, {
'aria-hidden': true
}, '-'), this.contentEl_);
}
return el;
}
/**
* Update remaining time display.
*
* @param {Event} [event]
* The `timeupdate` or `durationchange` event that caused this to run.
*
* @listens Player#timeupdate
* @listens Player#durationchange
*/
updateContent(event) {
if (typeof this.player_.duration() !== 'number') {
return;
}
let time;
// @deprecated We should only use remainingTimeDisplay
// as of video.js 7
if (this.player_.ended()) {
time = 0;
} else if (this.player_.remainingTimeDisplay) {
time = this.player_.remainingTimeDisplay();
} else {
time = this.player_.remainingTime();
}
this.updateTextNode_(time);
}
}
/**
* The text that is added to the `RemainingTimeDisplay` for screen reader users.
*
* @type {string}
* @private
*/
RemainingTimeDisplay.prototype.labelText_ = 'Remaining Time';
/**
* The text that should display over the `RemainingTimeDisplay`s controls. Added to for localization.
*
* @type {string}
* @protected
*
* @deprecated in v7; controlText_ is not used in non-active display Components
*/
RemainingTimeDisplay.prototype.controlText_ = 'Remaining Time';
Component$1.registerComponent('RemainingTimeDisplay', RemainingTimeDisplay);
/**
* @file live-display.js
*/
/** @import Player from './player' */
// TODO - Future make it click to snap to live
/**
* Displays the live indicator when duration is Infinity.
*
* @extends Component
*/
class LiveDisplay extends Component$1 {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*/
constructor(player, options) {
super(player, options);
this.updateShowing();
this.on(this.player(), 'durationchange', e => this.updateShowing(e));
}
/**
* Create the `Component`'s DOM element
*
* @return {Element}
* The element that was created.
*/
createEl() {
const el = super.createEl('div', {
className: 'vjs-live-control vjs-control'
});
this.contentEl_ = createEl('div', {
className: 'vjs-live-display'
}, {
'aria-live': 'off'
});
this.contentEl_.appendChild(createEl('span', {
className: 'vjs-control-text',
textContent: `${this.localize('Stream Type')}\u00a0`
}));
this.contentEl_.appendChild(document$1.createTextNode(this.localize('LIVE')));
el.appendChild(this.contentEl_);
return el;
}
dispose() {
this.contentEl_ = null;
super.dispose();
}
/**
* Check the duration to see if the LiveDisplay should be showing or not. Then show/hide
* it accordingly
*
* @param {Event} [event]
* The {@link Player#durationchange} event that caused this function to run.
*
* @listens Player#durationchange
*/
updateShowing(event) {
if (this.player().duration() === Infinity) {
this.show();
} else {
this.hide();
}
}
}
Component$1.registerComponent('LiveDisplay', LiveDisplay);
/**
* @file seek-to-live.js
*/
/** @import Player from './player' */
/**
* Displays the live indicator when duration is Infinity.
*
* @extends Component
*/
class SeekToLive extends Button {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*/
constructor(player, options) {
super(player, options);
this.updateLiveEdgeStatus();
if (this.player_.liveTracker) {
this.updateLiveEdgeStatusHandler_ = e => this.updateLiveEdgeStatus(e);
this.on(this.player_.liveTracker, 'liveedgechange', this.updateLiveEdgeStatusHandler_);
}
}
/**
* Create the `Component`'s DOM element
*
* @return {Element}
* The element that was created.
*/
createEl() {
const el = super.createEl('button', {
className: 'vjs-seek-to-live-control vjs-control'
});
this.setIcon('circle', el);
this.textEl_ = createEl('span', {
className: 'vjs-seek-to-live-text',
textContent: this.localize('LIVE')
}, {
'aria-hidden': 'true'
});
el.appendChild(this.textEl_);
return el;
}
/**
* Update the state of this button if we are at the live edge
* or not
*/
updateLiveEdgeStatus() {
// default to live edge
if (!this.player_.liveTracker || this.player_.liveTracker.atLiveEdge()) {
this.setAttribute('aria-disabled', true);
this.addClass('vjs-at-live-edge');
this.controlText('Seek to live, currently playing live');
} else {
this.setAttribute('aria-disabled', false);
this.removeClass('vjs-at-live-edge');
this.controlText('Seek to live, currently behind live');
}
}
/**
* On click bring us as near to the live point as possible.
* This requires that we wait for the next `live-seekable-change`
* event which will happen every segment length seconds.
*/
handleClick() {
this.player_.liveTracker.seekToLiveEdge();
}
/**
* Dispose of the element and stop tracking
*/
dispose() {
if (this.player_.liveTracker) {
this.off(this.player_.liveTracker, 'liveedgechange', this.updateLiveEdgeStatusHandler_);
}
this.textEl_ = null;
super.dispose();
}
}
/**
* The text that should display over the `SeekToLive`s control. Added for localization.
*
* @type {string}
* @protected
*/
SeekToLive.prototype.controlText_ = 'Seek to live, currently playing live';
Component$1.registerComponent('SeekToLive', SeekToLive);
/**
* @file num.js
* @module num
*/
/**
* Keep a number between a min and a max value
*
* @param {number} number
* The number to clamp
*
* @param {number} min
* The minimum value
* @param {number} max
* The maximum value
*
* @return {number}
* the clamped number
*/
function clamp(number, min, max) {
number = Number(number);
return Math.min(max, Math.max(min, isNaN(number) ? min : number));
}
var Num = /*#__PURE__*/Object.freeze({
__proto__: null,
clamp: clamp
});
/**
* @file slider.js
*/
/** @import Player from '../player' */
/**
* The base functionality for a slider. Can be vertical or horizontal.
* For instance the volume bar or the seek bar on a video is a slider.
*
* @extends Component
*/
class Slider extends Component$1 {
/**
* Create an instance of this class
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*/
constructor(player, options) {
super(player, options);
this.handleMouseDown_ = e => this.handleMouseDown(e);
this.handleMouseUp_ = e => this.handleMouseUp(e);
this.handleKeyDown_ = e => this.handleKeyDown(e);
this.handleClick_ = e => this.handleClick(e);
this.handleMouseMove_ = e => this.handleMouseMove(e);
this.update_ = e => this.update(e);
// Set property names to bar to match with the child Slider class is looking for
this.bar = this.getChild(this.options_.barName);
// Set a horizontal or vertical class on the slider depending on the slider type
this.vertical(!!this.options_.vertical);
this.enable();
}
/**
* Are controls are currently enabled for this slider or not.
*
* @return {boolean}
* true if controls are enabled, false otherwise
*/
enabled() {
return this.enabled_;
}
/**
* Enable controls for this slider if they are disabled
*/
enable() {
if (this.enabled()) {
return;
}
this.on('mousedown', this.handleMouseDown_);
this.on('touchstart', this.handleMouseDown_);
this.on('keydown', this.handleKeyDown_);
this.on('click', this.handleClick_);
// TODO: deprecated, controlsvisible does not seem to be fired
this.on(this.player_, 'controlsvisible', this.update);
if (this.playerEvent) {
this.on(this.player_, this.playerEvent, this.update);
}
this.removeClass('disabled');
this.setAttribute('tabindex', 0);
this.enabled_ = true;
}
/**
* Disable controls for this slider if they are enabled
*/
disable() {
if (!this.enabled()) {
return;
}
const doc = this.bar.el_.ownerDocument;
this.off('mousedown', this.handleMouseDown_);
this.off('touchstart', this.handleMouseDown_);
this.off('keydown', this.handleKeyDown_);
this.off('click', this.handleClick_);
this.off(this.player_, 'controlsvisible', this.update_);
this.off(doc, 'mousemove', this.handleMouseMove_);
this.off(doc, 'mouseup', this.handleMouseUp_);
this.off(doc, 'touchmove', this.handleMouseMove_);
this.off(doc, 'touchend', this.handleMouseUp_);
this.removeAttribute('tabindex');
this.addClass('disabled');
if (this.playerEvent) {
this.off(this.player_, this.playerEvent, this.update);
}
this.enabled_ = false;
}
/**
* Create the `Slider`s DOM element.
*
* @param {string} type
* Type of element to create.
*
* @param {Object} [props={}]
* List of properties in Object form.
*
* @param {Object} [attributes={}]
* list of attributes in Object form.
*
* @return {Element}
* The element that gets created.
*/
createEl(type, props = {}, attributes = {}) {
// Add the slider element class to all sub classes
props.className = props.className + ' vjs-slider';
props = Object.assign({
tabIndex: 0
}, props);
attributes = Object.assign({
'role': 'slider',
'aria-valuenow': 0,
'aria-valuemin': 0,
'aria-valuemax': 100
}, attributes);
return super.createEl(type, props, attributes);
}
/**
* Handle `mousedown` or `touchstart` events on the `Slider`.
*
* @param {MouseEvent} event
* `mousedown` or `touchstart` event that triggered this function
*
* @listens mousedown
* @listens touchstart
* @fires Slider#slideractive
*/
handleMouseDown(event) {
const doc = this.bar.el_.ownerDocument;
if (event.type === 'mousedown') {
event.preventDefault();
}
// Do not call preventDefault() on touchstart in Chrome
// to avoid console warnings. Use a 'touch-action: none' style
// instead to prevent unintended scrolling.
// https://developers.google.com/web/updates/2017/01/scrolling-intervention
if (event.type === 'touchstart' && !IS_CHROME) {
event.preventDefault();
}
blockTextSelection();
this.addClass('vjs-sliding');
/**
* Triggered when the slider is in an active state
*
* @event Slider#slideractive
* @type {MouseEvent}
*/
this.trigger('slideractive');
this.on(doc, 'mousemove', this.handleMouseMove_);
this.on(doc, 'mouseup', this.handleMouseUp_);
this.on(doc, 'touchmove', this.handleMouseMove_);
this.on(doc, 'touchend', this.handleMouseUp_);
this.handleMouseMove(event, true);
}
/**
* Handle the `mousemove`, `touchmove`, and `mousedown` events on this `Slider`.
* The `mousemove` and `touchmove` events will only only trigger this function during
* `mousedown` and `touchstart`. This is due to {@link Slider#handleMouseDown} and
* {@link Slider#handleMouseUp}.
*
* @param {MouseEvent} event
* `mousedown`, `mousemove`, `touchstart`, or `touchmove` event that triggered
* this function
* @param {boolean} mouseDown this is a flag that should be set to true if `handleMouseMove` is called directly. It allows us to skip things that should not happen if coming from mouse down but should happen on regular mouse move handler. Defaults to false.
*
* @listens mousemove
* @listens touchmove
*/
handleMouseMove(event) {}
/**
* Handle `mouseup` or `touchend` events on the `Slider`.
*
* @param {MouseEvent} event
* `mouseup` or `touchend` event that triggered this function.
*
* @listens touchend
* @listens mouseup
* @fires Slider#sliderinactive
*/
handleMouseUp(event) {
const doc = this.bar.el_.ownerDocument;
unblockTextSelection();
this.removeClass('vjs-sliding');
/**
* Triggered when the slider is no longer in an active state.
*
* @event Slider#sliderinactive
* @type {Event}
*/
this.trigger('sliderinactive');
this.off(doc, 'mousemove', this.handleMouseMove_);
this.off(doc, 'mouseup', this.handleMouseUp_);
this.off(doc, 'touchmove', this.handleMouseMove_);
this.off(doc, 'touchend', this.handleMouseUp_);
this.update();
}
/**
* Update the progress bar of the `Slider`.
*
* @return {number}
* The percentage of progress the progress bar represents as a
* number from 0 to 1.
*/
update() {
// In VolumeBar init we have a setTimeout for update that pops and update
// to the end of the execution stack. The player is destroyed before then
// update will cause an error
// If there's no bar...
if (!this.el_ || !this.bar) {
return;
}
// clamp progress between 0 and 1
// and only round to four decimal places, as we round to two below
const progress = this.getProgress();
if (progress === this.progress_) {
return progress;
}
this.progress_ = progress;
this.requestNamedAnimationFrame('Slider#update', () => {
// Set the new bar width or height
const sizeKey = this.vertical() ? 'height' : 'width';
// Convert to a percentage for css value
this.bar.el().style[sizeKey] = (progress * 100).toFixed(2) + '%';
});
return progress;
}
/**
* Get the percentage of the bar that should be filled
* but clamped and rounded.
*
* @return {number}
* percentage filled that the slider is
*/
getProgress() {
return Number(clamp(this.getPercent(), 0, 1).toFixed(4));
}
/**
* Calculate distance for slider
*
* @param {Event} event
* The event that caused this function to run.
*
* @return {number}
* The current position of the Slider.
* - position.x for vertical `Slider`s
* - position.y for horizontal `Slider`s
*/
calculateDistance(event) {
const position = getPointerPosition(this.el_, event);
if (this.vertical()) {
return position.y;
}
return position.x;
}
/**
* Handle a `keydown` event on the `Slider`. Watches for left, right, up, and down
* arrow keys. This function will only be called when the slider has focus. See
* {@link Slider#handleFocus} and {@link Slider#handleBlur}.
*
* @param {KeyboardEvent} event
* the `keydown` event that caused this function to run.
*
* @listens keydown
*/
handleKeyDown(event) {
const spatialNavOptions = this.options_.playerOptions.spatialNavigation;
const spatialNavEnabled = spatialNavOptions && spatialNavOptions.enabled;
const horizontalSeek = spatialNavOptions && spatialNavOptions.horizontalSeek;
if (spatialNavEnabled) {
if (horizontalSeek && event.key === 'ArrowLeft' || !horizontalSeek && event.key === 'ArrowDown') {
event.preventDefault();
event.stopPropagation();
this.stepBack();
} else if (horizontalSeek && event.key === 'ArrowRight' || !horizontalSeek && event.key === 'ArrowUp') {
event.preventDefault();
event.stopPropagation();
this.stepForward();
} else {
super.handleKeyDown(event);
}
// Left and Down Arrows
} else if (event.key === 'ArrowLeft' || event.key === 'ArrowDown') {
event.preventDefault();
event.stopPropagation();
this.stepBack();
// Up and Right Arrows
} else if (event.key === 'ArrowUp' || event.key === 'ArrowRight') {
event.preventDefault();
event.stopPropagation();
this.stepForward();
} else {
// Pass keydown handling up for unsupported keys
super.handleKeyDown(event);
}
}
/**
* Listener for click events on slider, used to prevent clicks
* from bubbling up to parent elements like button menus.
*
* @param {Object} event
* Event that caused this object to run
*/
handleClick(event) {
event.stopPropagation();
event.preventDefault();
}
/**
* Get/set if slider is horizontal for vertical
*
* @param {boolean} [bool]
* - true if slider is vertical,
* - false is horizontal
*
* @return {boolean}
* - true if slider is vertical, and getting
* - false if the slider is horizontal, and getting
*/
vertical(bool) {
if (bool === undefined) {
return this.vertical_ || false;
}
this.vertical_ = !!bool;
if (this.vertical_) {
this.addClass('vjs-slider-vertical');
} else {
this.addClass('vjs-slider-horizontal');
}
}
}
Component$1.registerComponent('Slider', Slider);
/**
* @file load-progress-bar.js
*/
/** @import Player from '../../player' */
// get the percent width of a time compared to the total end
const percentify = (time, end) => clamp(time / end * 100, 0, 100).toFixed(2) + '%';
/**
* Shows loading progress
*
* @extends Component
*/
class LoadProgressBar extends Component$1 {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*/
constructor(player, options) {
super(player, options);
this.partEls_ = [];
this.on(player, 'progress', e => this.update(e));
}
/**
* Create the `Component`'s DOM element
*
* @return {Element}
* The element that was created.
*/
createEl() {
const el = super.createEl('div', {
className: 'vjs-load-progress'
});
const wrapper = createEl('span', {
className: 'vjs-control-text'
});
const loadedText = createEl('span', {
textContent: this.localize('Loaded')
});
const separator = document$1.createTextNode(': ');
this.percentageEl_ = createEl('span', {
className: 'vjs-control-text-loaded-percentage',
textContent: '0%'
});
el.appendChild(wrapper);
wrapper.appendChild(loadedText);
wrapper.appendChild(separator);
wrapper.appendChild(this.percentageEl_);
return el;
}
dispose() {
this.partEls_ = null;
this.percentageEl_ = null;
super.dispose();
}
/**
* Update progress bar
*
* @param {Event} [event]
* The `progress` event that caused this function to run.
*
* @listens Player#progress
*/
update(event) {
this.requestNamedAnimationFrame('LoadProgressBar#update', () => {
const liveTracker = this.player_.liveTracker;
const buffered = this.player_.buffered();
const duration = liveTracker && liveTracker.isLive() ? liveTracker.seekableEnd() : this.player_.duration();
const bufferedEnd = this.player_.bufferedEnd();
const children = this.partEls_;
const percent = percentify(bufferedEnd, duration);
if (this.percent_ !== percent) {
// update the width of the progress bar
this.el_.style.width = percent;
// update the control-text
textContent(this.percentageEl_, percent);
this.percent_ = percent;
}
// add child elements to represent the individual buffered time ranges
for (let i = 0; i < buffered.length; i++) {
const start = buffered.start(i);
const end = buffered.end(i);
let part = children[i];
if (!part) {
part = this.el_.appendChild(createEl());
children[i] = part;
}
// only update if changed
if (part.dataset.start === start && part.dataset.end === end) {
continue;
}
part.dataset.start = start;
part.dataset.end = end;
// set the percent based on the width of the progress bar (bufferedEnd)
part.style.left = percentify(start, bufferedEnd);
part.style.width = percentify(end - start, bufferedEnd);
}
// remove unused buffered range elements
for (let i = children.length; i > buffered.length; i--) {
this.el_.removeChild(children[i - 1]);
}
children.length = buffered.length;
});
}
}
Component$1.registerComponent('LoadProgressBar', LoadProgressBar);
/**
* @file time-tooltip.js
*/
/** @import Player from '../../player' */
/**
* Time tooltips display a time above the progress bar.
*
* @extends Component
*/
class TimeTooltip extends Component$1 {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The {@link Player} that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*/
constructor(player, options) {
super(player, options);
this.update = throttle(bind_(this, this.update), UPDATE_REFRESH_INTERVAL);
}
/**
* Create the time tooltip DOM element
*
* @return {Element}
* The element that was created.
*/
createEl() {
return super.createEl('div', {
className: 'vjs-time-tooltip'
}, {
'aria-hidden': 'true'
});
}
/**
* Updates the position of the time tooltip relative to the `SeekBar`.
*
* @param {Object} seekBarRect
* The `ClientRect` for the {@link SeekBar} element.
*
* @param {number} seekBarPoint
* A number from 0 to 1, representing a horizontal reference point
* from the left edge of the {@link SeekBar}
*/
update(seekBarRect, seekBarPoint, content) {
const tooltipRect = findPosition(this.el_);
const playerRect = getBoundingClientRect(this.player_.el());
const seekBarPointPx = seekBarRect.width * seekBarPoint;
// do nothing if either rect isn't available
// for example, if the player isn't in the DOM for testing
if (!playerRect || !tooltipRect) {
return;
}
// This is the space left of the `seekBarPoint` available within the bounds
// of the player. We calculate any gap between the left edge of the player
// and the left edge of the `SeekBar` and add the number of pixels in the
// `SeekBar` before hitting the `seekBarPoint`
let spaceLeftOfPoint = seekBarRect.left - playerRect.left + seekBarPointPx;
// This is the space right of the `seekBarPoint` available within the bounds
// of the player. We calculate the number of pixels from the `seekBarPoint`
// to the right edge of the `SeekBar` and add to that any gap between the
// right edge of the `SeekBar` and the player.
let spaceRightOfPoint = seekBarRect.width - seekBarPointPx + (playerRect.right - seekBarRect.right);
// spaceRightOfPoint is always NaN for mouse time display
// because the seekbarRect does not have a right property. This causes
// the mouse tool tip to be truncated when it's close to the right edge of the player.
// In such cases, we ignore the `playerRect.right - seekBarRect.right` value when calculating.
// For the sake of consistency, we ignore seekBarRect.left - playerRect.left for the left edge.
if (!spaceRightOfPoint) {
spaceRightOfPoint = seekBarRect.width - seekBarPointPx;
spaceLeftOfPoint = seekBarPointPx;
}
// This is the number of pixels by which the tooltip will need to be pulled
// further to the right to center it over the `seekBarPoint`.
let pullTooltipBy = tooltipRect.width / 2;
// Adjust the `pullTooltipBy` distance to the left or right depending on
// the results of the space calculations above.
if (spaceLeftOfPoint < pullTooltipBy) {
pullTooltipBy += pullTooltipBy - spaceLeftOfPoint;
} else if (spaceRightOfPoint < pullTooltipBy) {
pullTooltipBy = spaceRightOfPoint;
}
// Due to the imprecision of decimal/ratio based calculations and varying
// rounding behaviors, there are cases where the spacing adjustment is off
// by a pixel or two. This adds insurance to these calculations.
if (pullTooltipBy < 0) {
pullTooltipBy = 0;
} else if (pullTooltipBy > tooltipRect.width) {
pullTooltipBy = tooltipRect.width;
}
// prevent small width fluctuations within 0.4px from
// changing the value below.
// This really helps for live to prevent the play
// progress time tooltip from jittering
pullTooltipBy = Math.round(pullTooltipBy);
this.el_.style.right = `-${pullTooltipBy}px`;
this.write(content);
}
/**
* Write the time to the tooltip DOM element.
*
* @param {string} content
* The formatted time for the tooltip.
*/
write(content) {
textContent(this.el_, content);
}
/**
* Updates the position of the time tooltip relative to the `SeekBar`.
*
* @param {Object} seekBarRect
* The `ClientRect` for the {@link SeekBar} element.
*
* @param {number} seekBarPoint
* A number from 0 to 1, representing a horizontal reference point
* from the left edge of the {@link SeekBar}
*
* @param {number} time
* The time to update the tooltip to, not used during live playback
*
* @param {Function} cb
* A function that will be called during the request animation frame
* for tooltips that need to do additional animations from the default
*/
updateTime(seekBarRect, seekBarPoint, time, cb) {
this.requestNamedAnimationFrame('TimeTooltip#updateTime', () => {
let content;
const duration = this.player_.duration();
if (this.player_.liveTracker && this.player_.liveTracker.isLive()) {
const liveWindow = this.player_.liveTracker.liveWindow();
const secondsBehind = liveWindow - seekBarPoint * liveWindow;
content = (secondsBehind < 1 ? '' : '-') + formatTime(secondsBehind, liveWindow);
} else {
content = formatTime(time, duration);
}
this.update(seekBarRect, seekBarPoint, content);
if (cb) {
cb();
}
});
}
}
Component$1.registerComponent('TimeTooltip', TimeTooltip);
/**
* @file play-progress-bar.js
*/
/**
* Used by {@link SeekBar} to display media playback progress as part of the
* {@link ProgressControl}.
*
* @extends Component
*/
class PlayProgressBar extends Component$1 {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The {@link Player} that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*/
constructor(player, options) {
super(player, options);
this.setIcon('circle');
this.update = throttle(bind_(this, this.update), UPDATE_REFRESH_INTERVAL);
}
/**
* Create the the DOM element for this class.
*
* @return {Element}
* The element that was created.
*/
createEl() {
return super.createEl('div', {
className: 'vjs-play-progress vjs-slider-bar'
}, {
'aria-hidden': 'true'
});
}
/**
* Enqueues updates to its own DOM as well as the DOM of its
* {@link TimeTooltip} child.
*
* @param {Object} seekBarRect
* The `ClientRect` for the {@link SeekBar} element.
*
* @param {number} seekBarPoint
* A number from 0 to 1, representing a horizontal reference point
* from the left edge of the {@link SeekBar}
*/
update(seekBarRect, seekBarPoint) {
const timeTooltip = this.getChild('timeTooltip');
if (!timeTooltip) {
return;
}
const time = this.player_.scrubbing() ? this.player_.getCache().currentTime : this.player_.currentTime();
timeTooltip.updateTime(seekBarRect, seekBarPoint, time);
}
}
/**
* Default options for {@link PlayProgressBar}.
*
* @type {Object}
* @private
*/
PlayProgressBar.prototype.options_ = {
children: []
};
// Time tooltips should not be added to a player on mobile devices
if (!IS_IOS && !IS_ANDROID) {
PlayProgressBar.prototype.options_.children.push('timeTooltip');
}
Component$1.registerComponent('PlayProgressBar', PlayProgressBar);
/**
* @file mouse-time-display.js
*/
/**
* The {@link MouseTimeDisplay} component tracks mouse movement over the
* {@link ProgressControl}. It displays an indicator and a {@link TimeTooltip}
* indicating the time which is represented by a given point in the
* {@link ProgressControl}.
*
* @extends Component
*/
class MouseTimeDisplay extends Component$1 {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The {@link Player} that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*/
constructor(player, options) {
super(player, options);
this.update = throttle(bind_(this, this.update), UPDATE_REFRESH_INTERVAL);
}
/**
* Create the DOM element for this class.
*
* @return {Element}
* The element that was created.
*/
createEl() {
return super.createEl('div', {
className: 'vjs-mouse-display'
});
}
/**
* Enqueues updates to its own DOM as well as the DOM of its
* {@link TimeTooltip} child.
*
* @param {Object} seekBarRect
* The `ClientRect` for the {@link SeekBar} element.
*
* @param {number} seekBarPoint
* A number from 0 to 1, representing a horizontal reference point
* from the left edge of the {@link SeekBar}
*/
update(seekBarRect, seekBarPoint) {
const time = seekBarPoint * this.player_.duration();
this.getChild('timeTooltip').updateTime(seekBarRect, seekBarPoint, time, () => {
this.el_.style.left = `${seekBarRect.width * seekBarPoint}px`;
});
}
}
/**
* Default options for `MouseTimeDisplay`
*
* @type {Object}
* @private
*/
MouseTimeDisplay.prototype.options_ = {
children: ['timeTooltip']
};
Component$1.registerComponent('MouseTimeDisplay', MouseTimeDisplay);
/**
* @file seek-bar.js
*/
// The number of seconds the `step*` functions move the timeline.
const STEP_SECONDS = 5;
// The multiplier of STEP_SECONDS that PgUp/PgDown move the timeline.
const PAGE_KEY_MULTIPLIER = 12;
/**
* Seek bar and container for the progress bars. Uses {@link PlayProgressBar}
* as its `bar`.
*
* @extends Slider
*/
class SeekBar extends Slider {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*/
constructor(player, options) {
super(player, options);
this.setEventHandlers_();
}
/**
* Sets the event handlers
*
* @private
*/
setEventHandlers_() {
this.update_ = bind_(this, this.update);
this.update = throttle(this.update_, UPDATE_REFRESH_INTERVAL);
this.on(this.player_, ['durationchange', 'timeupdate'], this.update);
this.on(this.player_, ['ended'], this.update_);
if (this.player_.liveTracker) {
this.on(this.player_.liveTracker, 'liveedgechange', this.update);
}
// when playing, let's ensure we smoothly update the play progress bar
// via an interval
this.updateInterval = null;
this.enableIntervalHandler_ = e => this.enableInterval_(e);
this.disableIntervalHandler_ = e => this.disableInterval_(e);
this.on(this.player_, ['playing'], this.enableIntervalHandler_);
this.on(this.player_, ['ended', 'pause', 'waiting'], this.disableIntervalHandler_);
// we don't need to update the play progress if the document is hidden,
// also, this causes the CPU to spike and eventually crash the page on IE11.
if ('hidden' in document$1 && 'visibilityState' in document$1) {
this.on(document$1, 'visibilitychange', this.toggleVisibility_);
}
}
toggleVisibility_(e) {
if (document$1.visibilityState === 'hidden') {
this.cancelNamedAnimationFrame('SeekBar#update');
this.cancelNamedAnimationFrame('Slider#update');
this.disableInterval_(e);
} else {
if (!this.player_.ended() && !this.player_.paused()) {
this.enableInterval_();
}
// we just switched back to the page and someone may be looking, so, update ASAP
this.update();
}
}
enableInterval_() {
if (this.updateInterval) {
return;
}
this.updateInterval = this.setInterval(this.update, UPDATE_REFRESH_INTERVAL);
}
disableInterval_(e) {
if (this.player_.liveTracker && this.player_.liveTracker.isLive() && e && e.type !== 'ended') {
return;
}
if (!this.updateInterval) {
return;
}
this.clearInterval(this.updateInterval);
this.updateInterval = null;
}
/**
* Create the `Component`'s DOM element
*
* @return {Element}
* The element that was created.
*/
createEl() {
return super.createEl('div', {
className: 'vjs-progress-holder'
}, {
'aria-label': this.localize('Progress Bar')
});
}
/**
* This function updates the play progress bar and accessibility
* attributes to whatever is passed in.
*
* @param {Event} [event]
* The `timeupdate` or `ended` event that caused this to run.
*
* @listens Player#timeupdate
*
* @return {number}
* The current percent at a number from 0-1
*/
update(event) {
// ignore updates while the tab is hidden
if (document$1.visibilityState === 'hidden') {
return;
}
const percent = super.update();
this.requestNamedAnimationFrame('SeekBar#update', () => {
const currentTime = this.player_.ended() ? this.player_.duration() : this.getCurrentTime_();
const liveTracker = this.player_.liveTracker;
let duration = this.player_.duration();
if (liveTracker && liveTracker.isLive()) {
duration = this.player_.liveTracker.liveCurrentTime();
}
if (this.percent_ !== percent) {
// machine readable value of progress bar (percentage complete)
this.el_.setAttribute('aria-valuenow', (percent * 100).toFixed(2));
this.percent_ = percent;
}
if (this.currentTime_ !== currentTime || this.duration_ !== duration) {
// human readable value of progress bar (time complete)
this.el_.setAttribute('aria-valuetext', this.localize('progress bar timing: currentTime={1} duration={2}', [formatTime(currentTime, duration), formatTime(duration, duration)], '{1} of {2}'));
this.currentTime_ = currentTime;
this.duration_ = duration;
}
// update the progress bar time tooltip with the current time
if (this.bar) {
this.bar.update(getBoundingClientRect(this.el()), this.getProgress());
}
});
return percent;
}
/**
* Prevent liveThreshold from causing seeks to seem like they
* are not happening from a user perspective.
*
* @param {number} ct
* current time to seek to
*/
userSeek_(ct) {
if (this.player_.liveTracker && this.player_.liveTracker.isLive()) {
this.player_.liveTracker.nextSeekedFromUser();
}
this.player_.currentTime(ct);
}
/**
* Get the value of current time but allows for smooth scrubbing,
* when player can't keep up.
*
* @return {number}
* The current time value to display
*
* @private
*/
getCurrentTime_() {
return this.player_.scrubbing() ? this.player_.getCache().currentTime : this.player_.currentTime();
}
/**
* Get the percentage of media played so far.
*
* @return {number}
* The percentage of media played so far (0 to 1).
*/
getPercent() {
const currentTime = this.getCurrentTime_();
let percent;
const liveTracker = this.player_.liveTracker;
if (liveTracker && liveTracker.isLive()) {
percent = (currentTime - liveTracker.seekableStart()) / liveTracker.liveWindow();
// prevent the percent from changing at the live edge
if (liveTracker.atLiveEdge()) {
percent = 1;
}
} else {
percent = currentTime / this.player_.duration();
}
return percent;
}
/**
* Handle mouse down on seek bar
*
* @param {MouseEvent} event
* The `mousedown` event that caused this to run.
*
* @listens mousedown
*/
handleMouseDown(event) {
if (!isSingleLeftClick(event)) {
return;
}
// Stop event propagation to prevent double fire in progress-control.js
event.stopPropagation();
this.videoWasPlaying = !this.player_.paused();
this.player_.pause();
super.handleMouseDown(event);
}
/**
* Handle mouse move on seek bar
*
* @param {MouseEvent} event
* The `mousemove` event that caused this to run.
* @param {boolean} mouseDown this is a flag that should be set to true if `handleMouseMove` is called directly. It allows us to skip things that should not happen if coming from mouse down but should happen on regular mouse move handler. Defaults to false
*
* @listens mousemove
*/
handleMouseMove(event, mouseDown = false) {
if (!isSingleLeftClick(event) || isNaN(this.player_.duration())) {
return;
}
if (!mouseDown && !this.player_.scrubbing()) {
this.player_.scrubbing(true);
}
let newTime;
const distance = this.calculateDistance(event);
const liveTracker = this.player_.liveTracker;
if (!liveTracker || !liveTracker.isLive()) {
newTime = distance * this.player_.duration();
// Don't let video end while scrubbing.
if (newTime === this.player_.duration()) {
newTime = newTime - 0.1;
}
} else {
if (distance >= 0.99) {
liveTracker.seekToLiveEdge();
return;
}
const seekableStart = liveTracker.seekableStart();
const seekableEnd = liveTracker.liveCurrentTime();
newTime = seekableStart + distance * liveTracker.liveWindow();
// Don't let video end while scrubbing.
if (newTime >= seekableEnd) {
newTime = seekableEnd;
}
// Compensate for precision differences so that currentTime is not less
// than seekable start
if (newTime <= seekableStart) {
newTime = seekableStart + 0.1;
}
// On android seekableEnd can be Infinity sometimes,
// this will cause newTime to be Infinity, which is
// not a valid currentTime.
if (newTime === Infinity) {
return;
}
}
// Set new time (tell player to seek to new time)
this.userSeek_(newTime);
if (this.player_.options_.enableSmoothSeeking) {
this.update();
}
}
enable() {
super.enable();
const mouseTimeDisplay = this.getChild('mouseTimeDisplay');
if (!mouseTimeDisplay) {
return;
}
mouseTimeDisplay.show();
}
disable() {
super.disable();
const mouseTimeDisplay = this.getChild('mouseTimeDisplay');
if (!mouseTimeDisplay) {
return;
}
mouseTimeDisplay.hide();
}
/**
* Handle mouse up on seek bar
*
* @param {MouseEvent} event
* The `mouseup` event that caused this to run.
*
* @listens mouseup
*/
handleMouseUp(event) {
super.handleMouseUp(event);
// Stop event propagation to prevent double fire in progress-control.js
if (event) {
event.stopPropagation();
}
this.player_.scrubbing(false);
/**
* Trigger timeupdate because we're done seeking and the time has changed.
* This is particularly useful for if the player is paused to time the time displays.
*
* @event Tech#timeupdate
* @type {Event}
*/
this.player_.trigger({
type: 'timeupdate',
target: this,
manuallyTriggered: true
});
if (this.videoWasPlaying) {
silencePromise(this.player_.play());
} else {
// We're done seeking and the time has changed.
// If the player is paused, make sure we display the correct time on the seek bar.
this.update_();
}
}
/**
* Move more quickly fast forward for keyboard-only users
*/
stepForward() {
this.userSeek_(this.player_.currentTime() + STEP_SECONDS);
}
/**
* Move more quickly rewind for keyboard-only users
*/
stepBack() {
this.userSeek_(this.player_.currentTime() - STEP_SECONDS);
}
/**
* Toggles the playback state of the player
* This gets called when enter or space is used on the seekbar
*
* @param {KeyboardEvent} event
* The `keydown` event that caused this function to be called
*
*/
handleAction(event) {
if (this.player_.paused()) {
this.player_.play();
} else {
this.player_.pause();
}
}
/**
* Called when this SeekBar has focus and a key gets pressed down.
* Supports the following keys:
*
* Space or Enter key fire a click event
* Home key moves to start of the timeline
* End key moves to end of the timeline
* Digit "0" through "9" keys move to 0%, 10% ... 80%, 90% of the timeline
* PageDown key moves back a larger step than ArrowDown
* PageUp key moves forward a large step
*
* @param {KeyboardEvent} event
* The `keydown` event that caused this function to be called.
*
* @listens keydown
*/
handleKeyDown(event) {
const liveTracker = this.player_.liveTracker;
if (event.key === ' ' || event.key === 'Enter') {
event.preventDefault();
event.stopPropagation();
this.handleAction(event);
} else if (event.key === 'Home') {
event.preventDefault();
event.stopPropagation();
this.userSeek_(0);
} else if (event.key === 'End') {
event.preventDefault();
event.stopPropagation();
if (liveTracker && liveTracker.isLive()) {
this.userSeek_(liveTracker.liveCurrentTime());
} else {
this.userSeek_(this.player_.duration());
}
} else if (/^[0-9]$/.test(event.key)) {
event.preventDefault();
event.stopPropagation();
const gotoFraction = parseInt(event.key, 10) * 0.1;
if (liveTracker && liveTracker.isLive()) {
this.userSeek_(liveTracker.seekableStart() + liveTracker.liveWindow() * gotoFraction);
} else {
this.userSeek_(this.player_.duration() * gotoFraction);
}
} else if (event.key === 'PageDown') {
event.preventDefault();
event.stopPropagation();
this.userSeek_(this.player_.currentTime() - STEP_SECONDS * PAGE_KEY_MULTIPLIER);
} else if (event.key === 'PageUp') {
event.preventDefault();
event.stopPropagation();
this.userSeek_(this.player_.currentTime() + STEP_SECONDS * PAGE_KEY_MULTIPLIER);
} else {
// Pass keydown handling up for unsupported keys
super.handleKeyDown(event);
}
}
dispose() {
this.disableInterval_();
this.off(this.player_, ['durationchange', 'timeupdate'], this.update);
this.off(this.player_, ['ended'], this.update_);
if (this.player_.liveTracker) {
this.off(this.player_.liveTracker, 'liveedgechange', this.update);
}
this.off(this.player_, ['playing'], this.enableIntervalHandler_);
this.off(this.player_, ['ended', 'pause', 'waiting'], this.disableIntervalHandler_);
// we don't need to update the play progress if the document is hidden,
// also, this causes the CPU to spike and eventually crash the page on IE11.
if ('hidden' in document$1 && 'visibilityState' in document$1) {
this.off(document$1, 'visibilitychange', this.toggleVisibility_);
}
super.dispose();
}
}
/**
* Default options for the `SeekBar`
*
* @type {Object}
* @private
*/
SeekBar.prototype.options_ = {
children: ['loadProgressBar', 'playProgressBar'],
barName: 'playProgressBar'
};
// MouseTimeDisplay tooltips should not be added to a player on mobile devices
if (!IS_IOS && !IS_ANDROID) {
SeekBar.prototype.options_.children.splice(1, 0, 'mouseTimeDisplay');
}
Component$1.registerComponent('SeekBar', SeekBar);
/**
* @file progress-control.js
*/
/**
* The Progress Control component contains the seek bar, load progress,
* and play progress.
*
* @extends Component
*/
class ProgressControl extends Component$1 {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*/
constructor(player, options) {
super(player, options);
this.handleMouseMove = throttle(bind_(this, this.handleMouseMove), UPDATE_REFRESH_INTERVAL);
this.throttledHandleMouseSeek = throttle(bind_(this, this.handleMouseSeek), UPDATE_REFRESH_INTERVAL);
this.handleMouseUpHandler_ = e => this.handleMouseUp(e);
this.handleMouseDownHandler_ = e => this.handleMouseDown(e);
this.enable();
}
/**
* Create the `Component`'s DOM element
*
* @return {Element}
* The element that was created.
*/
createEl() {
return super.createEl('div', {
className: 'vjs-progress-control vjs-control'
});
}
/**
* When the mouse moves over the `ProgressControl`, the pointer position
* gets passed down to the `MouseTimeDisplay` component.
*
* @param {Event} event
* The `mousemove` event that caused this function to run.
*
* @listen mousemove
*/
handleMouseMove(event) {
const seekBar = this.getChild('seekBar');
if (!seekBar) {
return;
}
const playProgressBar = seekBar.getChild('playProgressBar');
const mouseTimeDisplay = seekBar.getChild('mouseTimeDisplay');
if (!playProgressBar && !mouseTimeDisplay) {
return;
}
const seekBarEl = seekBar.el();
const seekBarRect = findPosition(seekBarEl);
let seekBarPoint = getPointerPosition(seekBarEl, event).x;
// The default skin has a gap on either side of the `SeekBar`. This means
// that it's possible to trigger this behavior outside the boundaries of
// the `SeekBar`. This ensures we stay within it at all times.
seekBarPoint = clamp(seekBarPoint, 0, 1);
if (mouseTimeDisplay) {
mouseTimeDisplay.update(seekBarRect, seekBarPoint);
}
if (playProgressBar) {
playProgressBar.update(seekBarRect, seekBar.getProgress());
}
}
/**
* A throttled version of the {@link ProgressControl#handleMouseSeek} listener.
*
* @method ProgressControl#throttledHandleMouseSeek
* @param {Event} event
* The `mousemove` event that caused this function to run.
*
* @listen mousemove
* @listen touchmove
*/
/**
* Handle `mousemove` or `touchmove` events on the `ProgressControl`.
*
* @param {Event} event
* `mousedown` or `touchstart` event that triggered this function
*
* @listens mousemove
* @listens touchmove
*/
handleMouseSeek(event) {
const seekBar = this.getChild('seekBar');
if (seekBar) {
seekBar.handleMouseMove(event);
}
}
/**
* Are controls are currently enabled for this progress control.
*
* @return {boolean}
* true if controls are enabled, false otherwise
*/
enabled() {
return this.enabled_;
}
/**
* Disable all controls on the progress control and its children
*/
disable() {
this.children().forEach(child => child.disable && child.disable());
if (!this.enabled()) {
return;
}
this.off(['mousedown', 'touchstart'], this.handleMouseDownHandler_);
this.off(this.el_, 'mousemove', this.handleMouseMove);
this.removeListenersAddedOnMousedownAndTouchstart();
this.addClass('disabled');
this.enabled_ = false;
// Restore normal playback state if controls are disabled while scrubbing
if (this.player_.scrubbing()) {
const seekBar = this.getChild('seekBar');
this.player_.scrubbing(false);
if (seekBar.videoWasPlaying) {
silencePromise(this.player_.play());
}
}
}
/**
* Enable all controls on the progress control and its children
*/
enable() {
this.children().forEach(child => child.enable && child.enable());
if (this.enabled()) {
return;
}
this.on(['mousedown', 'touchstart'], this.handleMouseDownHandler_);
this.on(this.el_, 'mousemove', this.handleMouseMove);
this.removeClass('disabled');
this.enabled_ = true;
}
/**
* Cleanup listeners after the user finishes interacting with the progress controls
*/
removeListenersAddedOnMousedownAndTouchstart() {
const doc = this.el_.ownerDocument;
this.off(doc, 'mousemove', this.throttledHandleMouseSeek);
this.off(doc, 'touchmove', this.throttledHandleMouseSeek);
this.off(doc, 'mouseup', this.handleMouseUpHandler_);
this.off(doc, 'touchend', this.handleMouseUpHandler_);
}
/**
* Handle `mousedown` or `touchstart` events on the `ProgressControl`.
*
* @param {Event} event
* `mousedown` or `touchstart` event that triggered this function
*
* @listens mousedown
* @listens touchstart
*/
handleMouseDown(event) {
const doc = this.el_.ownerDocument;
const seekBar = this.getChild('seekBar');
if (seekBar) {
seekBar.handleMouseDown(event);
}
this.on(doc, 'mousemove', this.throttledHandleMouseSeek);
this.on(doc, 'touchmove', this.throttledHandleMouseSeek);
this.on(doc, 'mouseup', this.handleMouseUpHandler_);
this.on(doc, 'touchend', this.handleMouseUpHandler_);
}
/**
* Handle `mouseup` or `touchend` events on the `ProgressControl`.
*
* @param {Event} event
* `mouseup` or `touchend` event that triggered this function.
*
* @listens touchend
* @listens mouseup
*/
handleMouseUp(event) {
const seekBar = this.getChild('seekBar');
if (seekBar) {
seekBar.handleMouseUp(event);
}
this.removeListenersAddedOnMousedownAndTouchstart();
}
}
/**
* Default options for `ProgressControl`
*
* @type {Object}
* @private
*/
ProgressControl.prototype.options_ = {
children: ['seekBar']
};
Component$1.registerComponent('ProgressControl', ProgressControl);
/**
* @file picture-in-picture-toggle.js
*/
/** @import Player from './player' */
/**
* Toggle Picture-in-Picture mode
*
* @extends Button
*/
class PictureInPictureToggle extends Button {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*
* @listens Player#enterpictureinpicture
* @listens Player#leavepictureinpicture
*/
constructor(player, options) {
super(player, options);
this.setIcon('picture-in-picture-enter');
this.on(player, ['enterpictureinpicture', 'leavepictureinpicture'], e => this.handlePictureInPictureChange(e));
this.on(player, ['disablepictureinpicturechanged', 'loadedmetadata'], e => this.handlePictureInPictureEnabledChange(e));
this.on(player, ['loadedmetadata', 'audioonlymodechange', 'audiopostermodechange'], () => this.handlePictureInPictureAudioModeChange());
// TODO: Deactivate button on player emptied event.
this.disable();
}
/**
* Builds the default DOM `className`.
*
* @return {string}
* The DOM `className` for this object.
*/
buildCSSClass() {
return `vjs-picture-in-picture-control vjs-hidden ${super.buildCSSClass()}`;
}
/**
* Displays or hides the button depending on the audio mode detection.
* Exits picture-in-picture if it is enabled when switching to audio mode.
*/
handlePictureInPictureAudioModeChange() {
// This audio detection will not detect HLS or DASH audio-only streams because there was no reliable way to detect them at the time
const isSourceAudio = this.player_.currentType().substring(0, 5) === 'audio';
const isAudioMode = isSourceAudio || this.player_.audioPosterMode() || this.player_.audioOnlyMode();
if (!isAudioMode) {
this.show();
return;
}
if (this.player_.isInPictureInPicture()) {
this.player_.exitPictureInPicture();
}
this.hide();
}
/**
* Enables or disables button based on availability of a Picture-In-Picture mode.
*
* Enabled if
* - `player.options().enableDocumentPictureInPicture` is true and
* window.documentPictureInPicture is available; or
* - `player.disablePictureInPicture()` is false and
* element.requestPictureInPicture is available
*/
handlePictureInPictureEnabledChange() {
if (document$1.pictureInPictureEnabled && this.player_.disablePictureInPicture() === false || this.player_.options_.enableDocumentPictureInPicture && 'documentPictureInPicture' in window$1) {
this.enable();
} else {
this.disable();
}
}
/**
* Handles enterpictureinpicture and leavepictureinpicture on the player and change control text accordingly.
*
* @param {Event} [event]
* The {@link Player#enterpictureinpicture} or {@link Player#leavepictureinpicture} event that caused this function to be
* called.
*
* @listens Player#enterpictureinpicture
* @listens Player#leavepictureinpicture
*/
handlePictureInPictureChange(event) {
if (this.player_.isInPictureInPicture()) {
this.setIcon('picture-in-picture-exit');
this.controlText('Exit Picture-in-Picture');
} else {
this.setIcon('picture-in-picture-enter');
this.controlText('Picture-in-Picture');
}
this.handlePictureInPictureEnabledChange();
}
/**
* This gets called when an `PictureInPictureToggle` is "clicked". See
* {@link ClickableComponent} for more detailed information on what a click can be.
*
* @param {Event} [event]
* The `keydown`, `tap`, or `click` event that caused this function to be
* called.
*
* @listens tap
* @listens click
*/
handleClick(event) {
if (!this.player_.isInPictureInPicture()) {
this.player_.requestPictureInPicture();
} else {
this.player_.exitPictureInPicture();
}
}
/**
* Show the `Component`s element if it is hidden by removing the
* 'vjs-hidden' class name from it only in browsers that support the Picture-in-Picture API.
*/
show() {
// Does not allow to display the pictureInPictureToggle in browsers that do not support the Picture-in-Picture API, e.g. Firefox.
if (typeof document$1.exitPictureInPicture !== 'function') {
return;
}
super.show();
}
}
/**
* The text that should display over the `PictureInPictureToggle`s controls. Added for localization.
*
* @type {string}
* @protected
*/
PictureInPictureToggle.prototype.controlText_ = 'Picture-in-Picture';
Component$1.registerComponent('PictureInPictureToggle', PictureInPictureToggle);
/**
* @file fullscreen-toggle.js
*/
/** @import Player from './player' */
/**
* Toggle fullscreen video
*
* @extends Button
*/
class FullscreenToggle extends Button {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*/
constructor(player, options) {
super(player, options);
this.setIcon('fullscreen-enter');
this.on(player, 'fullscreenchange', e => this.handleFullscreenChange(e));
if (document$1[player.fsApi_.fullscreenEnabled] === false) {
this.disable();
}
}
/**
* Builds the default DOM `className`.
*
* @return {string}
* The DOM `className` for this object.
*/
buildCSSClass() {
return `vjs-fullscreen-control ${super.buildCSSClass()}`;
}
/**
* Handles fullscreenchange on the player and change control text accordingly.
*
* @param {Event} [event]
* The {@link Player#fullscreenchange} event that caused this function to be
* called.
*
* @listens Player#fullscreenchange
*/
handleFullscreenChange(event) {
if (this.player_.isFullscreen()) {
this.controlText('Exit Fullscreen');
this.setIcon('fullscreen-exit');
} else {
this.controlText('Fullscreen');
this.setIcon('fullscreen-enter');
}
}
/**
* This gets called when an `FullscreenToggle` is "clicked". See
* {@link ClickableComponent} for more detailed information on what a click can be.
*
* @param {Event} [event]
* The `keydown`, `tap`, or `click` event that caused this function to be
* called.
*
* @listens tap
* @listens click
*/
handleClick(event) {
if (!this.player_.isFullscreen()) {
this.player_.requestFullscreen();
} else {
this.player_.exitFullscreen();
}
}
}
/**
* The text that should display over the `FullscreenToggle`s controls. Added for localization.
*
* @type {string}
* @protected
*/
FullscreenToggle.prototype.controlText_ = 'Fullscreen';
Component$1.registerComponent('FullscreenToggle', FullscreenToggle);
/** @import Component from '../../component' */
/** @import Player from '../../player' */
/**
* Check if volume control is supported and if it isn't hide the
* `Component` that was passed using the `vjs-hidden` class.
*
* @param {Component} self
* The component that should be hidden if volume is unsupported
*
* @param {Player} player
* A reference to the player
*
* @private
*/
const checkVolumeSupport = function (self, player) {
// hide volume controls when they're not supported by the current tech
if (player.tech_ && !player.tech_.featuresVolumeControl) {
self.addClass('vjs-hidden');
}
self.on(player, 'loadstart', function () {
if (!player.tech_.featuresVolumeControl) {
self.addClass('vjs-hidden');
} else {
self.removeClass('vjs-hidden');
}
});
};
/**
* @file volume-level.js
*/
/**
* Shows volume level
*
* @extends Component
*/
class VolumeLevel extends Component$1 {
/**
* Create the `Component`'s DOM element
*
* @return {Element}
* The element that was created.
*/
createEl() {
const el = super.createEl('div', {
className: 'vjs-volume-level'
});
this.setIcon('circle', el);
el.appendChild(super.createEl('span', {
className: 'vjs-control-text'
}));
return el;
}
}
Component$1.registerComponent('VolumeLevel', VolumeLevel);
/**
* @file volume-level-tooltip.js
*/
/** @import Player from '../../player' */
/**
* Volume level tooltips display a volume above or side by side the volume bar.
*
* @extends Component
*/
class VolumeLevelTooltip extends Component$1 {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The {@link Player} that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*/
constructor(player, options) {
super(player, options);
this.update = throttle(bind_(this, this.update), UPDATE_REFRESH_INTERVAL);
}
/**
* Create the volume tooltip DOM element
*
* @return {Element}
* The element that was created.
*/
createEl() {
return super.createEl('div', {
className: 'vjs-volume-tooltip'
}, {
'aria-hidden': 'true'
});
}
/**
* Updates the position of the tooltip relative to the `VolumeBar` and
* its content text.
*
* @param {Object} rangeBarRect
* The `ClientRect` for the {@link VolumeBar} element.
*
* @param {number} rangeBarPoint
* A number from 0 to 1, representing a horizontal/vertical reference point
* from the left edge of the {@link VolumeBar}
*
* @param {boolean} vertical
* Referees to the Volume control position
* in the control bar{@link VolumeControl}
*
*/
update(rangeBarRect, rangeBarPoint, vertical, content) {
if (!vertical) {
const tooltipRect = getBoundingClientRect(this.el_);
const playerRect = getBoundingClientRect(this.player_.el());
const volumeBarPointPx = rangeBarRect.width * rangeBarPoint;
if (!playerRect || !tooltipRect) {
return;
}
const spaceLeftOfPoint = rangeBarRect.left - playerRect.left + volumeBarPointPx;
const spaceRightOfPoint = rangeBarRect.width - volumeBarPointPx + (playerRect.right - rangeBarRect.right);
let pullTooltipBy = tooltipRect.width / 2;
if (spaceLeftOfPoint < pullTooltipBy) {
pullTooltipBy += pullTooltipBy - spaceLeftOfPoint;
} else if (spaceRightOfPoint < pullTooltipBy) {
pullTooltipBy = spaceRightOfPoint;
}
if (pullTooltipBy < 0) {
pullTooltipBy = 0;
} else if (pullTooltipBy > tooltipRect.width) {
pullTooltipBy = tooltipRect.width;
}
this.el_.style.right = `-${pullTooltipBy}px`;
}
this.write(`${content}%`);
}
/**
* Write the volume to the tooltip DOM element.
*
* @param {string} content
* The formatted volume for the tooltip.
*/
write(content) {
textContent(this.el_, content);
}
/**
* Updates the position of the volume tooltip relative to the `VolumeBar`.
*
* @param {Object} rangeBarRect
* The `ClientRect` for the {@link VolumeBar} element.
*
* @param {number} rangeBarPoint
* A number from 0 to 1, representing a horizontal/vertical reference point
* from the left edge of the {@link VolumeBar}
*
* @param {boolean} vertical
* Referees to the Volume control position
* in the control bar{@link VolumeControl}
*
* @param {number} volume
* The volume level to update the tooltip to
*
* @param {Function} cb
* A function that will be called during the request animation frame
* for tooltips that need to do additional animations from the default
*/
updateVolume(rangeBarRect, rangeBarPoint, vertical, volume, cb) {
this.requestNamedAnimationFrame('VolumeLevelTooltip#updateVolume', () => {
this.update(rangeBarRect, rangeBarPoint, vertical, volume.toFixed(0));
if (cb) {
cb();
}
});
}
}
Component$1.registerComponent('VolumeLevelTooltip', VolumeLevelTooltip);
/**
* @file mouse-volume-level-display.js
*/
/**
* The {@link MouseVolumeLevelDisplay} component tracks mouse movement over the
* {@link VolumeControl}. It displays an indicator and a {@link VolumeLevelTooltip}
* indicating the volume level which is represented by a given point in the
* {@link VolumeBar}.
*
* @extends Component
*/
class MouseVolumeLevelDisplay extends Component$1 {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The {@link Player} that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*/
constructor(player, options) {
super(player, options);
this.update = throttle(bind_(this, this.update), UPDATE_REFRESH_INTERVAL);
}
/**
* Create the DOM element for this class.
*
* @return {Element}
* The element that was created.
*/
createEl() {
return super.createEl('div', {
className: 'vjs-mouse-display'
});
}
/**
* Enquires updates to its own DOM as well as the DOM of its
* {@link VolumeLevelTooltip} child.
*
* @param {Object} rangeBarRect
* The `ClientRect` for the {@link VolumeBar} element.
*
* @param {number} rangeBarPoint
* A number from 0 to 1, representing a horizontal/vertical reference point
* from the left edge of the {@link VolumeBar}
*
* @param {boolean} vertical
* Referees to the Volume control position
* in the control bar{@link VolumeControl}
*
*/
update(rangeBarRect, rangeBarPoint, vertical) {
const volume = 100 * rangeBarPoint;
this.getChild('volumeLevelTooltip').updateVolume(rangeBarRect, rangeBarPoint, vertical, volume, () => {
if (vertical) {
this.el_.style.bottom = `${rangeBarRect.height * rangeBarPoint}px`;
} else {
this.el_.style.left = `${rangeBarRect.width * rangeBarPoint}px`;
}
});
}
}
/**
* Default options for `MouseVolumeLevelDisplay`
*
* @type {Object}
* @private
*/
MouseVolumeLevelDisplay.prototype.options_ = {
children: ['volumeLevelTooltip']
};
Component$1.registerComponent('MouseVolumeLevelDisplay', MouseVolumeLevelDisplay);
/**
* @file volume-bar.js
*/
/**
* The bar that contains the volume level and can be clicked on to adjust the level
*
* @extends Slider
*/
class VolumeBar extends Slider {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*/
constructor(player, options) {
super(player, options);
this.on('slideractive', e => this.updateLastVolume_(e));
this.on(player, 'volumechange', e => this.updateARIAAttributes(e));
player.ready(() => this.updateARIAAttributes());
}
/**
* Create the `Component`'s DOM element
*
* @return {Element}
* The element that was created.
*/
createEl() {
return super.createEl('div', {
className: 'vjs-volume-bar vjs-slider-bar'
}, {
'aria-label': this.localize('Volume Level'),
'aria-live': 'polite'
});
}
/**
* Handle mouse down on volume bar
*
* @param {Event} event
* The `mousedown` event that caused this to run.
*
* @listens mousedown
*/
handleMouseDown(event) {
if (!isSingleLeftClick(event)) {
return;
}
super.handleMouseDown(event);
}
/**
* Handle movement events on the {@link VolumeMenuButton}.
*
* @param {Event} event
* The event that caused this function to run.
*
* @listens mousemove
*/
handleMouseMove(event) {
const mouseVolumeLevelDisplay = this.getChild('mouseVolumeLevelDisplay');
if (mouseVolumeLevelDisplay) {
const volumeBarEl = this.el();
const volumeBarRect = getBoundingClientRect(volumeBarEl);
const vertical = this.vertical();
let volumeBarPoint = getPointerPosition(volumeBarEl, event);
volumeBarPoint = vertical ? volumeBarPoint.y : volumeBarPoint.x;
// The default skin has a gap on either side of the `VolumeBar`. This means
// that it's possible to trigger this behavior outside the boundaries of
// the `VolumeBar`. This ensures we stay within it at all times.
volumeBarPoint = clamp(volumeBarPoint, 0, 1);
mouseVolumeLevelDisplay.update(volumeBarRect, volumeBarPoint, vertical);
}
if (!isSingleLeftClick(event)) {
return;
}
this.checkMuted();
this.player_.volume(this.calculateDistance(event));
}
/**
* If the player is muted unmute it.
*/
checkMuted() {
if (this.player_.muted()) {
this.player_.muted(false);
}
}
/**
* Get percent of volume level
*
* @return {number}
* Volume level percent as a decimal number.
*/
getPercent() {
if (this.player_.muted()) {
return 0;
}
return this.player_.volume();
}
/**
* Increase volume level for keyboard users
*/
stepForward() {
this.checkMuted();
this.player_.volume(this.player_.volume() + 0.1);
}
/**
* Decrease volume level for keyboard users
*/
stepBack() {
this.checkMuted();
this.player_.volume(this.player_.volume() - 0.1);
}
/**
* Update ARIA accessibility attributes
*
* @param {Event} [event]
* The `volumechange` event that caused this function to run.
*
* @listens Player#volumechange
*/
updateARIAAttributes(event) {
const ariaValue = this.player_.muted() ? 0 : this.volumeAsPercentage_();
this.el_.setAttribute('aria-valuenow', ariaValue);
this.el_.setAttribute('aria-valuetext', ariaValue + '%');
}
/**
* Returns the current value of the player volume as a percentage
*
* @private
*/
volumeAsPercentage_() {
return Math.round(this.player_.volume() * 100);
}
/**
* When user starts dragging the VolumeBar, store the volume and listen for
* the end of the drag. When the drag ends, if the volume was set to zero,
* set lastVolume to the stored volume.
*
* @listens slideractive
* @private
*/
updateLastVolume_() {
const volumeBeforeDrag = this.player_.volume();
this.one('sliderinactive', () => {
if (this.player_.volume() === 0) {
this.player_.lastVolume_(volumeBeforeDrag);
}
});
}
}
/**
* Default options for the `VolumeBar`
*
* @type {Object}
* @private
*/
VolumeBar.prototype.options_ = {
children: ['volumeLevel'],
barName: 'volumeLevel'
};
// MouseVolumeLevelDisplay tooltip should not be added to a player on mobile devices
if (!IS_IOS && !IS_ANDROID) {
VolumeBar.prototype.options_.children.splice(0, 0, 'mouseVolumeLevelDisplay');
}
/**
* Call the update event for this Slider when this event happens on the player.
*
* @type {string}
*/
VolumeBar.prototype.playerEvent = 'volumechange';
Component$1.registerComponent('VolumeBar', VolumeBar);
/**
* @file volume-control.js
*/
/**
* The component for controlling the volume level
*
* @extends Component
*/
class VolumeControl extends Component$1 {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options={}]
* The key/value store of player options.
*/
constructor(player, options = {}) {
options.vertical = options.vertical || false;
// Pass the vertical option down to the VolumeBar if
// the VolumeBar is turned on.
if (typeof options.volumeBar === 'undefined' || isPlain(options.volumeBar)) {
options.volumeBar = options.volumeBar || {};
options.volumeBar.vertical = options.vertical;
}
super(player, options);
// hide this control if volume support is missing
checkVolumeSupport(this, player);
this.throttledHandleMouseMove = throttle(bind_(this, this.handleMouseMove), UPDATE_REFRESH_INTERVAL);
this.handleMouseUpHandler_ = e => this.handleMouseUp(e);
this.on('mousedown', e => this.handleMouseDown(e));
this.on('touchstart', e => this.handleMouseDown(e));
this.on('mousemove', e => this.handleMouseMove(e));
// while the slider is active (the mouse has been pressed down and
// is dragging) or in focus we do not want to hide the VolumeBar
this.on(this.volumeBar, ['focus', 'slideractive'], () => {
this.volumeBar.addClass('vjs-slider-active');
this.addClass('vjs-slider-active');
this.trigger('slideractive');
});
this.on(this.volumeBar, ['blur', 'sliderinactive'], () => {
this.volumeBar.removeClass('vjs-slider-active');
this.removeClass('vjs-slider-active');
this.trigger('sliderinactive');
});
}
/**
* Create the `Component`'s DOM element
*
* @return {Element}
* The element that was created.
*/
createEl() {
let orientationClass = 'vjs-volume-horizontal';
if (this.options_.vertical) {
orientationClass = 'vjs-volume-vertical';
}
return super.createEl('div', {
className: `vjs-volume-control vjs-control ${orientationClass}`
});
}
/**
* Handle `mousedown` or `touchstart` events on the `VolumeControl`.
*
* @param {Event} event
* `mousedown` or `touchstart` event that triggered this function
*
* @listens mousedown
* @listens touchstart
*/
handleMouseDown(event) {
const doc = this.el_.ownerDocument;
this.on(doc, 'mousemove', this.throttledHandleMouseMove);
this.on(doc, 'touchmove', this.throttledHandleMouseMove);
this.on(doc, 'mouseup', this.handleMouseUpHandler_);
this.on(doc, 'touchend', this.handleMouseUpHandler_);
}
/**
* Handle `mouseup` or `touchend` events on the `VolumeControl`.
*
* @param {Event} event
* `mouseup` or `touchend` event that triggered this function.
*
* @listens touchend
* @listens mouseup
*/
handleMouseUp(event) {
const doc = this.el_.ownerDocument;
this.off(doc, 'mousemove', this.throttledHandleMouseMove);
this.off(doc, 'touchmove', this.throttledHandleMouseMove);
this.off(doc, 'mouseup', this.handleMouseUpHandler_);
this.off(doc, 'touchend', this.handleMouseUpHandler_);
}
/**
* Handle `mousedown` or `touchstart` events on the `VolumeControl`.
*
* @param {Event} event
* `mousedown` or `touchstart` event that triggered this function
*
* @listens mousedown
* @listens touchstart
*/
handleMouseMove(event) {
this.volumeBar.handleMouseMove(event);
}
}
/**
* Default options for the `VolumeControl`
*
* @type {Object}
* @private
*/
VolumeControl.prototype.options_ = {
children: ['volumeBar']
};
Component$1.registerComponent('VolumeControl', VolumeControl);
/** @import Component from '../../component' */
/** @import Player from '../../player' */
/**
* Check if muting volume is supported and if it isn't hide the mute toggle
* button.
*
* @param {Component} self
* A reference to the mute toggle button
*
* @param {Player} player
* A reference to the player
*
* @private
*/
const checkMuteSupport = function (self, player) {
// hide mute toggle button if it's not supported by the current tech
if (player.tech_ && !player.tech_.featuresMuteControl) {
self.addClass('vjs-hidden');
}
self.on(player, 'loadstart', function () {
if (!player.tech_.featuresMuteControl) {
self.addClass('vjs-hidden');
} else {
self.removeClass('vjs-hidden');
}
});
};
/**
* @file mute-toggle.js
*/
/** @import Player from './player' */
/**
* A button component for muting the audio.
*
* @extends Button
*/
class MuteToggle extends Button {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*/
constructor(player, options) {
super(player, options);
// hide this control if volume support is missing
checkMuteSupport(this, player);
this.on(player, ['loadstart', 'volumechange'], e => this.update(e));
}
/**
* Builds the default DOM `className`.
*
* @return {string}
* The DOM `className` for this object.
*/
buildCSSClass() {
return `vjs-mute-control ${super.buildCSSClass()}`;
}
/**
* This gets called when an `MuteToggle` is "clicked". See
* {@link ClickableComponent} for more detailed information on what a click can be.
*
* @param {Event} [event]
* The `keydown`, `tap`, or `click` event that caused this function to be
* called.
*
* @listens tap
* @listens click
*/
handleClick(event) {
const vol = this.player_.volume();
const lastVolume = this.player_.lastVolume_();
if (vol === 0) {
const volumeToSet = lastVolume < 0.1 ? 0.1 : lastVolume;
this.player_.volume(volumeToSet);
this.player_.muted(false);
} else {
this.player_.muted(this.player_.muted() ? false : true);
}
}
/**
* Update the `MuteToggle` button based on the state of `volume` and `muted`
* on the player.
*
* @param {Event} [event]
* The {@link Player#loadstart} event if this function was called
* through an event.
*
* @listens Player#loadstart
* @listens Player#volumechange
*/
update(event) {
this.updateIcon_();
this.updateControlText_();
}
/**
* Update the appearance of the `MuteToggle` icon.
*
* Possible states (given `level` variable below):
* - 0: crossed out
* - 1: zero bars of volume
* - 2: one bar of volume
* - 3: two bars of volume
*
* @private
*/
updateIcon_() {
const vol = this.player_.volume();
let level = 3;
this.setIcon('volume-high');
// in iOS when a player is loaded with muted attribute
// and volume is changed with a native mute button
// we want to make sure muted state is updated
if (IS_IOS && this.player_.tech_ && this.player_.tech_.el_) {
this.player_.muted(this.player_.tech_.el_.muted);
}
if (vol === 0 || this.player_.muted()) {
this.setIcon('volume-mute');
level = 0;
} else if (vol < 0.33) {
this.setIcon('volume-low');
level = 1;
} else if (vol < 0.67) {
this.setIcon('volume-medium');
level = 2;
}
removeClass(this.el_, [0, 1, 2, 3].reduce((str, i) => str + `${i ? ' ' : ''}vjs-vol-${i}`, ''));
addClass(this.el_, `vjs-vol-${level}`);
}
/**
* If `muted` has changed on the player, update the control text
* (`title` attribute on `vjs-mute-control` element and content of
* `vjs-control-text` element).
*
* @private
*/
updateControlText_() {
const soundOff = this.player_.muted() || this.player_.volume() === 0;
const text = soundOff ? 'Unmute' : 'Mute';
if (this.controlText() !== text) {
this.controlText(text);
}
}
}
/**
* The text that should display over the `MuteToggle`s controls. Added for localization.
*
* @type {string}
* @protected
*/
MuteToggle.prototype.controlText_ = 'Mute';
Component$1.registerComponent('MuteToggle', MuteToggle);
/**
* @file volume-control.js
*/
/**
* A Component to contain the MuteToggle and VolumeControl so that
* they can work together.
*
* @extends Component
*/
class VolumePanel extends Component$1 {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options={}]
* The key/value store of player options.
*/
constructor(player, options = {}) {
if (typeof options.inline !== 'undefined') {
options.inline = options.inline;
} else {
options.inline = true;
}
// pass the inline option down to the VolumeControl as vertical if
// the VolumeControl is on.
if (typeof options.volumeControl === 'undefined' || isPlain(options.volumeControl)) {
options.volumeControl = options.volumeControl || {};
options.volumeControl.vertical = !options.inline;
}
super(player, options);
// this handler is used by mouse handler methods below
this.handleKeyPressHandler_ = e => this.handleKeyPress(e);
this.on(player, ['loadstart'], e => this.volumePanelState_(e));
this.on(this.muteToggle, 'keyup', e => this.handleKeyPress(e));
this.on(this.volumeControl, 'keyup', e => this.handleVolumeControlKeyUp(e));
this.on('keydown', e => this.handleKeyPress(e));
this.on('mouseover', e => this.handleMouseOver(e));
this.on('mouseout', e => this.handleMouseOut(e));
// while the slider is active (the mouse has been pressed down and
// is dragging) we do not want to hide the VolumeBar
this.on(this.volumeControl, ['slideractive'], this.sliderActive_);
this.on(this.volumeControl, ['sliderinactive'], this.sliderInactive_);
}
/**
* Add vjs-slider-active class to the VolumePanel
*
* @listens VolumeControl#slideractive
* @private
*/
sliderActive_() {
this.addClass('vjs-slider-active');
}
/**
* Removes vjs-slider-active class to the VolumePanel
*
* @listens VolumeControl#sliderinactive
* @private
*/
sliderInactive_() {
this.removeClass('vjs-slider-active');
}
/**
* Adds vjs-hidden or vjs-mute-toggle-only to the VolumePanel
* depending on MuteToggle and VolumeControl state
*
* @listens Player#loadstart
* @private
*/
volumePanelState_() {
// hide volume panel if neither volume control or mute toggle
// are displayed
if (this.volumeControl.hasClass('vjs-hidden') && this.muteToggle.hasClass('vjs-hidden')) {
this.addClass('vjs-hidden');
}
// if only mute toggle is visible we don't want
// volume panel expanding when hovered or active
if (this.volumeControl.hasClass('vjs-hidden') && !this.muteToggle.hasClass('vjs-hidden')) {
this.addClass('vjs-mute-toggle-only');
}
}
/**
* Create the `Component`'s DOM element
*
* @return {Element}
* The element that was created.
*/
createEl() {
let orientationClass = 'vjs-volume-panel-horizontal';
if (!this.options_.inline) {
orientationClass = 'vjs-volume-panel-vertical';
}
return super.createEl('div', {
className: `vjs-volume-panel vjs-control ${orientationClass}`
});
}
/**
* Dispose of the `volume-panel` and all child components.
*/
dispose() {
this.handleMouseOut();
super.dispose();
}
/**
* Handles `keyup` events on the `VolumeControl`, looking for ESC, which closes
* the volume panel and sets focus on `MuteToggle`.
*
* @param {Event} event
* The `keyup` event that caused this function to be called.
*
* @listens keyup
*/
handleVolumeControlKeyUp(event) {
if (event.key === 'Escape') {
this.muteToggle.focus();
}
}
/**
* This gets called when a `VolumePanel` gains hover via a `mouseover` event.
* Turns on listening for `mouseover` event. When they happen it
* calls `this.handleMouseOver`.
*
* @param {Event} event
* The `mouseover` event that caused this function to be called.
*
* @listens mouseover
*/
handleMouseOver(event) {
this.addClass('vjs-hover');
on(document$1, 'keyup', this.handleKeyPressHandler_);
}
/**
* This gets called when a `VolumePanel` gains hover via a `mouseout` event.
* Turns on listening for `mouseout` event. When they happen it
* calls `this.handleMouseOut`.
*
* @param {Event} event
* The `mouseout` event that caused this function to be called.
*
* @listens mouseout
*/
handleMouseOut(event) {
this.removeClass('vjs-hover');
off(document$1, 'keyup', this.handleKeyPressHandler_);
}
/**
* Handles `keyup` event on the document or `keydown` event on the `VolumePanel`,
* looking for ESC, which hides the `VolumeControl`.
*
* @param {Event} event
* The keypress that triggered this event.
*
* @listens keydown | keyup
*/
handleKeyPress(event) {
if (event.key === 'Escape') {
this.handleMouseOut();
}
}
}
/**
* Default options for the `VolumeControl`
*
* @type {Object}
* @private
*/
VolumePanel.prototype.options_ = {
children: ['muteToggle', 'volumeControl']
};
Component$1.registerComponent('VolumePanel', VolumePanel);
/**
* Button to skip forward a configurable amount of time
* through a video. Renders in the control bar.
*
* e.g. options: {controlBar: {skipButtons: forward: 5}}
*
* @extends Button
*/
class SkipForward extends Button {
constructor(player, options) {
super(player, options);
this.validOptions = [5, 10, 30];
this.skipTime = this.getSkipForwardTime();
if (this.skipTime && this.validOptions.includes(this.skipTime)) {
this.setIcon(`forward-${this.skipTime}`);
this.controlText(this.localize('Skip forward {1} seconds', [this.skipTime.toLocaleString(player.language())]));
this.show();
} else {
this.hide();
}
}
getSkipForwardTime() {
const playerOptions = this.options_.playerOptions;
return playerOptions.controlBar && playerOptions.controlBar.skipButtons && playerOptions.controlBar.skipButtons.forward;
}
buildCSSClass() {
return `vjs-skip-forward-${this.getSkipForwardTime()} ${super.buildCSSClass()}`;
}
/**
* On click, skips forward in the duration/seekable range by a configurable amount of seconds.
* If the time left in the duration/seekable range is less than the configured 'skip forward' time,
* skips to end of duration/seekable range.
*
* Handle a click on a `SkipForward` button
*
* @param {EventTarget~Event} event
* The `click` event that caused this function
* to be called
*/
handleClick(event) {
if (isNaN(this.player_.duration())) {
return;
}
const currentVideoTime = this.player_.currentTime();
const liveTracker = this.player_.liveTracker;
const duration = liveTracker && liveTracker.isLive() ? liveTracker.seekableEnd() : this.player_.duration();
let newTime;
if (currentVideoTime + this.skipTime <= duration) {
newTime = currentVideoTime + this.skipTime;
} else {
newTime = duration;
}
this.player_.currentTime(newTime);
}
/**
* Update control text on languagechange
*/
handleLanguagechange() {
this.controlText(this.localize('Skip forward {1} seconds', [this.skipTime]));
}
}
SkipForward.prototype.controlText_ = 'Skip Forward';
Component$1.registerComponent('SkipForward', SkipForward);
/**
* Button to skip backward a configurable amount of time
* through a video. Renders in the control bar.
*
* * e.g. options: {controlBar: {skipButtons: backward: 5}}
*
* @extends Button
*/
class SkipBackward extends Button {
constructor(player, options) {
super(player, options);
this.validOptions = [5, 10, 30];
this.skipTime = this.getSkipBackwardTime();
if (this.skipTime && this.validOptions.includes(this.skipTime)) {
this.setIcon(`replay-${this.skipTime}`);
this.controlText(this.localize('Skip backward {1} seconds', [this.skipTime.toLocaleString(player.language())]));
this.show();
} else {
this.hide();
}
}
getSkipBackwardTime() {
const playerOptions = this.options_.playerOptions;
return playerOptions.controlBar && playerOptions.controlBar.skipButtons && playerOptions.controlBar.skipButtons.backward;
}
buildCSSClass() {
return `vjs-skip-backward-${this.getSkipBackwardTime()} ${super.buildCSSClass()}`;
}
/**
* On click, skips backward in the video by a configurable amount of seconds.
* If the current time in the video is less than the configured 'skip backward' time,
* skips to beginning of video or seekable range.
*
* Handle a click on a `SkipBackward` button
*
* @param {EventTarget~Event} event
* The `click` event that caused this function
* to be called
*/
handleClick(event) {
const currentVideoTime = this.player_.currentTime();
const liveTracker = this.player_.liveTracker;
const seekableStart = liveTracker && liveTracker.isLive() && liveTracker.seekableStart();
let newTime;
if (seekableStart && currentVideoTime - this.skipTime <= seekableStart) {
newTime = seekableStart;
} else if (currentVideoTime >= this.skipTime) {
newTime = currentVideoTime - this.skipTime;
} else {
newTime = 0;
}
this.player_.currentTime(newTime);
}
/**
* Update control text on languagechange
*/
handleLanguagechange() {
this.controlText(this.localize('Skip backward {1} seconds', [this.skipTime]));
}
}
SkipBackward.prototype.controlText_ = 'Skip Backward';
Component$1.registerComponent('SkipBackward', SkipBackward);
/**
* @file menu.js
*/
/** @import Player from '../player' */
/**
* The Menu component is used to build popup menus, including subtitle and
* captions selection menus.
*
* @extends Component
*/
class Menu extends Component$1 {
/**
* Create an instance of this class.
*
* @param {Player} player
* the player that this component should attach to
*
* @param {Object} [options]
* Object of option names and values
*
*/
constructor(player, options) {
super(player, options);
if (options) {
this.menuButton_ = options.menuButton;
}
this.focusedChild_ = -1;
this.on('keydown', e => this.handleKeyDown(e));
// All the menu item instances share the same blur handler provided by the menu container.
this.boundHandleBlur_ = e => this.handleBlur(e);
this.boundHandleTapClick_ = e => this.handleTapClick(e);
}
/**
* Add event listeners to the {@link MenuItem}.
*
* @param {Object} component
* The instance of the `MenuItem` to add listeners to.
*
*/
addEventListenerForItem(component) {
if (!(component instanceof Component$1)) {
return;
}
this.on(component, 'blur', this.boundHandleBlur_);
this.on(component, ['tap', 'click'], this.boundHandleTapClick_);
}
/**
* Remove event listeners from the {@link MenuItem}.
*
* @param {Object} component
* The instance of the `MenuItem` to remove listeners.
*
*/
removeEventListenerForItem(component) {
if (!(component instanceof Component$1)) {
return;
}
this.off(component, 'blur', this.boundHandleBlur_);
this.off(component, ['tap', 'click'], this.boundHandleTapClick_);
}
/**
* This method will be called indirectly when the component has been added
* before the component adds to the new menu instance by `addItem`.
* In this case, the original menu instance will remove the component
* by calling `removeChild`.
*
* @param {Object} component
* The instance of the `MenuItem`
*/
removeChild(component) {
if (typeof component === 'string') {
component = this.getChild(component);
}
this.removeEventListenerForItem(component);
super.removeChild(component);
}
/**
* Add a {@link MenuItem} to the menu.
*
* @param {Object|string} component
* The name or instance of the `MenuItem` to add.
*
*/
addItem(component) {
const childComponent = this.addChild(component);
if (childComponent) {
this.addEventListenerForItem(childComponent);
}
}
/**
* Create the `Menu`s DOM element.
*
* @return {Element}
* the element that was created
*/
createEl() {
const contentElType = this.options_.contentElType || 'ul';
this.contentEl_ = createEl(contentElType, {
className: 'vjs-menu-content'
});
this.contentEl_.setAttribute('role', 'menu');
const el = super.createEl('div', {
append: this.contentEl_,
className: 'vjs-menu'
});
el.appendChild(this.contentEl_);
// Prevent clicks from bubbling up. Needed for Menu Buttons,
// where a click on the parent is significant
on(el, 'click', function (event) {
event.preventDefault();
event.stopImmediatePropagation();
});
return el;
}
dispose() {
this.contentEl_ = null;
this.boundHandleBlur_ = null;
this.boundHandleTapClick_ = null;
super.dispose();
}
/**
* Called when a `MenuItem` loses focus.
*
* @param {Event} event
* The `blur` event that caused this function to be called.
*
* @listens blur
*/
handleBlur(event) {
const relatedTarget = event.relatedTarget || document$1.activeElement;
// Close menu popup when a user clicks outside the menu
if (!this.children().some(element => {
return element.el() === relatedTarget;
})) {
const btn = this.menuButton_;
if (btn && btn.buttonPressed_ && relatedTarget !== btn.el().firstChild) {
btn.unpressButton();
}
}
}
/**
* Called when a `MenuItem` gets clicked or tapped.
*
* @param {Event} event
* The `click` or `tap` event that caused this function to be called.
*
* @listens click,tap
*/
handleTapClick(event) {
// Unpress the associated MenuButton, and move focus back to it
if (this.menuButton_) {
this.menuButton_.unpressButton();
const childComponents = this.children();
if (!Array.isArray(childComponents)) {
return;
}
const foundComponent = childComponents.filter(component => component.el() === event.target)[0];
if (!foundComponent) {
return;
}
// don't focus menu button if item is a caption settings item
// because focus will move elsewhere
if (foundComponent.name() !== 'CaptionSettingsMenuItem') {
this.menuButton_.focus();
}
}
}
/**
* Handle a `keydown` event on this menu. This listener is added in the constructor.
*
* @param {KeyboardEvent} event
* A `keydown` event that happened on the menu.
*
* @listens keydown
*/
handleKeyDown(event) {
// Left and Down Arrows
if (event.key === 'ArrowLeft' || event.key === 'ArrowDown') {
event.preventDefault();
event.stopPropagation();
this.stepForward();
// Up and Right Arrows
} else if (event.key === 'ArrowRight' || event.key === 'ArrowUp') {
event.preventDefault();
event.stopPropagation();
this.stepBack();
}
}
/**
* Move to next (lower) menu item for keyboard users.
*/
stepForward() {
let stepChild = 0;
if (this.focusedChild_ !== undefined) {
stepChild = this.focusedChild_ + 1;
}
this.focus(stepChild);
}
/**
* Move to previous (higher) menu item for keyboard users.
*/
stepBack() {
let stepChild = 0;
if (this.focusedChild_ !== undefined) {
stepChild = this.focusedChild_ - 1;
}
this.focus(stepChild);
}
/**
* Set focus on a {@link MenuItem} in the `Menu`.
*
* @param {Object|string} [item=0]
* Index of child item set focus on.
*/
focus(item = 0) {
const children = this.children().slice();
const haveTitle = children.length && children[0].hasClass('vjs-menu-title');
if (haveTitle) {
children.shift();
}
if (children.length > 0) {
if (item < 0) {
item = 0;
} else if (item >= children.length) {
item = children.length - 1;
}
this.focusedChild_ = item;
children[item].el_.focus();
}
}
}
Component$1.registerComponent('Menu', Menu);
/**
* @file menu-button.js
*/
/** @import Player from '../player' */
/**
* A `MenuButton` class for any popup {@link Menu}.
*
* @extends Component
*/
class MenuButton extends Component$1 {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options={}]
* The key/value store of player options.
*/
constructor(player, options = {}) {
super(player, options);
this.menuButton_ = new Button(player, options);
this.menuButton_.controlText(this.controlText_);
this.menuButton_.el_.setAttribute('aria-haspopup', 'true');
// Add buildCSSClass values to the button, not the wrapper
const buttonClass = Button.prototype.buildCSSClass();
this.menuButton_.el_.className = this.buildCSSClass() + ' ' + buttonClass;
this.menuButton_.removeClass('vjs-control');
this.addChild(this.menuButton_);
this.update();
this.enabled_ = true;
const handleClick = e => this.handleClick(e);
this.handleMenuKeyUp_ = e => this.handleMenuKeyUp(e);
this.on(this.menuButton_, 'tap', handleClick);
this.on(this.menuButton_, 'click', handleClick);
this.on(this.menuButton_, 'keydown', e => this.handleKeyDown(e));
this.on(this.menuButton_, 'mouseenter', () => {
this.addClass('vjs-hover');
this.menu.show();
on(document$1, 'keyup', this.handleMenuKeyUp_);
});
this.on('mouseleave', e => this.handleMouseLeave(e));
this.on('keydown', e => this.handleSubmenuKeyDown(e));
}
/**
* Update the menu based on the current state of its items.
*/
update() {
const menu = this.createMenu();
if (this.menu) {
this.menu.dispose();
this.removeChild(this.menu);
}
this.menu = menu;
this.addChild(menu);
/**
* Track the state of the menu button
*
* @type {Boolean}
* @private
*/
this.buttonPressed_ = false;
this.menuButton_.el_.setAttribute('aria-expanded', 'false');
if (this.items && this.items.length <= this.hideThreshold_) {
this.hide();
this.menu.contentEl_.removeAttribute('role');
} else {
this.show();
this.menu.contentEl_.setAttribute('role', 'menu');
}
}
/**
* Create the menu and add all items to it.
*
* @return {Menu}
* The constructed menu
*/
createMenu() {
const menu = new Menu(this.player_, {
menuButton: this
});
/**
* Hide the menu if the number of items is less than or equal to this threshold. This defaults
* to 0 and whenever we add items which can be hidden to the menu we'll increment it. We list
* it here because every time we run `createMenu` we need to reset the value.
*
* @protected
* @type {Number}
*/
this.hideThreshold_ = 0;
// Add a title list item to the top
if (this.options_.title) {
const titleEl = createEl('li', {
className: 'vjs-menu-title',
textContent: toTitleCase$1(this.options_.title),
tabIndex: -1
});
const titleComponent = new Component$1(this.player_, {
el: titleEl
});
menu.addItem(titleComponent);
}
this.items = this.createItems();
if (this.items) {
// Add menu items to the menu
for (let i = 0; i < this.items.length; i++) {
menu.addItem(this.items[i]);
}
}
return menu;
}
/**
* Create the list of menu items. Specific to each subclass.
*
* @abstract
*/
createItems() {}
/**
* Create the `MenuButtons`s DOM element.
*
* @return {Element}
* The element that gets created.
*/
createEl() {
return super.createEl('div', {
className: this.buildWrapperCSSClass()
}, {});
}
/**
* Overwrites the `setIcon` method from `Component`.
* In this case, we want the icon to be appended to the menuButton.
*
* @param {string} name
* The icon name to be added.
*/
setIcon(name) {
super.setIcon(name, this.menuButton_.el_);
}
/**
* Allow sub components to stack CSS class names for the wrapper element
*
* @return {string}
* The constructed wrapper DOM `className`
*/
buildWrapperCSSClass() {
let menuButtonClass = 'vjs-menu-button';
// If the inline option is passed, we want to use different styles altogether.
if (this.options_.inline === true) {
menuButtonClass += '-inline';
} else {
menuButtonClass += '-popup';
}
// TODO: Fix the CSS so that this isn't necessary
const buttonClass = Button.prototype.buildCSSClass();
return `vjs-menu-button ${menuButtonClass} ${buttonClass} ${super.buildCSSClass()}`;
}
/**
* Builds the default DOM `className`.
*
* @return {string}
* The DOM `className` for this object.
*/
buildCSSClass() {
let menuButtonClass = 'vjs-menu-button';
// If the inline option is passed, we want to use different styles altogether.
if (this.options_.inline === true) {
menuButtonClass += '-inline';
} else {
menuButtonClass += '-popup';
}
return `vjs-menu-button ${menuButtonClass} ${super.buildCSSClass()}`;
}
/**
* Get or set the localized control text that will be used for accessibility.
*
* > NOTE: This will come from the internal `menuButton_` element.
*
* @param {string} [text]
* Control text for element.
*
* @param {Element} [el=this.menuButton_.el()]
* Element to set the title on.
*
* @return {string}
* - The control text when getting
*/
controlText(text, el = this.menuButton_.el()) {
return this.menuButton_.controlText(text, el);
}
/**
* Dispose of the `menu-button` and all child components.
*/
dispose() {
this.handleMouseLeave();
super.dispose();
}
/**
* Handle a click on a `MenuButton`.
* See {@link ClickableComponent#handleClick} for instances where this is called.
*
* @param {Event} event
* The `keydown`, `tap`, or `click` event that caused this function to be
* called.
*
* @listens tap
* @listens click
*/
handleClick(event) {
if (this.buttonPressed_) {
this.unpressButton();
} else {
this.pressButton();
}
}
/**
* Handle `mouseleave` for `MenuButton`.
*
* @param {Event} event
* The `mouseleave` event that caused this function to be called.
*
* @listens mouseleave
*/
handleMouseLeave(event) {
this.removeClass('vjs-hover');
off(document$1, 'keyup', this.handleMenuKeyUp_);
}
/**
* Set the focus to the actual button, not to this element
*/
focus() {
this.menuButton_.focus();
}
/**
* Remove the focus from the actual button, not this element
*/
blur() {
this.menuButton_.blur();
}
/**
* Handle tab, escape, down arrow, and up arrow keys for `MenuButton`. See
* {@link ClickableComponent#handleKeyDown} for instances where this is called.
*
* @param {Event} event
* The `keydown` event that caused this function to be called.
*
* @listens keydown
*/
handleKeyDown(event) {
// Escape or Tab unpress the 'button'
if (event.key === 'Esc' || event.key === 'Tab') {
if (this.buttonPressed_) {
this.unpressButton();
}
// Don't preventDefault for Tab key - we still want to lose focus
if (!event.key === 'Tab') {
event.preventDefault();
// Set focus back to the menu button's button
this.menuButton_.focus();
}
// Up Arrow or Down Arrow also 'press' the button to open the menu
} else if (event.key === 'Up' || event.key === 'Down' && !(this.player_.options_.playerOptions.spatialNavigation && this.player_.options_.playerOptions.spatialNavigation.enabled)) {
if (!this.buttonPressed_) {
event.preventDefault();
this.pressButton();
}
}
}
/**
* Handle a `keyup` event on a `MenuButton`. The listener for this is added in
* the constructor.
*
* @param {Event} event
* Key press event
*
* @listens keyup
*/
handleMenuKeyUp(event) {
// Escape hides popup menu
if (event.key === 'Esc' || event.key === 'Tab') {
this.removeClass('vjs-hover');
}
}
/**
* This method name now delegates to `handleSubmenuKeyDown`. This means
* anyone calling `handleSubmenuKeyPress` will not see their method calls
* stop working.
*
* @param {Event} event
* The event that caused this function to be called.
*/
handleSubmenuKeyPress(event) {
this.handleSubmenuKeyDown(event);
}
/**
* Handle a `keydown` event on a sub-menu. The listener for this is added in
* the constructor.
*
* @param {Event} event
* Key press event
*
* @listens keydown
*/
handleSubmenuKeyDown(event) {
// Escape or Tab unpress the 'button'
if (event.key === 'Esc' || event.key === 'Tab') {
if (this.buttonPressed_) {
this.unpressButton();
}
// Don't preventDefault for Tab key - we still want to lose focus
if (!event.key === 'Tab') {
event.preventDefault();
// Set focus back to the menu button's button
this.menuButton_.focus();
}
}
}
/**
* Put the current `MenuButton` into a pressed state.
*/
pressButton() {
if (this.enabled_) {
this.buttonPressed_ = true;
this.menu.show();
this.menu.lockShowing();
this.menuButton_.el_.setAttribute('aria-expanded', 'true');
// set the focus into the submenu, except on iOS where it is resulting in
// undesired scrolling behavior when the player is in an iframe
if (IS_IOS && isInFrame()) {
// Return early so that the menu isn't focused
return;
}
this.menu.focus();
}
}
/**
* Take the current `MenuButton` out of a pressed state.
*/
unpressButton() {
if (this.enabled_) {
this.buttonPressed_ = false;
this.menu.unlockShowing();
this.menu.hide();
this.menuButton_.el_.setAttribute('aria-expanded', 'false');
}
}
/**
* Disable the `MenuButton`. Don't allow it to be clicked.
*/
disable() {
this.unpressButton();
this.enabled_ = false;
this.addClass('vjs-disabled');
this.menuButton_.disable();
}
/**
* Enable the `MenuButton`. Allow it to be clicked.
*/
enable() {
this.enabled_ = true;
this.removeClass('vjs-disabled');
this.menuButton_.enable();
}
}
Component$1.registerComponent('MenuButton', MenuButton);
/**
* @file track-button.js
*/
/** @import Player from './player' */
/**
* The base class for buttons that toggle specific track types (e.g. subtitles).
*
* @extends MenuButton
*/
class TrackButton extends MenuButton {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*/
constructor(player, options) {
const tracks = options.tracks;
super(player, options);
if (this.items.length <= 1) {
this.hide();
}
if (!tracks) {
return;
}
const updateHandler = bind_(this, this.update);
tracks.addEventListener('removetrack', updateHandler);
tracks.addEventListener('addtrack', updateHandler);
tracks.addEventListener('labelchange', updateHandler);
this.player_.on('ready', updateHandler);
this.player_.on('dispose', function () {
tracks.removeEventListener('removetrack', updateHandler);
tracks.removeEventListener('addtrack', updateHandler);
tracks.removeEventListener('labelchange', updateHandler);
});
}
}
Component$1.registerComponent('TrackButton', TrackButton);
/**
* @file menu-item.js
*/
/** @import Player from '../player' */
/**
* The component for a menu item. ``
*
* @extends ClickableComponent
*/
class MenuItem extends ClickableComponent {
/**
* Creates an instance of the this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options={}]
* The key/value store of player options.
*
*/
constructor(player, options) {
super(player, options);
this.selectable = options.selectable;
this.isSelected_ = options.selected || false;
this.multiSelectable = options.multiSelectable;
this.selected(this.isSelected_);
if (this.selectable) {
if (this.multiSelectable) {
this.el_.setAttribute('role', 'menuitemcheckbox');
} else {
this.el_.setAttribute('role', 'menuitemradio');
}
} else {
this.el_.setAttribute('role', 'menuitem');
}
}
/**
* Create the `MenuItem's DOM element
*
* @param {string} [type=li]
* Element's node type, not actually used, always set to `li`.
*
* @param {Object} [props={}]
* An object of properties that should be set on the element
*
* @param {Object} [attrs={}]
* An object of attributes that should be set on the element
*
* @return {Element}
* The element that gets created.
*/
createEl(type, props, attrs) {
// The control is textual, not just an icon
this.nonIconControl = true;
const el = super.createEl('li', Object.assign({
className: 'vjs-menu-item',
tabIndex: -1
}, props), attrs);
// swap icon with menu item text.
const menuItemEl = createEl('span', {
className: 'vjs-menu-item-text',
textContent: this.localize(this.options_.label)
});
// If using SVG icons, the element with vjs-icon-placeholder will be added separately.
if (this.player_.options_.experimentalSvgIcons) {
el.appendChild(menuItemEl);
} else {
el.replaceChild(menuItemEl, el.querySelector('.vjs-icon-placeholder'));
}
return el;
}
/**
* Ignore keys which are used by the menu, but pass any other ones up. See
* {@link ClickableComponent#handleKeyDown} for instances where this is called.
*
* @param {KeyboardEvent} event
* The `keydown` event that caused this function to be called.
*
* @listens keydown
*/
handleKeyDown(event) {
if (!['Tab', 'Escape', 'ArrowUp', 'ArrowLeft', 'ArrowRight', 'ArrowDown'].includes(event.key)) {
// Pass keydown handling up for unused keys
super.handleKeyDown(event);
}
}
/**
* Any click on a `MenuItem` puts it into the selected state.
* See {@link ClickableComponent#handleClick} for instances where this is called.
*
* @param {Event} event
* The `keydown`, `tap`, or `click` event that caused this function to be
* called.
*
* @listens tap
* @listens click
*/
handleClick(event) {
this.selected(true);
}
/**
* Set the state for this menu item as selected or not.
*
* @param {boolean} selected
* if the menu item is selected or not
*/
selected(selected) {
if (this.selectable) {
if (selected) {
this.addClass('vjs-selected');
this.el_.setAttribute('aria-checked', 'true');
// aria-checked isn't fully supported by browsers/screen readers,
// so indicate selected state to screen reader in the control text.
this.controlText(', selected');
this.isSelected_ = true;
} else {
this.removeClass('vjs-selected');
this.el_.setAttribute('aria-checked', 'false');
// Indicate un-selected state to screen reader
this.controlText('');
this.isSelected_ = false;
}
}
}
}
Component$1.registerComponent('MenuItem', MenuItem);
/**
* @file text-track-menu-item.js
*/
/** @import Player from '../../player' */
/**
* The specific menu item type for selecting a language within a text track kind
*
* @extends MenuItem
*/
class TextTrackMenuItem extends MenuItem {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*/
constructor(player, options) {
const track = options.track;
const tracks = player.textTracks();
// Modify options for parent MenuItem class's init.
options.label = track.label || track.language || 'Unknown';
options.selected = track.mode === 'showing';
super(player, options);
this.track = track;
// Determine the relevant kind(s) of tracks for this component and filter
// out empty kinds.
this.kinds = (options.kinds || [options.kind || this.track.kind]).filter(Boolean);
const changeHandler = (...args) => {
this.handleTracksChange.apply(this, args);
};
const selectedLanguageChangeHandler = (...args) => {
this.handleSelectedLanguageChange.apply(this, args);
};
player.on(['loadstart', 'texttrackchange'], changeHandler);
tracks.addEventListener('change', changeHandler);
tracks.addEventListener('selectedlanguagechange', selectedLanguageChangeHandler);
this.on('dispose', function () {
player.off(['loadstart', 'texttrackchange'], changeHandler);
tracks.removeEventListener('change', changeHandler);
tracks.removeEventListener('selectedlanguagechange', selectedLanguageChangeHandler);
});
// iOS7 doesn't dispatch change events to TextTrackLists when an
// associated track's mode changes. Without something like
// Object.observe() (also not present on iOS7), it's not
// possible to detect changes to the mode attribute and polyfill
// the change event. As a poor substitute, we manually dispatch
// change events whenever the controls modify the mode.
if (tracks.onchange === undefined) {
let event;
this.on(['tap', 'click'], function () {
if (typeof window$1.Event !== 'object') {
// Android 2.3 throws an Illegal Constructor error for window.Event
try {
event = new window$1.Event('change');
} catch (err) {
// continue regardless of error
}
}
if (!event) {
event = document$1.createEvent('Event');
event.initEvent('change', true, true);
}
tracks.dispatchEvent(event);
});
}
// set the default state based on current tracks
this.handleTracksChange();
}
/**
* This gets called when an `TextTrackMenuItem` is "clicked". See
* {@link ClickableComponent} for more detailed information on what a click can be.
*
* @param {Event} event
* The `keydown`, `tap`, or `click` event that caused this function to be
* called.
*
* @listens tap
* @listens click
*/
handleClick(event) {
const referenceTrack = this.track;
const tracks = this.player_.textTracks();
super.handleClick(event);
if (!tracks) {
return;
}
for (let i = 0; i < tracks.length; i++) {
const track = tracks[i];
// If the track from the text tracks list is not of the right kind,
// skip it. We do not want to affect tracks of incompatible kind(s).
if (this.kinds.indexOf(track.kind) === -1) {
continue;
}
// If this text track is the component's track and it is not showing,
// set it to showing.
if (track === referenceTrack) {
if (track.mode !== 'showing') {
track.mode = 'showing';
}
// If this text track is not the component's track and it is not
// disabled, set it to disabled.
} else if (track.mode !== 'disabled') {
track.mode = 'disabled';
}
}
}
/**
* Handle text track list change
*
* @param {Event} event
* The `change` event that caused this function to be called.
*
* @listens TextTrackList#change
*/
handleTracksChange(event) {
const shouldBeSelected = this.track.mode === 'showing';
// Prevent redundant selected() calls because they may cause
// screen readers to read the appended control text unnecessarily
if (shouldBeSelected !== this.isSelected_) {
this.selected(shouldBeSelected);
}
}
handleSelectedLanguageChange(event) {
if (this.track.mode === 'showing') {
const selectedLanguage = this.player_.cache_.selectedLanguage;
// Don't replace the kind of track across the same language
if (selectedLanguage && selectedLanguage.enabled && selectedLanguage.language === this.track.language && selectedLanguage.kind !== this.track.kind) {
return;
}
this.player_.cache_.selectedLanguage = {
enabled: true,
language: this.track.language,
kind: this.track.kind
};
}
}
dispose() {
// remove reference to track object on dispose
this.track = null;
super.dispose();
}
}
Component$1.registerComponent('TextTrackMenuItem', TextTrackMenuItem);
/**
* @file off-text-track-menu-item.js
*/
/** @import Player from '../../player' */
/**
* A special menu item for turning off a specific type of text track
*
* @extends TextTrackMenuItem
*/
class OffTextTrackMenuItem extends TextTrackMenuItem {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*/
constructor(player, options) {
// Create pseudo track info
// Requires options['kind']
options.track = {
player,
// it is no longer necessary to store `kind` or `kinds` on the track itself
// since they are now stored in the `kinds` property of all instances of
// TextTrackMenuItem, but this will remain for backwards compatibility
kind: options.kind,
kinds: options.kinds,
default: false,
mode: 'disabled'
};
if (!options.kinds) {
options.kinds = [options.kind];
}
if (options.label) {
options.track.label = options.label;
} else {
options.track.label = options.kinds.join(' and ') + ' off';
}
// MenuItem is selectable
options.selectable = true;
// MenuItem is NOT multiSelectable (i.e. only one can be marked "selected" at a time)
options.multiSelectable = false;
super(player, options);
}
/**
* Handle text track change
*
* @param {Event} event
* The event that caused this function to run
*/
handleTracksChange(event) {
const tracks = this.player().textTracks();
let shouldBeSelected = true;
for (let i = 0, l = tracks.length; i < l; i++) {
const track = tracks[i];
if (this.options_.kinds.indexOf(track.kind) > -1 && track.mode === 'showing') {
shouldBeSelected = false;
break;
}
}
// Prevent redundant selected() calls because they may cause
// screen readers to read the appended control text unnecessarily
if (shouldBeSelected !== this.isSelected_) {
this.selected(shouldBeSelected);
}
}
handleSelectedLanguageChange(event) {
const tracks = this.player().textTracks();
let allHidden = true;
for (let i = 0, l = tracks.length; i < l; i++) {
const track = tracks[i];
if (['captions', 'descriptions', 'subtitles'].indexOf(track.kind) > -1 && track.mode === 'showing') {
allHidden = false;
break;
}
}
if (allHidden) {
this.player_.cache_.selectedLanguage = {
enabled: false
};
}
}
/**
* Update control text and label on languagechange
*/
handleLanguagechange() {
this.$('.vjs-menu-item-text').textContent = this.player_.localize(this.options_.label);
super.handleLanguagechange();
}
}
Component$1.registerComponent('OffTextTrackMenuItem', OffTextTrackMenuItem);
/**
* @file text-track-button.js
*/
/** @import Player from '../../player' */
/**
* The base class for buttons that toggle specific text track types (e.g. subtitles)
*
* @extends MenuButton
*/
class TextTrackButton extends TrackButton {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options={}]
* The key/value store of player options.
*/
constructor(player, options = {}) {
options.tracks = player.textTracks();
super(player, options);
}
/**
* Create a menu item for each text track
*
* @param {TextTrackMenuItem[]} [items=[]]
* Existing array of items to use during creation
*
* @return {TextTrackMenuItem[]}
* Array of menu items that were created
*/
createItems(items = [], TrackMenuItem = TextTrackMenuItem) {
// Label is an override for the [track] off label
// USed to localise captions/subtitles
let label;
if (this.label_) {
label = `${this.label_} off`;
}
// Add an OFF menu item to turn all tracks off
items.push(new OffTextTrackMenuItem(this.player_, {
kinds: this.kinds_,
kind: this.kind_,
label
}));
this.hideThreshold_ += 1;
const tracks = this.player_.textTracks();
if (!Array.isArray(this.kinds_)) {
this.kinds_ = [this.kind_];
}
for (let i = 0; i < tracks.length; i++) {
const track = tracks[i];
// only add tracks that are of an appropriate kind and have a label
if (this.kinds_.indexOf(track.kind) > -1) {
const item = new TrackMenuItem(this.player_, {
track,
kinds: this.kinds_,
kind: this.kind_,
// MenuItem is selectable
selectable: true,
// MenuItem is NOT multiSelectable (i.e. only one can be marked "selected" at a time)
multiSelectable: false
});
item.addClass(`vjs-${track.kind}-menu-item`);
items.push(item);
}
}
return items;
}
}
Component$1.registerComponent('TextTrackButton', TextTrackButton);
/**
* @file chapters-track-menu-item.js
*/
/** @import Player from '../../player' */
/**
* The chapter track menu item
*
* @extends MenuItem
*/
class ChaptersTrackMenuItem extends MenuItem {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*/
constructor(player, options) {
const track = options.track;
const cue = options.cue;
const currentTime = player.currentTime();
// Modify options for parent MenuItem class's init.
options.selectable = true;
options.multiSelectable = false;
options.label = cue.text;
options.selected = cue.startTime <= currentTime && currentTime < cue.endTime;
super(player, options);
this.track = track;
this.cue = cue;
}
/**
* This gets called when an `ChaptersTrackMenuItem` is "clicked". See
* {@link ClickableComponent} for more detailed information on what a click can be.
*
* @param {Event} [event]
* The `keydown`, `tap`, or `click` event that caused this function to be
* called.
*
* @listens tap
* @listens click
*/
handleClick(event) {
super.handleClick();
this.player_.currentTime(this.cue.startTime);
}
}
Component$1.registerComponent('ChaptersTrackMenuItem', ChaptersTrackMenuItem);
/**
* @file chapters-button.js
*/
/** @import Player from '../../player' */
/** @import Menu from '../../menu/menu' */
/** @import TextTrack from '../../tracks/text-track' */
/** @import TextTrackMenuItem from '../text-track-controls/text-track-menu-item' */
/**
* The button component for toggling and selecting chapters
* Chapters act much differently than other text tracks
* Cues are navigation vs. other tracks of alternative languages
*
* @extends TextTrackButton
*/
class ChaptersButton extends TextTrackButton {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*
* @param {Function} [ready]
* The function to call when this function is ready.
*/
constructor(player, options, ready) {
super(player, options, ready);
this.setIcon('chapters');
this.selectCurrentItem_ = () => {
this.items.forEach(item => {
item.selected(this.track_.activeCues[0] === item.cue);
});
};
}
/**
* Builds the default DOM `className`.
*
* @return {string}
* The DOM `className` for this object.
*/
buildCSSClass() {
return `vjs-chapters-button ${super.buildCSSClass()}`;
}
buildWrapperCSSClass() {
return `vjs-chapters-button ${super.buildWrapperCSSClass()}`;
}
/**
* Update the menu based on the current state of its items.
*
* @param {Event} [event]
* An event that triggered this function to run.
*
* @listens TextTrackList#addtrack
* @listens TextTrackList#removetrack
* @listens TextTrackList#change
*/
update(event) {
if (event && event.track && event.track.kind !== 'chapters') {
return;
}
const track = this.findChaptersTrack();
if (track !== this.track_) {
this.setTrack(track);
super.update();
} else if (!this.items || track && track.cues && track.cues.length !== this.items.length) {
// Update the menu initially or if the number of cues has changed since set
super.update();
}
}
/**
* Set the currently selected track for the chapters button.
*
* @param {TextTrack} track
* The new track to select. Nothing will change if this is the currently selected
* track.
*/
setTrack(track) {
if (this.track_ === track) {
return;
}
if (!this.updateHandler_) {
this.updateHandler_ = this.update.bind(this);
}
// here this.track_ refers to the old track instance
if (this.track_) {
const remoteTextTrackEl = this.player_.remoteTextTrackEls().getTrackElementByTrack_(this.track_);
if (remoteTextTrackEl) {
remoteTextTrackEl.removeEventListener('load', this.updateHandler_);
}
this.track_.removeEventListener('cuechange', this.selectCurrentItem_);
this.track_ = null;
}
this.track_ = track;
// here this.track_ refers to the new track instance
if (this.track_) {
this.track_.mode = 'hidden';
const remoteTextTrackEl = this.player_.remoteTextTrackEls().getTrackElementByTrack_(this.track_);
if (remoteTextTrackEl) {
remoteTextTrackEl.addEventListener('load', this.updateHandler_);
}
this.track_.addEventListener('cuechange', this.selectCurrentItem_);
}
}
/**
* Find the track object that is currently in use by this ChaptersButton
*
* @return {TextTrack|undefined}
* The current track or undefined if none was found.
*/
findChaptersTrack() {
const tracks = this.player_.textTracks() || [];
for (let i = tracks.length - 1; i >= 0; i--) {
// We will always choose the last track as our chaptersTrack
const track = tracks[i];
if (track.kind === this.kind_) {
return track;
}
}
}
/**
* Get the caption for the ChaptersButton based on the track label. This will also
* use the current tracks localized kind as a fallback if a label does not exist.
*
* @return {string}
* The tracks current label or the localized track kind.
*/
getMenuCaption() {
if (this.track_ && this.track_.label) {
return this.track_.label;
}
return this.localize(toTitleCase$1(this.kind_));
}
/**
* Create menu from chapter track
*
* @return {Menu}
* New menu for the chapter buttons
*/
createMenu() {
this.options_.title = this.getMenuCaption();
return super.createMenu();
}
/**
* Create a menu item for each text track
*
* @return {TextTrackMenuItem[]}
* Array of menu items
*/
createItems() {
const items = [];
if (!this.track_) {
return items;
}
const cues = this.track_.cues;
if (!cues) {
return items;
}
for (let i = 0, l = cues.length; i < l; i++) {
const cue = cues[i];
const mi = new ChaptersTrackMenuItem(this.player_, {
track: this.track_,
cue
});
items.push(mi);
}
return items;
}
}
/**
* `kind` of TextTrack to look for to associate it with this menu.
*
* @type {string}
* @private
*/
ChaptersButton.prototype.kind_ = 'chapters';
/**
* The text that should display over the `ChaptersButton`s controls. Added for localization.
*
* @type {string}
* @protected
*/
ChaptersButton.prototype.controlText_ = 'Chapters';
Component$1.registerComponent('ChaptersButton', ChaptersButton);
/**
* @file descriptions-button.js
*/
/** @import Player from '../../player' */
/**
* The button component for toggling and selecting descriptions
*
* @extends TextTrackButton
*/
class DescriptionsButton extends TextTrackButton {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*
* @param {Function} [ready]
* The function to call when this component is ready.
*/
constructor(player, options, ready) {
super(player, options, ready);
this.setIcon('audio-description');
const tracks = player.textTracks();
const changeHandler = bind_(this, this.handleTracksChange);
tracks.addEventListener('change', changeHandler);
this.on('dispose', function () {
tracks.removeEventListener('change', changeHandler);
});
}
/**
* Handle text track change
*
* @param {Event} event
* The event that caused this function to run
*
* @listens TextTrackList#change
*/
handleTracksChange(event) {
const tracks = this.player().textTracks();
let disabled = false;
// Check whether a track of a different kind is showing
for (let i = 0, l = tracks.length; i < l; i++) {
const track = tracks[i];
if (track.kind !== this.kind_ && track.mode === 'showing') {
disabled = true;
break;
}
}
// If another track is showing, disable this menu button
if (disabled) {
this.disable();
} else {
this.enable();
}
}
/**
* Builds the default DOM `className`.
*
* @return {string}
* The DOM `className` for this object.
*/
buildCSSClass() {
return `vjs-descriptions-button ${super.buildCSSClass()}`;
}
buildWrapperCSSClass() {
return `vjs-descriptions-button ${super.buildWrapperCSSClass()}`;
}
}
/**
* `kind` of TextTrack to look for to associate it with this menu.
*
* @type {string}
* @private
*/
DescriptionsButton.prototype.kind_ = 'descriptions';
/**
* The text that should display over the `DescriptionsButton`s controls. Added for localization.
*
* @type {string}
* @protected
*/
DescriptionsButton.prototype.controlText_ = 'Descriptions';
Component$1.registerComponent('DescriptionsButton', DescriptionsButton);
/**
* @file subtitles-button.js
*/
/** @import Player from '../../player' */
/**
* The button component for toggling and selecting subtitles
*
* @extends TextTrackButton
*/
class SubtitlesButton extends TextTrackButton {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*
* @param {Function} [ready]
* The function to call when this component is ready.
*/
constructor(player, options, ready) {
super(player, options, ready);
this.setIcon('subtitles');
}
/**
* Builds the default DOM `className`.
*
* @return {string}
* The DOM `className` for this object.
*/
buildCSSClass() {
return `vjs-subtitles-button ${super.buildCSSClass()}`;
}
buildWrapperCSSClass() {
return `vjs-subtitles-button ${super.buildWrapperCSSClass()}`;
}
}
/**
* `kind` of TextTrack to look for to associate it with this menu.
*
* @type {string}
* @private
*/
SubtitlesButton.prototype.kind_ = 'subtitles';
/**
* The text that should display over the `SubtitlesButton`s controls. Added for localization.
*
* @type {string}
* @protected
*/
SubtitlesButton.prototype.controlText_ = 'Subtitles';
Component$1.registerComponent('SubtitlesButton', SubtitlesButton);
/**
* @file caption-settings-menu-item.js
*/
/** @import Player from '../../player' */
/**
* The menu item for caption track settings menu
*
* @extends TextTrackMenuItem
*/
class CaptionSettingsMenuItem extends TextTrackMenuItem {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*/
constructor(player, options) {
options.track = {
player,
kind: options.kind,
label: options.kind + ' settings',
selectable: false,
default: false,
mode: 'disabled'
};
// CaptionSettingsMenuItem has no concept of 'selected'
options.selectable = false;
options.name = 'CaptionSettingsMenuItem';
super(player, options);
this.addClass('vjs-texttrack-settings');
this.controlText(', opens ' + options.kind + ' settings dialog');
}
/**
* This gets called when an `CaptionSettingsMenuItem` is "clicked". See
* {@link ClickableComponent} for more detailed information on what a click can be.
*
* @param {Event} [event]
* The `keydown`, `tap`, or `click` event that caused this function to be
* called.
*
* @listens tap
* @listens click
*/
handleClick(event) {
this.player().getChild('textTrackSettings').open();
}
/**
* Update control text and label on languagechange
*/
handleLanguagechange() {
this.$('.vjs-menu-item-text').textContent = this.player_.localize(this.options_.kind + ' settings');
super.handleLanguagechange();
}
}
Component$1.registerComponent('CaptionSettingsMenuItem', CaptionSettingsMenuItem);
/**
* @file captions-button.js
*/
/** @import Player from '../../player' */
/**
* The button component for toggling and selecting captions
*
* @extends TextTrackButton
*/
class CaptionsButton extends TextTrackButton {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*
* @param {Function} [ready]
* The function to call when this component is ready.
*/
constructor(player, options, ready) {
super(player, options, ready);
this.setIcon('captions');
}
/**
* Builds the default DOM `className`.
*
* @return {string}
* The DOM `className` for this object.
*/
buildCSSClass() {
return `vjs-captions-button ${super.buildCSSClass()}`;
}
buildWrapperCSSClass() {
return `vjs-captions-button ${super.buildWrapperCSSClass()}`;
}
/**
* Create caption menu items
*
* @return {CaptionSettingsMenuItem[]}
* The array of current menu items.
*/
createItems() {
const items = [];
if (!(this.player().tech_ && this.player().tech_.featuresNativeTextTracks) && this.player().getChild('textTrackSettings')) {
items.push(new CaptionSettingsMenuItem(this.player_, {
kind: this.kind_
}));
this.hideThreshold_ += 1;
}
return super.createItems(items);
}
}
/**
* `kind` of TextTrack to look for to associate it with this menu.
*
* @type {string}
* @private
*/
CaptionsButton.prototype.kind_ = 'captions';
/**
* The text that should display over the `CaptionsButton`s controls. Added for localization.
*
* @type {string}
* @protected
*/
CaptionsButton.prototype.controlText_ = 'Captions';
Component$1.registerComponent('CaptionsButton', CaptionsButton);
/**
* @file subs-caps-menu-item.js
*/
/**
* SubsCapsMenuItem has an [cc] icon to distinguish captions from subtitles
* in the SubsCapsMenu.
*
* @extends TextTrackMenuItem
*/
class SubsCapsMenuItem extends TextTrackMenuItem {
createEl(type, props, attrs) {
const el = super.createEl(type, props, attrs);
const parentSpan = el.querySelector('.vjs-menu-item-text');
if (this.options_.track.kind === 'captions') {
if (this.player_.options_.experimentalSvgIcons) {
this.setIcon('captions', el);
} else {
parentSpan.appendChild(createEl('span', {
className: 'vjs-icon-placeholder'
}, {
'aria-hidden': true
}));
}
parentSpan.appendChild(createEl('span', {
className: 'vjs-control-text',
// space added as the text will visually flow with the
// label
textContent: ` ${this.localize('Captions')}`
}));
}
return el;
}
}
Component$1.registerComponent('SubsCapsMenuItem', SubsCapsMenuItem);
/**
* @file sub-caps-button.js
*/
/** @import Player from '../../player' */
/**
* The button component for toggling and selecting captions and/or subtitles
*
* @extends TextTrackButton
*/
class SubsCapsButton extends TextTrackButton {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*
* @param {Function} [ready]
* The function to call when this component is ready.
*/
constructor(player, options = {}) {
super(player, options);
// Although North America uses "captions" in most cases for
// "captions and subtitles" other locales use "subtitles"
this.label_ = 'subtitles';
this.setIcon('subtitles');
if (['en', 'en-us', 'en-ca', 'fr-ca'].indexOf(this.player_.language_) > -1) {
this.label_ = 'captions';
this.setIcon('captions');
}
this.menuButton_.controlText(toTitleCase$1(this.label_));
}
/**
* Builds the default DOM `className`.
*
* @return {string}
* The DOM `className` for this object.
*/
buildCSSClass() {
return `vjs-subs-caps-button ${super.buildCSSClass()}`;
}
buildWrapperCSSClass() {
return `vjs-subs-caps-button ${super.buildWrapperCSSClass()}`;
}
/**
* Create caption/subtitles menu items
*
* @return {CaptionSettingsMenuItem[]}
* The array of current menu items.
*/
createItems() {
let items = [];
if (!(this.player().tech_ && this.player().tech_.featuresNativeTextTracks) && this.player().getChild('textTrackSettings')) {
items.push(new CaptionSettingsMenuItem(this.player_, {
kind: this.label_
}));
this.hideThreshold_ += 1;
}
items = super.createItems(items, SubsCapsMenuItem);
return items;
}
}
/**
* `kind`s of TextTrack to look for to associate it with this menu.
*
* @type {array}
* @private
*/
SubsCapsButton.prototype.kinds_ = ['captions', 'subtitles'];
/**
* The text that should display over the `SubsCapsButton`s controls.
*
*
* @type {string}
* @protected
*/
SubsCapsButton.prototype.controlText_ = 'Subtitles';
Component$1.registerComponent('SubsCapsButton', SubsCapsButton);
/**
* @file audio-track-menu-item.js
*/
/** @import Player from '../../player' */
/**
* An {@link AudioTrack} {@link MenuItem}
*
* @extends MenuItem
*/
class AudioTrackMenuItem extends MenuItem {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*/
constructor(player, options) {
const track = options.track;
const tracks = player.audioTracks();
// Modify options for parent MenuItem class's init.
options.label = track.label || track.language || 'Unknown';
options.selected = track.enabled;
super(player, options);
this.track = track;
this.addClass(`vjs-${track.kind}-menu-item`);
const changeHandler = (...args) => {
this.handleTracksChange.apply(this, args);
};
tracks.addEventListener('change', changeHandler);
this.on('dispose', () => {
tracks.removeEventListener('change', changeHandler);
});
}
createEl(type, props, attrs) {
const el = super.createEl(type, props, attrs);
const parentSpan = el.querySelector('.vjs-menu-item-text');
if (['main-desc', 'descriptions'].indexOf(this.options_.track.kind) >= 0) {
parentSpan.appendChild(createEl('span', {
className: 'vjs-icon-placeholder'
}, {
'aria-hidden': true
}));
parentSpan.appendChild(createEl('span', {
className: 'vjs-control-text',
textContent: ' ' + this.localize('Descriptions')
}));
}
return el;
}
/**
* This gets called when an `AudioTrackMenuItem is "clicked". See {@link ClickableComponent}
* for more detailed information on what a click can be.
*
* @param {Event} [event]
* The `keydown`, `tap`, or `click` event that caused this function to be
* called.
*
* @listens tap
* @listens click
*/
handleClick(event) {
super.handleClick(event);
// the audio track list will automatically toggle other tracks
// off for us.
this.track.enabled = true;
// when native audio tracks are used, we want to make sure that other tracks are turned off
if (this.player_.tech_.featuresNativeAudioTracks) {
const tracks = this.player_.audioTracks();
for (let i = 0; i < tracks.length; i++) {
const track = tracks[i];
// skip the current track since we enabled it above
if (track === this.track) {
continue;
}
track.enabled = track === this.track;
}
}
}
/**
* Handle any {@link AudioTrack} change.
*
* @param {Event} [event]
* The {@link AudioTrackList#change} event that caused this to run.
*
* @listens AudioTrackList#change
*/
handleTracksChange(event) {
this.selected(this.track.enabled);
}
}
Component$1.registerComponent('AudioTrackMenuItem', AudioTrackMenuItem);
/**
* @file audio-track-button.js
*/
/**
* The base class for buttons that toggle specific {@link AudioTrack} types.
*
* @extends TrackButton
*/
class AudioTrackButton extends TrackButton {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options={}]
* The key/value store of player options.
*/
constructor(player, options = {}) {
options.tracks = player.audioTracks();
super(player, options);
this.setIcon('audio');
}
/**
* Builds the default DOM `className`.
*
* @return {string}
* The DOM `className` for this object.
*/
buildCSSClass() {
return `vjs-audio-button ${super.buildCSSClass()}`;
}
buildWrapperCSSClass() {
return `vjs-audio-button ${super.buildWrapperCSSClass()}`;
}
/**
* Create a menu item for each audio track
*
* @param {AudioTrackMenuItem[]} [items=[]]
* An array of existing menu items to use.
*
* @return {AudioTrackMenuItem[]}
* An array of menu items
*/
createItems(items = []) {
// if there's only one audio track, there no point in showing it
this.hideThreshold_ = 1;
const tracks = this.player_.audioTracks();
for (let i = 0; i < tracks.length; i++) {
const track = tracks[i];
items.push(new AudioTrackMenuItem(this.player_, {
track,
// MenuItem is selectable
selectable: true,
// MenuItem is NOT multiSelectable (i.e. only one can be marked "selected" at a time)
multiSelectable: false
}));
}
return items;
}
}
/**
* The text that should display over the `AudioTrackButton`s controls. Added for localization.
*
* @type {string}
* @protected
*/
AudioTrackButton.prototype.controlText_ = 'Audio Track';
Component$1.registerComponent('AudioTrackButton', AudioTrackButton);
/**
* @file playback-rate-menu-item.js
*/
/** @import Player from '../../player' */
/**
* The specific menu item type for selecting a playback rate.
*
* @extends MenuItem
*/
class PlaybackRateMenuItem extends MenuItem {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*/
constructor(player, options) {
const label = options.rate;
const rate = parseFloat(label, 10);
// Modify options for parent MenuItem class's init.
options.label = label;
options.selected = rate === player.playbackRate();
options.selectable = true;
options.multiSelectable = false;
super(player, options);
this.label = label;
this.rate = rate;
this.on(player, 'ratechange', e => this.update(e));
}
/**
* This gets called when an `PlaybackRateMenuItem` is "clicked". See
* {@link ClickableComponent} for more detailed information on what a click can be.
*
* @param {Event} [event]
* The `keydown`, `tap`, or `click` event that caused this function to be
* called.
*
* @listens tap
* @listens click
*/
handleClick(event) {
super.handleClick();
this.player().playbackRate(this.rate);
}
/**
* Update the PlaybackRateMenuItem when the playbackrate changes.
*
* @param {Event} [event]
* The `ratechange` event that caused this function to run.
*
* @listens Player#ratechange
*/
update(event) {
this.selected(this.player().playbackRate() === this.rate);
}
}
/**
* The text that should display over the `PlaybackRateMenuItem`s controls. Added for localization.
*
* @type {string}
* @private
*/
PlaybackRateMenuItem.prototype.contentElType = 'button';
Component$1.registerComponent('PlaybackRateMenuItem', PlaybackRateMenuItem);
/**
* @file playback-rate-menu-button.js
*/
/** @import Player from '../../player' */
/**
* The component for controlling the playback rate.
*
* @extends MenuButton
*/
class PlaybackRateMenuButton extends MenuButton {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*/
constructor(player, options) {
super(player, options);
this.menuButton_.el_.setAttribute('aria-describedby', this.labelElId_);
this.updateVisibility();
this.updateLabel();
this.on(player, 'loadstart', e => this.updateVisibility(e));
this.on(player, 'ratechange', e => this.updateLabel(e));
this.on(player, 'playbackrateschange', e => this.handlePlaybackRateschange(e));
}
/**
* Create the `Component`'s DOM element
*
* @return {Element}
* The element that was created.
*/
createEl() {
const el = super.createEl();
this.labelElId_ = 'vjs-playback-rate-value-label-' + this.id_;
this.labelEl_ = createEl('div', {
className: 'vjs-playback-rate-value',
id: this.labelElId_,
textContent: '1x'
});
el.appendChild(this.labelEl_);
return el;
}
dispose() {
this.labelEl_ = null;
super.dispose();
}
/**
* Builds the default DOM `className`.
*
* @return {string}
* The DOM `className` for this object.
*/
buildCSSClass() {
return `vjs-playback-rate ${super.buildCSSClass()}`;
}
buildWrapperCSSClass() {
return `vjs-playback-rate ${super.buildWrapperCSSClass()}`;
}
/**
* Create the list of menu items. Specific to each subclass.
*
*/
createItems() {
const rates = this.playbackRates();
const items = [];
for (let i = rates.length - 1; i >= 0; i--) {
items.push(new PlaybackRateMenuItem(this.player(), {
rate: rates[i] + 'x'
}));
}
return items;
}
/**
* On playbackrateschange, update the menu to account for the new items.
*
* @listens Player#playbackrateschange
*/
handlePlaybackRateschange(event) {
this.update();
}
/**
* Get possible playback rates
*
* @return {Array}
* All possible playback rates
*/
playbackRates() {
const player = this.player();
return player.playbackRates && player.playbackRates() || [];
}
/**
* Get whether playback rates is supported by the tech
* and an array of playback rates exists
*
* @return {boolean}
* Whether changing playback rate is supported
*/
playbackRateSupported() {
return this.player().tech_ && this.player().tech_.featuresPlaybackRate && this.playbackRates() && this.playbackRates().length > 0;
}
/**
* Hide playback rate controls when they're no playback rate options to select
*
* @param {Event} [event]
* The event that caused this function to run.
*
* @listens Player#loadstart
*/
updateVisibility(event) {
if (this.playbackRateSupported()) {
this.removeClass('vjs-hidden');
} else {
this.addClass('vjs-hidden');
}
}
/**
* Update button label when rate changed
*
* @param {Event} [event]
* The event that caused this function to run.
*
* @listens Player#ratechange
*/
updateLabel(event) {
if (this.playbackRateSupported()) {
this.labelEl_.textContent = this.player().playbackRate() + 'x';
}
}
}
/**
* The text that should display over the `PlaybackRateMenuButton`s controls.
*
* Added for localization.
*
* @type {string}
* @protected
*/
PlaybackRateMenuButton.prototype.controlText_ = 'Playback Rate';
Component$1.registerComponent('PlaybackRateMenuButton', PlaybackRateMenuButton);
/**
* @file spacer.js
*/
/**
* Just an empty spacer element that can be used as an append point for plugins, etc.
* Also can be used to create space between elements when necessary.
*
* @extends Component
*/
class Spacer extends Component$1 {
/**
* Builds the default DOM `className`.
*
* @return {string}
* The DOM `className` for this object.
*/
buildCSSClass() {
return `vjs-spacer ${super.buildCSSClass()}`;
}
/**
* Create the `Component`'s DOM element
*
* @return {Element}
* The element that was created.
*/
createEl(tag = 'div', props = {}, attributes = {}) {
if (!props.className) {
props.className = this.buildCSSClass();
}
return super.createEl(tag, props, attributes);
}
}
Component$1.registerComponent('Spacer', Spacer);
/**
* @file custom-control-spacer.js
*/
/**
* Spacer specifically meant to be used as an insertion point for new plugins, etc.
*
* @extends Spacer
*/
class CustomControlSpacer extends Spacer {
/**
* Builds the default DOM `className`.
*
* @return {string}
* The DOM `className` for this object.
*/
buildCSSClass() {
return `vjs-custom-control-spacer ${super.buildCSSClass()}`;
}
/**
* Create the `Component`'s DOM element
*
* @return {Element}
* The element that was created.
*/
createEl() {
return super.createEl('div', {
className: this.buildCSSClass(),
// No-flex/table-cell mode requires there be some content
// in the cell to fill the remaining space of the table.
textContent: '\u00a0'
});
}
}
Component$1.registerComponent('CustomControlSpacer', CustomControlSpacer);
/**
* @file control-bar.js
*/
/**
* Container of main controls.
*
* @extends Component
*/
class ControlBar extends Component$1 {
/**
* Create the `Component`'s DOM element
*
* @return {Element}
* The element that was created.
*/
createEl() {
return super.createEl('div', {
className: 'vjs-control-bar',
dir: 'ltr'
});
}
}
/**
* Default options for `ControlBar`
*
* @type {Object}
* @private
*/
ControlBar.prototype.options_ = {
children: ['playToggle', 'skipBackward', 'skipForward', 'volumePanel', 'currentTimeDisplay', 'timeDivider', 'durationDisplay', 'progressControl', 'liveDisplay', 'seekToLive', 'remainingTimeDisplay', 'customControlSpacer', 'playbackRateMenuButton', 'chaptersButton', 'descriptionsButton', 'subsCapsButton', 'audioTrackButton', 'pictureInPictureToggle', 'fullscreenToggle']
};
Component$1.registerComponent('ControlBar', ControlBar);
/**
* @file error-display.js
*/
/** @import Player from './player' */
/**
* A display that indicates an error has occurred. This means that the video
* is unplayable.
*
* @extends ModalDialog
*/
class ErrorDisplay extends ModalDialog {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*/
constructor(player, options) {
super(player, options);
this.on(player, 'error', e => {
this.open(e);
});
}
/**
* Builds the default DOM `className`.
*
* @return {string}
* The DOM `className` for this object.
*
* @deprecated Since version 5.
*/
buildCSSClass() {
return `vjs-error-display ${super.buildCSSClass()}`;
}
/**
* Gets the localized error message based on the `Player`s error.
*
* @return {string}
* The `Player`s error message localized or an empty string.
*/
content() {
const error = this.player().error();
return error ? this.localize(error.message) : '';
}
}
/**
* The default options for an `ErrorDisplay`.
*
* @private
*/
ErrorDisplay.prototype.options_ = Object.assign({}, ModalDialog.prototype.options_, {
pauseOnOpen: false,
fillAlways: true,
temporary: false,
uncloseable: true
});
Component$1.registerComponent('ErrorDisplay', ErrorDisplay);
/** @import Player from './player' */
/** @import { ContentDescriptor } from '../utils/dom' */
/**
* Creates DOM element of 'select' & its options.
*
* @extends Component
*/
class TextTrackSelect extends Component$1 {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*
* @param {ContentDescriptor} [options.content=undefined]
* Provide customized content for this modal.
*
* @param {string} [options.legendId]
* A text with part of an string to create atribute of aria-labelledby.
*
* @param {string} [options.id]
* A text with part of an string to create atribute of aria-labelledby.
*
* @param {Array} [options.SelectOptions]
* Array that contains the value & textContent of for each of the
* options elements.
*/
constructor(player, options = {}) {
super(player, options);
this.el_.setAttribute('aria-labelledby', this.selectLabelledbyIds);
}
/**
* Create the `TextTrackSelect`'s DOM element
*
* @return {Element}
* The DOM element that gets created.
*/
createEl() {
this.selectLabelledbyIds = [this.options_.legendId, this.options_.labelId].join(' ').trim();
// Create select & inner options
const selectoptions = createEl('select', {
id: this.options_.id
}, {}, this.options_.SelectOptions.map(optionText => {
// Constructs an id for the .
// For the colour settings that have two with a each, generates an id based off the label value
// For font size/family and edge style with one and no , generates an id with a guid
const optionId = (this.options_.labelId ? this.options_.labelId : `vjs-track-option-${newGUID()}`) + '-' + optionText[1].replace(/\W+/g, '');
const option = createEl('option', {
id: optionId,
value: this.localize(optionText[0]),
textContent: optionText[1]
});
option.setAttribute('aria-labelledby', `${this.selectLabelledbyIds} ${optionId}`);
return option;
}));
return selectoptions;
}
}
Component$1.registerComponent('TextTrackSelect', TextTrackSelect);
/** @import Player from './player' */
/** @import { ContentDescriptor } from '../utils/dom' */
/**
* Creates fieldset section of 'TextTrackSettings'.
* Manganes two versions of fieldsets, one for type of 'colors'
* & the other for 'font', Component adds diferent DOM elements
* to that fieldset depending on the type.
*
* @extends Component
*/
class TextTrackFieldset extends Component$1 {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*
* @param {ContentDescriptor} [options.content=undefined]
* Provide customized content for this modal.
*
* @param {string} [options.legendId]
* A text with part of an string to create atribute of aria-labelledby.
* It passes to 'TextTrackSelect'.
*
* @param {string} [options.id]
* A text with part of an string to create atribute of aria-labelledby.
* It passes to 'TextTrackSelect'.
*
* @param {string} [options.legendText]
* A text to use as the text content of the legend element.
*
* @param {Array} [options.selects]
* Array that contains the selects that are use to create 'selects'
* components.
*
* @param {Array} [options.SelectOptions]
* Array that contains the value & textContent of for each of the
* options elements, it passes to 'TextTrackSelect'.
*
* @param {string} [options.type]
* Conditions if some DOM elements will be added to the fieldset
* component.
*
* @param {Object} [options.selectConfigs]
* Object with the following properties that are the selects configurations:
* backgroundColor, backgroundOpacity, color, edgeStyle, fontFamily,
* fontPercent, textOpacity, windowColor, windowOpacity.
* These properties are use to configure the 'TextTrackSelect' Component.
*/
constructor(player, options = {}) {
super(player, options);
// Add Components & DOM Elements
const legendElement = createEl('legend', {
textContent: this.localize(this.options_.legendText),
id: this.options_.legendId
});
this.el().appendChild(legendElement);
const selects = this.options_.selects;
// Iterate array of selects to create 'selects' components
for (const i of selects) {
const selectConfig = this.options_.selectConfigs[i];
const selectClassName = selectConfig.className;
const id = selectConfig.id.replace('%s', this.options_.id_);
let span = null;
const guid = `vjs_select_${newGUID()}`;
// Conditionally create span to add on the component
if (this.options_.type === 'colors') {
span = createEl('span', {
className: selectClassName
});
const label = createEl('label', {
id,
className: 'vjs-label',
textContent: selectConfig.label
});
label.setAttribute('for', guid);
span.appendChild(label);
}
const textTrackSelect = new TextTrackSelect(player, {
SelectOptions: selectConfig.options,
legendId: this.options_.legendId,
id: guid,
labelId: id
});
this.addChild(textTrackSelect);
// Conditionally append to 'select' component to conditionally created span
if (this.options_.type === 'colors') {
span.appendChild(textTrackSelect.el());
this.el().appendChild(span);
}
}
}
/**
* Create the `TextTrackFieldset`'s DOM element
*
* @return {Element}
* The DOM element that gets created.
*/
createEl() {
const el = createEl('fieldset', {
// Prefixing classes of elements within a player with "vjs-"
// is a convention used in Video.js.
className: this.options_.className
});
return el;
}
}
Component$1.registerComponent('TextTrackFieldset', TextTrackFieldset);
/** @import Player from './player' */
/** @import { ContentDescriptor } from '../utils/dom' */
/**
* The component 'TextTrackSettingsColors' displays a set of 'fieldsets'
* using the component 'TextTrackFieldset'.
*
* @extends Component
*/
class TextTrackSettingsColors extends Component$1 {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*
* @param {ContentDescriptor} [options.content=undefined]
* Provide customized content for this modal.
*
* @param {Array} [options.fieldSets]
* Array that contains the configurations for the selects.
*
* @param {Object} [options.selectConfigs]
* Object with the following properties that are the select confugations:
* backgroundColor, backgroundOpacity, color, edgeStyle, fontFamily,
* fontPercent, textOpacity, windowColor, windowOpacity.
* it passes to 'TextTrackFieldset'.
*/
constructor(player, options = {}) {
super(player, options);
const id_ = this.options_.textTrackComponentid;
// createElFgColor_
const ElFgColorFieldset = new TextTrackFieldset(player, {
id_,
legendId: `captions-text-legend-${id_}`,
legendText: this.localize('Text'),
className: 'vjs-fg vjs-track-setting',
selects: this.options_.fieldSets[0],
selectConfigs: this.options_.selectConfigs,
type: 'colors'
});
this.addChild(ElFgColorFieldset);
// createElBgColor_
const ElBgColorFieldset = new TextTrackFieldset(player, {
id_,
legendId: `captions-background-${id_}`,
legendText: this.localize('Text Background'),
className: 'vjs-bg vjs-track-setting',
selects: this.options_.fieldSets[1],
selectConfigs: this.options_.selectConfigs,
type: 'colors'
});
this.addChild(ElBgColorFieldset);
// createElWinColor_
const ElWinColorFieldset = new TextTrackFieldset(player, {
id_,
legendId: `captions-window-${id_}`,
legendText: this.localize('Caption Area Background'),
className: 'vjs-window vjs-track-setting',
selects: this.options_.fieldSets[2],
selectConfigs: this.options_.selectConfigs,
type: 'colors'
});
this.addChild(ElWinColorFieldset);
}
/**
* Create the `TextTrackSettingsColors`'s DOM element
*
* @return {Element}
* The DOM element that gets created.
*/
createEl() {
const el = createEl('div', {
className: 'vjs-track-settings-colors'
});
return el;
}
}
Component$1.registerComponent('TextTrackSettingsColors', TextTrackSettingsColors);
/** @import Player from './player' */
/** @import { ContentDescriptor } from '../utils/dom' */
/**
* The component 'TextTrackSettingsFont' displays a set of 'fieldsets'
* using the component 'TextTrackFieldset'.
*
* @extends Component
*/
class TextTrackSettingsFont extends Component$1 {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*
* @param {ContentDescriptor} [options.content=undefined]
* Provide customized content for this modal.
*
* @param {Array} [options.fieldSets]
* Array that contains the configurations for the selects.
*
* @param {Object} [options.selectConfigs]
* Object with the following properties that are the select confugations:
* backgroundColor, backgroundOpacity, color, edgeStyle, fontFamily,
* fontPercent, textOpacity, windowColor, windowOpacity.
* it passes to 'TextTrackFieldset'.
*/
constructor(player, options = {}) {
super(player, options);
const id_ = this.options_.textTrackComponentid;
const ElFgColorFieldset = new TextTrackFieldset(player, {
id_,
legendId: `captions-font-size-${id_}`,
legendText: 'Font Size',
className: 'vjs-font-percent vjs-track-setting',
selects: this.options_.fieldSets[0],
selectConfigs: this.options_.selectConfigs,
type: 'font'
});
this.addChild(ElFgColorFieldset);
const ElBgColorFieldset = new TextTrackFieldset(player, {
id_,
legendId: `captions-edge-style-${id_}`,
legendText: this.localize('Text Edge Style'),
className: 'vjs-edge-style vjs-track-setting',
selects: this.options_.fieldSets[1],
selectConfigs: this.options_.selectConfigs,
type: 'font'
});
this.addChild(ElBgColorFieldset);
const ElWinColorFieldset = new TextTrackFieldset(player, {
id_,
legendId: `captions-font-family-${id_}`,
legendText: this.localize('Font Family'),
className: 'vjs-font-family vjs-track-setting',
selects: this.options_.fieldSets[2],
selectConfigs: this.options_.selectConfigs,
type: 'font'
});
this.addChild(ElWinColorFieldset);
}
/**
* Create the `TextTrackSettingsFont`'s DOM element
*
* @return {Element}
* The DOM element that gets created.
*/
createEl() {
const el = createEl('div', {
className: 'vjs-track-settings-font'
});
return el;
}
}
Component$1.registerComponent('TextTrackSettingsFont', TextTrackSettingsFont);
/**
* Buttons of reset & done that modal 'TextTrackSettings'
* uses as part of its content.
*
* 'Reset': Resets all settings on 'TextTrackSettings'.
* 'Done': Closes 'TextTrackSettings' modal.
*
* @extends Component
*/
class TrackSettingsControls extends Component$1 {
constructor(player, options = {}) {
super(player, options);
// Create DOM elements
const defaultsDescription = this.localize('restore all settings to the default values');
const resetButton = new Button(player, {
controlText: defaultsDescription,
className: 'vjs-default-button'
});
resetButton.el().classList.remove('vjs-control', 'vjs-button');
resetButton.el().textContent = this.localize('Reset');
this.addChild(resetButton);
const doneButton = new Button(player, {
controlText: defaultsDescription,
className: 'vjs-done-button'
});
// Remove unrequired style classes
doneButton.el().classList.remove('vjs-control', 'vjs-button');
doneButton.el().textContent = this.localize('Done');
this.addChild(doneButton);
}
/**
* Create the `TrackSettingsControls`'s DOM element
*
* @return {Element}
* The DOM element that gets created.
*/
createEl() {
const el = createEl('div', {
className: 'vjs-track-settings-controls'
});
return el;
}
}
Component$1.registerComponent('TrackSettingsControls', TrackSettingsControls);
/**
* @file text-track-settings.js
*/
/** @import Player from '../player' */
const LOCAL_STORAGE_KEY$1 = 'vjs-text-track-settings';
const COLOR_BLACK = ['#000', 'Black'];
const COLOR_BLUE = ['#00F', 'Blue'];
const COLOR_CYAN = ['#0FF', 'Cyan'];
const COLOR_GREEN = ['#0F0', 'Green'];
const COLOR_MAGENTA = ['#F0F', 'Magenta'];
const COLOR_RED = ['#F00', 'Red'];
const COLOR_WHITE = ['#FFF', 'White'];
const COLOR_YELLOW = ['#FF0', 'Yellow'];
const OPACITY_OPAQUE = ['1', 'Opaque'];
const OPACITY_SEMI = ['0.5', 'Semi-Transparent'];
const OPACITY_TRANS = ['0', 'Transparent'];
// Configuration for the various elements in the DOM of this component.
//
// Possible keys include:
//
// `default`:
// The default option index. Only needs to be provided if not zero.
// `parser`:
// A function which is used to parse the value from the selected option in
// a customized way.
// `selector`:
// The selector used to find the associated element.
const selectConfigs = {
backgroundColor: {
selector: '.vjs-bg-color > select',
id: 'captions-background-color-%s',
label: 'Color',
options: [COLOR_BLACK, COLOR_WHITE, COLOR_RED, COLOR_GREEN, COLOR_BLUE, COLOR_YELLOW, COLOR_MAGENTA, COLOR_CYAN],
className: 'vjs-bg-color'
},
backgroundOpacity: {
selector: '.vjs-bg-opacity > select',
id: 'captions-background-opacity-%s',
label: 'Opacity',
options: [OPACITY_OPAQUE, OPACITY_SEMI, OPACITY_TRANS],
className: 'vjs-bg-opacity vjs-opacity'
},
color: {
selector: '.vjs-text-color > select',
id: 'captions-foreground-color-%s',
label: 'Color',
options: [COLOR_WHITE, COLOR_BLACK, COLOR_RED, COLOR_GREEN, COLOR_BLUE, COLOR_YELLOW, COLOR_MAGENTA, COLOR_CYAN],
className: 'vjs-text-color'
},
edgeStyle: {
selector: '.vjs-edge-style > select',
id: '',
label: 'Text Edge Style',
options: [['none', 'None'], ['raised', 'Raised'], ['depressed', 'Depressed'], ['uniform', 'Uniform'], ['dropshadow', 'Drop shadow']]
},
fontFamily: {
selector: '.vjs-font-family > select',
id: '',
label: 'Font Family',
options: [['proportionalSansSerif', 'Proportional Sans-Serif'], ['monospaceSansSerif', 'Monospace Sans-Serif'], ['proportionalSerif', 'Proportional Serif'], ['monospaceSerif', 'Monospace Serif'], ['casual', 'Casual'], ['script', 'Script'], ['small-caps', 'Small Caps']]
},
fontPercent: {
selector: '.vjs-font-percent > select',
id: '',
label: 'Font Size',
options: [['0.50', '50%'], ['0.75', '75%'], ['1.00', '100%'], ['1.25', '125%'], ['1.50', '150%'], ['1.75', '175%'], ['2.00', '200%'], ['3.00', '300%'], ['4.00', '400%']],
default: 2,
parser: v => v === '1.00' ? null : Number(v)
},
textOpacity: {
selector: '.vjs-text-opacity > select',
id: 'captions-foreground-opacity-%s',
label: 'Opacity',
options: [OPACITY_OPAQUE, OPACITY_SEMI],
className: 'vjs-text-opacity vjs-opacity'
},
// Options for this object are defined below.
windowColor: {
selector: '.vjs-window-color > select',
id: 'captions-window-color-%s',
label: 'Color',
className: 'vjs-window-color'
},
// Options for this object are defined below.
windowOpacity: {
selector: '.vjs-window-opacity > select',
id: 'captions-window-opacity-%s',
label: 'Opacity',
options: [OPACITY_TRANS, OPACITY_SEMI, OPACITY_OPAQUE],
className: 'vjs-window-opacity vjs-opacity'
}
};
selectConfigs.windowColor.options = selectConfigs.backgroundColor.options;
/**
* Get the actual value of an option.
*
* @param {string} value
* The value to get
*
* @param {Function} [parser]
* Optional function to adjust the value.
*
* @return {*}
* - Will be `undefined` if no value exists
* - Will be `undefined` if the given value is "none".
* - Will be the actual value otherwise.
*
* @private
*/
function parseOptionValue(value, parser) {
if (parser) {
value = parser(value);
}
if (value && value !== 'none') {
return value;
}
}
/**
* Gets the value of the selected element within a element.
*
* @param {Element} el
* the element to look in
*
* @param {Function} [parser]
* Optional function to adjust the value.
*
* @return {*}
* - Will be `undefined` if no value exists
* - Will be `undefined` if the given value is "none".
* - Will be the actual value otherwise.
*
* @private
*/
function getSelectedOptionValue(el, parser) {
const value = el.options[el.options.selectedIndex].value;
return parseOptionValue(value, parser);
}
/**
* Sets the selected element within a element based on a
* given value.
*
* @param {Element} el
* The element to look in.
*
* @param {string} value
* the property to look on.
*
* @param {Function} [parser]
* Optional function to adjust the value before comparing.
*
* @private
*/
function setSelectedOption(el, value, parser) {
if (!value) {
return;
}
for (let i = 0; i < el.options.length; i++) {
if (parseOptionValue(el.options[i].value, parser) === value) {
el.selectedIndex = i;
break;
}
}
}
/**
* Manipulate Text Tracks settings.
*
* @extends ModalDialog
*/
class TextTrackSettings extends ModalDialog {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*/
constructor(player, options) {
options.temporary = false;
super(player, options);
this.updateDisplay = this.updateDisplay.bind(this);
// fill the modal and pretend we have opened it
this.fill();
this.hasBeenOpened_ = this.hasBeenFilled_ = true;
this.renderModalComponents(player);
this.endDialog = createEl('p', {
className: 'vjs-control-text',
textContent: this.localize('End of dialog window.')
});
this.el().appendChild(this.endDialog);
this.setDefaults();
// Grab `persistTextTrackSettings` from the player options if not passed in child options
if (options.persistTextTrackSettings === undefined) {
this.options_.persistTextTrackSettings = this.options_.playerOptions.persistTextTrackSettings;
}
this.bindFunctionsToSelectsAndButtons();
if (this.options_.persistTextTrackSettings) {
this.restoreSettings();
}
}
renderModalComponents(player) {
const textTrackSettingsColors = new TextTrackSettingsColors(player, {
textTrackComponentid: this.id_,
selectConfigs,
fieldSets: [['color', 'textOpacity'], ['backgroundColor', 'backgroundOpacity'], ['windowColor', 'windowOpacity']]
});
this.addChild(textTrackSettingsColors);
const textTrackSettingsFont = new TextTrackSettingsFont(player, {
textTrackComponentid: this.id_,
selectConfigs,
fieldSets: [['fontPercent'], ['edgeStyle'], ['fontFamily']]
});
this.addChild(textTrackSettingsFont);
const trackSettingsControls = new TrackSettingsControls(player);
this.addChild(trackSettingsControls);
}
bindFunctionsToSelectsAndButtons() {
this.on(this.$('.vjs-done-button'), ['click', 'tap'], () => {
this.saveSettings();
this.close();
});
this.on(this.$('.vjs-default-button'), ['click', 'tap'], () => {
this.setDefaults();
this.updateDisplay();
});
each(selectConfigs, config => {
this.on(this.$(config.selector), 'change', this.updateDisplay);
});
}
dispose() {
this.endDialog = null;
super.dispose();
}
label() {
return this.localize('Caption Settings Dialog');
}
description() {
return this.localize('Beginning of dialog window. Escape will cancel and close the window.');
}
buildCSSClass() {
return super.buildCSSClass() + ' vjs-text-track-settings';
}
/**
* Gets an object of text track settings (or null).
*
* @return {Object}
* An object with config values parsed from the DOM or localStorage.
*/
getValues() {
return reduce(selectConfigs, (accum, config, key) => {
const value = getSelectedOptionValue(this.$(config.selector), config.parser);
if (value !== undefined) {
accum[key] = value;
}
return accum;
}, {});
}
/**
* Sets text track settings from an object of values.
*
* @param {Object} values
* An object with config values parsed from the DOM or localStorage.
*/
setValues(values) {
each(selectConfigs, (config, key) => {
setSelectedOption(this.$(config.selector), values[key], config.parser);
});
}
/**
* Sets all `` elements to their default values.
*/
setDefaults() {
each(selectConfigs, config => {
const index = config.hasOwnProperty('default') ? config.default : 0;
this.$(config.selector).selectedIndex = index;
});
}
/**
* Restore texttrack settings from localStorage
*/
restoreSettings() {
let values;
try {
values = JSON.parse(window$1.localStorage.getItem(LOCAL_STORAGE_KEY$1));
} catch (err) {
log$1.warn(err);
}
if (values) {
this.setValues(values);
}
}
/**
* Save text track settings to localStorage
*/
saveSettings() {
if (!this.options_.persistTextTrackSettings) {
return;
}
const values = this.getValues();
try {
if (Object.keys(values).length) {
window$1.localStorage.setItem(LOCAL_STORAGE_KEY$1, JSON.stringify(values));
} else {
window$1.localStorage.removeItem(LOCAL_STORAGE_KEY$1);
}
} catch (err) {
log$1.warn(err);
}
}
/**
* Update display of text track settings
*/
updateDisplay() {
const ttDisplay = this.player_.getChild('textTrackDisplay');
if (ttDisplay) {
ttDisplay.updateDisplay();
}
}
/**
* Repopulate dialog with new localizations on languagechange
*/
handleLanguagechange() {
this.fill();
this.renderModalComponents(this.player_);
this.bindFunctionsToSelectsAndButtons();
}
}
Component$1.registerComponent('TextTrackSettings', TextTrackSettings);
/**
* @file resize-manager.js
*/
/**
* A Resize Manager. It is in charge of triggering `playerresize` on the player in the right conditions.
*
* It'll either create an iframe and use a debounced resize handler on it or use the new {@link https://wicg.github.io/ResizeObserver/|ResizeObserver}.
*
* If the ResizeObserver is available natively, it will be used. A polyfill can be passed in as an option.
* If a `playerresize` event is not needed, the ResizeManager component can be removed from the player, see the example below.
*
* @example How to disable the resize manager
* const player = videojs('#vid', {
* resizeManager: false
* });
*
* @see {@link https://wicg.github.io/ResizeObserver/|ResizeObserver specification}
*
* @extends Component
*/
class ResizeManager extends Component$1 {
/**
* Create the ResizeManager.
*
* @param {Object} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of ResizeManager options.
*
* @param {Object} [options.ResizeObserver]
* A polyfill for ResizeObserver can be passed in here.
* If this is set to null it will ignore the native ResizeObserver and fall back to the iframe fallback.
*/
constructor(player, options) {
let RESIZE_OBSERVER_AVAILABLE = options.ResizeObserver || window$1.ResizeObserver;
// if `null` was passed, we want to disable the ResizeObserver
if (options.ResizeObserver === null) {
RESIZE_OBSERVER_AVAILABLE = false;
}
// Only create an element when ResizeObserver isn't available
const options_ = merge$1({
createEl: !RESIZE_OBSERVER_AVAILABLE,
reportTouchActivity: false
}, options);
super(player, options_);
this.ResizeObserver = options.ResizeObserver || window$1.ResizeObserver;
this.loadListener_ = null;
this.resizeObserver_ = null;
this.debouncedHandler_ = debounce(() => {
this.resizeHandler();
}, 100, false, this);
if (RESIZE_OBSERVER_AVAILABLE) {
this.resizeObserver_ = new this.ResizeObserver(this.debouncedHandler_);
this.resizeObserver_.observe(player.el());
} else {
this.loadListener_ = () => {
if (!this.el_ || !this.el_.contentWindow) {
return;
}
const debouncedHandler_ = this.debouncedHandler_;
let unloadListener_ = this.unloadListener_ = function () {
off(this, 'resize', debouncedHandler_);
off(this, 'unload', unloadListener_);
unloadListener_ = null;
};
// safari and edge can unload the iframe before resizemanager dispose
// we have to dispose of event handlers correctly before that happens
on(this.el_.contentWindow, 'unload', unloadListener_);
on(this.el_.contentWindow, 'resize', debouncedHandler_);
};
this.one('load', this.loadListener_);
}
}
createEl() {
return super.createEl('iframe', {
className: 'vjs-resize-manager',
tabIndex: -1,
title: this.localize('No content')
}, {
'aria-hidden': 'true'
});
}
/**
* Called when a resize is triggered on the iframe or a resize is observed via the ResizeObserver
*
* @fires Player#playerresize
*/
resizeHandler() {
/**
* Called when the player size has changed
*
* @event Player#playerresize
* @type {Event}
*/
// make sure player is still around to trigger
// prevents this from causing an error after dispose
if (!this.player_ || !this.player_.trigger) {
return;
}
this.player_.trigger('playerresize');
}
dispose() {
if (this.debouncedHandler_) {
this.debouncedHandler_.cancel();
}
if (this.resizeObserver_) {
if (this.player_.el()) {
this.resizeObserver_.unobserve(this.player_.el());
}
this.resizeObserver_.disconnect();
}
if (this.loadListener_) {
this.off('load', this.loadListener_);
}
if (this.el_ && this.el_.contentWindow && this.unloadListener_) {
this.unloadListener_.call(this.el_.contentWindow);
}
this.ResizeObserver = null;
this.resizeObserver = null;
this.debouncedHandler_ = null;
this.loadListener_ = null;
super.dispose();
}
}
Component$1.registerComponent('ResizeManager', ResizeManager);
/** @import Player from './player' */
const defaults$1 = {
trackingThreshold: 20,
liveTolerance: 15
};
/*
track when we are at the live edge, and other helpers for live playback */
/**
* A class for checking live current time and determining when the player
* is at or behind the live edge.
*/
class LiveTracker extends Component$1 {
/**
* Creates an instance of this class.
*
* @param {Player} player
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
* The key/value store of player options.
*
* @param {number} [options.trackingThreshold=20]
* Number of seconds of live window (seekableEnd - seekableStart) that
* media needs to have before the liveui will be shown.
*
* @param {number} [options.liveTolerance=15]
* Number of seconds behind live that we have to be
* before we will be considered non-live. Note that this will only
* be used when playing at the live edge. This allows large seekable end
* changes to not effect whether we are live or not.
*/
constructor(player, options) {
// LiveTracker does not need an element
const options_ = merge$1(defaults$1, options, {
createEl: false
});
super(player, options_);
this.trackLiveHandler_ = () => this.trackLive_();
this.handlePlay_ = e => this.handlePlay(e);
this.handleFirstTimeupdate_ = e => this.handleFirstTimeupdate(e);
this.handleSeeked_ = e => this.handleSeeked(e);
this.seekToLiveEdge_ = e => this.seekToLiveEdge(e);
this.reset_();
this.on(this.player_, 'durationchange', e => this.handleDurationchange(e));
// we should try to toggle tracking on canplay as native playback engines, like Safari
// may not have the proper values for things like seekableEnd until then
this.on(this.player_, 'canplay', () => this.toggleTracking());
}
/**
* all the functionality for tracking when seek end changes
* and for tracking how far past seek end we should be
*/
trackLive_() {
const seekable = this.player_.seekable();
// skip undefined seekable
if (!seekable || !seekable.length) {
return;
}
const newTime = Number(window$1.performance.now().toFixed(4));
const deltaTime = this.lastTime_ === -1 ? 0 : (newTime - this.lastTime_) / 1000;
this.lastTime_ = newTime;
this.pastSeekEnd_ = this.pastSeekEnd() + deltaTime;
const liveCurrentTime = this.liveCurrentTime();
const currentTime = this.player_.currentTime();
// we are behind live if any are true
// 1. the player is paused
// 2. the user seeked to a location 2 seconds away from live
// 3. the difference between live and current time is greater
// liveTolerance which defaults to 15s
let isBehind = this.player_.paused() || this.seekedBehindLive_ || Math.abs(liveCurrentTime - currentTime) > this.options_.liveTolerance;
// we cannot be behind if
// 1. until we have not seen a timeupdate yet
// 2. liveCurrentTime is Infinity, which happens on Android and Native Safari
if (!this.timeupdateSeen_ || liveCurrentTime === Infinity) {
isBehind = false;
}
if (isBehind !== this.behindLiveEdge_) {
this.behindLiveEdge_ = isBehind;
this.trigger('liveedgechange');
}
}
/**
* handle a durationchange event on the player
* and start/stop tracking accordingly.
*/
handleDurationchange() {
this.toggleTracking();
}
/**
* start/stop tracking
*/
toggleTracking() {
if (this.player_.duration() === Infinity && this.liveWindow() >= this.options_.trackingThreshold) {
if (this.player_.options_.liveui) {
this.player_.addClass('vjs-liveui');
}
this.startTracking();
} else {
this.player_.removeClass('vjs-liveui');
this.stopTracking();
}
}
/**
* start tracking live playback
*/
startTracking() {
if (this.isTracking()) {
return;
}
// If we haven't seen a timeupdate, we need to check whether playback
// began before this component started tracking. This can happen commonly
// when using autoplay.
if (!this.timeupdateSeen_) {
this.timeupdateSeen_ = this.player_.hasStarted();
}
this.trackingInterval_ = this.setInterval(this.trackLiveHandler_, UPDATE_REFRESH_INTERVAL);
this.trackLive_();
this.on(this.player_, ['play', 'pause'], this.trackLiveHandler_);
if (!this.timeupdateSeen_) {
this.one(this.player_, 'play', this.handlePlay_);
this.one(this.player_, 'timeupdate', this.handleFirstTimeupdate_);
} else {
this.on(this.player_, 'seeked', this.handleSeeked_);
}
}
/**
* handle the first timeupdate on the player if it wasn't already playing
* when live tracker started tracking.
*/
handleFirstTimeupdate() {
this.timeupdateSeen_ = true;
this.on(this.player_, 'seeked', this.handleSeeked_);
}
/**
* Keep track of what time a seek starts, and listen for seeked
* to find where a seek ends.
*/
handleSeeked() {
const timeDiff = Math.abs(this.liveCurrentTime() - this.player_.currentTime());
this.seekedBehindLive_ = this.nextSeekedFromUser_ && timeDiff > 2;
this.nextSeekedFromUser_ = false;
this.trackLive_();
}
/**
* handle the first play on the player, and make sure that we seek
* right to the live edge.
*/
handlePlay() {
this.one(this.player_, 'timeupdate', this.seekToLiveEdge_);
}
/**
* Stop tracking, and set all internal variables to
* their initial value.
*/
reset_() {
this.lastTime_ = -1;
this.pastSeekEnd_ = 0;
this.lastSeekEnd_ = -1;
this.behindLiveEdge_ = true;
this.timeupdateSeen_ = false;
this.seekedBehindLive_ = false;
this.nextSeekedFromUser_ = false;
this.clearInterval(this.trackingInterval_);
this.trackingInterval_ = null;
this.off(this.player_, ['play', 'pause'], this.trackLiveHandler_);
this.off(this.player_, 'seeked', this.handleSeeked_);
this.off(this.player_, 'play', this.handlePlay_);
this.off(this.player_, 'timeupdate', this.handleFirstTimeupdate_);
this.off(this.player_, 'timeupdate', this.seekToLiveEdge_);
}
/**
* The next seeked event is from the user. Meaning that any seek
* > 2s behind live will be considered behind live for real and
* liveTolerance will be ignored.
*/
nextSeekedFromUser() {
this.nextSeekedFromUser_ = true;
}
/**
* stop tracking live playback
*/
stopTracking() {
if (!this.isTracking()) {
return;
}
this.reset_();
this.trigger('liveedgechange');
}
/**
* A helper to get the player seekable end
* so that we don't have to null check everywhere
*
* @return {number}
* The furthest seekable end or Infinity.
*/
seekableEnd() {
const seekable = this.player_.seekable();
const seekableEnds = [];
let i = seekable ? seekable.length : 0;
while (i--) {
seekableEnds.push(seekable.end(i));
}
// grab the furthest seekable end after sorting, or if there are none
// default to Infinity
return seekableEnds.length ? seekableEnds.sort()[seekableEnds.length - 1] : Infinity;
}
/**
* A helper to get the player seekable start
* so that we don't have to null check everywhere
*
* @return {number}
* The earliest seekable start or 0.
*/
seekableStart() {
const seekable = this.player_.seekable();
const seekableStarts = [];
let i = seekable ? seekable.length : 0;
while (i--) {
seekableStarts.push(seekable.start(i));
}
// grab the first seekable start after sorting, or if there are none
// default to 0
return seekableStarts.length ? seekableStarts.sort()[0] : 0;
}
/**
* Get the live time window aka
* the amount of time between seekable start and
* live current time.
*
* @return {number}
* The amount of seconds that are seekable in
* the live video.
*/
liveWindow() {
const liveCurrentTime = this.liveCurrentTime();
// if liveCurrenTime is Infinity then we don't have a liveWindow at all
if (liveCurrentTime === Infinity) {
return 0;
}
return liveCurrentTime - this.seekableStart();
}
/**
* Determines if the player is live, only checks if this component
* is tracking live playback or not
*
* @return {boolean}
* Whether liveTracker is tracking
*/
isLive() {
return this.isTracking();
}
/**
* Determines if currentTime is at the live edge and won't fall behind
* on each seekableendchange
*
* @return {boolean}
* Whether playback is at the live edge
*/
atLiveEdge() {
return !this.behindLiveEdge();
}
/**
* get what we expect the live current time to be
*
* @return {number}
* The expected live current time
*/
liveCurrentTime() {
return this.pastSeekEnd() + this.seekableEnd();
}
/**
* The number of seconds that have occurred after seekable end
* changed. This will be reset to 0 once seekable end changes.
*
* @return {number}
* Seconds past the current seekable end
*/
pastSeekEnd() {
const seekableEnd = this.seekableEnd();
if (this.lastSeekEnd_ !== -1 && seekableEnd !== this.lastSeekEnd_) {
this.pastSeekEnd_ = 0;
}
this.lastSeekEnd_ = seekableEnd;
return this.pastSeekEnd_;
}
/**
* If we are currently behind the live edge, aka currentTime will be
* behind on a seekableendchange
*
* @return {boolean}
* If we are behind the live edge
*/
behindLiveEdge() {
return this.behindLiveEdge_;
}
/**
* Whether live tracker is currently tracking or not.
*/
isTracking() {
return typeof this.trackingInterval_ === 'number';
}
/**
* Seek to the live edge if we are behind the live edge
*/
seekToLiveEdge() {
this.seekedBehindLive_ = false;
if (this.atLiveEdge()) {
return;
}
this.nextSeekedFromUser_ = false;
this.player_.currentTime(this.liveCurrentTime());
}
/**
* Dispose of liveTracker
*/
dispose() {
this.stopTracking();
super.dispose();
}
}
Component$1.registerComponent('LiveTracker', LiveTracker);
/**
* Displays an element over the player which contains an optional title and
* description for the current content.
*
* Much of the code for this component originated in the now obsolete
* videojs-dock plugin: https://github.com/brightcove/videojs-dock/
*
* @extends Component
*/
class TitleBar extends Component$1 {
constructor(player, options) {
super(player, options);
this.on('statechanged', e => this.updateDom_());
this.updateDom_();
}
/**
* Create the `TitleBar`'s DOM element
*
* @return {Element}
* The element that was created.
*/
createEl() {
this.els = {
title: createEl('div', {
className: 'vjs-title-bar-title',
id: `vjs-title-bar-title-${newGUID()}`
}),
description: createEl('div', {
className: 'vjs-title-bar-description',
id: `vjs-title-bar-description-${newGUID()}`
})
};
return createEl('div', {
className: 'vjs-title-bar'
}, {}, values(this.els));
}
/**
* Updates the DOM based on the component's state object.
*/
updateDom_() {
const tech = this.player_.tech_;
const techEl = tech && tech.el_;
const techAriaAttrs = {
title: 'aria-labelledby',
description: 'aria-describedby'
};
['title', 'description'].forEach(k => {
const value = this.state[k];
const el = this.els[k];
const techAriaAttr = techAriaAttrs[k];
emptyEl(el);
if (value) {
textContent(el, value);
}
// If there is a tech element available, update its ARIA attributes
// according to whether a title and/or description have been provided.
if (techEl) {
techEl.removeAttribute(techAriaAttr);
if (value) {
techEl.setAttribute(techAriaAttr, el.id);
}
}
});
if (this.state.title || this.state.description) {
this.show();
} else {
this.hide();
}
}
/**
* Update the contents of the title bar component with new title and
* description text.
*
* If both title and description are missing, the title bar will be hidden.
*
* If either title or description are present, the title bar will be visible.
*
* NOTE: Any previously set value will be preserved. To unset a previously
* set value, you must pass an empty string or null.
*
* For example:
*
* ```
* update({title: 'foo', description: 'bar'}) // title: 'foo', description: 'bar'
* update({description: 'bar2'}) // title: 'foo', description: 'bar2'
* update({title: ''}) // title: '', description: 'bar2'
* update({title: 'foo', description: null}) // title: 'foo', description: null
* ```
*
* @param {Object} [options={}]
* An options object. When empty, the title bar will be hidden.
*
* @param {string} [options.title]
* A title to display in the title bar.
*
* @param {string} [options.description]
* A description to display in the title bar.
*/
update(options) {
this.setState(options);
}
/**
* Dispose the component.
*/
dispose() {
const tech = this.player_.tech_;
const techEl = tech && tech.el_;
if (techEl) {
techEl.removeAttribute('aria-labelledby');
techEl.removeAttribute('aria-describedby');
}
super.dispose();
this.els = null;
}
}
Component$1.registerComponent('TitleBar', TitleBar);
/** @import Player from './player' */
/**
* @typedef {object} TransientButtonOptions
* @property {string} [controlText] Control text, usually visible for these buttons
* @property {number} [initialDisplay=4000] Time in ms that button should initially remain visible
* @property {Array<'top'|'neartop'|'bottom'|'left'|'right'>} [position] Array of position strings to add basic styles for positioning
* @property {string} [className] Class(es) to add
* @property {boolean} [takeFocus=false] Whether element sohuld take focus when shown
* @property {Function} [clickHandler] Function called on button activation
*/
/** @type {TransientButtonOptions} */
const defaults = {
initialDisplay: 4000,
position: [],
takeFocus: false
};
/**
* A floating transient button.
* It's recommended to insert these buttons _before_ the control bar with the this argument to `addChild`
* for a logical tab order.
*
* @example
* ```
* player.addChild(
* 'TransientButton',
* options,
* player.children().indexOf(player.getChild("ControlBar"))
* )
* ```
*
* @extends Button
*/
class TransientButton extends Button {
/**
* TransientButton constructor
*
* @param {Player} player The button's player
* @param {TransientButtonOptions} options Options for the transient button
*/
constructor(player, options) {
options = merge$1(defaults, options);
super(player, options);
this.controlText(options.controlText);
this.hide();
// When shown, the float button will be visible even if the user is inactive.
// Clear this if there is any interaction.
this.on(this.player_, ['useractive', 'userinactive'], e => {
this.removeClass('force-display');
});
}
/**
* Return CSS class including position classes
*
* @return {string} CSS class list
*/
buildCSSClass() {
return `vjs-transient-button focus-visible ${this.options_.position.map(c => `vjs-${c}`).join(' ')}`;
}
/**
* Create the button element
*
* @return {HTMLButtonElement} The button element
*/
createEl() {
/** @type HTMLButtonElement */
const el = createEl('button', {}, {
type: 'button',
class: this.buildCSSClass()
}, createEl('span'));
this.controlTextEl_ = el.querySelector('span');
return el;
}
/**
* Show the button. The button will remain visible for the `initialDisplay` time, default 4s,
* and when there is user activity.
*/
show() {
super.show();
this.addClass('force-display');
if (this.options_.takeFocus) {
this.el().focus({
preventScroll: true
});
}
this.forceDisplayTimeout = this.player_.setTimeout(() => {
this.removeClass('force-display');
}, this.options_.initialDisplay);
}
/**
* Hide the display, even if during the `initialDisplay` time.
*/
hide() {
this.removeClass('force-display');
super.hide();
}
/**
* Dispose the component
*/
dispose() {
this.player_.clearTimeout(this.forceDisplayTimeout);
super.dispose();
}
}
Component$1.registerComponent('TransientButton', TransientButton);
/** @import Html5 from './html5' */
/**
* This function is used to fire a sourceset when there is something
* similar to `mediaEl.load()` being called. It will try to find the source via
* the `src` attribute and then the `` elements. It will then fire `sourceset`
* with the source that was found or empty string if we cannot know. If it cannot
* find a source then `sourceset` will not be fired.
*
* @param {Html5} tech
* The tech object that sourceset was setup on
*
* @return {boolean}
* returns false if the sourceset was not fired and true otherwise.
*/
const sourcesetLoad = tech => {
const el = tech.el();
// if `el.src` is set, that source will be loaded.
if (el.hasAttribute('src')) {
tech.triggerSourceset(el.src);
return true;
}
/**
* Since there isn't a src property on the media element, source elements will be used for
* implementing the source selection algorithm. This happens asynchronously and
* for most cases were there is more than one source we cannot tell what source will
* be loaded, without re-implementing the source selection algorithm. At this time we are not
* going to do that. There are three special cases that we do handle here though:
*
* 1. If there are no sources, do not fire `sourceset`.
* 2. If there is only one `` with a `src` property/attribute that is our `src`
* 3. If there is more than one `` but all of them have the same `src` url.
* That will be our src.
*/
const sources = tech.$$('source');
const srcUrls = [];
let src = '';
// if there are no sources, do not fire sourceset
if (!sources.length) {
return false;
}
// only count valid/non-duplicate source elements
for (let i = 0; i < sources.length; i++) {
const url = sources[i].src;
if (url && srcUrls.indexOf(url) === -1) {
srcUrls.push(url);
}
}
// there were no valid sources
if (!srcUrls.length) {
return false;
}
// there is only one valid source element url
// use that
if (srcUrls.length === 1) {
src = srcUrls[0];
}
tech.triggerSourceset(src);
return true;
};
/**
* our implementation of an `innerHTML` descriptor for browsers
* that do not have one.
*/
const innerHTMLDescriptorPolyfill = Object.defineProperty({}, 'innerHTML', {
get() {
return this.cloneNode(true).innerHTML;
},
set(v) {
// make a dummy node to use innerHTML on
const dummy = document$1.createElement(this.nodeName.toLowerCase());
// set innerHTML to the value provided
dummy.innerHTML = v;
// make a document fragment to hold the nodes from dummy
const docFrag = document$1.createDocumentFragment();
// copy all of the nodes created by the innerHTML on dummy
// to the document fragment
while (dummy.childNodes.length) {
docFrag.appendChild(dummy.childNodes[0]);
}
// remove content
this.innerText = '';
// now we add all of that html in one by appending the
// document fragment. This is how innerHTML does it.
window$1.Element.prototype.appendChild.call(this, docFrag);
// then return the result that innerHTML's setter would
return this.innerHTML;
}
});
/**
* Get a property descriptor given a list of priorities and the
* property to get.
*/
const getDescriptor = (priority, prop) => {
let descriptor = {};
for (let i = 0; i < priority.length; i++) {
descriptor = Object.getOwnPropertyDescriptor(priority[i], prop);
if (descriptor && descriptor.set && descriptor.get) {
break;
}
}
descriptor.enumerable = true;
descriptor.configurable = true;
return descriptor;
};
const getInnerHTMLDescriptor = tech => getDescriptor([tech.el(), window$1.HTMLMediaElement.prototype, window$1.Element.prototype, innerHTMLDescriptorPolyfill], 'innerHTML');
/**
* Patches browser internal functions so that we can tell synchronously
* if a `` was appended to the media element. For some reason this
* causes a `sourceset` if the the media element is ready and has no source.
* This happens when:
* - The page has just loaded and the media element does not have a source.
* - The media element was emptied of all sources, then `load()` was called.
*
* It does this by patching the following functions/properties when they are supported:
*
* - `append()` - can be used to add a `` element to the media element
* - `appendChild()` - can be used to add a `` element to the media element
* - `insertAdjacentHTML()` - can be used to add a `` element to the media element
* - `innerHTML` - can be used to add a `` element to the media element
*
* @param {Html5} tech
* The tech object that sourceset is being setup on.
*/
const firstSourceWatch = function (tech) {
const el = tech.el();
// make sure firstSourceWatch isn't setup twice.
if (el.resetSourceWatch_) {
return;
}
const old = {};
const innerDescriptor = getInnerHTMLDescriptor(tech);
const appendWrapper = appendFn => (...args) => {
const retval = appendFn.apply(el, args);
sourcesetLoad(tech);
return retval;
};
['append', 'appendChild', 'insertAdjacentHTML'].forEach(k => {
if (!el[k]) {
return;
}
// store the old function
old[k] = el[k];
// call the old function with a sourceset if a source
// was loaded
el[k] = appendWrapper(old[k]);
});
Object.defineProperty(el, 'innerHTML', merge$1(innerDescriptor, {
set: appendWrapper(innerDescriptor.set)
}));
el.resetSourceWatch_ = () => {
el.resetSourceWatch_ = null;
Object.keys(old).forEach(k => {
el[k] = old[k];
});
Object.defineProperty(el, 'innerHTML', innerDescriptor);
};
// on the first sourceset, we need to revert our changes
tech.one('sourceset', el.resetSourceWatch_);
};
/**
* our implementation of a `src` descriptor for browsers
* that do not have one
*/
const srcDescriptorPolyfill = Object.defineProperty({}, 'src', {
get() {
if (this.hasAttribute('src')) {
return getAbsoluteURL(window$1.Element.prototype.getAttribute.call(this, 'src'));
}
return '';
},
set(v) {
window$1.Element.prototype.setAttribute.call(this, 'src', v);
return v;
}
});
const getSrcDescriptor = tech => getDescriptor([tech.el(), window$1.HTMLMediaElement.prototype, srcDescriptorPolyfill], 'src');
/**
* setup `sourceset` handling on the `Html5` tech. This function
* patches the following element properties/functions:
*
* - `src` - to determine when `src` is set
* - `setAttribute()` - to determine when `src` is set
* - `load()` - this re-triggers the source selection algorithm, and can
* cause a sourceset.
*
* If there is no source when we are adding `sourceset` support or during a `load()`
* we also patch the functions listed in `firstSourceWatch`.
*
* @param {Html5} tech
* The tech to patch
*/
const setupSourceset = function (tech) {
if (!tech.featuresSourceset) {
return;
}
const el = tech.el();
// make sure sourceset isn't setup twice.
if (el.resetSourceset_) {
return;
}
const srcDescriptor = getSrcDescriptor(tech);
const oldSetAttribute = el.setAttribute;
const oldLoad = el.load;
Object.defineProperty(el, 'src', merge$1(srcDescriptor, {
set: v => {
const retval = srcDescriptor.set.call(el, v);
// we use the getter here to get the actual value set on src
tech.triggerSourceset(el.src);
return retval;
}
}));
el.setAttribute = (n, v) => {
const retval = oldSetAttribute.call(el, n, v);
if (/src/i.test(n)) {
tech.triggerSourceset(el.src);
}
return retval;
};
el.load = () => {
const retval = oldLoad.call(el);
// if load was called, but there was no source to fire
// sourceset on. We have to watch for a source append
// as that can trigger a `sourceset` when the media element
// has no source
if (!sourcesetLoad(tech)) {
tech.triggerSourceset('');
firstSourceWatch(tech);
}
return retval;
};
if (el.currentSrc) {
tech.triggerSourceset(el.currentSrc);
} else if (!sourcesetLoad(tech)) {
firstSourceWatch(tech);
}
el.resetSourceset_ = () => {
el.resetSourceset_ = null;
el.load = oldLoad;
el.setAttribute = oldSetAttribute;
Object.defineProperty(el, 'src', srcDescriptor);
if (el.resetSourceWatch_) {
el.resetSourceWatch_();
}
};
};
/**
* @file html5.js
*/
/**
* HTML5 Media Controller - Wrapper for HTML5 Media API
*
* @mixes Tech~SourceHandlerAdditions
* @extends Tech
*/
class Html5 extends Tech {
/**
* Create an instance of this Tech.
*
* @param {Object} [options]
* The key/value store of player options.
*
* @param {Function} [ready]
* Callback function to call when the `HTML5` Tech is ready.
*/
constructor(options, ready) {
super(options, ready);
const source = options.source;
let crossoriginTracks = false;
this.featuresVideoFrameCallback = this.featuresVideoFrameCallback && this.el_.tagName === 'VIDEO';
// Set the source if one is provided
// 1) Check if the source is new (if not, we want to keep the original so playback isn't interrupted)
// 2) Check to see if the network state of the tag was failed at init, and if so, reset the source
// anyway so the error gets fired.
if (source && (this.el_.currentSrc !== source.src || options.tag && options.tag.initNetworkState_ === 3)) {
this.setSource(source);
} else {
this.handleLateInit_(this.el_);
}
// setup sourceset after late sourceset/init
if (options.enableSourceset) {
this.setupSourcesetHandling_();
}
this.isScrubbing_ = false;
if (this.el_.hasChildNodes()) {
const nodes = this.el_.childNodes;
let nodesLength = nodes.length;
const removeNodes = [];
while (nodesLength--) {
const node = nodes[nodesLength];
const nodeName = node.nodeName.toLowerCase();
if (nodeName === 'track') {
if (!this.featuresNativeTextTracks) {
// Empty video tag tracks so the built-in player doesn't use them also.
// This may not be fast enough to stop HTML5 browsers from reading the tags
// so we'll need to turn off any default tracks if we're manually doing
// captions and subtitles. videoElement.textTracks
removeNodes.push(node);
} else {
// store HTMLTrackElement and TextTrack to remote list
this.remoteTextTrackEls().addTrackElement_(node);
this.remoteTextTracks().addTrack(node.track);
this.textTracks().addTrack(node.track);
if (!crossoriginTracks && !this.el_.hasAttribute('crossorigin') && isCrossOrigin(node.src)) {
crossoriginTracks = true;
}
}
}
}
for (let i = 0; i < removeNodes.length; i++) {
this.el_.removeChild(removeNodes[i]);
}
}
this.proxyNativeTracks_();
if (this.featuresNativeTextTracks && crossoriginTracks) {
log$1.warn('Text Tracks are being loaded from another origin but the crossorigin attribute isn\'t used.\n' + 'This may prevent text tracks from loading.');
}
// prevent iOS Safari from disabling metadata text tracks during native playback
this.restoreMetadataTracksInIOSNativePlayer_();
// Determine if native controls should be used
// Our goal should be to get the custom controls on mobile solid everywhere
// so we can remove this all together. Right now this will block custom
// controls on touch enabled laptops like the Chrome Pixel
if ((TOUCH_ENABLED || IS_IPHONE) && options.nativeControlsForTouch === true) {
this.setControls(true);
}
// on iOS, we want to proxy `webkitbeginfullscreen` and `webkitendfullscreen`
// into a `fullscreenchange` event
this.proxyWebkitFullscreen_();
this.triggerReady();
}
/**
* Dispose of `HTML5` media element and remove all tracks.
*/
dispose() {
if (this.el_ && this.el_.resetSourceset_) {
this.el_.resetSourceset_();
}
Html5.disposeMediaElement(this.el_);
this.options_ = null;
// tech will handle clearing of the emulated track list
super.dispose();
}
/**
* Modify the media element so that we can detect when
* the source is changed. Fires `sourceset` just after the source has changed
*/
setupSourcesetHandling_() {
setupSourceset(this);
}
/**
* When a captions track is enabled in the iOS Safari native player, all other
* tracks are disabled (including metadata tracks), which nulls all of their
* associated cue points. This will restore metadata tracks to their pre-fullscreen
* state in those cases so that cue points are not needlessly lost.
*
* @private
*/
restoreMetadataTracksInIOSNativePlayer_() {
const textTracks = this.textTracks();
let metadataTracksPreFullscreenState;
// captures a snapshot of every metadata track's current state
const takeMetadataTrackSnapshot = () => {
metadataTracksPreFullscreenState = [];
for (let i = 0; i < textTracks.length; i++) {
const track = textTracks[i];
if (track.kind === 'metadata') {
metadataTracksPreFullscreenState.push({
track,
storedMode: track.mode
});
}
}
};
// snapshot each metadata track's initial state, and update the snapshot
// each time there is a track 'change' event
takeMetadataTrackSnapshot();
textTracks.addEventListener('change', takeMetadataTrackSnapshot);
this.on('dispose', () => textTracks.removeEventListener('change', takeMetadataTrackSnapshot));
const restoreTrackMode = () => {
for (let i = 0; i < metadataTracksPreFullscreenState.length; i++) {
const storedTrack = metadataTracksPreFullscreenState[i];
if (storedTrack.track.mode === 'disabled' && storedTrack.track.mode !== storedTrack.storedMode) {
storedTrack.track.mode = storedTrack.storedMode;
}
}
// we only want this handler to be executed on the first 'change' event
textTracks.removeEventListener('change', restoreTrackMode);
};
// when we enter fullscreen playback, stop updating the snapshot and
// restore all track modes to their pre-fullscreen state
this.on('webkitbeginfullscreen', () => {
textTracks.removeEventListener('change', takeMetadataTrackSnapshot);
// remove the listener before adding it just in case it wasn't previously removed
textTracks.removeEventListener('change', restoreTrackMode);
textTracks.addEventListener('change', restoreTrackMode);
});
// start updating the snapshot again after leaving fullscreen
this.on('webkitendfullscreen', () => {
// remove the listener before adding it just in case it wasn't previously removed
textTracks.removeEventListener('change', takeMetadataTrackSnapshot);
textTracks.addEventListener('change', takeMetadataTrackSnapshot);
// remove the restoreTrackMode handler in case it wasn't triggered during fullscreen playback
textTracks.removeEventListener('change', restoreTrackMode);
});
}
/**
* Attempt to force override of tracks for the given type
*
* @param {string} type - Track type to override, possible values include 'Audio',
* 'Video', and 'Text'.
* @param {boolean} override - If set to true native audio/video will be overridden,
* otherwise native audio/video will potentially be used.
* @private
*/
overrideNative_(type, override) {
// If there is no behavioral change don't add/remove listeners
if (override !== this[`featuresNative${type}Tracks`]) {
return;
}
const lowerCaseType = type.toLowerCase();
if (this[`${lowerCaseType}TracksListeners_`]) {
Object.keys(this[`${lowerCaseType}TracksListeners_`]).forEach(eventName => {
const elTracks = this.el()[`${lowerCaseType}Tracks`];
elTracks.removeEventListener(eventName, this[`${lowerCaseType}TracksListeners_`][eventName]);
});
}
this[`featuresNative${type}Tracks`] = !override;
this[`${lowerCaseType}TracksListeners_`] = null;
this.proxyNativeTracksForType_(lowerCaseType);
}
/**
* Attempt to force override of native audio tracks.
*
* @param {boolean} override - If set to true native audio will be overridden,
* otherwise native audio will potentially be used.
*/
overrideNativeAudioTracks(override) {
this.overrideNative_('Audio', override);
}
/**
* Attempt to force override of native video tracks.
*
* @param {boolean} override - If set to true native video will be overridden,
* otherwise native video will potentially be used.
*/
overrideNativeVideoTracks(override) {
this.overrideNative_('Video', override);
}
/**
* Proxy native track list events for the given type to our track
* lists if the browser we are playing in supports that type of track list.
*
* @param {string} name - Track type; values include 'audio', 'video', and 'text'
* @private
*/
proxyNativeTracksForType_(name) {
const props = NORMAL[name];
const elTracks = this.el()[props.getterName];
const techTracks = this[props.getterName]();
if (!this[`featuresNative${props.capitalName}Tracks`] || !elTracks || !elTracks.addEventListener) {
return;
}
const listeners = {
change: e => {
const event = {
type: 'change',
target: techTracks,
currentTarget: techTracks,
srcElement: techTracks
};
techTracks.trigger(event);
// if we are a text track change event, we should also notify the
// remote text track list. This can potentially cause a false positive
// if we were to get a change event on a non-remote track and
// we triggered the event on the remote text track list which doesn't
// contain that track. However, best practices mean looping through the
// list of tracks and searching for the appropriate mode value, so,
// this shouldn't pose an issue
if (name === 'text') {
this[REMOTE.remoteText.getterName]().trigger(event);
}
},
addtrack(e) {
techTracks.addTrack(e.track);
},
removetrack(e) {
techTracks.removeTrack(e.track);
}
};
const removeOldTracks = function () {
const removeTracks = [];
for (let i = 0; i < techTracks.length; i++) {
let found = false;
for (let j = 0; j < elTracks.length; j++) {
if (elTracks[j] === techTracks[i]) {
found = true;
break;
}
}
if (!found) {
removeTracks.push(techTracks[i]);
}
}
while (removeTracks.length) {
techTracks.removeTrack(removeTracks.shift());
}
};
this[props.getterName + 'Listeners_'] = listeners;
Object.keys(listeners).forEach(eventName => {
const listener = listeners[eventName];
elTracks.addEventListener(eventName, listener);
this.on('dispose', e => elTracks.removeEventListener(eventName, listener));
});
// Remove (native) tracks that are not used anymore
this.on('loadstart', removeOldTracks);
this.on('dispose', e => this.off('loadstart', removeOldTracks));
}
/**
* Proxy all native track list events to our track lists if the browser we are playing
* in supports that type of track list.
*
* @private
*/
proxyNativeTracks_() {
NORMAL.names.forEach(name => {
this.proxyNativeTracksForType_(name);
});
}
/**
* Create the `Html5` Tech's DOM element.
*
* @return {Element}
* The element that gets created.
*/
createEl() {
let el = this.options_.tag;
// Check if this browser supports moving the element into the box.
// On the iPhone video will break if you move the element,
// So we have to create a brand new element.
// If we ingested the player div, we do not need to move the media element.
if (!el || !(this.options_.playerElIngest || this.movingMediaElementInDOM)) {
// If the original tag is still there, clone and remove it.
if (el) {
const clone = el.cloneNode(true);
if (el.parentNode) {
el.parentNode.insertBefore(clone, el);
}
Html5.disposeMediaElement(el);
el = clone;
} else {
el = document$1.createElement('video');
// determine if native controls should be used
const tagAttributes = this.options_.tag && getAttributes(this.options_.tag);
const attributes = merge$1({}, tagAttributes);
if (!TOUCH_ENABLED || this.options_.nativeControlsForTouch !== true) {
delete attributes.controls;
}
setAttributes(el, Object.assign(attributes, {
id: this.options_.techId,
class: 'vjs-tech'
}));
}
el.playerId = this.options_.playerId;
}
if (typeof this.options_.preload !== 'undefined') {
setAttribute(el, 'preload', this.options_.preload);
}
if (this.options_.disablePictureInPicture !== undefined) {
el.disablePictureInPicture = this.options_.disablePictureInPicture;
}
// Update specific tag settings, in case they were overridden
// `autoplay` has to be *last* so that `muted` and `playsinline` are present
// when iOS/Safari or other browsers attempt to autoplay.
const settingsAttrs = ['loop', 'muted', 'playsinline', 'autoplay'];
for (let i = 0; i < settingsAttrs.length; i++) {
const attr = settingsAttrs[i];
const value = this.options_[attr];
if (typeof value !== 'undefined') {
if (value) {
setAttribute(el, attr, attr);
} else {
removeAttribute(el, attr);
}
el[attr] = value;
}
}
return el;
}
/**
* This will be triggered if the loadstart event has already fired, before videojs was
* ready. Two known examples of when this can happen are:
* 1. If we're loading the playback object after it has started loading
* 2. The media is already playing the (often with autoplay on) then
*
* This function will fire another loadstart so that videojs can catchup.
*
* @fires Tech#loadstart
*
* @return {undefined}
* returns nothing.
*/
handleLateInit_(el) {
if (el.networkState === 0 || el.networkState === 3) {
// The video element hasn't started loading the source yet
// or didn't find a source
return;
}
if (el.readyState === 0) {
// NetworkState is set synchronously BUT loadstart is fired at the
// end of the current stack, usually before setInterval(fn, 0).
// So at this point we know loadstart may have already fired or is
// about to fire, and either way the player hasn't seen it yet.
// We don't want to fire loadstart prematurely here and cause a
// double loadstart so we'll wait and see if it happens between now
// and the next loop, and fire it if not.
// HOWEVER, we also want to make sure it fires before loadedmetadata
// which could also happen between now and the next loop, so we'll
// watch for that also.
let loadstartFired = false;
const setLoadstartFired = function () {
loadstartFired = true;
};
this.on('loadstart', setLoadstartFired);
const triggerLoadstart = function () {
// We did miss the original loadstart. Make sure the player
// sees loadstart before loadedmetadata
if (!loadstartFired) {
this.trigger('loadstart');
}
};
this.on('loadedmetadata', triggerLoadstart);
this.ready(function () {
this.off('loadstart', setLoadstartFired);
this.off('loadedmetadata', triggerLoadstart);
if (!loadstartFired) {
// We did miss the original native loadstart. Fire it now.
this.trigger('loadstart');
}
});
return;
}
// From here on we know that loadstart already fired and we missed it.
// The other readyState events aren't as much of a problem if we double
// them, so not going to go to as much trouble as loadstart to prevent
// that unless we find reason to.
const eventsToTrigger = ['loadstart'];
// loadedmetadata: newly equal to HAVE_METADATA (1) or greater
eventsToTrigger.push('loadedmetadata');
// loadeddata: newly increased to HAVE_CURRENT_DATA (2) or greater
if (el.readyState >= 2) {
eventsToTrigger.push('loadeddata');
}
// canplay: newly increased to HAVE_FUTURE_DATA (3) or greater
if (el.readyState >= 3) {
eventsToTrigger.push('canplay');
}
// canplaythrough: newly equal to HAVE_ENOUGH_DATA (4)
if (el.readyState >= 4) {
eventsToTrigger.push('canplaythrough');
}
// We still need to give the player time to add event listeners
this.ready(function () {
eventsToTrigger.forEach(function (type) {
this.trigger(type);
}, this);
});
}
/**
* Set whether we are scrubbing or not.
* This is used to decide whether we should use `fastSeek` or not.
* `fastSeek` is used to provide trick play on Safari browsers.
*
* @param {boolean} isScrubbing
* - true for we are currently scrubbing
* - false for we are no longer scrubbing
*/
setScrubbing(isScrubbing) {
this.isScrubbing_ = isScrubbing;
}
/**
* Get whether we are scrubbing or not.
*
* @return {boolean} isScrubbing
* - true for we are currently scrubbing
* - false for we are no longer scrubbing
*/
scrubbing() {
return this.isScrubbing_;
}
/**
* Set current time for the `HTML5` tech.
*
* @param {number} seconds
* Set the current time of the media to this.
*/
setCurrentTime(seconds) {
try {
if (this.isScrubbing_ && this.el_.fastSeek && IS_ANY_SAFARI) {
this.el_.fastSeek(seconds);
} else {
this.el_.currentTime = seconds;
}
} catch (e) {
log$1(e, 'Video is not ready. (Video.js)');
// this.warning(VideoJS.warnings.videoNotReady);
}
}
/**
* Get the current duration of the HTML5 media element.
*
* @return {number}
* The duration of the media or 0 if there is no duration.
*/
duration() {
// Android Chrome will report duration as Infinity for VOD HLS until after
// playback has started, which triggers the live display erroneously.
// Return NaN if playback has not started and trigger a durationupdate once
// the duration can be reliably known.
if (this.el_.duration === Infinity && IS_ANDROID && IS_CHROME && this.el_.currentTime === 0) {
// Wait for the first `timeupdate` with currentTime > 0 - there may be
// several with 0
const checkProgress = () => {
if (this.el_.currentTime > 0) {
// Trigger durationchange for genuinely live video
if (this.el_.duration === Infinity) {
this.trigger('durationchange');
}
this.off('timeupdate', checkProgress);
}
};
this.on('timeupdate', checkProgress);
return NaN;
}
return this.el_.duration || NaN;
}
/**
* Get the current width of the HTML5 media element.
*
* @return {number}
* The width of the HTML5 media element.
*/
width() {
return this.el_.offsetWidth;
}
/**
* Get the current height of the HTML5 media element.
*
* @return {number}
* The height of the HTML5 media element.
*/
height() {
return this.el_.offsetHeight;
}
/**
* Proxy iOS `webkitbeginfullscreen` and `webkitendfullscreen` into
* `fullscreenchange` event.
*
* @private
* @fires fullscreenchange
* @listens webkitendfullscreen
* @listens webkitbeginfullscreen
* @listens webkitbeginfullscreen
*/
proxyWebkitFullscreen_() {
if (!('webkitDisplayingFullscreen' in this.el_)) {
return;
}
const endFn = function () {
this.trigger('fullscreenchange', {
isFullscreen: false
});
// Safari will sometimes set controls on the videoelement when existing fullscreen.
if (this.el_.controls && !this.options_.nativeControlsForTouch && this.controls()) {
this.el_.controls = false;
}
};
const beginFn = function () {
if ('webkitPresentationMode' in this.el_ && this.el_.webkitPresentationMode !== 'picture-in-picture') {
this.one('webkitendfullscreen', endFn);
this.trigger('fullscreenchange', {
isFullscreen: true,
// set a flag in case another tech triggers fullscreenchange
nativeIOSFullscreen: true
});
}
};
this.on('webkitbeginfullscreen', beginFn);
this.on('dispose', () => {
this.off('webkitbeginfullscreen', beginFn);
this.off('webkitendfullscreen', endFn);
});
}
/**
* Check if fullscreen is supported on the video el.
*
* @return {boolean}
* - True if fullscreen is supported.
* - False if fullscreen is not supported.
*/
supportsFullScreen() {
return typeof this.el_.webkitEnterFullScreen === 'function';
}
/**
* Request that the `HTML5` Tech enter fullscreen.
*/
enterFullScreen() {
const video = this.el_;
if (video.paused && video.networkState <= video.HAVE_METADATA) {
// attempt to prime the video element for programmatic access
// this isn't necessary on the desktop but shouldn't hurt
silencePromise(this.el_.play());
// playing and pausing synchronously during the transition to fullscreen
// can get iOS ~6.1 devices into a play/pause loop
this.setTimeout(function () {
video.pause();
try {
video.webkitEnterFullScreen();
} catch (e) {
this.trigger('fullscreenerror', e);
}
}, 0);
} else {
try {
video.webkitEnterFullScreen();
} catch (e) {
this.trigger('fullscreenerror', e);
}
}
}
/**
* Request that the `HTML5` Tech exit fullscreen.
*/
exitFullScreen() {
if (!this.el_.webkitDisplayingFullscreen) {
this.trigger('fullscreenerror', new Error('The video is not fullscreen'));
return;
}
this.el_.webkitExitFullScreen();
}
/**
* Create a floating video window always on top of other windows so that users may
* continue consuming media while they interact with other content sites, or
* applications on their device.
*
* @see [Spec]{@link https://wicg.github.io/picture-in-picture}
*
* @return {Promise}
* A promise with a Picture-in-Picture window.
*/
requestPictureInPicture() {
return this.el_.requestPictureInPicture();
}
/**
* Native requestVideoFrameCallback if supported by browser/tech, or fallback
* Don't use rVCF on Safari when DRM is playing, as it doesn't fire
* Needs to be checked later than the constructor
* This will be a false positive for clear sources loaded after a Fairplay source
*
* @param {function} cb function to call
* @return {number} id of request
*/
requestVideoFrameCallback(cb) {
if (this.featuresVideoFrameCallback && !this.el_.webkitKeys) {
return this.el_.requestVideoFrameCallback(cb);
}
return super.requestVideoFrameCallback(cb);
}
/**
* Native or fallback requestVideoFrameCallback
*
* @param {number} id request id to cancel
*/
cancelVideoFrameCallback(id) {
if (this.featuresVideoFrameCallback && !this.el_.webkitKeys) {
this.el_.cancelVideoFrameCallback(id);
} else {
super.cancelVideoFrameCallback(id);
}
}
/**
* A getter/setter for the `Html5` Tech's source object.
* > Note: Please use {@link Html5#setSource}
*
* @param {Tech~SourceObject} [src]
* The source object you want to set on the `HTML5` techs element.
*
* @return {Tech~SourceObject|undefined}
* - The current source object when a source is not passed in.
* - undefined when setting
*
* @deprecated Since version 5.
*/
src(src) {
if (src === undefined) {
return this.el_.src;
}
// Setting src through `src` instead of `setSrc` will be deprecated
this.setSrc(src);
}
/**
* Reset the tech by removing all sources and then calling
* {@link Html5.resetMediaElement}.
*/
reset() {
Html5.resetMediaElement(this.el_);
}
/**
* Get the current source on the `HTML5` Tech. Falls back to returning the source from
* the HTML5 media element.
*
* @return {Tech~SourceObject}
* The current source object from the HTML5 tech. With a fallback to the
* elements source.
*/
currentSrc() {
if (this.currentSource_) {
return this.currentSource_.src;
}
return this.el_.currentSrc;
}
/**
* Set controls attribute for the HTML5 media Element.
*
* @param {string} val
* Value to set the controls attribute to
*/
setControls(val) {
this.el_.controls = !!val;
}
/**
* Create and returns a remote {@link TextTrack} object.
*
* @param {string} kind
* `TextTrack` kind (subtitles, captions, descriptions, chapters, or metadata)
*
* @param {string} [label]
* Label to identify the text track
*
* @param {string} [language]
* Two letter language abbreviation
*
* @return {TextTrack}
* The TextTrack that gets created.
*/
addTextTrack(kind, label, language) {
if (!this.featuresNativeTextTracks) {
return super.addTextTrack(kind, label, language);
}
return this.el_.addTextTrack(kind, label, language);
}
/**
* Creates either native TextTrack or an emulated TextTrack depending
* on the value of `featuresNativeTextTracks`
*
* @param {Object} options
* The object should contain the options to initialize the TextTrack with.
*
* @param {string} [options.kind]
* `TextTrack` kind (subtitles, captions, descriptions, chapters, or metadata).
*
* @param {string} [options.label]
* Label to identify the text track
*
* @param {string} [options.language]
* Two letter language abbreviation.
*
* @param {boolean} [options.default]
* Default this track to on.
*
* @param {string} [options.id]
* The internal id to assign this track.
*
* @param {string} [options.src]
* A source url for the track.
*
* @return {HTMLTrackElement}
* The track element that gets created.
*/
createRemoteTextTrack(options) {
if (!this.featuresNativeTextTracks) {
return super.createRemoteTextTrack(options);
}
const htmlTrackElement = document$1.createElement('track');
if (options.kind) {
htmlTrackElement.kind = options.kind;
}
if (options.label) {
htmlTrackElement.label = options.label;
}
if (options.language || options.srclang) {
htmlTrackElement.srclang = options.language || options.srclang;
}
if (options.default) {
htmlTrackElement.default = options.default;
}
if (options.id) {
htmlTrackElement.id = options.id;
}
if (options.src) {
htmlTrackElement.src = options.src;
}
return htmlTrackElement;
}
/**
* Creates a remote text track object and returns an html track element.
*
* @param {Object} options The object should contain values for
* kind, language, label, and src (location of the WebVTT file)
* @param {boolean} [manualCleanup=false] if set to true, the TextTrack
* will not be removed from the TextTrackList and HtmlTrackElementList
* after a source change
* @return {HTMLTrackElement} An Html Track Element.
* This can be an emulated {@link HTMLTrackElement} or a native one.
*
*/
addRemoteTextTrack(options, manualCleanup) {
const htmlTrackElement = super.addRemoteTextTrack(options, manualCleanup);
if (this.featuresNativeTextTracks) {
this.el().appendChild(htmlTrackElement);
}
return htmlTrackElement;
}
/**
* Remove remote `TextTrack` from `TextTrackList` object
*
* @param {TextTrack} track
* `TextTrack` object to remove
*/
removeRemoteTextTrack(track) {
super.removeRemoteTextTrack(track);
if (this.featuresNativeTextTracks) {
const tracks = this.$$('track');
let i = tracks.length;
while (i--) {
if (track === tracks[i] || track === tracks[i].track) {
this.el().removeChild(tracks[i]);
}
}
}
}
/**
* Gets available media playback quality metrics as specified by the W3C's Media
* Playback Quality API.
*
* @see [Spec]{@link https://wicg.github.io/media-playback-quality}
*
* @return {Object}
* An object with supported media playback quality metrics
*/
getVideoPlaybackQuality() {
if (typeof this.el().getVideoPlaybackQuality === 'function') {
return this.el().getVideoPlaybackQuality();
}
const videoPlaybackQuality = {};
if (typeof this.el().webkitDroppedFrameCount !== 'undefined' && typeof this.el().webkitDecodedFrameCount !== 'undefined') {
videoPlaybackQuality.droppedVideoFrames = this.el().webkitDroppedFrameCount;
videoPlaybackQuality.totalVideoFrames = this.el().webkitDecodedFrameCount;
}
if (window$1.performance) {
videoPlaybackQuality.creationTime = window$1.performance.now();
}
return videoPlaybackQuality;
}
}
/* HTML5 Support Testing ---------------------------------------------------- */
/**
* Element for testing browser HTML5 media capabilities
*
* @type {Element}
* @constant
* @private
*/
defineLazyProperty(Html5, 'TEST_VID', function () {
if (!isReal()) {
return;
}
const video = document$1.createElement('video');
const track = document$1.createElement('track');
track.kind = 'captions';
track.srclang = 'en';
track.label = 'English';
video.appendChild(track);
return video;
});
/**
* Check if HTML5 media is supported by this browser/device.
*
* @return {boolean}
* - True if HTML5 media is supported.
* - False if HTML5 media is not supported.
*/
Html5.isSupported = function () {
// IE with no Media Player is a LIAR! (#984)
try {
Html5.TEST_VID.volume = 0.5;
} catch (e) {
return false;
}
return !!(Html5.TEST_VID && Html5.TEST_VID.canPlayType);
};
/**
* Check if the tech can support the given type
*
* @param {string} type
* The mimetype to check
* @return {string} 'probably', 'maybe', or '' (empty string)
*/
Html5.canPlayType = function (type) {
return Html5.TEST_VID.canPlayType(type);
};
/**
* Check if the tech can support the given source
*
* @param {Object} srcObj
* The source object
* @param {Object} options
* The options passed to the tech
* @return {string} 'probably', 'maybe', or '' (empty string)
*/
Html5.canPlaySource = function (srcObj, options) {
return Html5.canPlayType(srcObj.type);
};
/**
* Check if the volume can be changed in this browser/device.
* Volume cannot be changed in a lot of mobile devices.
* Specifically, it can't be changed from 1 on iOS.
*
* @return {boolean}
* - True if volume can be controlled
* - False otherwise
*/
Html5.canControlVolume = function () {
// IE will error if Windows Media Player not installed #3315
try {
const volume = Html5.TEST_VID.volume;
Html5.TEST_VID.volume = volume / 2 + 0.1;
const canControl = volume !== Html5.TEST_VID.volume;
// With the introduction of iOS 15, there are cases where the volume is read as
// changed but reverts back to its original state at the start of the next tick.
// To determine whether volume can be controlled on iOS,
// a timeout is set and the volume is checked asynchronously.
// Since `features` doesn't currently work asynchronously, the value is manually set.
if (canControl && IS_IOS) {
window$1.setTimeout(() => {
if (Html5 && Html5.prototype) {
Html5.prototype.featuresVolumeControl = volume !== Html5.TEST_VID.volume;
}
});
// default iOS to false, which will be updated in the timeout above.
return false;
}
return canControl;
} catch (e) {
return false;
}
};
/**
* Check if the volume can be muted in this browser/device.
* Some devices, e.g. iOS, don't allow changing volume
* but permits muting/unmuting.
*
* @return {boolean}
* - True if volume can be muted
* - False otherwise
*/
Html5.canMuteVolume = function () {
try {
const muted = Html5.TEST_VID.muted;
// in some versions of iOS muted property doesn't always
// work, so we want to set both property and attribute
Html5.TEST_VID.muted = !muted;
if (Html5.TEST_VID.muted) {
setAttribute(Html5.TEST_VID, 'muted', 'muted');
} else {
removeAttribute(Html5.TEST_VID, 'muted', 'muted');
}
return muted !== Html5.TEST_VID.muted;
} catch (e) {
return false;
}
};
/**
* Check if the playback rate can be changed in this browser/device.
*
* @return {boolean}
* - True if playback rate can be controlled
* - False otherwise
*/
Html5.canControlPlaybackRate = function () {
// Playback rate API is implemented in Android Chrome, but doesn't do anything
// https://github.com/videojs/video.js/issues/3180
if (IS_ANDROID && IS_CHROME && CHROME_VERSION < 58) {
return false;
}
// IE will error if Windows Media Player not installed #3315
try {
const playbackRate = Html5.TEST_VID.playbackRate;
Html5.TEST_VID.playbackRate = playbackRate / 2 + 0.1;
return playbackRate !== Html5.TEST_VID.playbackRate;
} catch (e) {
return false;
}
};
/**
* Check if we can override a video/audio elements attributes, with
* Object.defineProperty.
*
* @return {boolean}
* - True if builtin attributes can be overridden
* - False otherwise
*/
Html5.canOverrideAttributes = function () {
// if we cannot overwrite the src/innerHTML property, there is no support
// iOS 7 safari for instance cannot do this.
try {
const noop = () => {};
Object.defineProperty(document$1.createElement('video'), 'src', {
get: noop,
set: noop
});
Object.defineProperty(document$1.createElement('audio'), 'src', {
get: noop,
set: noop
});
Object.defineProperty(document$1.createElement('video'), 'innerHTML', {
get: noop,
set: noop
});
Object.defineProperty(document$1.createElement('audio'), 'innerHTML', {
get: noop,
set: noop
});
} catch (e) {
return false;
}
return true;
};
/**
* Check to see if native `TextTrack`s are supported by this browser/device.
*
* @return {boolean}
* - True if native `TextTrack`s are supported.
* - False otherwise
*/
Html5.supportsNativeTextTracks = function () {
return IS_ANY_SAFARI || IS_IOS && IS_CHROME;
};
/**
* Check to see if native `VideoTrack`s are supported by this browser/device
*
* @return {boolean}
* - True if native `VideoTrack`s are supported.
* - False otherwise
*/
Html5.supportsNativeVideoTracks = function () {
return !!(Html5.TEST_VID && Html5.TEST_VID.videoTracks);
};
/**
* Check to see if native `AudioTrack`s are supported by this browser/device
*
* @return {boolean}
* - True if native `AudioTrack`s are supported.
* - False otherwise
*/
Html5.supportsNativeAudioTracks = function () {
return !!(Html5.TEST_VID && Html5.TEST_VID.audioTracks);
};
/**
* An array of events available on the Html5 tech.
*
* @private
* @type {Array}
*/
Html5.Events = ['loadstart', 'suspend', 'abort', 'error', 'emptied', 'stalled', 'loadedmetadata', 'loadeddata', 'canplay', 'canplaythrough', 'playing', 'waiting', 'seeking', 'seeked', 'ended', 'durationchange', 'timeupdate', 'progress', 'play', 'pause', 'ratechange', 'resize', 'volumechange'];
/**
* Boolean indicating whether the `Tech` supports volume control.
*
* @type {boolean}
* @default {@link Html5.canControlVolume}
*/
/**
* Boolean indicating whether the `Tech` supports muting volume.
*
* @type {boolean}
* @default {@link Html5.canMuteVolume}
*/
/**
* Boolean indicating whether the `Tech` supports changing the speed at which the media
* plays. Examples:
* - Set player to play 2x (twice) as fast
* - Set player to play 0.5x (half) as fast
*
* @type {boolean}
* @default {@link Html5.canControlPlaybackRate}
*/
/**
* Boolean indicating whether the `Tech` supports the `sourceset` event.
*
* @type {boolean}
* @default
*/
/**
* Boolean indicating whether the `HTML5` tech currently supports native `TextTrack`s.
*
* @type {boolean}
* @default {@link Html5.supportsNativeTextTracks}
*/
/**
* Boolean indicating whether the `HTML5` tech currently supports native `VideoTrack`s.
*
* @type {boolean}
* @default {@link Html5.supportsNativeVideoTracks}
*/
/**
* Boolean indicating whether the `HTML5` tech currently supports native `AudioTrack`s.
*
* @type {boolean}
* @default {@link Html5.supportsNativeAudioTracks}
*/
[['featuresMuteControl', 'canMuteVolume'], ['featuresPlaybackRate', 'canControlPlaybackRate'], ['featuresSourceset', 'canOverrideAttributes'], ['featuresNativeTextTracks', 'supportsNativeTextTracks'], ['featuresNativeVideoTracks', 'supportsNativeVideoTracks'], ['featuresNativeAudioTracks', 'supportsNativeAudioTracks']].forEach(function ([key, fn]) {
defineLazyProperty(Html5.prototype, key, () => Html5[fn](), true);
});
Html5.prototype.featuresVolumeControl = Html5.canControlVolume();
/**
* Boolean indicating whether the `HTML5` tech currently supports the media element
* moving in the DOM. iOS breaks if you move the media element, so this is set this to
* false there. Everywhere else this should be true.
*
* @type {boolean}
* @default
*/
Html5.prototype.movingMediaElementInDOM = !IS_IOS;
// TODO: Previous comment: No longer appears to be used. Can probably be removed.
// Is this true?
/**
* Boolean indicating whether the `HTML5` tech currently supports automatic media resize
* when going into fullscreen.
*
* @type {boolean}
* @default
*/
Html5.prototype.featuresFullscreenResize = true;
/**
* Boolean indicating whether the `HTML5` tech currently supports the progress event.
* If this is false, manual `progress` events will be triggered instead.
*
* @type {boolean}
* @default
*/
Html5.prototype.featuresProgressEvents = true;
/**
* Boolean indicating whether the `HTML5` tech currently supports the timeupdate event.
* If this is false, manual `timeupdate` events will be triggered instead.
*
* @default
*/
Html5.prototype.featuresTimeupdateEvents = true;
/**
* Whether the HTML5 el supports `requestVideoFrameCallback`
*
* @type {boolean}
*/
Html5.prototype.featuresVideoFrameCallback = !!(Html5.TEST_VID && Html5.TEST_VID.requestVideoFrameCallback);
Html5.disposeMediaElement = function (el) {
if (!el) {
return;
}
if (el.parentNode) {
el.parentNode.removeChild(el);
}
// remove any child track or source nodes to prevent their loading
while (el.hasChildNodes()) {
el.removeChild(el.firstChild);
}
// remove any src reference. not setting `src=''` because that causes a warning
// in firefox
el.removeAttribute('src');
// force the media element to update its loading state by calling load()
// however IE on Windows 7N has a bug that throws an error so need a try/catch (#793)
if (typeof el.load === 'function') {
// wrapping in an iife so it's not deoptimized (#1060#discussion_r10324473)
(function () {
try {
el.load();
} catch (e) {
// not supported
}
})();
}
};
Html5.resetMediaElement = function (el) {
if (!el) {
return;
}
const sources = el.querySelectorAll('source');
let i = sources.length;
while (i--) {
el.removeChild(sources[i]);
}
// remove any src reference.
// not setting `src=''` because that throws an error
el.removeAttribute('src');
if (typeof el.load === 'function') {
// wrapping in an iife so it's not deoptimized (#1060#discussion_r10324473)
(function () {
try {
el.load();
} catch (e) {
// satisfy linter
}
})();
}
};
/* Native HTML5 element property wrapping ----------------------------------- */
// Wrap native boolean attributes with getters that check both property and attribute
// The list is as followed:
// muted, defaultMuted, autoplay, controls, loop, playsinline
[
/**
* Get the value of `muted` from the media element. `muted` indicates
* that the volume for the media should be set to silent. This does not actually change
* the `volume` attribute.
*
* @method Html5#muted
* @return {boolean}
* - True if the value of `volume` should be ignored and the audio set to silent.
* - False if the value of `volume` should be used.
*
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-muted}
*/
'muted',
/**
* Get the value of `defaultMuted` from the media element. `defaultMuted` indicates
* whether the media should start muted or not. Only changes the default state of the
* media. `muted` and `defaultMuted` can have different values. {@link Html5#muted} indicates the
* current state.
*
* @method Html5#defaultMuted
* @return {boolean}
* - The value of `defaultMuted` from the media element.
* - True indicates that the media should start muted.
* - False indicates that the media should not start muted
*
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-defaultmuted}
*/
'defaultMuted',
/**
* Get the value of `autoplay` from the media element. `autoplay` indicates
* that the media should start to play as soon as the page is ready.
*
* @method Html5#autoplay
* @return {boolean}
* - The value of `autoplay` from the media element.
* - True indicates that the media should start as soon as the page loads.
* - False indicates that the media should not start as soon as the page loads.
*
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-media-autoplay}
*/
'autoplay',
/**
* Get the value of `controls` from the media element. `controls` indicates
* whether the native media controls should be shown or hidden.
*
* @method Html5#controls
* @return {boolean}
* - The value of `controls` from the media element.
* - True indicates that native controls should be showing.
* - False indicates that native controls should be hidden.
*
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-media-controls}
*/
'controls',
/**
* Get the value of `loop` from the media element. `loop` indicates
* that the media should return to the start of the media and continue playing once
* it reaches the end.
*
* @method Html5#loop
* @return {boolean}
* - The value of `loop` from the media element.
* - True indicates that playback should seek back to start once
* the end of a media is reached.
* - False indicates that playback should not loop back to the start when the
* end of the media is reached.
*
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-media-loop}
*/
'loop',
/**
* Get the value of `playsinline` from the media element. `playsinline` indicates
* to the browser that non-fullscreen playback is preferred when fullscreen
* playback is the native default, such as in iOS Safari.
*
* @method Html5#playsinline
* @return {boolean}
* - The value of `playsinline` from the media element.
* - True indicates that the media should play inline.
* - False indicates that the media should not play inline.
*
* @see [Spec]{@link https://html.spec.whatwg.org/#attr-video-playsinline}
*/
'playsinline'].forEach(function (prop) {
Html5.prototype[prop] = function () {
return this.el_[prop] || this.el_.hasAttribute(prop);
};
});
// Wrap native boolean attributes with setters that set both property and attribute
// The list is as followed:
// setMuted, setDefaultMuted, setAutoplay, setLoop, setPlaysinline
// setControls is special-cased above
[
/**
* Set the value of `muted` on the media element. `muted` indicates that the current
* audio level should be silent.
*
* @method Html5#setMuted
* @param {boolean} muted
* - True if the audio should be set to silent
* - False otherwise
*
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-muted}
*/
'muted',
/**
* Set the value of `defaultMuted` on the media element. `defaultMuted` indicates that the current
* audio level should be silent, but will only effect the muted level on initial playback..
*
* @method Html5.prototype.setDefaultMuted
* @param {boolean} defaultMuted
* - True if the audio should be set to silent
* - False otherwise
*
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-defaultmuted}
*/
'defaultMuted',
/**
* Set the value of `autoplay` on the media element. `autoplay` indicates
* that the media should start to play as soon as the page is ready.
*
* @method Html5#setAutoplay
* @param {boolean} autoplay
* - True indicates that the media should start as soon as the page loads.
* - False indicates that the media should not start as soon as the page loads.
*
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-media-autoplay}
*/
'autoplay',
/**
* Set the value of `loop` on the media element. `loop` indicates
* that the media should return to the start of the media and continue playing once
* it reaches the end.
*
* @method Html5#setLoop
* @param {boolean} loop
* - True indicates that playback should seek back to start once
* the end of a media is reached.
* - False indicates that playback should not loop back to the start when the
* end of the media is reached.
*
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-media-loop}
*/
'loop',
/**
* Set the value of `playsinline` from the media element. `playsinline` indicates
* to the browser that non-fullscreen playback is preferred when fullscreen
* playback is the native default, such as in iOS Safari.
*
* @method Html5#setPlaysinline
* @param {boolean} playsinline
* - True indicates that the media should play inline.
* - False indicates that the media should not play inline.
*
* @see [Spec]{@link https://html.spec.whatwg.org/#attr-video-playsinline}
*/
'playsinline'].forEach(function (prop) {
Html5.prototype['set' + toTitleCase$1(prop)] = function (v) {
this.el_[prop] = v;
if (v) {
this.el_.setAttribute(prop, prop);
} else {
this.el_.removeAttribute(prop);
}
};
});
// Wrap native properties with a getter
// The list is as followed
// paused, currentTime, buffered, volume, poster, preload, error, seeking
// seekable, ended, playbackRate, defaultPlaybackRate, disablePictureInPicture
// played, networkState, readyState, videoWidth, videoHeight, crossOrigin
[
/**
* Get the value of `paused` from the media element. `paused` indicates whether the media element
* is currently paused or not.
*
* @method Html5#paused
* @return {boolean}
* The value of `paused` from the media element.
*
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-paused}
*/
'paused',
/**
* Get the value of `currentTime` from the media element. `currentTime` indicates
* the current second that the media is at in playback.
*
* @method Html5#currentTime
* @return {number}
* The value of `currentTime` from the media element.
*
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-currenttime}
*/
'currentTime',
/**
* Get the value of `buffered` from the media element. `buffered` is a `TimeRange`
* object that represents the parts of the media that are already downloaded and
* available for playback.
*
* @method Html5#buffered
* @return {TimeRange}
* The value of `buffered` from the media element.
*
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-buffered}
*/
'buffered',
/**
* Get the value of `volume` from the media element. `volume` indicates
* the current playback volume of audio for a media. `volume` will be a value from 0
* (silent) to 1 (loudest and default).
*
* @method Html5#volume
* @return {number}
* The value of `volume` from the media element. Value will be between 0-1.
*
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-a-volume}
*/
'volume',
/**
* Get the value of `poster` from the media element. `poster` indicates
* that the url of an image file that can/will be shown when no media data is available.
*
* @method Html5#poster
* @return {string}
* The value of `poster` from the media element. Value will be a url to an
* image.
*
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-video-poster}
*/
'poster',
/**
* Get the value of `preload` from the media element. `preload` indicates
* what should download before the media is interacted with. It can have the following
* values:
* - none: nothing should be downloaded
* - metadata: poster and the first few frames of the media may be downloaded to get
* media dimensions and other metadata
* - auto: allow the media and metadata for the media to be downloaded before
* interaction
*
* @method Html5#preload
* @return {string}
* The value of `preload` from the media element. Will be 'none', 'metadata',
* or 'auto'.
*
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-media-preload}
*/
'preload',
/**
* Get the value of the `error` from the media element. `error` indicates any
* MediaError that may have occurred during playback. If error returns null there is no
* current error.
*
* @method Html5#error
* @return {MediaError|null}
* The value of `error` from the media element. Will be `MediaError` if there
* is a current error and null otherwise.
*
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-error}
*/
'error',
/**
* Get the value of `seeking` from the media element. `seeking` indicates whether the
* media is currently seeking to a new position or not.
*
* @method Html5#seeking
* @return {boolean}
* - The value of `seeking` from the media element.
* - True indicates that the media is currently seeking to a new position.
* - False indicates that the media is not seeking to a new position at this time.
*
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-seeking}
*/
'seeking',
/**
* Get the value of `seekable` from the media element. `seekable` returns a
* `TimeRange` object indicating ranges of time that can currently be `seeked` to.
*
* @method Html5#seekable
* @return {TimeRange}
* The value of `seekable` from the media element. A `TimeRange` object
* indicating the current ranges of time that can be seeked to.
*
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-seekable}
*/
'seekable',
/**
* Get the value of `ended` from the media element. `ended` indicates whether
* the media has reached the end or not.
*
* @method Html5#ended
* @return {boolean}
* - The value of `ended` from the media element.
* - True indicates that the media has ended.
* - False indicates that the media has not ended.
*
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-ended}
*/
'ended',
/**
* Get the value of `playbackRate` from the media element. `playbackRate` indicates
* the rate at which the media is currently playing back. Examples:
* - if playbackRate is set to 2, media will play twice as fast.
* - if playbackRate is set to 0.5, media will play half as fast.
*
* @method Html5#playbackRate
* @return {number}
* The value of `playbackRate` from the media element. A number indicating
* the current playback speed of the media, where 1 is normal speed.
*
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-playbackrate}
*/
'playbackRate',
/**
* Get the value of `defaultPlaybackRate` from the media element. `defaultPlaybackRate` indicates
* the rate at which the media is currently playing back. This value will not indicate the current
* `playbackRate` after playback has started, use {@link Html5#playbackRate} for that.
*
* Examples:
* - if defaultPlaybackRate is set to 2, media will play twice as fast.
* - if defaultPlaybackRate is set to 0.5, media will play half as fast.
*
* @method Html5.prototype.defaultPlaybackRate
* @return {number}
* The value of `defaultPlaybackRate` from the media element. A number indicating
* the current playback speed of the media, where 1 is normal speed.
*
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-playbackrate}
*/
'defaultPlaybackRate',
/**
* Get the value of 'disablePictureInPicture' from the video element.
*
* @method Html5#disablePictureInPicture
* @return {boolean} value
* - The value of `disablePictureInPicture` from the video element.
* - True indicates that the video can't be played in Picture-In-Picture mode
* - False indicates that the video can be played in Picture-In-Picture mode
*
* @see [Spec]{@link https://w3c.github.io/picture-in-picture/#disable-pip}
*/
'disablePictureInPicture',
/**
* Get the value of `played` from the media element. `played` returns a `TimeRange`
* object representing points in the media timeline that have been played.
*
* @method Html5#played
* @return {TimeRange}
* The value of `played` from the media element. A `TimeRange` object indicating
* the ranges of time that have been played.
*
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-played}
*/
'played',
/**
* Get the value of `networkState` from the media element. `networkState` indicates
* the current network state. It returns an enumeration from the following list:
* - 0: NETWORK_EMPTY
* - 1: NETWORK_IDLE
* - 2: NETWORK_LOADING
* - 3: NETWORK_NO_SOURCE
*
* @method Html5#networkState
* @return {number}
* The value of `networkState` from the media element. This will be a number
* from the list in the description.
*
* @see [Spec] {@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-networkstate}
*/
'networkState',
/**
* Get the value of `readyState` from the media element. `readyState` indicates
* the current state of the media element. It returns an enumeration from the
* following list:
* - 0: HAVE_NOTHING
* - 1: HAVE_METADATA
* - 2: HAVE_CURRENT_DATA
* - 3: HAVE_FUTURE_DATA
* - 4: HAVE_ENOUGH_DATA
*
* @method Html5#readyState
* @return {number}
* The value of `readyState` from the media element. This will be a number
* from the list in the description.
*
* @see [Spec] {@link https://www.w3.org/TR/html5/embedded-content-0.html#ready-states}
*/
'readyState',
/**
* Get the value of `videoWidth` from the video element. `videoWidth` indicates
* the current width of the video in css pixels.
*
* @method Html5#videoWidth
* @return {number}
* The value of `videoWidth` from the video element. This will be a number
* in css pixels.
*
* @see [Spec] {@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-video-videowidth}
*/
'videoWidth',
/**
* Get the value of `videoHeight` from the video element. `videoHeight` indicates
* the current height of the video in css pixels.
*
* @method Html5#videoHeight
* @return {number}
* The value of `videoHeight` from the video element. This will be a number
* in css pixels.
*
* @see [Spec] {@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-video-videowidth}
*/
'videoHeight',
/**
* Get the value of `crossOrigin` from the media element. `crossOrigin` indicates
* to the browser that should sent the cookies along with the requests for the
* different assets/playlists
*
* @method Html5#crossOrigin
* @return {string}
* - anonymous indicates that the media should not sent cookies.
* - use-credentials indicates that the media should sent cookies along the requests.
*
* @see [Spec]{@link https://html.spec.whatwg.org/#attr-media-crossorigin}
*/
'crossOrigin'].forEach(function (prop) {
Html5.prototype[prop] = function () {
return this.el_[prop];
};
});
// Wrap native properties with a setter in this format:
// set + toTitleCase(name)
// The list is as follows:
// setVolume, setSrc, setPoster, setPreload, setPlaybackRate, setDefaultPlaybackRate,
// setDisablePictureInPicture, setCrossOrigin
[
/**
* Set the value of `volume` on the media element. `volume` indicates the current
* audio level as a percentage in decimal form. This means that 1 is 100%, 0.5 is 50%, and
* so on.
*
* @method Html5#setVolume
* @param {number} percentAsDecimal
* The volume percent as a decimal. Valid range is from 0-1.
*
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-a-volume}
*/
'volume',
/**
* Set the value of `src` on the media element. `src` indicates the current
* {@link Tech~SourceObject} for the media.
*
* @method Html5#setSrc
* @param {Tech~SourceObject} src
* The source object to set as the current source.
*
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-src}
*/
'src',
/**
* Set the value of `poster` on the media element. `poster` is the url to
* an image file that can/will be shown when no media data is available.
*
* @method Html5#setPoster
* @param {string} poster
* The url to an image that should be used as the `poster` for the media
* element.
*
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-media-poster}
*/
'poster',
/**
* Set the value of `preload` on the media element. `preload` indicates
* what should download before the media is interacted with. It can have the following
* values:
* - none: nothing should be downloaded
* - metadata: poster and the first few frames of the media may be downloaded to get
* media dimensions and other metadata
* - auto: allow the media and metadata for the media to be downloaded before
* interaction
*
* @method Html5#setPreload
* @param {string} preload
* The value of `preload` to set on the media element. Must be 'none', 'metadata',
* or 'auto'.
*
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-media-preload}
*/
'preload',
/**
* Set the value of `playbackRate` on the media element. `playbackRate` indicates
* the rate at which the media should play back. Examples:
* - if playbackRate is set to 2, media will play twice as fast.
* - if playbackRate is set to 0.5, media will play half as fast.
*
* @method Html5#setPlaybackRate
* @return {number}
* The value of `playbackRate` from the media element. A number indicating
* the current playback speed of the media, where 1 is normal speed.
*
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-playbackrate}
*/
'playbackRate',
/**
* Set the value of `defaultPlaybackRate` on the media element. `defaultPlaybackRate` indicates
* the rate at which the media should play back upon initial startup. Changing this value
* after a video has started will do nothing. Instead you should used {@link Html5#setPlaybackRate}.
*
* Example Values:
* - if playbackRate is set to 2, media will play twice as fast.
* - if playbackRate is set to 0.5, media will play half as fast.
*
* @method Html5.prototype.setDefaultPlaybackRate
* @return {number}
* The value of `defaultPlaybackRate` from the media element. A number indicating
* the current playback speed of the media, where 1 is normal speed.
*
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-defaultplaybackrate}
*/
'defaultPlaybackRate',
/**
* Prevents the browser from suggesting a Picture-in-Picture context menu
* or to request Picture-in-Picture automatically in some cases.
*
* @method Html5#setDisablePictureInPicture
* @param {boolean} value
* The true value will disable Picture-in-Picture mode.
*
* @see [Spec]{@link https://w3c.github.io/picture-in-picture/#disable-pip}
*/
'disablePictureInPicture',
/**
* Set the value of `crossOrigin` from the media element. `crossOrigin` indicates
* to the browser that should sent the cookies along with the requests for the
* different assets/playlists
*
* @method Html5#setCrossOrigin
* @param {string} crossOrigin
* - anonymous indicates that the media should not sent cookies.
* - use-credentials indicates that the media should sent cookies along the requests.
*
* @see [Spec]{@link https://html.spec.whatwg.org/#attr-media-crossorigin}
*/
'crossOrigin'].forEach(function (prop) {
Html5.prototype['set' + toTitleCase$1(prop)] = function (v) {
this.el_[prop] = v;
};
});
// wrap native functions with a function
// The list is as follows:
// pause, load, play
[
/**
* A wrapper around the media elements `pause` function. This will call the `HTML5`
* media elements `pause` function.
*
* @method Html5#pause
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-pause}
*/
'pause',
/**
* A wrapper around the media elements `load` function. This will call the `HTML5`s
* media element `load` function.
*
* @method Html5#load
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-load}
*/
'load',
/**
* A wrapper around the media elements `play` function. This will call the `HTML5`s
* media element `play` function.
*
* @method Html5#play
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-play}
*/
'play'].forEach(function (prop) {
Html5.prototype[prop] = function () {
return this.el_[prop]();
};
});
Tech.withSourceHandlers(Html5);
/**
* Native source handler for Html5, simply passes the source to the media element.
*
* @property {Tech~SourceObject} source
* The source object
*
* @property {Html5} tech
* The instance of the HTML5 tech.
*/
Html5.nativeSourceHandler = {};
/**
* Check if the media element can play the given mime type.
*
* @param {string} type
* The mimetype to check
*
* @return {string}
* 'probably', 'maybe', or '' (empty string)
*/
Html5.nativeSourceHandler.canPlayType = function (type) {
// IE without MediaPlayer throws an error (#519)
try {
return Html5.TEST_VID.canPlayType(type);
} catch (e) {
return '';
}
};
/**
* Check if the media element can handle a source natively.
*
* @param {Tech~SourceObject} source
* The source object
*
* @param {Object} [options]
* Options to be passed to the tech.
*
* @return {string}
* 'probably', 'maybe', or '' (empty string).
*/
Html5.nativeSourceHandler.canHandleSource = function (source, options) {
// If a type was provided we should rely on that
if (source.type) {
return Html5.nativeSourceHandler.canPlayType(source.type);
// If no type, fall back to checking 'video/[EXTENSION]'
} else if (source.src) {
const ext = getFileExtension(source.src);
return Html5.nativeSourceHandler.canPlayType(`video/${ext}`);
}
return '';
};
/**
* Pass the source to the native media element.
*
* @param {Tech~SourceObject} source
* The source object
*
* @param {Html5} tech
* The instance of the Html5 tech
*
* @param {Object} [options]
* The options to pass to the source
*/
Html5.nativeSourceHandler.handleSource = function (source, tech, options) {
tech.setSrc(source.src);
};
/**
* A noop for the native dispose function, as cleanup is not needed.
*/
Html5.nativeSourceHandler.dispose = function () {};
// Register the native source handler
Html5.registerSourceHandler(Html5.nativeSourceHandler);
Tech.registerTech('Html5', Html5);
/**
* @file player.js
*/
/** @import { TimeRange } from './utils/time' */
/** @import HtmlTrackElement from './tracks/html-track-element' */
/**
* @callback PlayerReadyCallback
* @this {Player}
* @returns {void}
*/
// The following tech events are simply re-triggered
// on the player when they happen
const TECH_EVENTS_RETRIGGER = [
/**
* Fired while the user agent is downloading media data.
*
* @event Player#progress
* @type {Event}
*/
/**
* Retrigger the `progress` event that was triggered by the {@link Tech}.
*
* @private
* @method Player#handleTechProgress_
* @fires Player#progress
* @listens Tech#progress
*/
'progress',
/**
* Fires when the loading of an audio/video is aborted.
*
* @event Player#abort
* @type {Event}
*/
/**
* Retrigger the `abort` event that was triggered by the {@link Tech}.
*
* @private
* @method Player#handleTechAbort_
* @fires Player#abort
* @listens Tech#abort
*/
'abort',
/**
* Fires when the browser is intentionally not getting media data.
*
* @event Player#suspend
* @type {Event}
*/
/**
* Retrigger the `suspend` event that was triggered by the {@link Tech}.
*
* @private
* @method Player#handleTechSuspend_
* @fires Player#suspend
* @listens Tech#suspend
*/
'suspend',
/**
* Fires when the current playlist is empty.
*
* @event Player#emptied
* @type {Event}
*/
/**
* Retrigger the `emptied` event that was triggered by the {@link Tech}.
*
* @private
* @method Player#handleTechEmptied_
* @fires Player#emptied
* @listens Tech#emptied
*/
'emptied',
/**
* Fires when the browser is trying to get media data, but data is not available.
*
* @event Player#stalled
* @type {Event}
*/
/**
* Retrigger the `stalled` event that was triggered by the {@link Tech}.
*
* @private
* @method Player#handleTechStalled_
* @fires Player#stalled
* @listens Tech#stalled
*/
'stalled',
/**
* Fires when the browser has loaded meta data for the audio/video.
*
* @event Player#loadedmetadata
* @type {Event}
*/
/**
* Retrigger the `loadedmetadata` event that was triggered by the {@link Tech}.
*
* @private
* @method Player#handleTechLoadedmetadata_
* @fires Player#loadedmetadata
* @listens Tech#loadedmetadata
*/
'loadedmetadata',
/**
* Fires when the browser has loaded the current frame of the audio/video.
*
* @event Player#loadeddata
* @type {event}
*/
/**
* Retrigger the `loadeddata` event that was triggered by the {@link Tech}.
*
* @private
* @method Player#handleTechLoaddeddata_
* @fires Player#loadeddata
* @listens Tech#loadeddata
*/
'loadeddata',
/**
* Fires when the current playback position has changed.
*
* @event Player#timeupdate
* @type {event}
*/
/**
* Retrigger the `timeupdate` event that was triggered by the {@link Tech}.
*
* @private
* @method Player#handleTechTimeUpdate_
* @fires Player#timeupdate
* @listens Tech#timeupdate
*/
'timeupdate',
/**
* Fires when the video's intrinsic dimensions change
*
* @event Player#resize
* @type {event}
*/
/**
* Retrigger the `resize` event that was triggered by the {@link Tech}.
*
* @private
* @method Player#handleTechResize_
* @fires Player#resize
* @listens Tech#resize
*/
'resize',
/**
* Fires when the volume has been changed
*
* @event Player#volumechange
* @type {event}
*/
/**
* Retrigger the `volumechange` event that was triggered by the {@link Tech}.
*
* @private
* @method Player#handleTechVolumechange_
* @fires Player#volumechange
* @listens Tech#volumechange
*/
'volumechange',
/**
* Fires when the text track has been changed
*
* @event Player#texttrackchange
* @type {event}
*/
/**
* Retrigger the `texttrackchange` event that was triggered by the {@link Tech}.
*
* @private
* @method Player#handleTechTexttrackchange_
* @fires Player#texttrackchange
* @listens Tech#texttrackchange
*/
'texttrackchange'];
// events to queue when playback rate is zero
// this is a hash for the sole purpose of mapping non-camel-cased event names
// to camel-cased function names
const TECH_EVENTS_QUEUE = {
canplay: 'CanPlay',
canplaythrough: 'CanPlayThrough',
playing: 'Playing',
seeked: 'Seeked'
};
const BREAKPOINT_ORDER = ['tiny', 'xsmall', 'small', 'medium', 'large', 'xlarge', 'huge'];
const BREAKPOINT_CLASSES = {};
// grep: vjs-layout-tiny
// grep: vjs-layout-x-small
// grep: vjs-layout-small
// grep: vjs-layout-medium
// grep: vjs-layout-large
// grep: vjs-layout-x-large
// grep: vjs-layout-huge
BREAKPOINT_ORDER.forEach(k => {
const v = k.charAt(0) === 'x' ? `x-${k.substring(1)}` : k;
BREAKPOINT_CLASSES[k] = `vjs-layout-${v}`;
});
const DEFAULT_BREAKPOINTS = {
tiny: 210,
xsmall: 320,
small: 425,
medium: 768,
large: 1440,
xlarge: 2560,
huge: Infinity
};
/**
* An instance of the `Player` class is created when any of the Video.js setup methods
* are used to initialize a video.
*
* After an instance has been created it can be accessed globally in three ways:
* 1. By calling `videojs.getPlayer('example_video_1');`
* 2. By calling `videojs('example_video_1');` (not recommended)
* 2. By using it directly via `videojs.players.example_video_1;`
*
* @extends Component
* @global
*/
class Player extends Component$1 {
/**
* Create an instance of this class.
*
* @param {Element} tag
* The original video DOM element used for configuring options.
*
* @param {Object} [options]
* Object of option names and values.
*
* @param {PlayerReadyCallback} [ready]
* Ready callback function.
*/
constructor(tag, options, ready) {
// Make sure tag ID exists
// also here.. probably better
tag.id = tag.id || options.id || `vjs_video_${newGUID()}`;
// Set Options
// The options argument overrides options set in the video tag
// which overrides globally set options.
// This latter part coincides with the load order
// (tag must exist before Player)
options = Object.assign(Player.getTagSettings(tag), options);
// Delay the initialization of children because we need to set up
// player properties first, and can't use `this` before `super()`
options.initChildren = false;
// Same with creating the element
options.createEl = false;
// don't auto mixin the evented mixin
options.evented = false;
// we don't want the player to report touch activity on itself
// see enableTouchActivity in Component
options.reportTouchActivity = false;
// If language is not set, get the closest lang attribute
if (!options.language) {
const closest = tag.closest('[lang]');
if (closest) {
options.language = closest.getAttribute('lang');
}
}
// Run base component initializing with new options
super(null, options, ready);
// Create bound methods for document listeners.
this.boundDocumentFullscreenChange_ = e => this.documentFullscreenChange_(e);
this.boundFullWindowOnEscKey_ = e => this.fullWindowOnEscKey(e);
this.boundUpdateStyleEl_ = e => this.updateStyleEl_(e);
this.boundApplyInitTime_ = e => this.applyInitTime_(e);
this.boundUpdateCurrentBreakpoint_ = e => this.updateCurrentBreakpoint_(e);
this.boundHandleTechClick_ = e => this.handleTechClick_(e);
this.boundHandleTechDoubleClick_ = e => this.handleTechDoubleClick_(e);
this.boundHandleTechTouchStart_ = e => this.handleTechTouchStart_(e);
this.boundHandleTechTouchMove_ = e => this.handleTechTouchMove_(e);
this.boundHandleTechTouchEnd_ = e => this.handleTechTouchEnd_(e);
this.boundHandleTechTap_ = e => this.handleTechTap_(e);
this.boundUpdatePlayerHeightOnAudioOnlyMode_ = e => this.updatePlayerHeightOnAudioOnlyMode_(e);
// default isFullscreen_ to false
this.isFullscreen_ = false;
// create logger
this.log = createLogger(this.id_);
// Hold our own reference to fullscreen api so it can be mocked in tests
this.fsApi_ = FullscreenApi;
// Tracks when a tech changes the poster
this.isPosterFromTech_ = false;
// Holds callback info that gets queued when playback rate is zero
// and a seek is happening
this.queuedCallbacks_ = [];
// Turn off API access because we're loading a new tech that might load asynchronously
this.isReady_ = false;
// Init state hasStarted_
this.hasStarted_ = false;
// Init state userActive_
this.userActive_ = false;
// Init debugEnabled_
this.debugEnabled_ = false;
// Init state audioOnlyMode_
this.audioOnlyMode_ = false;
// Init state audioPosterMode_
this.audioPosterMode_ = false;
// Init state audioOnlyCache_
this.audioOnlyCache_ = {
controlBarHeight: null,
playerHeight: null,
hiddenChildren: []
};
// if the global option object was accidentally blown away by
// someone, bail early with an informative error
if (!this.options_ || !this.options_.techOrder || !this.options_.techOrder.length) {
throw new Error('No techOrder specified. Did you overwrite ' + 'videojs.options instead of just changing the ' + 'properties you want to override?');
}
// Store the original tag used to set options
this.tag = tag;
// Store the tag attributes used to restore html5 element
this.tagAttributes = tag && getAttributes(tag);
// Update current language
this.language(this.options_.language);
// Update Supported Languages
if (options.languages) {
// Normalise player option languages to lowercase
const languagesToLower = {};
Object.getOwnPropertyNames(options.languages).forEach(function (name) {
languagesToLower[name.toLowerCase()] = options.languages[name];
});
this.languages_ = languagesToLower;
} else {
this.languages_ = Player.prototype.options_.languages;
}
this.resetCache_();
// Set poster
/** @type string */
this.poster_ = options.poster || '';
// Set controls
/** @type {boolean} */
this.controls_ = !!options.controls;
// Original tag settings stored in options
// now remove immediately so native controls don't flash.
// May be turned back on by HTML5 tech if nativeControlsForTouch is true
tag.controls = false;
tag.removeAttribute('controls');
this.changingSrc_ = false;
this.playCallbacks_ = [];
this.playTerminatedQueue_ = [];
// the attribute overrides the option
if (tag.hasAttribute('autoplay')) {
this.autoplay(true);
} else {
// otherwise use the setter to validate and
// set the correct value.
this.autoplay(this.options_.autoplay);
}
// check plugins
if (options.plugins) {
Object.keys(options.plugins).forEach(name => {
if (typeof this[name] !== 'function') {
throw new Error(`plugin "${name}" does not exist`);
}
});
}
/*
* Store the internal state of scrubbing
*
* @private
* @return {Boolean} True if the user is scrubbing
*/
this.scrubbing_ = false;
this.el_ = this.createEl();
// Make this an evented object and use `el_` as its event bus.
evented(this, {
eventBusKey: 'el_'
});
// listen to document and player fullscreenchange handlers so we receive those events
// before a user can receive them so we can update isFullscreen appropriately.
// make sure that we listen to fullscreenchange events before everything else to make sure that
// our isFullscreen method is updated properly for internal components as well as external.
if (this.fsApi_.requestFullscreen) {
on(document$1, this.fsApi_.fullscreenchange, this.boundDocumentFullscreenChange_);
this.on(this.fsApi_.fullscreenchange, this.boundDocumentFullscreenChange_);
}
if (this.fluid_) {
this.on(['playerreset', 'resize'], this.boundUpdateStyleEl_);
}
// We also want to pass the original player options to each component and plugin
// as well so they don't need to reach back into the player for options later.
// We also need to do another copy of this.options_ so we don't end up with
// an infinite loop.
const playerOptionsCopy = merge$1(this.options_);
// Load plugins
if (options.plugins) {
Object.keys(options.plugins).forEach(name => {
this[name](options.plugins[name]);
});
}
// Enable debug mode to fire debugon event for all plugins.
if (options.debug) {
this.debug(true);
}
this.options_.playerOptions = playerOptionsCopy;
this.middleware_ = [];
this.playbackRates(options.playbackRates);
if (options.experimentalSvgIcons) {
// Add SVG Sprite to the DOM
const parser = new window$1.DOMParser();
const parsedSVG = parser.parseFromString(icons, 'image/svg+xml');
const errorNode = parsedSVG.querySelector('parsererror');
if (errorNode) {
log$1.warn('Failed to load SVG Icons. Falling back to Font Icons.');
this.options_.experimentalSvgIcons = null;
} else {
const sprite = parsedSVG.documentElement;
sprite.style.display = 'none';
this.el_.appendChild(sprite);
this.addClass('vjs-svg-icons-enabled');
}
}
this.initChildren();
// Set isAudio based on whether or not an audio tag was used
this.isAudio(tag.nodeName.toLowerCase() === 'audio');
// Update controls className. Can't do this when the controls are initially
// set because the element doesn't exist yet.
if (this.controls()) {
this.addClass('vjs-controls-enabled');
} else {
this.addClass('vjs-controls-disabled');
}
// Set ARIA label and region role depending on player type
this.el_.setAttribute('role', 'region');
if (this.isAudio()) {
this.el_.setAttribute('aria-label', this.localize('Audio Player'));
} else {
this.el_.setAttribute('aria-label', this.localize('Video Player'));
}
if (this.isAudio()) {
this.addClass('vjs-audio');
}
// Check if spatial navigation is enabled in the options.
// If enabled, instantiate the SpatialNavigation class.
if (options.spatialNavigation && options.spatialNavigation.enabled) {
this.spatialNavigation = new SpatialNavigation(this);
this.addClass('vjs-spatial-navigation-enabled');
}
// TODO: Make this smarter. Toggle user state between touching/mousing
// using events, since devices can have both touch and mouse events.
// TODO: Make this check be performed again when the window switches between monitors
// (See https://github.com/videojs/video.js/issues/5683)
if (TOUCH_ENABLED) {
this.addClass('vjs-touch-enabled');
}
// iOS Safari has broken hover handling
if (!IS_IOS) {
this.addClass('vjs-workinghover');
}
// Make player easily findable by ID
Player.players[this.id_] = this;
// Add a major version class to aid css in plugins
const majorVersion = version$6.split('.')[0];
this.addClass(`vjs-v${majorVersion}`);
// When the player is first initialized, trigger activity so components
// like the control bar show themselves if needed
this.userActive(true);
this.reportUserActivity();
this.one('play', e => this.listenForUserActivity_(e));
this.on('keydown', e => this.handleKeyDown(e));
this.on('languagechange', e => this.handleLanguagechange(e));
this.breakpoints(this.options_.breakpoints);
this.responsive(this.options_.responsive);
// Calling both the audio mode methods after the player is fully
// setup to be able to listen to the events triggered by them
this.on('ready', () => {
// Calling the audioPosterMode method first so that
// the audioOnlyMode can take precedence when both options are set to true
this.audioPosterMode(this.options_.audioPosterMode);
this.audioOnlyMode(this.options_.audioOnlyMode);
});
}
/**
* Destroys the video player and does any necessary cleanup.
*
* This is especially helpful if you are dynamically adding and removing videos
* to/from the DOM.
*
* @fires Player#dispose
*/
dispose() {
/**
* Called when the player is being disposed of.
*
* @event Player#dispose
* @type {Event}
*/
this.trigger('dispose');
// prevent dispose from being called twice
this.off('dispose');
// Make sure all player-specific document listeners are unbound. This is
off(document$1, this.fsApi_.fullscreenchange, this.boundDocumentFullscreenChange_);
off(document$1, 'keydown', this.boundFullWindowOnEscKey_);
if (this.styleEl_ && this.styleEl_.parentNode) {
this.styleEl_.parentNode.removeChild(this.styleEl_);
this.styleEl_ = null;
}
// Kill reference to this player
Player.players[this.id_] = null;
if (this.tag && this.tag.player) {
this.tag.player = null;
}
if (this.el_ && this.el_.player) {
this.el_.player = null;
}
if (this.tech_) {
this.tech_.dispose();
this.isPosterFromTech_ = false;
this.poster_ = '';
}
if (this.playerElIngest_) {
this.playerElIngest_ = null;
}
if (this.tag) {
this.tag = null;
}
clearCacheForPlayer(this);
// remove all event handlers for track lists
// all tracks and track listeners are removed on
// tech dispose
ALL.names.forEach(name => {
const props = ALL[name];
const list = this[props.getterName]();
// if it is not a native list
// we have to manually remove event listeners
if (list && list.off) {
list.off();
}
});
// the actual .el_ is removed here, or replaced if
super.dispose({
restoreEl: this.options_.restoreEl
});
}
/**
* Create the `Player`'s DOM element.
*
* @return {Element}
* The DOM element that gets created.
*/
createEl() {
let tag = this.tag;
let el;
let playerElIngest = this.playerElIngest_ = tag.parentNode && tag.parentNode.hasAttribute && tag.parentNode.hasAttribute('data-vjs-player');
const divEmbed = this.tag.tagName.toLowerCase() === 'video-js';
if (playerElIngest) {
el = this.el_ = tag.parentNode;
} else if (!divEmbed) {
el = this.el_ = super.createEl('div');
}
// Copy over all the attributes from the tag, including ID and class
// ID will now reference player box, not the video tag
const attrs = getAttributes(tag);
if (divEmbed) {
el = this.el_ = tag;
tag = this.tag = document$1.createElement('video');
while (el.children.length) {
tag.appendChild(el.firstChild);
}
if (!hasClass(el, 'video-js')) {
addClass(el, 'video-js');
}
el.appendChild(tag);
playerElIngest = this.playerElIngest_ = el;
// move properties over from our custom `video-js` element
// to our new `video` element. This will move things like
// `src` or `controls` that were set via js before the player
// was initialized.
Object.keys(el).forEach(k => {
try {
tag[k] = el[k];
} catch (e) {
// we got a a property like outerHTML which we can't actually copy, ignore it
}
});
}
// set tabindex to -1 to remove the video element from the focus order
tag.setAttribute('tabindex', '-1');
attrs.tabindex = '-1';
// Workaround for #4583 on Chrome (on Windows) with JAWS.
// See https://github.com/FreedomScientific/VFO-standards-support/issues/78
// Note that we can't detect if JAWS is being used, but this ARIA attribute
// doesn't change behavior of Chrome if JAWS is not being used
if (IS_CHROME && IS_WINDOWS) {
tag.setAttribute('role', 'application');
attrs.role = 'application';
}
// Remove width/height attrs from tag so CSS can make it 100% width/height
tag.removeAttribute('width');
tag.removeAttribute('height');
if ('width' in attrs) {
delete attrs.width;
}
if ('height' in attrs) {
delete attrs.height;
}
Object.getOwnPropertyNames(attrs).forEach(function (attr) {
// don't copy over the class attribute to the player element when we're in a div embed
// the class is already set up properly in the divEmbed case
// and we want to make sure that the `video-js` class doesn't get lost
if (!(divEmbed && attr === 'class')) {
el.setAttribute(attr, attrs[attr]);
}
if (divEmbed) {
tag.setAttribute(attr, attrs[attr]);
}
});
// Update tag id/class for use as HTML5 playback tech
// Might think we should do this after embedding in container so .vjs-tech class
// doesn't flash 100% width/height, but class only applies with .video-js parent
tag.playerId = tag.id;
tag.id += '_html5_api';
tag.className = 'vjs-tech';
// Make player findable on elements
tag.player = el.player = this;
// Default state of video is paused
this.addClass('vjs-paused');
const deviceClassNames = ['IS_SMART_TV', 'IS_TIZEN', 'IS_WEBOS', 'IS_ANDROID', 'IS_IPAD', 'IS_IPHONE', 'IS_CHROMECAST_RECEIVER'].filter(key => browser[key]).map(key => {
return 'vjs-device-' + key.substring(3).toLowerCase().replace(/\_/g, '-');
});
this.addClass(...deviceClassNames);
// Add a style element in the player that we'll use to set the width/height
// of the player in a way that's still overridable by CSS, just like the
// video element
if (window$1.VIDEOJS_NO_DYNAMIC_STYLE !== true) {
this.styleEl_ = createStyleElement('vjs-styles-dimensions');
const defaultsStyleEl = $('.vjs-styles-defaults');
const head = $('head');
head.insertBefore(this.styleEl_, defaultsStyleEl ? defaultsStyleEl.nextSibling : head.firstChild);
}
this.fill_ = false;
this.fluid_ = false;
// Pass in the width/height/aspectRatio options which will update the style el
this.width(this.options_.width);
this.height(this.options_.height);
this.fill(this.options_.fill);
this.fluid(this.options_.fluid);
this.aspectRatio(this.options_.aspectRatio);
// support both crossOrigin and crossorigin to reduce confusion and issues around the name
this.crossOrigin(this.options_.crossOrigin || this.options_.crossorigin);
// Hide any links within the video/audio tag,
// because IE doesn't hide them completely from screen readers.
const links = tag.getElementsByTagName('a');
for (let i = 0; i < links.length; i++) {
const linkEl = links.item(i);
addClass(linkEl, 'vjs-hidden');
linkEl.setAttribute('hidden', 'hidden');
}
// insertElFirst seems to cause the networkState to flicker from 3 to 2, so
// keep track of the original for later so we can know if the source originally failed
tag.initNetworkState_ = tag.networkState;
// Wrap video tag in div (el/box) container
if (tag.parentNode && !playerElIngest) {
tag.parentNode.insertBefore(el, tag);
}
// insert the tag as the first child of the player element
// then manually add it to the children array so that this.addChild
// will work properly for other components
//
// Breaks iPhone, fixed in HTML5 setup.
prependTo(tag, el);
this.children_.unshift(tag);
// Set lang attr on player to ensure CSS :lang() in consistent with player
// if it's been set to something different to the doc
this.el_.setAttribute('lang', this.language_);
this.el_.setAttribute('translate', 'no');
this.el_ = el;
return el;
}
/**
* Get or set the `Player`'s crossOrigin option. For the HTML5 player, this
* sets the `crossOrigin` property on the `` tag to control the CORS
* behavior.
*
* @see [Video Element Attributes]{@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/video#attr-crossorigin}
*
* @param {string|null} [value]
* The value to set the `Player`'s crossOrigin to. If an argument is
* given, must be one of `'anonymous'` or `'use-credentials'`, or 'null'.
*
* @return {string|null|undefined}
* - The current crossOrigin value of the `Player` when getting.
* - undefined when setting
*/
crossOrigin(value) {
// `null` can be set to unset a value
if (typeof value === 'undefined') {
return this.techGet_('crossOrigin');
}
if (value !== null && value !== 'anonymous' && value !== 'use-credentials') {
log$1.warn(`crossOrigin must be null, "anonymous" or "use-credentials", given "${value}"`);
return;
}
this.techCall_('setCrossOrigin', value);
if (this.posterImage) {
this.posterImage.crossOrigin(value);
}
return;
}
/**
* A getter/setter for the `Player`'s width. Returns the player's configured value.
* To get the current width use `currentWidth()`.
*
* @param {number|string} [value]
* CSS value to set the `Player`'s width to.
*
* @return {number|undefined}
* - The current width of the `Player` when getting.
* - Nothing when setting
*/
width(value) {
return this.dimension('width', value);
}
/**
* A getter/setter for the `Player`'s height. Returns the player's configured value.
* To get the current height use `currentheight()`.
*
* @param {number|string} [value]
* CSS value to set the `Player`'s height to.
*
* @return {number|undefined}
* - The current height of the `Player` when getting.
* - Nothing when setting
*/
height(value) {
return this.dimension('height', value);
}
/**
* A getter/setter for the `Player`'s width & height.
*
* @param {string} dimension
* This string can be:
* - 'width'
* - 'height'
*
* @param {number|string} [value]
* Value for dimension specified in the first argument.
*
* @return {number}
* The dimension arguments value when getting (width/height).
*/
dimension(dimension, value) {
const privDimension = dimension + '_';
if (value === undefined) {
return this[privDimension] || 0;
}
if (value === '' || value === 'auto') {
// If an empty string is given, reset the dimension to be automatic
this[privDimension] = undefined;
this.updateStyleEl_();
return;
}
const parsedVal = parseFloat(value);
if (isNaN(parsedVal)) {
log$1.error(`Improper value "${value}" supplied for for ${dimension}`);
return;
}
this[privDimension] = parsedVal;
this.updateStyleEl_();
}
/**
* A getter/setter/toggler for the vjs-fluid `className` on the `Player`.
*
* Turning this on will turn off fill mode.
*
* @param {boolean} [bool]
* - A value of true adds the class.
* - A value of false removes the class.
* - No value will be a getter.
*
* @return {boolean|undefined}
* - The value of fluid when getting.
* - `undefined` when setting.
*/
fluid(bool) {
if (bool === undefined) {
return !!this.fluid_;
}
this.fluid_ = !!bool;
if (isEvented(this)) {
this.off(['playerreset', 'resize'], this.boundUpdateStyleEl_);
}
if (bool) {
this.addClass('vjs-fluid');
this.fill(false);
addEventedCallback(this, () => {
this.on(['playerreset', 'resize'], this.boundUpdateStyleEl_);
});
} else {
this.removeClass('vjs-fluid');
}
this.updateStyleEl_();
}
/**
* A getter/setter/toggler for the vjs-fill `className` on the `Player`.
*
* Turning this on will turn off fluid mode.
*
* @param {boolean} [bool]
* - A value of true adds the class.
* - A value of false removes the class.
* - No value will be a getter.
*
* @return {boolean|undefined}
* - The value of fluid when getting.
* - `undefined` when setting.
*/
fill(bool) {
if (bool === undefined) {
return !!this.fill_;
}
this.fill_ = !!bool;
if (bool) {
this.addClass('vjs-fill');
this.fluid(false);
} else {
this.removeClass('vjs-fill');
}
}
/**
* Get/Set the aspect ratio
*
* @param {string} [ratio]
* Aspect ratio for player
*
* @return {string|undefined}
* returns the current aspect ratio when getting
*/
/**
* A getter/setter for the `Player`'s aspect ratio.
*
* @param {string} [ratio]
* The value to set the `Player`'s aspect ratio to.
*
* @return {string|undefined}
* - The current aspect ratio of the `Player` when getting.
* - undefined when setting
*/
aspectRatio(ratio) {
if (ratio === undefined) {
return this.aspectRatio_;
}
// Check for width:height format
if (!/^\d+\:\d+$/.test(ratio)) {
throw new Error('Improper value supplied for aspect ratio. The format should be width:height, for example 16:9.');
}
this.aspectRatio_ = ratio;
// We're assuming if you set an aspect ratio you want fluid mode,
// because in fixed mode you could calculate width and height yourself.
this.fluid(true);
this.updateStyleEl_();
}
/**
* Update styles of the `Player` element (height, width and aspect ratio).
*
* @private
* @listens Tech#loadedmetadata
*/
updateStyleEl_() {
if (window$1.VIDEOJS_NO_DYNAMIC_STYLE === true) {
const width = typeof this.width_ === 'number' ? this.width_ : this.options_.width;
const height = typeof this.height_ === 'number' ? this.height_ : this.options_.height;
const techEl = this.tech_ && this.tech_.el();
if (techEl) {
if (width >= 0) {
techEl.width = width;
}
if (height >= 0) {
techEl.height = height;
}
}
return;
}
let width;
let height;
let aspectRatio;
let idClass;
// The aspect ratio is either used directly or to calculate width and height.
if (this.aspectRatio_ !== undefined && this.aspectRatio_ !== 'auto') {
// Use any aspectRatio that's been specifically set
aspectRatio = this.aspectRatio_;
} else if (this.videoWidth() > 0) {
// Otherwise try to get the aspect ratio from the video metadata
aspectRatio = this.videoWidth() + ':' + this.videoHeight();
} else {
// Or use a default. The video element's is 2:1, but 16:9 is more common.
aspectRatio = '16:9';
}
// Get the ratio as a decimal we can use to calculate dimensions
const ratioParts = aspectRatio.split(':');
const ratioMultiplier = ratioParts[1] / ratioParts[0];
if (this.width_ !== undefined) {
// Use any width that's been specifically set
width = this.width_;
} else if (this.height_ !== undefined) {
// Or calculate the width from the aspect ratio if a height has been set
width = this.height_ / ratioMultiplier;
} else {
// Or use the video's metadata, or use the video el's default of 300
width = this.videoWidth() || 300;
}
if (this.height_ !== undefined) {
// Use any height that's been specifically set
height = this.height_;
} else {
// Otherwise calculate the height from the ratio and the width
height = width * ratioMultiplier;
}
// Ensure the CSS class is valid by starting with an alpha character
if (/^[^a-zA-Z]/.test(this.id())) {
idClass = 'dimensions-' + this.id();
} else {
idClass = this.id() + '-dimensions';
}
// Ensure the right class is still on the player for the style element
this.addClass(idClass);
setTextContent(this.styleEl_, `
.${idClass} {
width: ${width}px;
height: ${height}px;
}
.${idClass}.vjs-fluid:not(.vjs-audio-only-mode) {
padding-top: ${ratioMultiplier * 100}%;
}
`);
}
/**
* Load/Create an instance of playback {@link Tech} including element
* and API methods. Then append the `Tech` element in `Player` as a child.
*
* @param {string} techName
* name of the playback technology
*
* @param {string} source
* video source
*
* @private
*/
loadTech_(techName, source) {
// Pause and remove current playback technology
if (this.tech_) {
this.unloadTech_();
}
const titleTechName = toTitleCase$1(techName);
const camelTechName = techName.charAt(0).toLowerCase() + techName.slice(1);
// get rid of the HTML5 video tag as soon as we are using another tech
if (titleTechName !== 'Html5' && this.tag) {
Tech.getTech('Html5').disposeMediaElement(this.tag);
this.tag.player = null;
this.tag = null;
}
this.techName_ = titleTechName;
// Turn off API access because we're loading a new tech that might load asynchronously
this.isReady_ = false;
let autoplay = this.autoplay();
// if autoplay is a string (or `true` with normalizeAutoplay: true) we pass false to the tech
// because the player is going to handle autoplay on `loadstart`
if (typeof this.autoplay() === 'string' || this.autoplay() === true && this.options_.normalizeAutoplay) {
autoplay = false;
}
// Grab tech-specific options from player options and add source and parent element to use.
const techOptions = {
source,
autoplay,
'nativeControlsForTouch': this.options_.nativeControlsForTouch,
'playerId': this.id(),
'techId': `${this.id()}_${camelTechName}_api`,
'playsinline': this.options_.playsinline,
'preload': this.options_.preload,
'loop': this.options_.loop,
'disablePictureInPicture': this.options_.disablePictureInPicture,
'muted': this.options_.muted,
'poster': this.poster(),
'language': this.language(),
'playerElIngest': this.playerElIngest_ || false,
'vtt.js': this.options_['vtt.js'],
'canOverridePoster': !!this.options_.techCanOverridePoster,
'enableSourceset': this.options_.enableSourceset
};
ALL.names.forEach(name => {
const props = ALL[name];
techOptions[props.getterName] = this[props.privateName];
});
Object.assign(techOptions, this.options_[titleTechName]);
Object.assign(techOptions, this.options_[camelTechName]);
Object.assign(techOptions, this.options_[techName.toLowerCase()]);
if (this.tag) {
techOptions.tag = this.tag;
}
if (source && source.src === this.cache_.src && this.cache_.currentTime > 0) {
techOptions.startTime = this.cache_.currentTime;
}
// Initialize tech instance
const TechClass = Tech.getTech(techName);
if (!TechClass) {
throw new Error(`No Tech named '${titleTechName}' exists! '${titleTechName}' should be registered using videojs.registerTech()'`);
}
this.tech_ = new TechClass(techOptions);
// player.triggerReady is always async, so don't need this to be async
this.tech_.ready(bind_(this, this.handleTechReady_), true);
textTrackConverter.jsonToTextTracks(this.textTracksJson_ || [], this.tech_);
// Listen to all HTML5-defined events and trigger them on the player
TECH_EVENTS_RETRIGGER.forEach(event => {
this.on(this.tech_, event, e => this[`handleTech${toTitleCase$1(event)}_`](e));
});
Object.keys(TECH_EVENTS_QUEUE).forEach(event => {
this.on(this.tech_, event, eventObj => {
if (this.tech_.playbackRate() === 0 && this.tech_.seeking()) {
this.queuedCallbacks_.push({
callback: this[`handleTech${TECH_EVENTS_QUEUE[event]}_`].bind(this),
event: eventObj
});
return;
}
this[`handleTech${TECH_EVENTS_QUEUE[event]}_`](eventObj);
});
});
this.on(this.tech_, 'loadstart', e => this.handleTechLoadStart_(e));
this.on(this.tech_, 'sourceset', e => this.handleTechSourceset_(e));
this.on(this.tech_, 'waiting', e => this.handleTechWaiting_(e));
this.on(this.tech_, 'ended', e => this.handleTechEnded_(e));
this.on(this.tech_, 'seeking', e => this.handleTechSeeking_(e));
this.on(this.tech_, 'play', e => this.handleTechPlay_(e));
this.on(this.tech_, 'pause', e => this.handleTechPause_(e));
this.on(this.tech_, 'durationchange', e => this.handleTechDurationChange_(e));
this.on(this.tech_, 'fullscreenchange', (e, data) => this.handleTechFullscreenChange_(e, data));
this.on(this.tech_, 'fullscreenerror', (e, err) => this.handleTechFullscreenError_(e, err));
this.on(this.tech_, 'enterpictureinpicture', e => this.handleTechEnterPictureInPicture_(e));
this.on(this.tech_, 'leavepictureinpicture', e => this.handleTechLeavePictureInPicture_(e));
this.on(this.tech_, 'error', e => this.handleTechError_(e));
this.on(this.tech_, 'posterchange', e => this.handleTechPosterChange_(e));
this.on(this.tech_, 'textdata', e => this.handleTechTextData_(e));
this.on(this.tech_, 'ratechange', e => this.handleTechRateChange_(e));
this.on(this.tech_, 'loadedmetadata', this.boundUpdateStyleEl_);
this.usingNativeControls(this.techGet_('controls'));
if (this.controls() && !this.usingNativeControls()) {
this.addTechControlsListeners_();
}
// Add the tech element in the DOM if it was not already there
// Make sure to not insert the original video element if using Html5
if (this.tech_.el().parentNode !== this.el() && (titleTechName !== 'Html5' || !this.tag)) {
prependTo(this.tech_.el(), this.el());
}
// Get rid of the original video tag reference after the first tech is loaded
if (this.tag) {
this.tag.player = null;
this.tag = null;
}
}
/**
* Unload and dispose of the current playback {@link Tech}.
*
* @private
*/
unloadTech_() {
// Save the current text tracks so that we can reuse the same text tracks with the next tech
ALL.names.forEach(name => {
const props = ALL[name];
this[props.privateName] = this[props.getterName]();
});
this.textTracksJson_ = textTrackConverter.textTracksToJson(this.tech_);
this.isReady_ = false;
this.tech_.dispose();
this.tech_ = false;
if (this.isPosterFromTech_) {
this.poster_ = '';
this.trigger('posterchange');
}
this.isPosterFromTech_ = false;
}
/**
* Return a reference to the current {@link Tech}.
* It will print a warning by default about the danger of using the tech directly
* but any argument that is passed in will silence the warning.
*
* @param {*} [safety]
* Anything passed in to silence the warning
*
* @return {Tech}
* The Tech
*/
tech(safety) {
if (safety === undefined) {
log$1.warn('Using the tech directly can be dangerous. I hope you know what you\'re doing.\n' + 'See https://github.com/videojs/video.js/issues/2617 for more info.\n');
}
return this.tech_;
}
/**
* An object that contains Video.js version.
*
* @typedef {Object} PlayerVersion
*
* @property {string} 'video.js' - Video.js version
*/
/**
* Returns an object with Video.js version.
*
* @return {PlayerVersion}
* An object with Video.js version.
*/
version() {
return {
'video.js': version$6
};
}
/**
* Set up click and touch listeners for the playback element
*
* - On desktops: a click on the video itself will toggle playback
* - On mobile devices: a click on the video toggles controls
* which is done by toggling the user state between active and
* inactive
* - A tap can signal that a user has become active or has become inactive
* e.g. a quick tap on an iPhone movie should reveal the controls. Another
* quick tap should hide them again (signaling the user is in an inactive
* viewing state)
* - In addition to this, we still want the user to be considered inactive after
* a few seconds of inactivity.
*
* > Note: the only part of iOS interaction we can't mimic with this setup
* is a touch and hold on the video element counting as activity in order to
* keep the controls showing, but that shouldn't be an issue. A touch and hold
* on any controls will still keep the user active
*
* @private
*/
addTechControlsListeners_() {
// Make sure to remove all the previous listeners in case we are called multiple times.
this.removeTechControlsListeners_();
this.on(this.tech_, 'click', this.boundHandleTechClick_);
this.on(this.tech_, 'dblclick', this.boundHandleTechDoubleClick_);
// If the controls were hidden we don't want that to change without a tap event
// so we'll check if the controls were already showing before reporting user
// activity
this.on(this.tech_, 'touchstart', this.boundHandleTechTouchStart_);
this.on(this.tech_, 'touchmove', this.boundHandleTechTouchMove_);
this.on(this.tech_, 'touchend', this.boundHandleTechTouchEnd_);
// The tap listener needs to come after the touchend listener because the tap
// listener cancels out any reportedUserActivity when setting userActive(false)
this.on(this.tech_, 'tap', this.boundHandleTechTap_);
}
/**
* Remove the listeners used for click and tap controls. This is needed for
* toggling to controls disabled, where a tap/touch should do nothing.
*
* @private
*/
removeTechControlsListeners_() {
// We don't want to just use `this.off()` because there might be other needed
// listeners added by techs that extend this.
this.off(this.tech_, 'tap', this.boundHandleTechTap_);
this.off(this.tech_, 'touchstart', this.boundHandleTechTouchStart_);
this.off(this.tech_, 'touchmove', this.boundHandleTechTouchMove_);
this.off(this.tech_, 'touchend', this.boundHandleTechTouchEnd_);
this.off(this.tech_, 'click', this.boundHandleTechClick_);
this.off(this.tech_, 'dblclick', this.boundHandleTechDoubleClick_);
}
/**
* Player waits for the tech to be ready
*
* @private
*/
handleTechReady_() {
this.triggerReady();
// Keep the same volume as before
if (this.cache_.volume) {
this.techCall_('setVolume', this.cache_.volume);
}
// Look if the tech found a higher resolution poster while loading
this.handleTechPosterChange_();
// Update the duration if available
this.handleTechDurationChange_();
}
/**
* Retrigger the `loadstart` event that was triggered by the {@link Tech}.
*
* @fires Player#loadstart
* @listens Tech#loadstart
* @private
*/
handleTechLoadStart_() {
// TODO: Update to use `emptied` event instead. See #1277.
this.removeClass('vjs-ended', 'vjs-seeking');
// reset the error state
this.error(null);
// Update the duration
this.handleTechDurationChange_();
if (!this.paused()) {
/**
* Fired when the user agent begins looking for media data
*
* @event Player#loadstart
* @type {Event}
*/
this.trigger('loadstart');
} else {
// reset the hasStarted state
this.hasStarted(false);
this.trigger('loadstart');
}
// autoplay happens after loadstart for the browser,
// so we mimic that behavior
this.manualAutoplay_(this.autoplay() === true && this.options_.normalizeAutoplay ? 'play' : this.autoplay());
}
/**
* Handle autoplay string values, rather than the typical boolean
* values that should be handled by the tech. Note that this is not
* part of any specification. Valid values and what they do can be
* found on the autoplay getter at Player#autoplay()
*/
manualAutoplay_(type) {
if (!this.tech_ || typeof type !== 'string') {
return;
}
// Save original muted() value, set muted to true, and attempt to play().
// On promise rejection, restore muted from saved value
const resolveMuted = () => {
const previouslyMuted = this.muted();
this.muted(true);
const restoreMuted = () => {
this.muted(previouslyMuted);
};
// restore muted on play terminatation
this.playTerminatedQueue_.push(restoreMuted);
const mutedPromise = this.play();
if (!isPromise(mutedPromise)) {
return;
}
return mutedPromise.catch(err => {
restoreMuted();
throw new Error(`Rejection at manualAutoplay. Restoring muted value. ${err ? err : ''}`);
});
};
let promise;
// if muted defaults to true
// the only thing we can do is call play
if (type === 'any' && !this.muted()) {
promise = this.play();
if (isPromise(promise)) {
promise = promise.catch(resolveMuted);
}
} else if (type === 'muted' && !this.muted()) {
promise = resolveMuted();
} else {
promise = this.play();
}
if (!isPromise(promise)) {
return;
}
return promise.then(() => {
this.trigger({
type: 'autoplay-success',
autoplay: type
});
}).catch(() => {
this.trigger({
type: 'autoplay-failure',
autoplay: type
});
});
}
/**
* Update the internal source caches so that we return the correct source from
* `src()`, `currentSource()`, and `currentSources()`.
*
* > Note: `currentSources` will not be updated if the source that is passed in exists
* in the current `currentSources` cache.
*
*
* @param {Tech~SourceObject} srcObj
* A string or object source to update our caches to.
*/
updateSourceCaches_(srcObj = '') {
let src = srcObj;
let type = '';
if (typeof src !== 'string') {
src = srcObj.src;
type = srcObj.type;
}
// make sure all the caches are set to default values
// to prevent null checking
this.cache_.source = this.cache_.source || {};
this.cache_.sources = this.cache_.sources || [];
// try to get the type of the src that was passed in
if (src && !type) {
type = findMimetype(this, src);
}
// update `currentSource` cache always
this.cache_.source = merge$1({}, srcObj, {
src,
type
});
const matchingSources = this.cache_.sources.filter(s => s.src && s.src === src);
const sourceElSources = [];
const sourceEls = this.$$('source');
const matchingSourceEls = [];
for (let i = 0; i < sourceEls.length; i++) {
const sourceObj = getAttributes(sourceEls[i]);
sourceElSources.push(sourceObj);
if (sourceObj.src && sourceObj.src === src) {
matchingSourceEls.push(sourceObj.src);
}
}
// if we have matching source els but not matching sources
// the current source cache is not up to date
if (matchingSourceEls.length && !matchingSources.length) {
this.cache_.sources = sourceElSources;
// if we don't have matching source or source els set the
// sources cache to the `currentSource` cache
} else if (!matchingSources.length) {
this.cache_.sources = [this.cache_.source];
}
// update the tech `src` cache
this.cache_.src = src;
}
/**
* *EXPERIMENTAL* Fired when the source is set or changed on the {@link Tech}
* causing the media element to reload.
*
* It will fire for the initial source and each subsequent source.
* This event is a custom event from Video.js and is triggered by the {@link Tech}.
*
* The event object for this event contains a `src` property that will contain the source
* that was available when the event was triggered. This is generally only necessary if Video.js
* is switching techs while the source was being changed.
*
* It is also fired when `load` is called on the player (or media element)
* because the {@link https://html.spec.whatwg.org/multipage/media.html#dom-media-load|specification for `load`}
* says that the resource selection algorithm needs to be aborted and restarted.
* In this case, it is very likely that the `src` property will be set to the
* empty string `""` to indicate we do not know what the source will be but
* that it is changing.
*
* *This event is currently still experimental and may change in minor releases.*
* __To use this, pass `enableSourceset` option to the player.__
*
* @event Player#sourceset
* @type {Event}
* @prop {string} src
* The source url available when the `sourceset` was triggered.
* It will be an empty string if we cannot know what the source is
* but know that the source will change.
*/
/**
* Retrigger the `sourceset` event that was triggered by the {@link Tech}.
*
* @fires Player#sourceset
* @listens Tech#sourceset
* @private
*/
handleTechSourceset_(event) {
// only update the source cache when the source
// was not updated using the player api
if (!this.changingSrc_) {
let updateSourceCaches = src => this.updateSourceCaches_(src);
const playerSrc = this.currentSource().src;
const eventSrc = event.src;
// if we have a playerSrc that is not a blob, and a tech src that is a blob
if (playerSrc && !/^blob:/.test(playerSrc) && /^blob:/.test(eventSrc)) {
// if both the tech source and the player source were updated we assume
// something like @videojs/http-streaming did the sourceset and skip updating the source cache.
if (!this.lastSource_ || this.lastSource_.tech !== eventSrc && this.lastSource_.player !== playerSrc) {
updateSourceCaches = () => {};
}
}
// update the source to the initial source right away
// in some cases this will be empty string
updateSourceCaches(eventSrc);
// if the `sourceset` `src` was an empty string
// wait for a `loadstart` to update the cache to `currentSrc`.
// If a sourceset happens before a `loadstart`, we reset the state
if (!event.src) {
this.tech_.any(['sourceset', 'loadstart'], e => {
// if a sourceset happens before a `loadstart` there
// is nothing to do as this `handleTechSourceset_`
// will be called again and this will be handled there.
if (e.type === 'sourceset') {
return;
}
const techSrc = this.techGet_('currentSrc');
this.lastSource_.tech = techSrc;
this.updateSourceCaches_(techSrc);
});
}
}
this.lastSource_ = {
player: this.currentSource().src,
tech: event.src
};
this.trigger({
src: event.src,
type: 'sourceset'
});
}
/**
* Add/remove the vjs-has-started class
*
*
* @param {boolean} request
* - true: adds the class
* - false: remove the class
*
* @return {boolean}
* the boolean value of hasStarted_
*/
hasStarted(request) {
if (request === undefined) {
// act as getter, if we have no request to change
return this.hasStarted_;
}
if (request === this.hasStarted_) {
return;
}
this.hasStarted_ = request;
if (this.hasStarted_) {
this.addClass('vjs-has-started');
} else {
this.removeClass('vjs-has-started');
}
}
/**
* Fired whenever the media begins or resumes playback
*
* @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#dom-media-play}
* @fires Player#play
* @listens Tech#play
* @private
*/
handleTechPlay_() {
this.removeClass('vjs-ended', 'vjs-paused');
this.addClass('vjs-playing');
// hide the poster when the user hits play
this.hasStarted(true);
/**
* Triggered whenever an {@link Tech#play} event happens. Indicates that
* playback has started or resumed.
*
* @event Player#play
* @type {Event}
*/
this.trigger('play');
}
/**
* Retrigger the `ratechange` event that was triggered by the {@link Tech}.
*
* If there were any events queued while the playback rate was zero, fire
* those events now.
*
* @private
* @method Player#handleTechRateChange_
* @fires Player#ratechange
* @listens Tech#ratechange
*/
handleTechRateChange_() {
if (this.tech_.playbackRate() > 0 && this.cache_.lastPlaybackRate === 0) {
this.queuedCallbacks_.forEach(queued => queued.callback(queued.event));
this.queuedCallbacks_ = [];
}
this.cache_.lastPlaybackRate = this.tech_.playbackRate();
/**
* Fires when the playing speed of the audio/video is changed
*
* @event Player#ratechange
* @type {event}
*/
this.trigger('ratechange');
}
/**
* Retrigger the `waiting` event that was triggered by the {@link Tech}.
*
* @fires Player#waiting
* @listens Tech#waiting
* @private
*/
handleTechWaiting_() {
this.addClass('vjs-waiting');
/**
* A readyState change on the DOM element has caused playback to stop.
*
* @event Player#waiting
* @type {Event}
*/
this.trigger('waiting');
// Browsers may emit a timeupdate event after a waiting event. In order to prevent
// premature removal of the waiting class, wait for the time to change.
const timeWhenWaiting = this.currentTime();
const timeUpdateListener = () => {
if (timeWhenWaiting !== this.currentTime()) {
this.removeClass('vjs-waiting');
this.off('timeupdate', timeUpdateListener);
}
};
this.on('timeupdate', timeUpdateListener);
}
/**
* Retrigger the `canplay` event that was triggered by the {@link Tech}.
* > Note: This is not consistent between browsers. See #1351
*
* @fires Player#canplay
* @listens Tech#canplay
* @private
*/
handleTechCanPlay_() {
this.removeClass('vjs-waiting');
/**
* The media has a readyState of HAVE_FUTURE_DATA or greater.
*
* @event Player#canplay
* @type {Event}
*/
this.trigger('canplay');
}
/**
* Retrigger the `canplaythrough` event that was triggered by the {@link Tech}.
*
* @fires Player#canplaythrough
* @listens Tech#canplaythrough
* @private
*/
handleTechCanPlayThrough_() {
this.removeClass('vjs-waiting');
/**
* The media has a readyState of HAVE_ENOUGH_DATA or greater. This means that the
* entire media file can be played without buffering.
*
* @event Player#canplaythrough
* @type {Event}
*/
this.trigger('canplaythrough');
}
/**
* Retrigger the `playing` event that was triggered by the {@link Tech}.
*
* @fires Player#playing
* @listens Tech#playing
* @private
*/
handleTechPlaying_() {
this.removeClass('vjs-waiting');
/**
* The media is no longer blocked from playback, and has started playing.
*
* @event Player#playing
* @type {Event}
*/
this.trigger('playing');
}
/**
* Retrigger the `seeking` event that was triggered by the {@link Tech}.
*
* @fires Player#seeking
* @listens Tech#seeking
* @private
*/
handleTechSeeking_() {
this.addClass('vjs-seeking');
/**
* Fired whenever the player is jumping to a new time
*
* @event Player#seeking
* @type {Event}
*/
this.trigger('seeking');
}
/**
* Retrigger the `seeked` event that was triggered by the {@link Tech}.
*
* @fires Player#seeked
* @listens Tech#seeked
* @private
*/
handleTechSeeked_() {
this.removeClass('vjs-seeking', 'vjs-ended');
/**
* Fired when the player has finished jumping to a new time
*
* @event Player#seeked
* @type {Event}
*/
this.trigger('seeked');
}
/**
* Retrigger the `pause` event that was triggered by the {@link Tech}.
*
* @fires Player#pause
* @listens Tech#pause
* @private
*/
handleTechPause_() {
this.removeClass('vjs-playing');
this.addClass('vjs-paused');
/**
* Fired whenever the media has been paused
*
* @event Player#pause
* @type {Event}
*/
this.trigger('pause');
}
/**
* Retrigger the `ended` event that was triggered by the {@link Tech}.
*
* @fires Player#ended
* @listens Tech#ended
* @private
*/
handleTechEnded_() {
this.addClass('vjs-ended');
this.removeClass('vjs-waiting');
if (this.options_.loop) {
this.currentTime(0);
this.play();
} else if (!this.paused()) {
this.pause();
}
/**
* Fired when the end of the media resource is reached (currentTime == duration)
*
* @event Player#ended
* @type {Event}
*/
this.trigger('ended');
}
/**
* Fired when the duration of the media resource is first known or changed
*
* @listens Tech#durationchange
* @private
*/
handleTechDurationChange_() {
this.duration(this.techGet_('duration'));
}
/**
* Handle a click on the media element to play/pause
*
* @param {Event} event
* the event that caused this function to trigger
*
* @listens Tech#click
* @private
*/
handleTechClick_(event) {
// When controls are disabled a click should not toggle playback because
// the click is considered a control
if (!this.controls_) {
return;
}
if (this.options_ === undefined || this.options_.userActions === undefined || this.options_.userActions.click === undefined || this.options_.userActions.click !== false) {
if (this.options_ !== undefined && this.options_.userActions !== undefined && typeof this.options_.userActions.click === 'function') {
this.options_.userActions.click.call(this, event);
} else if (this.paused()) {
silencePromise(this.play());
} else {
this.pause();
}
}
}
/**
* Handle a double-click on the media element to enter/exit fullscreen
*
* @param {Event} event
* the event that caused this function to trigger
*
* @listens Tech#dblclick
* @private
*/
handleTechDoubleClick_(event) {
if (!this.controls_) {
return;
}
// we do not want to toggle fullscreen state
// when double-clicking inside a control bar or a modal
const inAllowedEls = Array.prototype.some.call(this.$$('.vjs-control-bar, .vjs-modal-dialog'), el => el.contains(event.target));
if (!inAllowedEls) {
/*
* options.userActions.doubleClick
*
* If `undefined` or `true`, double-click toggles fullscreen if controls are present
* Set to `false` to disable double-click handling
* Set to a function to substitute an external double-click handler
*/
if (this.options_ === undefined || this.options_.userActions === undefined || this.options_.userActions.doubleClick === undefined || this.options_.userActions.doubleClick !== false) {
if (this.options_ !== undefined && this.options_.userActions !== undefined && typeof this.options_.userActions.doubleClick === 'function') {
this.options_.userActions.doubleClick.call(this, event);
} else if (this.isFullscreen()) {
this.exitFullscreen();
} else {
this.requestFullscreen();
}
}
}
}
/**
* Handle a tap on the media element. It will toggle the user
* activity state, which hides and shows the controls.
*
* @listens Tech#tap
* @private
*/
handleTechTap_() {
this.userActive(!this.userActive());
}
/**
* Handle touch to start
*
* @listens Tech#touchstart
* @private
*/
handleTechTouchStart_() {
this.userWasActive = this.userActive();
}
/**
* Handle touch to move
*
* @listens Tech#touchmove
* @private
*/
handleTechTouchMove_() {
if (this.userWasActive) {
this.reportUserActivity();
}
}
/**
* Handle touch to end
*
* @param {Event} event
* the touchend event that triggered
* this function
*
* @listens Tech#touchend
* @private
*/
handleTechTouchEnd_(event) {
// Stop the mouse events from also happening
if (event.cancelable) {
event.preventDefault();
}
}
/**
* @private
*/
toggleFullscreenClass_() {
if (this.isFullscreen()) {
this.addClass('vjs-fullscreen');
} else {
this.removeClass('vjs-fullscreen');
}
}
/**
* when the document fschange event triggers it calls this
*/
documentFullscreenChange_(e) {
const targetPlayer = e.target.player;
// if another player was fullscreen
// do a null check for targetPlayer because older firefox's would put document as e.target
if (targetPlayer && targetPlayer !== this) {
return;
}
const el = this.el();
let isFs = document$1[this.fsApi_.fullscreenElement] === el;
if (!isFs && el.matches) {
isFs = el.matches(':' + this.fsApi_.fullscreen);
}
this.isFullscreen(isFs);
}
/**
* Handle Tech Fullscreen Change
*
* @param {Event} event
* the fullscreenchange event that triggered this function
*
* @param {Object} data
* the data that was sent with the event
*
* @private
* @listens Tech#fullscreenchange
* @fires Player#fullscreenchange
*/
handleTechFullscreenChange_(event, data) {
if (data) {
if (data.nativeIOSFullscreen) {
this.addClass('vjs-ios-native-fs');
this.tech_.one('webkitendfullscreen', () => {
this.removeClass('vjs-ios-native-fs');
});
}
this.isFullscreen(data.isFullscreen);
}
}
handleTechFullscreenError_(event, err) {
this.trigger('fullscreenerror', err);
}
/**
* @private
*/
togglePictureInPictureClass_() {
if (this.isInPictureInPicture()) {
this.addClass('vjs-picture-in-picture');
} else {
this.removeClass('vjs-picture-in-picture');
}
}
/**
* Handle Tech Enter Picture-in-Picture.
*
* @param {Event} event
* the enterpictureinpicture event that triggered this function
*
* @private
* @listens Tech#enterpictureinpicture
*/
handleTechEnterPictureInPicture_(event) {
this.isInPictureInPicture(true);
}
/**
* Handle Tech Leave Picture-in-Picture.
*
* @param {Event} event
* the leavepictureinpicture event that triggered this function
*
* @private
* @listens Tech#leavepictureinpicture
*/
handleTechLeavePictureInPicture_(event) {
this.isInPictureInPicture(false);
}
/**
* Fires when an error occurred during the loading of an audio/video.
*
* @private
* @listens Tech#error
*/
handleTechError_() {
const error = this.tech_.error();
if (error) {
this.error(error);
}
}
/**
* Retrigger the `textdata` event that was triggered by the {@link Tech}.
*
* @fires Player#textdata
* @listens Tech#textdata
* @private
*/
handleTechTextData_() {
let data = null;
if (arguments.length > 1) {
data = arguments[1];
}
/**
* Fires when we get a textdata event from tech
*
* @event Player#textdata
* @type {Event}
*/
this.trigger('textdata', data);
}
/**
* Get object for cached values.
*
* @return {Object}
* get the current object cache
*/
getCache() {
return this.cache_;
}
/**
* Resets the internal cache object.
*
* Using this function outside the player constructor or reset method may
* have unintended side-effects.
*
* @private
*/
resetCache_() {
this.cache_ = {
// Right now, the currentTime is not _really_ cached because it is always
// retrieved from the tech (see: currentTime). However, for completeness,
// we set it to zero here to ensure that if we do start actually caching
// it, we reset it along with everything else.
currentTime: 0,
initTime: 0,
inactivityTimeout: this.options_.inactivityTimeout,
duration: NaN,
lastVolume: 1,
lastPlaybackRate: this.defaultPlaybackRate(),
media: null,
src: '',
source: {},
sources: [],
playbackRates: [],
volume: 1
};
}
/**
* Pass values to the playback tech
*
* @param {string} [method]
* the method to call
*
* @param {Object} [arg]
* the argument to pass
*
* @private
*/
techCall_(method, arg) {
// If it's not ready yet, call method when it is
this.ready(function () {
if (method in allowedSetters) {
return set(this.middleware_, this.tech_, method, arg);
} else if (method in allowedMediators) {
return mediate(this.middleware_, this.tech_, method, arg);
}
try {
if (this.tech_) {
this.tech_[method](arg);
}
} catch (e) {
log$1(e);
throw e;
}
}, true);
}
/**
* Mediate attempt to call playback tech method
* and return the value of the method called.
*
* @param {string} method
* Tech method
*
* @return {*}
* Value returned by the tech method called, undefined if tech
* is not ready or tech method is not present
*
* @private
*/
techGet_(method) {
if (!this.tech_ || !this.tech_.isReady_) {
return;
}
if (method in allowedGetters) {
return get(this.middleware_, this.tech_, method);
} else if (method in allowedMediators) {
return mediate(this.middleware_, this.tech_, method);
}
// Log error when playback tech object is present but method
// is undefined or unavailable
try {
return this.tech_[method]();
} catch (e) {
// When building additional tech libs, an expected method may not be defined yet
if (this.tech_[method] === undefined) {
log$1(`Video.js: ${method} method not defined for ${this.techName_} playback technology.`, e);
throw e;
}
// When a method isn't available on the object it throws a TypeError
if (e.name === 'TypeError') {
log$1(`Video.js: ${method} unavailable on ${this.techName_} playback technology element.`, e);
this.tech_.isReady_ = false;
throw e;
}
// If error unknown, just log and throw
log$1(e);
throw e;
}
}
/**
* Attempt to begin playback at the first opportunity.
*
* @return {Promise|undefined}
* Returns a promise if the browser supports Promises (or one
* was passed in as an option). This promise will be resolved on
* the return value of play. If this is undefined it will fulfill the
* promise chain otherwise the promise chain will be fulfilled when
* the promise from play is fulfilled.
*/
play() {
return new Promise(resolve => {
this.play_(resolve);
});
}
/**
* The actual logic for play, takes a callback that will be resolved on the
* return value of play. This allows us to resolve to the play promise if there
* is one on modern browsers.
*
* @private
* @param {Function} [callback]
* The callback that should be called when the techs play is actually called
*/
play_(callback = silencePromise) {
this.playCallbacks_.push(callback);
const isSrcReady = Boolean(!this.changingSrc_ && (this.src() || this.currentSrc()));
const isSafariOrIOS = Boolean(IS_ANY_SAFARI || IS_IOS);
// treat calls to play_ somewhat like the `one` event function
if (this.waitToPlay_) {
this.off(['ready', 'loadstart'], this.waitToPlay_);
this.waitToPlay_ = null;
}
// if the player/tech is not ready or the src itself is not ready
// queue up a call to play on `ready` or `loadstart`
if (!this.isReady_ || !isSrcReady) {
this.waitToPlay_ = e => {
this.play_();
};
this.one(['ready', 'loadstart'], this.waitToPlay_);
// if we are in Safari, there is a high chance that loadstart will trigger after the gesture timeperiod
// in that case, we need to prime the video element by calling load so it'll be ready in time
if (!isSrcReady && isSafariOrIOS) {
this.load();
}
return;
}
// If the player/tech is ready and we have a source, we can attempt playback.
const val = this.techGet_('play');
// For native playback, reset the progress bar if we get a play call from a replay.
const isNativeReplay = isSafariOrIOS && this.hasClass('vjs-ended');
if (isNativeReplay) {
this.resetProgressBar_();
}
// play was terminated if the returned value is null
if (val === null) {
this.runPlayTerminatedQueue_();
} else {
this.runPlayCallbacks_(val);
}
}
/**
* These functions will be run when if play is terminated. If play
* runPlayCallbacks_ is run these function will not be run. This allows us
* to differentiate between a terminated play and an actual call to play.
*/
runPlayTerminatedQueue_() {
const queue = this.playTerminatedQueue_.slice(0);
this.playTerminatedQueue_ = [];
queue.forEach(function (q) {
q();
});
}
/**
* When a callback to play is delayed we have to run these
* callbacks when play is actually called on the tech. This function
* runs the callbacks that were delayed and accepts the return value
* from the tech.
*
* @param {undefined|Promise} val
* The return value from the tech.
*/
runPlayCallbacks_(val) {
const callbacks = this.playCallbacks_.slice(0);
this.playCallbacks_ = [];
// clear play terminatedQueue since we finished a real play
this.playTerminatedQueue_ = [];
callbacks.forEach(function (cb) {
cb(val);
});
}
/**
* Pause the video playback
*/
pause() {
this.techCall_('pause');
}
/**
* Check if the player is paused or has yet to play
*
* @return {boolean}
* - false: if the media is currently playing
* - true: if media is not currently playing
*/
paused() {
// The initial state of paused should be true (in Safari it's actually false)
return this.techGet_('paused') === false ? false : true;
}
/**
* Get a TimeRange object representing the current ranges of time that the user
* has played.
*
* @return {TimeRange}
* A time range object that represents all the increments of time that have
* been played.
*/
played() {
return this.techGet_('played') || createTimeRanges$1(0, 0);
}
/**
* Sets or returns whether or not the user is "scrubbing". Scrubbing is
* when the user has clicked the progress bar handle and is
* dragging it along the progress bar.
*
* @param {boolean} [isScrubbing]
* whether the user is or is not scrubbing
*
* @return {boolean|undefined}
* - The value of scrubbing when getting
* - Nothing when setting
*/
scrubbing(isScrubbing) {
if (typeof isScrubbing === 'undefined') {
return this.scrubbing_;
}
this.scrubbing_ = !!isScrubbing;
this.techCall_('setScrubbing', this.scrubbing_);
if (isScrubbing) {
this.addClass('vjs-scrubbing');
} else {
this.removeClass('vjs-scrubbing');
}
}
/**
* Get or set the current time (in seconds)
*
* @param {number|string} [seconds]
* The time to seek to in seconds
*
* @return {number|undefined}
* - the current time in seconds when getting
* - Nothing when setting
*/
currentTime(seconds) {
if (seconds === undefined) {
// cache last currentTime and return. default to 0 seconds
//
// Caching the currentTime is meant to prevent a massive amount of reads on the tech's
// currentTime when scrubbing, but may not provide much performance benefit after all.
// Should be tested. Also something has to read the actual current time or the cache will
// never get updated.
this.cache_.currentTime = this.techGet_('currentTime') || 0;
return this.cache_.currentTime;
}
if (seconds < 0) {
seconds = 0;
}
if (!this.isReady_ || this.changingSrc_ || !this.tech_ || !this.tech_.isReady_) {
this.cache_.initTime = seconds;
this.off('canplay', this.boundApplyInitTime_);
this.one('canplay', this.boundApplyInitTime_);
return;
}
this.techCall_('setCurrentTime', seconds);
this.cache_.initTime = 0;
if (isFinite(seconds)) {
this.cache_.currentTime = Number(seconds);
}
}
/**
* Apply the value of initTime stored in cache as currentTime.
*
* @private
*/
applyInitTime_() {
this.currentTime(this.cache_.initTime);
}
/**
* Normally gets the length in time of the video in seconds;
* in all but the rarest use cases an argument will NOT be passed to the method
*
* > **NOTE**: The video must have started loading before the duration can be
* known, and depending on preload behaviour may not be known until the video starts
* playing.
*
* @fires Player#durationchange
*
* @param {number} [seconds]
* The duration of the video to set in seconds
*
* @return {number|undefined}
* - The duration of the video in seconds when getting
* - Nothing when setting
*/
duration(seconds) {
if (seconds === undefined) {
// return NaN if the duration is not known
return this.cache_.duration !== undefined ? this.cache_.duration : NaN;
}
seconds = parseFloat(seconds);
// Standardize on Infinity for signaling video is live
if (seconds < 0) {
seconds = Infinity;
}
if (seconds !== this.cache_.duration) {
// Cache the last set value for optimized scrubbing
this.cache_.duration = seconds;
if (seconds === Infinity) {
this.addClass('vjs-live');
} else {
this.removeClass('vjs-live');
}
if (!isNaN(seconds)) {
// Do not fire durationchange unless the duration value is known.
// @see [Spec]{@link https://www.w3.org/TR/2011/WD-html5-20110113/video.html#media-element-load-algorithm}
/**
* @event Player#durationchange
* @type {Event}
*/
this.trigger('durationchange');
}
}
}
/**
* Calculates how much time is left in the video. Not part
* of the native video API.
*
* @return {number}
* The time remaining in seconds
*/
remainingTime() {
return this.duration() - this.currentTime();
}
/**
* A remaining time function that is intended to be used when
* the time is to be displayed directly to the user.
*
* @return {number}
* The rounded time remaining in seconds
*/
remainingTimeDisplay() {
return Math.floor(this.duration()) - Math.floor(this.currentTime());
}
//
// Kind of like an array of portions of the video that have been downloaded.
/**
* Get a TimeRange object with an array of the times of the video
* that have been downloaded. If you just want the percent of the
* video that's been downloaded, use bufferedPercent.
*
* @see [Buffered Spec]{@link http://dev.w3.org/html5/spec/video.html#dom-media-buffered}
*
* @return {TimeRange}
* A mock {@link TimeRanges} object (following HTML spec)
*/
buffered() {
let buffered = this.techGet_('buffered');
if (!buffered || !buffered.length) {
buffered = createTimeRanges$1(0, 0);
}
return buffered;
}
/**
* Get the TimeRanges of the media that are currently available
* for seeking to.
*
* @see [Seekable Spec]{@link https://html.spec.whatwg.org/multipage/media.html#dom-media-seekable}
*
* @return {TimeRange}
* A mock {@link TimeRanges} object (following HTML spec)
*/
seekable() {
let seekable = this.techGet_('seekable');
if (!seekable || !seekable.length) {
seekable = createTimeRanges$1(0, 0);
}
return seekable;
}
/**
* Returns whether the player is in the "seeking" state.
*
* @return {boolean} True if the player is in the seeking state, false if not.
*/
seeking() {
return this.techGet_('seeking');
}
/**
* Returns whether the player is in the "ended" state.
*
* @return {boolean} True if the player is in the ended state, false if not.
*/
ended() {
return this.techGet_('ended');
}
/**
* Returns the current state of network activity for the element, from
* the codes in the list below.
* - NETWORK_EMPTY (numeric value 0)
* The element has not yet been initialised. All attributes are in
* their initial states.
* - NETWORK_IDLE (numeric value 1)
* The element's resource selection algorithm is active and has
* selected a resource, but it is not actually using the network at
* this time.
* - NETWORK_LOADING (numeric value 2)
* The user agent is actively trying to download data.
* - NETWORK_NO_SOURCE (numeric value 3)
* The element's resource selection algorithm is active, but it has
* not yet found a resource to use.
*
* @see https://html.spec.whatwg.org/multipage/embedded-content.html#network-states
* @return {number} the current network activity state
*/
networkState() {
return this.techGet_('networkState');
}
/**
* Returns a value that expresses the current state of the element
* with respect to rendering the current playback position, from the
* codes in the list below.
* - HAVE_NOTHING (numeric value 0)
* No information regarding the media resource is available.
* - HAVE_METADATA (numeric value 1)
* Enough of the resource has been obtained that the duration of the
* resource is available.
* - HAVE_CURRENT_DATA (numeric value 2)
* Data for the immediate current playback position is available.
* - HAVE_FUTURE_DATA (numeric value 3)
* Data for the immediate current playback position is available, as
* well as enough data for the user agent to advance the current
* playback position in the direction of playback.
* - HAVE_ENOUGH_DATA (numeric value 4)
* The user agent estimates that enough data is available for
* playback to proceed uninterrupted.
*
* @see https://html.spec.whatwg.org/multipage/embedded-content.html#dom-media-readystate
* @return {number} the current playback rendering state
*/
readyState() {
return this.techGet_('readyState');
}
/**
* Get the percent (as a decimal) of the video that's been downloaded.
* This method is not a part of the native HTML video API.
*
* @return {number}
* A decimal between 0 and 1 representing the percent
* that is buffered 0 being 0% and 1 being 100%
*/
bufferedPercent() {
return bufferedPercent(this.buffered(), this.duration());
}
/**
* Get the ending time of the last buffered time range
* This is used in the progress bar to encapsulate all time ranges.
*
* @return {number}
* The end of the last buffered time range
*/
bufferedEnd() {
const buffered = this.buffered();
const duration = this.duration();
let end = buffered.end(buffered.length - 1);
if (end > duration) {
end = duration;
}
return end;
}
/**
* Get or set the current volume of the media
*
* @param {number} [percentAsDecimal]
* The new volume as a decimal percent:
* - 0 is muted/0%/off
* - 1.0 is 100%/full
* - 0.5 is half volume or 50%
*
* @return {number|undefined}
* The current volume as a percent when getting
*/
volume(percentAsDecimal) {
let vol;
if (percentAsDecimal !== undefined) {
// Force value to between 0 and 1
vol = Math.max(0, Math.min(1, percentAsDecimal));
this.cache_.volume = vol;
this.techCall_('setVolume', vol);
if (vol > 0) {
this.lastVolume_(vol);
}
return;
}
// Default to 1 when returning current volume.
vol = parseFloat(this.techGet_('volume'));
return isNaN(vol) ? 1 : vol;
}
/**
* Get the current muted state, or turn mute on or off
*
* @param {boolean} [muted]
* - true to mute
* - false to unmute
*
* @return {boolean|undefined}
* - true if mute is on and getting
* - false if mute is off and getting
* - nothing if setting
*/
muted(muted) {
if (muted !== undefined) {
this.techCall_('setMuted', muted);
return;
}
return this.techGet_('muted') || false;
}
/**
* Get the current defaultMuted state, or turn defaultMuted on or off. defaultMuted
* indicates the state of muted on initial playback.
*
* ```js
* var myPlayer = videojs('some-player-id');
*
* myPlayer.src("http://www.example.com/path/to/video.mp4");
*
* // get, should be false
* console.log(myPlayer.defaultMuted());
* // set to true
* myPlayer.defaultMuted(true);
* // get should be true
* console.log(myPlayer.defaultMuted());
* ```
*
* @param {boolean} [defaultMuted]
* - true to mute
* - false to unmute
*
* @return {boolean|undefined}
* - true if defaultMuted is on and getting
* - false if defaultMuted is off and getting
* - Nothing when setting
*/
defaultMuted(defaultMuted) {
if (defaultMuted !== undefined) {
this.techCall_('setDefaultMuted', defaultMuted);
}
return this.techGet_('defaultMuted') || false;
}
/**
* Get the last volume, or set it
*
* @param {number} [percentAsDecimal]
* The new last volume as a decimal percent:
* - 0 is muted/0%/off
* - 1.0 is 100%/full
* - 0.5 is half volume or 50%
*
* @return {number|undefined}
* - The current value of lastVolume as a percent when getting
* - Nothing when setting
*
* @private
*/
lastVolume_(percentAsDecimal) {
if (percentAsDecimal !== undefined && percentAsDecimal !== 0) {
this.cache_.lastVolume = percentAsDecimal;
return;
}
return this.cache_.lastVolume;
}
/**
* Check if current tech can support native fullscreen
* (e.g. with built in controls like iOS)
*
* @return {boolean}
* if native fullscreen is supported
*/
supportsFullScreen() {
return this.techGet_('supportsFullScreen') || false;
}
/**
* Check if the player is in fullscreen mode or tell the player that it
* is or is not in fullscreen mode.
*
* > NOTE: As of the latest HTML5 spec, isFullscreen is no longer an official
* property and instead document.fullscreenElement is used. But isFullscreen is
* still a valuable property for internal player workings.
*
* @param {boolean} [isFS]
* Set the players current fullscreen state
*
* @return {boolean|undefined}
* - true if fullscreen is on and getting
* - false if fullscreen is off and getting
* - Nothing when setting
*/
isFullscreen(isFS) {
if (isFS !== undefined) {
const oldValue = this.isFullscreen_;
this.isFullscreen_ = Boolean(isFS);
// if we changed fullscreen state and we're in prefixed mode, trigger fullscreenchange
// this is the only place where we trigger fullscreenchange events for older browsers
// fullWindow mode is treated as a prefixed event and will get a fullscreenchange event as well
if (this.isFullscreen_ !== oldValue && this.fsApi_.prefixed) {
/**
* @event Player#fullscreenchange
* @type {Event}
*/
this.trigger('fullscreenchange');
}
this.toggleFullscreenClass_();
return;
}
return this.isFullscreen_;
}
/**
* Increase the size of the video to full screen
* In some browsers, full screen is not supported natively, so it enters
* "full window mode", where the video fills the browser window.
* In browsers and devices that support native full screen, sometimes the
* browser's default controls will be shown, and not the Video.js custom skin.
* This includes most mobile devices (iOS, Android) and older versions of
* Safari.
*
* @param {Object} [fullscreenOptions]
* Override the player fullscreen options
*
* @fires Player#fullscreenchange
*/
requestFullscreen(fullscreenOptions) {
if (this.isInPictureInPicture()) {
this.exitPictureInPicture();
}
const self = this;
return new Promise((resolve, reject) => {
function offHandler() {
self.off('fullscreenerror', errorHandler);
self.off('fullscreenchange', changeHandler);
}
function changeHandler() {
offHandler();
resolve();
}
function errorHandler(e, err) {
offHandler();
reject(err);
}
self.one('fullscreenchange', changeHandler);
self.one('fullscreenerror', errorHandler);
const promise = self.requestFullscreenHelper_(fullscreenOptions);
if (promise) {
promise.then(offHandler, offHandler);
promise.then(resolve, reject);
}
});
}
requestFullscreenHelper_(fullscreenOptions) {
let fsOptions;
// Only pass fullscreen options to requestFullscreen in spec-compliant browsers.
// Use defaults or player configured option unless passed directly to this method.
if (!this.fsApi_.prefixed) {
fsOptions = this.options_.fullscreen && this.options_.fullscreen.options || {};
if (fullscreenOptions !== undefined) {
fsOptions = fullscreenOptions;
}
}
// This method works as follows:
// 1. if a fullscreen api is available, use it
// 1. call requestFullscreen with potential options
// 2. if we got a promise from above, use it to update isFullscreen()
// 2. otherwise, if the tech supports fullscreen, call `enterFullScreen` on it.
// This is particularly used for iPhone, older iPads, and non-safari browser on iOS.
// 3. otherwise, use "fullWindow" mode
if (this.fsApi_.requestFullscreen) {
const promise = this.el_[this.fsApi_.requestFullscreen](fsOptions);
// Even on browsers with promise support this may not return a promise
if (promise) {
promise.then(() => this.isFullscreen(true), () => this.isFullscreen(false));
}
return promise;
} else if (this.tech_.supportsFullScreen() && !this.options_.preferFullWindow === true) {
// we can't take the video.js controls fullscreen but we can go fullscreen
// with native controls
this.techCall_('enterFullScreen');
} else {
// fullscreen isn't supported so we'll just stretch the video element to
// fill the viewport
this.enterFullWindow();
}
}
/**
* Return the video to its normal size after having been in full screen mode
*
* @fires Player#fullscreenchange
*/
exitFullscreen() {
const self = this;
return new Promise((resolve, reject) => {
function offHandler() {
self.off('fullscreenerror', errorHandler);
self.off('fullscreenchange', changeHandler);
}
function changeHandler() {
offHandler();
resolve();
}
function errorHandler(e, err) {
offHandler();
reject(err);
}
self.one('fullscreenchange', changeHandler);
self.one('fullscreenerror', errorHandler);
const promise = self.exitFullscreenHelper_();
if (promise) {
promise.then(offHandler, offHandler);
// map the promise to our resolve/reject methods
promise.then(resolve, reject);
}
});
}
exitFullscreenHelper_() {
if (this.fsApi_.requestFullscreen) {
const promise = document$1[this.fsApi_.exitFullscreen]();
// Even on browsers with promise support this may not return a promise
if (promise) {
// we're splitting the promise here, so, we want to catch the
// potential error so that this chain doesn't have unhandled errors
silencePromise(promise.then(() => this.isFullscreen(false)));
}
return promise;
} else if (this.tech_.supportsFullScreen() && !this.options_.preferFullWindow === true) {
this.techCall_('exitFullScreen');
} else {
this.exitFullWindow();
}
}
/**
* When fullscreen isn't supported we can stretch the
* video container to as wide as the browser will let us.
*
* @fires Player#enterFullWindow
*/
enterFullWindow() {
this.isFullscreen(true);
this.isFullWindow = true;
// Storing original doc overflow value to return to when fullscreen is off
this.docOrigOverflow = document$1.documentElement.style.overflow;
// Add listener for esc key to exit fullscreen
on(document$1, 'keydown', this.boundFullWindowOnEscKey_);
// Hide any scroll bars
document$1.documentElement.style.overflow = 'hidden';
// Apply fullscreen styles
addClass(document$1.body, 'vjs-full-window');
/**
* @event Player#enterFullWindow
* @type {Event}
*/
this.trigger('enterFullWindow');
}
/**
* Check for call to either exit full window or
* full screen on ESC key
*
* @param {string} event
* Event to check for key press
*/
fullWindowOnEscKey(event) {
if (event.key === 'Escape') {
if (this.isFullscreen() === true) {
if (!this.isFullWindow) {
this.exitFullscreen();
} else {
this.exitFullWindow();
}
}
}
}
/**
* Exit full window
*
* @fires Player#exitFullWindow
*/
exitFullWindow() {
this.isFullscreen(false);
this.isFullWindow = false;
off(document$1, 'keydown', this.boundFullWindowOnEscKey_);
// Unhide scroll bars.
document$1.documentElement.style.overflow = this.docOrigOverflow;
// Remove fullscreen styles
removeClass(document$1.body, 'vjs-full-window');
// Resize the box, controller, and poster to original sizes
// this.positionAll();
/**
* @event Player#exitFullWindow
* @type {Event}
*/
this.trigger('exitFullWindow');
}
/**
* Get or set disable Picture-in-Picture mode.
*
* @param {boolean} [value]
* - true will disable Picture-in-Picture mode
* - false will enable Picture-in-Picture mode
*/
disablePictureInPicture(value) {
if (value === undefined) {
return this.techGet_('disablePictureInPicture');
}
this.techCall_('setDisablePictureInPicture', value);
this.options_.disablePictureInPicture = value;
this.trigger('disablepictureinpicturechanged');
}
/**
* Check if the player is in Picture-in-Picture mode or tell the player that it
* is or is not in Picture-in-Picture mode.
*
* @param {boolean} [isPiP]
* Set the players current Picture-in-Picture state
*
* @return {boolean|undefined}
* - true if Picture-in-Picture is on and getting
* - false if Picture-in-Picture is off and getting
* - nothing if setting
*/
isInPictureInPicture(isPiP) {
if (isPiP !== undefined) {
this.isInPictureInPicture_ = !!isPiP;
this.togglePictureInPictureClass_();
return;
}
return !!this.isInPictureInPicture_;
}
/**
* Create a floating video window always on top of other windows so that users may
* continue consuming media while they interact with other content sites, or
* applications on their device.
*
* This can use document picture-in-picture or element picture in picture
*
* Set `enableDocumentPictureInPicture` to `true` to use docPiP on a supported browser
* Else set `disablePictureInPicture` to `false` to disable elPiP on a supported browser
*
*
* @see [Spec]{@link https://w3c.github.io/picture-in-picture/}
* @see [Spec]{@link https://wicg.github.io/document-picture-in-picture/}
*
* @fires Player#enterpictureinpicture
*
* @return {Promise}
* A promise with a Picture-in-Picture window.
*/
requestPictureInPicture() {
if (this.options_.enableDocumentPictureInPicture && window$1.documentPictureInPicture) {
const pipContainer = document$1.createElement(this.el().tagName);
pipContainer.classList = this.el().classList;
pipContainer.classList.add('vjs-pip-container');
if (this.posterImage) {
pipContainer.appendChild(this.posterImage.el().cloneNode(true));
}
if (this.titleBar) {
pipContainer.appendChild(this.titleBar.el().cloneNode(true));
}
pipContainer.appendChild(createEl('p', {
className: 'vjs-pip-text'
}, {}, this.localize('Playing in picture-in-picture')));
return window$1.documentPictureInPicture.requestWindow({
// The aspect ratio won't be correct, Chrome bug https://crbug.com/1407629
width: this.videoWidth(),
height: this.videoHeight()
}).then(pipWindow => {
copyStyleSheetsToWindow(pipWindow);
this.el_.parentNode.insertBefore(pipContainer, this.el_);
pipWindow.document.body.appendChild(this.el_);
pipWindow.document.body.classList.add('vjs-pip-window');
this.player_.isInPictureInPicture(true);
this.player_.trigger({
type: 'enterpictureinpicture',
pipWindow
});
// Listen for the PiP closing event to move the video back.
pipWindow.addEventListener('pagehide', event => {
const pipVideo = event.target.querySelector('.video-js');
pipContainer.parentNode.replaceChild(pipVideo, pipContainer);
this.player_.isInPictureInPicture(false);
this.player_.trigger('leavepictureinpicture');
});
return pipWindow;
});
}
if ('pictureInPictureEnabled' in document$1 && this.disablePictureInPicture() === false) {
/**
* This event fires when the player enters picture in picture mode
*
* @event Player#enterpictureinpicture
* @type {Event}
*/
return this.techGet_('requestPictureInPicture');
}
return Promise.reject('No PiP mode is available');
}
/**
* Exit Picture-in-Picture mode.
*
* @see [Spec]{@link https://wicg.github.io/picture-in-picture}
*
* @fires Player#leavepictureinpicture
*
* @return {Promise}
* A promise.
*/
exitPictureInPicture() {
if (window$1.documentPictureInPicture && window$1.documentPictureInPicture.window) {
// With documentPictureInPicture, Player#leavepictureinpicture is fired in the pagehide handler
window$1.documentPictureInPicture.window.close();
return Promise.resolve();
}
if ('pictureInPictureEnabled' in document$1) {
/**
* This event fires when the player leaves picture in picture mode
*
* @event Player#leavepictureinpicture
* @type {Event}
*/
return document$1.exitPictureInPicture();
}
}
/**
* Called when this Player has focus and a key gets pressed down, or when
* any Component of this player receives a key press that it doesn't handle.
* This allows player-wide hotkeys (either as defined below, or optionally
* by an external function).
*
* @param {KeyboardEvent} event
* The `keydown` event that caused this function to be called.
*
* @listens keydown
*/
handleKeyDown(event) {
const {
userActions
} = this.options_;
// Bail out if hotkeys are not configured.
if (!userActions || !userActions.hotkeys) {
return;
}
// Function that determines whether or not to exclude an element from
// hotkeys handling.
const excludeElement = el => {
const tagName = el.tagName.toLowerCase();
// The first and easiest test is for `contenteditable` elements.
if (el.isContentEditable) {
return true;
}
// Inputs matching these types will still trigger hotkey handling as
// they are not text inputs.
const allowedInputTypes = ['button', 'checkbox', 'hidden', 'radio', 'reset', 'submit'];
if (tagName === 'input') {
return allowedInputTypes.indexOf(el.type) === -1;
}
// The final test is by tag name. These tags will be excluded entirely.
const excludedTags = ['textarea'];
return excludedTags.indexOf(tagName) !== -1;
};
// Bail out if the user is focused on an interactive form element.
if (excludeElement(this.el_.ownerDocument.activeElement)) {
return;
}
if (typeof userActions.hotkeys === 'function') {
userActions.hotkeys.call(this, event);
} else {
this.handleHotkeys(event);
}
}
/**
* Called when this Player receives a hotkey keydown event.
* Supported player-wide hotkeys are:
*
* f - toggle fullscreen
* m - toggle mute
* k or Space - toggle play/pause
*
* @param {Event} event
* The `keydown` event that caused this function to be called.
*/
handleHotkeys(event) {
const hotkeys = this.options_.userActions ? this.options_.userActions.hotkeys : {};
// set fullscreenKey, muteKey, playPauseKey from `hotkeys`, use defaults if not set
const {
fullscreenKey = keydownEvent => event.key.toLowerCase() === 'f',
muteKey = keydownEvent => event.key.toLowerCase() === 'm',
playPauseKey = keydownEvent => event.key.toLowerCase() === 'k' || event.key.toLowerCase() === ' '
} = hotkeys;
if (fullscreenKey.call(this, event)) {
event.preventDefault();
event.stopPropagation();
const FSToggle = Component$1.getComponent('FullscreenToggle');
if (document$1[this.fsApi_.fullscreenEnabled] !== false) {
FSToggle.prototype.handleClick.call(this, event);
}
} else if (muteKey.call(this, event)) {
event.preventDefault();
event.stopPropagation();
const MuteToggle = Component$1.getComponent('MuteToggle');
MuteToggle.prototype.handleClick.call(this, event);
} else if (playPauseKey.call(this, event)) {
event.preventDefault();
event.stopPropagation();
const PlayToggle = Component$1.getComponent('PlayToggle');
PlayToggle.prototype.handleClick.call(this, event);
}
}
/**
* Check whether the player can play a given mimetype
*
* @see https://www.w3.org/TR/2011/WD-html5-20110113/video.html#dom-navigator-canplaytype
*
* @param {string} type
* The mimetype to check
*
* @return {string}
* 'probably', 'maybe', or '' (empty string)
*/
canPlayType(type) {
let can;
// Loop through each playback technology in the options order
for (let i = 0, j = this.options_.techOrder; i < j.length; i++) {
const techName = j[i];
let tech = Tech.getTech(techName);
// Support old behavior of techs being registered as components.
// Remove once that deprecated behavior is removed.
if (!tech) {
tech = Component$1.getComponent(techName);
}
// Check if the current tech is defined before continuing
if (!tech) {
log$1.error(`The "${techName}" tech is undefined. Skipped browser support check for that tech.`);
continue;
}
// Check if the browser supports this technology
if (tech.isSupported()) {
can = tech.canPlayType(type);
if (can) {
return can;
}
}
}
return '';
}
/**
* Select source based on tech-order or source-order
* Uses source-order selection if `options.sourceOrder` is truthy. Otherwise,
* defaults to tech-order selection
*
* @param {Array} sources
* The sources for a media asset
*
* @return {Object|boolean}
* Object of source and tech order or false
*/
selectSource(sources) {
// Get only the techs specified in `techOrder` that exist and are supported by the
// current platform
const techs = this.options_.techOrder.map(techName => {
return [techName, Tech.getTech(techName)];
}).filter(([techName, tech]) => {
// Check if the current tech is defined before continuing
if (tech) {
// Check if the browser supports this technology
return tech.isSupported();
}
log$1.error(`The "${techName}" tech is undefined. Skipped browser support check for that tech.`);
return false;
});
// Iterate over each `innerArray` element once per `outerArray` element and execute
// `tester` with both. If `tester` returns a non-falsy value, exit early and return
// that value.
const findFirstPassingTechSourcePair = function (outerArray, innerArray, tester) {
let found;
outerArray.some(outerChoice => {
return innerArray.some(innerChoice => {
found = tester(outerChoice, innerChoice);
if (found) {
return true;
}
});
});
return found;
};
let foundSourceAndTech;
const flip = fn => (a, b) => fn(b, a);
const finder = ([techName, tech], source) => {
if (tech.canPlaySource(source, this.options_[techName.toLowerCase()])) {
return {
source,
tech: techName
};
}
};
// Depending on the truthiness of `options.sourceOrder`, we swap the order of techs and sources
// to select from them based on their priority.
if (this.options_.sourceOrder) {
// Source-first ordering
foundSourceAndTech = findFirstPassingTechSourcePair(sources, techs, flip(finder));
} else {
// Tech-first ordering
foundSourceAndTech = findFirstPassingTechSourcePair(techs, sources, finder);
}
return foundSourceAndTech || false;
}
/**
* Executes source setting and getting logic
*
* @param {Tech~SourceObject|Tech~SourceObject[]|string} [source]
* A SourceObject, an array of SourceObjects, or a string referencing
* a URL to a media source. It is _highly recommended_ that an object
* or array of objects is used here, so that source selection
* algorithms can take the `type` into account.
*
* If not provided, this method acts as a getter.
* @param {boolean} [isRetry]
* Indicates whether this is being called internally as a result of a retry
*
* @return {string|undefined}
* If the `source` argument is missing, returns the current source
* URL. Otherwise, returns nothing/undefined.
*/
handleSrc_(source, isRetry) {
// getter usage
if (typeof source === 'undefined') {
return this.cache_.src || '';
}
// Reset retry behavior for new source
if (this.resetRetryOnError_) {
this.resetRetryOnError_();
}
// filter out invalid sources and turn our source into
// an array of source objects
const sources = filterSource(source);
// if a source was passed in then it is invalid because
// it was filtered to a zero length Array. So we have to
// show an error
if (!sources.length) {
this.setTimeout(function () {
this.error({
code: 4,
message: this.options_.notSupportedMessage
});
}, 0);
return;
}
// initial sources
this.changingSrc_ = true;
// Only update the cached source list if we are not retrying a new source after error,
// since in that case we want to include the failed source(s) in the cache
if (!isRetry) {
this.cache_.sources = sources;
}
this.updateSourceCaches_(sources[0]);
// middlewareSource is the source after it has been changed by middleware
setSource(this, sources[0], (middlewareSource, mws) => {
this.middleware_ = mws;
// since sourceSet is async we have to update the cache again after we select a source since
// the source that is selected could be out of order from the cache update above this callback.
if (!isRetry) {
this.cache_.sources = sources;
}
this.updateSourceCaches_(middlewareSource);
const err = this.src_(middlewareSource);
if (err) {
if (sources.length > 1) {
return this.handleSrc_(sources.slice(1));
}
this.changingSrc_ = false;
// We need to wrap this in a timeout to give folks a chance to add error event handlers
this.setTimeout(function () {
this.error({
code: 4,
message: this.options_.notSupportedMessage
});
}, 0);
// we could not find an appropriate tech, but let's still notify the delegate that this is it
// this needs a better comment about why this is needed
this.triggerReady();
return;
}
setTech(mws, this.tech_);
});
// Try another available source if this one fails before playback.
if (sources.length > 1) {
const retry = () => {
// Remove the error modal
this.error(null);
this.handleSrc_(sources.slice(1), true);
};
const stopListeningForErrors = () => {
this.off('error', retry);
};
this.one('error', retry);
this.one('playing', stopListeningForErrors);
this.resetRetryOnError_ = () => {
this.off('error', retry);
this.off('playing', stopListeningForErrors);
};
}
}
/**
* Get or set the video source.
*
* @param {Tech~SourceObject|Tech~SourceObject[]|string} [source]
* A SourceObject, an array of SourceObjects, or a string referencing
* a URL to a media source. It is _highly recommended_ that an object
* or array of objects is used here, so that source selection
* algorithms can take the `type` into account.
*
* If not provided, this method acts as a getter.
*
* @return {string|undefined}
* If the `source` argument is missing, returns the current source
* URL. Otherwise, returns nothing/undefined.
*/
src(source) {
return this.handleSrc_(source, false);
}
/**
* Set the source object on the tech, returns a boolean that indicates whether
* there is a tech that can play the source or not
*
* @param {Tech~SourceObject} source
* The source object to set on the Tech
*
* @return {boolean}
* - True if there is no Tech to playback this source
* - False otherwise
*
* @private
*/
src_(source) {
const sourceTech = this.selectSource([source]);
if (!sourceTech) {
return true;
}
if (!titleCaseEquals(sourceTech.tech, this.techName_)) {
this.changingSrc_ = true;
// load this technology with the chosen source
this.loadTech_(sourceTech.tech, sourceTech.source);
this.tech_.ready(() => {
this.changingSrc_ = false;
});
return false;
}
// wait until the tech is ready to set the source
// and set it synchronously if possible (#2326)
this.ready(function () {
// The setSource tech method was added with source handlers
// so older techs won't support it
// We need to check the direct prototype for the case where subclasses
// of the tech do not support source handlers
if (this.tech_.constructor.prototype.hasOwnProperty('setSource')) {
this.techCall_('setSource', source);
} else {
this.techCall_('src', source.src);
}
this.changingSrc_ = false;
}, true);
return false;
}
/**
* Begin loading the src data.
*/
load() {
// Workaround to use the load method with the VHS.
// Does not cover the case when the load method is called directly from the mediaElement.
if (this.tech_ && this.tech_.vhs) {
this.src(this.currentSource());
return;
}
this.techCall_('load');
}
/**
* Reset the player. Loads the first tech in the techOrder,
* removes all the text tracks in the existing `tech`,
* and calls `reset` on the `tech`.
*/
reset() {
if (this.paused()) {
this.doReset_();
} else {
const playPromise = this.play();
silencePromise(playPromise.then(() => this.doReset_()));
}
}
doReset_() {
if (this.tech_) {
this.tech_.clearTracks('text');
}
this.removeClass('vjs-playing');
this.addClass('vjs-paused');
this.resetCache_();
this.poster('');
this.loadTech_(this.options_.techOrder[0], null);
this.techCall_('reset');
this.resetControlBarUI_();
this.error(null);
if (this.titleBar) {
this.titleBar.update({
title: undefined,
description: undefined
});
}
if (isEvented(this)) {
this.trigger('playerreset');
}
}
/**
* Reset Control Bar's UI by calling sub-methods that reset
* all of Control Bar's components
*/
resetControlBarUI_() {
this.resetProgressBar_();
this.resetPlaybackRate_();
this.resetVolumeBar_();
}
/**
* Reset tech's progress so progress bar is reset in the UI
*/
resetProgressBar_() {
this.currentTime(0);
const {
currentTimeDisplay,
durationDisplay,
progressControl,
remainingTimeDisplay
} = this.controlBar || {};
const {
seekBar
} = progressControl || {};
if (currentTimeDisplay) {
currentTimeDisplay.updateContent();
}
if (durationDisplay) {
durationDisplay.updateContent();
}
if (remainingTimeDisplay) {
remainingTimeDisplay.updateContent();
}
if (seekBar) {
seekBar.update();
if (seekBar.loadProgressBar) {
seekBar.loadProgressBar.update();
}
}
}
/**
* Reset Playback ratio
*/
resetPlaybackRate_() {
this.playbackRate(this.defaultPlaybackRate());
this.handleTechRateChange_();
}
/**
* Reset Volume bar
*/
resetVolumeBar_() {
this.volume(1.0);
this.trigger('volumechange');
}
/**
* Returns all of the current source objects.
*
* @return {Tech~SourceObject[]}
* The current source objects
*/
currentSources() {
const source = this.currentSource();
const sources = [];
// assume `{}` or `{ src }`
if (Object.keys(source).length !== 0) {
sources.push(source);
}
return this.cache_.sources || sources;
}
/**
* Returns the current source object.
*
* @return {Tech~SourceObject}
* The current source object
*/
currentSource() {
return this.cache_.source || {};
}
/**
* Returns the fully qualified URL of the current source value e.g. http://mysite.com/video.mp4
* Can be used in conjunction with `currentType` to assist in rebuilding the current source object.
*
* @return {string}
* The current source
*/
currentSrc() {
return this.currentSource() && this.currentSource().src || '';
}
/**
* Get the current source type e.g. video/mp4
* This can allow you rebuild the current source object so that you could load the same
* source and tech later
*
* @return {string}
* The source MIME type
*/
currentType() {
return this.currentSource() && this.currentSource().type || '';
}
/**
* Get or set the preload attribute
*
* @param {'none'|'auto'|'metadata'} [value]
* Preload mode to pass to tech
*
* @return {string|undefined}
* - The preload attribute value when getting
* - Nothing when setting
*/
preload(value) {
if (value !== undefined) {
this.techCall_('setPreload', value);
this.options_.preload = value;
return;
}
return this.techGet_('preload');
}
/**
* Get or set the autoplay option. When this is a boolean it will
* modify the attribute on the tech. When this is a string the attribute on
* the tech will be removed and `Player` will handle autoplay on loadstarts.
*
* @param {boolean|'play'|'muted'|'any'} [value]
* - true: autoplay using the browser behavior
* - false: do not autoplay
* - 'play': call play() on every loadstart
* - 'muted': call muted() then play() on every loadstart
* - 'any': call play() on every loadstart. if that fails call muted() then play().
* - *: values other than those listed here will be set `autoplay` to true
*
* @return {boolean|string|undefined}
* - The current value of autoplay when getting
* - Nothing when setting
*/
autoplay(value) {
// getter usage
if (value === undefined) {
return this.options_.autoplay || false;
}
let techAutoplay;
// if the value is a valid string set it to that, or normalize `true` to 'play', if need be
if (typeof value === 'string' && /(any|play|muted)/.test(value) || value === true && this.options_.normalizeAutoplay) {
this.options_.autoplay = value;
this.manualAutoplay_(typeof value === 'string' ? value : 'play');
techAutoplay = false;
// any falsy value sets autoplay to false in the browser,
// lets do the same
} else if (!value) {
this.options_.autoplay = false;
// any other value (ie truthy) sets autoplay to true
} else {
this.options_.autoplay = true;
}
techAutoplay = typeof techAutoplay === 'undefined' ? this.options_.autoplay : techAutoplay;
// if we don't have a tech then we do not queue up
// a setAutoplay call on tech ready. We do this because the
// autoplay option will be passed in the constructor and we
// do not need to set it twice
if (this.tech_) {
this.techCall_('setAutoplay', techAutoplay);
}
}
/**
* Set or unset the playsinline attribute.
* Playsinline tells the browser that non-fullscreen playback is preferred.
*
* @param {boolean} [value]
* - true means that we should try to play inline by default
* - false means that we should use the browser's default playback mode,
* which in most cases is inline. iOS Safari is a notable exception
* and plays fullscreen by default.
*
* @return {string|undefined}
* - the current value of playsinline
* - Nothing when setting
*
* @see [Spec]{@link https://html.spec.whatwg.org/#attr-video-playsinline}
*/
playsinline(value) {
if (value !== undefined) {
this.techCall_('setPlaysinline', value);
this.options_.playsinline = value;
}
return this.techGet_('playsinline');
}
/**
* Get or set the loop attribute on the video element.
*
* @param {boolean} [value]
* - true means that we should loop the video
* - false means that we should not loop the video
*
* @return {boolean|undefined}
* - The current value of loop when getting
* - Nothing when setting
*/
loop(value) {
if (value !== undefined) {
this.techCall_('setLoop', value);
this.options_.loop = value;
return;
}
return this.techGet_('loop');
}
/**
* Get or set the poster image source url
*
* @fires Player#posterchange
*
* @param {string} [src]
* Poster image source URL
*
* @return {string|undefined}
* - The current value of poster when getting
* - Nothing when setting
*/
poster(src) {
if (src === undefined) {
return this.poster_;
}
// The correct way to remove a poster is to set as an empty string
// other falsey values will throw errors
if (!src) {
src = '';
}
if (src === this.poster_) {
return;
}
// update the internal poster variable
this.poster_ = src;
// update the tech's poster
this.techCall_('setPoster', src);
this.isPosterFromTech_ = false;
// alert components that the poster has been set
/**
* This event fires when the poster image is changed on the player.
*
* @event Player#posterchange
* @type {Event}
*/
this.trigger('posterchange');
}
/**
* Some techs (e.g. YouTube) can provide a poster source in an
* asynchronous way. We want the poster component to use this
* poster source so that it covers up the tech's controls.
* (YouTube's play button). However we only want to use this
* source if the player user hasn't set a poster through
* the normal APIs.
*
* @fires Player#posterchange
* @listens Tech#posterchange
* @private
*/
handleTechPosterChange_() {
if ((!this.poster_ || this.options_.techCanOverridePoster) && this.tech_ && this.tech_.poster) {
const newPoster = this.tech_.poster() || '';
if (newPoster !== this.poster_) {
this.poster_ = newPoster;
this.isPosterFromTech_ = true;
// Let components know the poster has changed
this.trigger('posterchange');
}
}
}
/**
* Get or set whether or not the controls are showing.
*
* @fires Player#controlsenabled
*
* @param {boolean} [bool]
* - true to turn controls on
* - false to turn controls off
*
* @return {boolean|undefined}
* - The current value of controls when getting
* - Nothing when setting
*/
controls(bool) {
if (bool === undefined) {
return !!this.controls_;
}
bool = !!bool;
// Don't trigger a change event unless it actually changed
if (this.controls_ === bool) {
return;
}
this.controls_ = bool;
if (this.usingNativeControls()) {
this.techCall_('setControls', bool);
}
if (this.controls_) {
this.removeClass('vjs-controls-disabled');
this.addClass('vjs-controls-enabled');
/**
* @event Player#controlsenabled
* @type {Event}
*/
this.trigger('controlsenabled');
if (!this.usingNativeControls()) {
this.addTechControlsListeners_();
}
} else {
this.removeClass('vjs-controls-enabled');
this.addClass('vjs-controls-disabled');
/**
* @event Player#controlsdisabled
* @type {Event}
*/
this.trigger('controlsdisabled');
if (!this.usingNativeControls()) {
this.removeTechControlsListeners_();
}
}
}
/**
* Toggle native controls on/off. Native controls are the controls built into
* devices (e.g. default iPhone controls) or other techs
* (e.g. Vimeo Controls)
* **This should only be set by the current tech, because only the tech knows
* if it can support native controls**
*
* @fires Player#usingnativecontrols
* @fires Player#usingcustomcontrols
*
* @param {boolean} [bool]
* - true to turn native controls on
* - false to turn native controls off
*
* @return {boolean|undefined}
* - The current value of native controls when getting
* - Nothing when setting
*/
usingNativeControls(bool) {
if (bool === undefined) {
return !!this.usingNativeControls_;
}
bool = !!bool;
// Don't trigger a change event unless it actually changed
if (this.usingNativeControls_ === bool) {
return;
}
this.usingNativeControls_ = bool;
if (this.usingNativeControls_) {
this.addClass('vjs-using-native-controls');
/**
* player is using the native device controls
*
* @event Player#usingnativecontrols
* @type {Event}
*/
this.trigger('usingnativecontrols');
} else {
this.removeClass('vjs-using-native-controls');
/**
* player is using the custom HTML controls
*
* @event Player#usingcustomcontrols
* @type {Event}
*/
this.trigger('usingcustomcontrols');
}
}
/**
* Set or get the current MediaError
*
* @fires Player#error
*
* @param {MediaError|string|number} [err]
* A MediaError or a string/number to be turned
* into a MediaError
*
* @return {MediaError|null|undefined}
* - The current MediaError when getting (or null)
* - Nothing when setting
*/
error(err) {
if (err === undefined) {
return this.error_ || null;
}
// allow hooks to modify error object
hooks('beforeerror').forEach(hookFunction => {
const newErr = hookFunction(this, err);
if (!(isObject(newErr) && !Array.isArray(newErr) || typeof newErr === 'string' || typeof newErr === 'number' || newErr === null)) {
this.log.error('please return a value that MediaError expects in beforeerror hooks');
return;
}
err = newErr;
});
// Suppress the first error message for no compatible source until
// user interaction
if (this.options_.suppressNotSupportedError && err && err.code === 4) {
const triggerSuppressedError = function () {
this.error(err);
};
this.options_.suppressNotSupportedError = false;
this.any(['click', 'touchstart'], triggerSuppressedError);
this.one('loadstart', function () {
this.off(['click', 'touchstart'], triggerSuppressedError);
});
return;
}
// restoring to default
if (err === null) {
this.error_ = null;
this.removeClass('vjs-error');
if (this.errorDisplay) {
this.errorDisplay.close();
}
return;
}
this.error_ = new MediaError(err);
// add the vjs-error classname to the player
this.addClass('vjs-error');
// log the name of the error type and any message
// IE11 logs "[object object]" and required you to expand message to see error object
log$1.error(`(CODE:${this.error_.code} ${MediaError.errorTypes[this.error_.code]})`, this.error_.message, this.error_);
/**
* @event Player#error
* @type {Event}
*/
this.trigger('error');
// notify hooks of the per player error
hooks('error').forEach(hookFunction => hookFunction(this, this.error_));
return;
}
/**
* Report user activity
*
* @param {Object} event
* Event object
*/
reportUserActivity(event) {
this.userActivity_ = true;
}
/**
* Get/set if user is active
*
* @fires Player#useractive
* @fires Player#userinactive
*
* @param {boolean} [bool]
* - true if the user is active
* - false if the user is inactive
*
* @return {boolean|undefined}
* - The current value of userActive when getting
* - Nothing when setting
*/
userActive(bool) {
if (bool === undefined) {
return this.userActive_;
}
bool = !!bool;
if (bool === this.userActive_) {
return;
}
this.userActive_ = bool;
if (this.userActive_) {
this.userActivity_ = true;
this.removeClass('vjs-user-inactive');
this.addClass('vjs-user-active');
/**
* @event Player#useractive
* @type {Event}
*/
this.trigger('useractive');
return;
}
// Chrome/Safari/IE have bugs where when you change the cursor it can
// trigger a mousemove event. This causes an issue when you're hiding
// the cursor when the user is inactive, and a mousemove signals user
// activity. Making it impossible to go into inactive mode. Specifically
// this happens in fullscreen when we really need to hide the cursor.
//
// When this gets resolved in ALL browsers it can be removed
// https://code.google.com/p/chromium/issues/detail?id=103041
if (this.tech_) {
this.tech_.one('mousemove', function (e) {
e.stopPropagation();
e.preventDefault();
});
}
this.userActivity_ = false;
this.removeClass('vjs-user-active');
this.addClass('vjs-user-inactive');
/**
* @event Player#userinactive
* @type {Event}
*/
this.trigger('userinactive');
}
/**
* Listen for user activity based on timeout value
*
* @private
*/
listenForUserActivity_() {
let mouseInProgress;
let lastMoveX;
let lastMoveY;
const handleActivity = bind_(this, this.reportUserActivity);
const handleMouseMove = function (e) {
// #1068 - Prevent mousemove spamming
// Chrome Bug: https://code.google.com/p/chromium/issues/detail?id=366970
if (e.screenX !== lastMoveX || e.screenY !== lastMoveY) {
lastMoveX = e.screenX;
lastMoveY = e.screenY;
handleActivity();
}
};
const handleMouseDown = function () {
handleActivity();
// For as long as the they are touching the device or have their mouse down,
// we consider them active even if they're not moving their finger or mouse.
// So we want to continue to update that they are active
this.clearInterval(mouseInProgress);
// Setting userActivity=true now and setting the interval to the same time
// as the activityCheck interval (250) should ensure we never miss the
// next activityCheck
mouseInProgress = this.setInterval(handleActivity, 250);
};
const handleMouseUpAndMouseLeave = function (event) {
handleActivity();
// Stop the interval that maintains activity if the mouse/touch is down
this.clearInterval(mouseInProgress);
};
// Any mouse movement will be considered user activity
this.on('mousedown', handleMouseDown);
this.on('mousemove', handleMouseMove);
this.on('mouseup', handleMouseUpAndMouseLeave);
this.on('mouseleave', handleMouseUpAndMouseLeave);
const controlBar = this.getChild('controlBar');
// Fixes bug on Android & iOS where when tapping progressBar (when control bar is displayed)
// controlBar would no longer be hidden by default timeout.
if (controlBar && !IS_IOS && !IS_ANDROID) {
controlBar.on('mouseenter', function (event) {
if (this.player().options_.inactivityTimeout !== 0) {
this.player().cache_.inactivityTimeout = this.player().options_.inactivityTimeout;
}
this.player().options_.inactivityTimeout = 0;
});
controlBar.on('mouseleave', function (event) {
this.player().options_.inactivityTimeout = this.player().cache_.inactivityTimeout;
});
}
// Listen for keyboard navigation
// Shouldn't need to use inProgress interval because of key repeat
this.on('keydown', handleActivity);
this.on('keyup', handleActivity);
// Run an interval every 250 milliseconds instead of stuffing everything into
// the mousemove/touchmove function itself, to prevent performance degradation.
// `this.reportUserActivity` simply sets this.userActivity_ to true, which
// then gets picked up by this loop
// http://ejohn.org/blog/learning-from-twitter/
let inactivityTimeout;
/** @this Player */
const activityCheck = function () {
// Check to see if mouse/touch activity has happened
if (!this.userActivity_) {
return;
}
// Reset the activity tracker
this.userActivity_ = false;
// If the user state was inactive, set the state to active
this.userActive(true);
// Clear any existing inactivity timeout to start the timer over
this.clearTimeout(inactivityTimeout);
const timeout = this.options_.inactivityTimeout;
if (timeout <= 0) {
return;
}
// In milliseconds, if no more activity has occurred the
// user will be considered inactive
inactivityTimeout = this.setTimeout(function () {
// Protect against the case where the inactivityTimeout can trigger just
// before the next user activity is picked up by the activity check loop
// causing a flicker
if (!this.userActivity_) {
this.userActive(false);
}
}, timeout);
};
this.setInterval(activityCheck, 250);
}
/**
* Gets or sets the current playback rate. A playback rate of
* 1.0 represents normal speed and 0.5 would indicate half-speed
* playback, for instance.
*
* @see https://html.spec.whatwg.org/multipage/embedded-content.html#dom-media-playbackrate
*
* @param {number} [rate]
* New playback rate to set.
*
* @return {number|undefined}
* - The current playback rate when getting or 1.0
* - Nothing when setting
*/
playbackRate(rate) {
if (rate !== undefined) {
// NOTE: this.cache_.lastPlaybackRate is set from the tech handler
// that is registered above
this.techCall_('setPlaybackRate', rate);
return;
}
if (this.tech_ && this.tech_.featuresPlaybackRate) {
return this.cache_.lastPlaybackRate || this.techGet_('playbackRate');
}
return 1.0;
}
/**
* Gets or sets the current default playback rate. A default playback rate of
* 1.0 represents normal speed and 0.5 would indicate half-speed playback, for instance.
* defaultPlaybackRate will only represent what the initial playbackRate of a video was, not
* not the current playbackRate.
*
* @see https://html.spec.whatwg.org/multipage/embedded-content.html#dom-media-defaultplaybackrate
*
* @param {number} [rate]
* New default playback rate to set.
*
* @return {number|undefined}
* - The default playback rate when getting or 1.0
* - Nothing when setting
*/
defaultPlaybackRate(rate) {
if (rate !== undefined) {
return this.techCall_('setDefaultPlaybackRate', rate);
}
if (this.tech_ && this.tech_.featuresPlaybackRate) {
return this.techGet_('defaultPlaybackRate');
}
return 1.0;
}
/**
* Gets or sets the audio flag
*
* @param {boolean} [bool]
* - true signals that this is an audio player
* - false signals that this is not an audio player
*
* @return {boolean|undefined}
* - The current value of isAudio when getting
* - Nothing when setting
*/
isAudio(bool) {
if (bool !== undefined) {
this.isAudio_ = !!bool;
return;
}
return !!this.isAudio_;
}
updatePlayerHeightOnAudioOnlyMode_() {
const controlBar = this.getChild('ControlBar');
if (!controlBar || this.audioOnlyCache_.controlBarHeight === controlBar.currentHeight()) {
return;
}
this.audioOnlyCache_.controlBarHeight = controlBar.currentHeight();
this.height(this.audioOnlyCache_.controlBarHeight);
}
enableAudioOnlyUI_() {
// Update styling immediately to show the control bar so we can get its height
this.addClass('vjs-audio-only-mode');
const playerChildren = this.children();
const controlBar = this.getChild('ControlBar');
const controlBarHeight = controlBar && controlBar.currentHeight();
// Hide all player components except the control bar. Control bar components
// needed only for video are hidden with CSS
playerChildren.forEach(child => {
if (child === controlBar) {
return;
}
if (child.el_ && !child.hasClass('vjs-hidden')) {
child.hide();
this.audioOnlyCache_.hiddenChildren.push(child);
}
});
this.audioOnlyCache_.playerHeight = this.currentHeight();
this.audioOnlyCache_.controlBarHeight = controlBarHeight;
this.on('playerresize', this.boundUpdatePlayerHeightOnAudioOnlyMode_);
// Set the player height the same as the control bar
this.height(controlBarHeight);
this.trigger('audioonlymodechange');
}
disableAudioOnlyUI_() {
this.removeClass('vjs-audio-only-mode');
this.off('playerresize', this.boundUpdatePlayerHeightOnAudioOnlyMode_);
// Show player components that were previously hidden
this.audioOnlyCache_.hiddenChildren.forEach(child => child.show());
// Reset player height
this.height(this.audioOnlyCache_.playerHeight);
this.trigger('audioonlymodechange');
}
/**
* Get the current audioOnlyMode state or set audioOnlyMode to true or false.
*
* Setting this to `true` will hide all player components except the control bar,
* as well as control bar components needed only for video.
*
* @param {boolean} [value]
* The value to set audioOnlyMode to.
*
* @return {Promise|boolean}
* A Promise is returned when setting the state, and a boolean when getting
* the present state
*/
audioOnlyMode(value) {
if (typeof value !== 'boolean' || value === this.audioOnlyMode_) {
return this.audioOnlyMode_;
}
this.audioOnlyMode_ = value;
// Enable Audio Only Mode
if (value) {
const exitPromises = [];
// Fullscreen and PiP are not supported in audioOnlyMode, so exit if we need to.
if (this.isInPictureInPicture()) {
exitPromises.push(this.exitPictureInPicture());
}
if (this.isFullscreen()) {
exitPromises.push(this.exitFullscreen());
}
if (this.audioPosterMode()) {
exitPromises.push(this.audioPosterMode(false));
}
return Promise.all(exitPromises).then(() => this.enableAudioOnlyUI_());
}
// Disable Audio Only Mode
return Promise.resolve().then(() => this.disableAudioOnlyUI_());
}
enablePosterModeUI_() {
// Hide the video element and show the poster image to enable posterModeUI
const tech = this.tech_ && this.tech_;
tech.hide();
this.addClass('vjs-audio-poster-mode');
this.trigger('audiopostermodechange');
}
disablePosterModeUI_() {
// Show the video element and hide the poster image to disable posterModeUI
const tech = this.tech_ && this.tech_;
tech.show();
this.removeClass('vjs-audio-poster-mode');
this.trigger('audiopostermodechange');
}
/**
* Get the current audioPosterMode state or set audioPosterMode to true or false
*
* @param {boolean} [value]
* The value to set audioPosterMode to.
*
* @return {Promise|boolean}
* A Promise is returned when setting the state, and a boolean when getting
* the present state
*/
audioPosterMode(value) {
if (typeof value !== 'boolean' || value === this.audioPosterMode_) {
return this.audioPosterMode_;
}
this.audioPosterMode_ = value;
if (value) {
if (this.audioOnlyMode()) {
const audioOnlyModePromise = this.audioOnlyMode(false);
return audioOnlyModePromise.then(() => {
// enable audio poster mode after audio only mode is disabled
this.enablePosterModeUI_();
});
}
return Promise.resolve().then(() => {
// enable audio poster mode
this.enablePosterModeUI_();
});
}
return Promise.resolve().then(() => {
// disable audio poster mode
this.disablePosterModeUI_();
});
}
/**
* A helper method for adding a {@link TextTrack} to our
* {@link TextTrackList}.
*
* In addition to the W3C settings we allow adding additional info through options.
*
* @see http://www.w3.org/html/wg/drafts/html/master/embedded-content-0.html#dom-media-addtexttrack
*
* @param {string} [kind]
* the kind of TextTrack you are adding
*
* @param {string} [label]
* the label to give the TextTrack label
*
* @param {string} [language]
* the language to set on the TextTrack
*
* @return {TextTrack|undefined}
* the TextTrack that was added or undefined
* if there is no tech
*/
addTextTrack(kind, label, language) {
if (this.tech_) {
return this.tech_.addTextTrack(kind, label, language);
}
}
/**
* Create a remote {@link TextTrack} and an {@link HTMLTrackElement}.
*
* @param {Object} options
* Options to pass to {@link HTMLTrackElement} during creation. See
* {@link HTMLTrackElement} for object properties that you should use.
*
* @param {boolean} [manualCleanup=false] if set to true, the TextTrack will not be removed
* from the TextTrackList and HtmlTrackElementList
* after a source change
*
* @return {HtmlTrackElement}
* the HTMLTrackElement that was created and added
* to the HtmlTrackElementList and the remote
* TextTrackList
*
*/
addRemoteTextTrack(options, manualCleanup) {
if (this.tech_) {
return this.tech_.addRemoteTextTrack(options, manualCleanup);
}
}
/**
* Remove a remote {@link TextTrack} from the respective
* {@link TextTrackList} and {@link HtmlTrackElementList}.
*
* @param {Object} track
* Remote {@link TextTrack} to remove
*
* @return {undefined}
* does not return anything
*/
removeRemoteTextTrack(obj = {}) {
let {
track
} = obj;
if (!track) {
track = obj;
}
// destructure the input into an object with a track argument, defaulting to arguments[0]
// default the whole argument to an empty object if nothing was passed in
if (this.tech_) {
return this.tech_.removeRemoteTextTrack(track);
}
}
/**
* Gets available media playback quality metrics as specified by the W3C's Media
* Playback Quality API.
*
* @see [Spec]{@link https://wicg.github.io/media-playback-quality}
*
* @return {Object|undefined}
* An object with supported media playback quality metrics or undefined if there
* is no tech or the tech does not support it.
*/
getVideoPlaybackQuality() {
return this.techGet_('getVideoPlaybackQuality');
}
/**
* Get video width
*
* @return {number}
* current video width
*/
videoWidth() {
return this.tech_ && this.tech_.videoWidth && this.tech_.videoWidth() || 0;
}
/**
* Get video height
*
* @return {number}
* current video height
*/
videoHeight() {
return this.tech_ && this.tech_.videoHeight && this.tech_.videoHeight() || 0;
}
/**
* Set or get the player's language code.
*
* Changing the language will trigger
* [languagechange]{@link Player#event:languagechange}
* which Components can use to update control text.
* ClickableComponent will update its control text by default on
* [languagechange]{@link Player#event:languagechange}.
*
* @fires Player#languagechange
*
* @param {string} [code]
* the language code to set the player to
*
* @return {string|undefined}
* - The current language code when getting
* - Nothing when setting
*/
language(code) {
if (code === undefined) {
return this.language_;
}
if (this.language_ !== String(code).toLowerCase()) {
this.language_ = String(code).toLowerCase();
// during first init, it's possible some things won't be evented
if (isEvented(this)) {
/**
* fires when the player language change
*
* @event Player#languagechange
* @type {Event}
*/
this.trigger('languagechange');
}
}
}
/**
* Get the player's language dictionary
* Merge every time, because a newly added plugin might call videojs.addLanguage() at any time
* Languages specified directly in the player options have precedence
*
* @return {Array}
* An array of of supported languages
*/
languages() {
return merge$1(Player.prototype.options_.languages, this.languages_);
}
/**
* returns a JavaScript object representing the current track
* information. **DOES not return it as JSON**
*
* @return {Object}
* Object representing the current of track info
*/
toJSON() {
const options = merge$1(this.options_);
const tracks = options.tracks;
options.tracks = [];
for (let i = 0; i < tracks.length; i++) {
let track = tracks[i];
// deep merge tracks and null out player so no circular references
track = merge$1(track);
track.player = undefined;
options.tracks[i] = track;
}
return options;
}
/**
* Creates a simple modal dialog (an instance of the {@link ModalDialog}
* component) that immediately overlays the player with arbitrary
* content and removes itself when closed.
*
* @param {string|Function|Element|Array|null} content
* Same as {@link ModalDialog#content}'s param of the same name.
* The most straight-forward usage is to provide a string or DOM
* element.
*
* @param {Object} [options]
* Extra options which will be passed on to the {@link ModalDialog}.
*
* @return {ModalDialog}
* the {@link ModalDialog} that was created
*/
createModal(content, options) {
options = options || {};
options.content = content || '';
const modal = new ModalDialog(this, options);
this.addChild(modal);
modal.on('dispose', () => {
this.removeChild(modal);
});
modal.open();
return modal;
}
/**
* Change breakpoint classes when the player resizes.
*
* @private
*/
updateCurrentBreakpoint_() {
if (!this.responsive()) {
return;
}
const currentBreakpoint = this.currentBreakpoint();
const currentWidth = this.currentWidth();
for (let i = 0; i < BREAKPOINT_ORDER.length; i++) {
const candidateBreakpoint = BREAKPOINT_ORDER[i];
const maxWidth = this.breakpoints_[candidateBreakpoint];
if (currentWidth <= maxWidth) {
// The current breakpoint did not change, nothing to do.
if (currentBreakpoint === candidateBreakpoint) {
return;
}
// Only remove a class if there is a current breakpoint.
if (currentBreakpoint) {
this.removeClass(BREAKPOINT_CLASSES[currentBreakpoint]);
}
this.addClass(BREAKPOINT_CLASSES[candidateBreakpoint]);
this.breakpoint_ = candidateBreakpoint;
break;
}
}
}
/**
* Removes the current breakpoint.
*
* @private
*/
removeCurrentBreakpoint_() {
const className = this.currentBreakpointClass();
this.breakpoint_ = '';
if (className) {
this.removeClass(className);
}
}
/**
* Get or set breakpoints on the player.
*
* Calling this method with an object or `true` will remove any previous
* custom breakpoints and start from the defaults again.
*
* @param {Object|boolean} [breakpoints]
* If an object is given, it can be used to provide custom
* breakpoints. If `true` is given, will set default breakpoints.
* If this argument is not given, will simply return the current
* breakpoints.
*
* @param {number} [breakpoints.tiny]
* The maximum width for the "vjs-layout-tiny" class.
*
* @param {number} [breakpoints.xsmall]
* The maximum width for the "vjs-layout-x-small" class.
*
* @param {number} [breakpoints.small]
* The maximum width for the "vjs-layout-small" class.
*
* @param {number} [breakpoints.medium]
* The maximum width for the "vjs-layout-medium" class.
*
* @param {number} [breakpoints.large]
* The maximum width for the "vjs-layout-large" class.
*
* @param {number} [breakpoints.xlarge]
* The maximum width for the "vjs-layout-x-large" class.
*
* @param {number} [breakpoints.huge]
* The maximum width for the "vjs-layout-huge" class.
*
* @return {Object}
* An object mapping breakpoint names to maximum width values.
*/
breakpoints(breakpoints) {
// Used as a getter.
if (breakpoints === undefined) {
return Object.assign(this.breakpoints_);
}
this.breakpoint_ = '';
this.breakpoints_ = Object.assign({}, DEFAULT_BREAKPOINTS, breakpoints);
// When breakpoint definitions change, we need to update the currently
// selected breakpoint.
this.updateCurrentBreakpoint_();
// Clone the breakpoints before returning.
return Object.assign(this.breakpoints_);
}
/**
* Get or set a flag indicating whether or not this player should adjust
* its UI based on its dimensions.
*
* @param {boolean} [value]
* Should be `true` if the player should adjust its UI based on its
* dimensions; otherwise, should be `false`.
*
* @return {boolean|undefined}
* Will be `true` if this player should adjust its UI based on its
* dimensions; otherwise, will be `false`.
* Nothing if setting
*/
responsive(value) {
// Used as a getter.
if (value === undefined) {
return this.responsive_;
}
value = Boolean(value);
const current = this.responsive_;
// Nothing changed.
if (value === current) {
return;
}
// The value actually changed, set it.
this.responsive_ = value;
// Start listening for breakpoints and set the initial breakpoint if the
// player is now responsive.
if (value) {
this.on('playerresize', this.boundUpdateCurrentBreakpoint_);
this.updateCurrentBreakpoint_();
// Stop listening for breakpoints if the player is no longer responsive.
} else {
this.off('playerresize', this.boundUpdateCurrentBreakpoint_);
this.removeCurrentBreakpoint_();
}
return value;
}
/**
* Get current breakpoint name, if any.
*
* @return {string}
* If there is currently a breakpoint set, returns a the key from the
* breakpoints object matching it. Otherwise, returns an empty string.
*/
currentBreakpoint() {
return this.breakpoint_;
}
/**
* Get the current breakpoint class name.
*
* @return {string}
* The matching class name (e.g. `"vjs-layout-tiny"` or
* `"vjs-layout-large"`) for the current breakpoint. Empty string if
* there is no current breakpoint.
*/
currentBreakpointClass() {
return BREAKPOINT_CLASSES[this.breakpoint_] || '';
}
/**
* An object that describes a single piece of media.
*
* Properties that are not part of this type description will be retained; so,
* this can be viewed as a generic metadata storage mechanism as well.
*
* @see {@link https://wicg.github.io/mediasession/#the-mediametadata-interface}
* @typedef {Object} Player~MediaObject
*
* @property {string} [album]
* Unused, except if this object is passed to the `MediaSession`
* API.
*
* @property {string} [artist]
* Unused, except if this object is passed to the `MediaSession`
* API.
*
* @property {Object[]} [artwork]
* Unused, except if this object is passed to the `MediaSession`
* API. If not specified, will be populated via the `poster`, if
* available.
*
* @property {string} [poster]
* URL to an image that will display before playback.
*
* @property {Tech~SourceObject|Tech~SourceObject[]|string} [src]
* A single source object, an array of source objects, or a string
* referencing a URL to a media source. It is _highly recommended_
* that an object or array of objects is used here, so that source
* selection algorithms can take the `type` into account.
*
* @property {string} [title]
* Unused, except if this object is passed to the `MediaSession`
* API.
*
* @property {Object[]} [textTracks]
* An array of objects to be used to create text tracks, following
* the {@link https://www.w3.org/TR/html50/embedded-content-0.html#the-track-element|native track element format}.
* For ease of removal, these will be created as "remote" text
* tracks and set to automatically clean up on source changes.
*
* These objects may have properties like `src`, `kind`, `label`,
* and `language`, see {@link Tech#createRemoteTextTrack}.
*/
/**
* Populate the player using a {@link Player~MediaObject|MediaObject}.
*
* @param {Player~MediaObject} media
* A media object.
*
* @param {Function} ready
* A callback to be called when the player is ready.
*/
loadMedia(media, ready) {
if (!media || typeof media !== 'object') {
return;
}
const crossOrigin = this.crossOrigin();
this.reset();
// Clone the media object so it cannot be mutated from outside.
this.cache_.media = merge$1(media);
const {
artist,
artwork,
description,
poster,
src,
textTracks,
title
} = this.cache_.media;
// If `artwork` is not given, create it using `poster`.
if (!artwork && poster) {
this.cache_.media.artwork = [{
src: poster,
type: getMimetype(poster)
}];
}
if (crossOrigin) {
this.crossOrigin(crossOrigin);
}
if (src) {
this.src(src);
}
if (poster) {
this.poster(poster);
}
if (Array.isArray(textTracks)) {
textTracks.forEach(tt => this.addRemoteTextTrack(tt, false));
}
if (this.titleBar) {
this.titleBar.update({
title,
description: description || artist || ''
});
}
this.ready(ready);
}
/**
* Get a clone of the current {@link Player~MediaObject} for this player.
*
* If the `loadMedia` method has not been used, will attempt to return a
* {@link Player~MediaObject} based on the current state of the player.
*
* @return {Player~MediaObject}
*/
getMedia() {
if (!this.cache_.media) {
const poster = this.poster();
const src = this.currentSources();
const textTracks = Array.prototype.map.call(this.remoteTextTracks(), tt => ({
kind: tt.kind,
label: tt.label,
language: tt.language,
src: tt.src
}));
const media = {
src,
textTracks
};
if (poster) {
media.poster = poster;
media.artwork = [{
src: media.poster,
type: getMimetype(media.poster)
}];
}
return media;
}
return merge$1(this.cache_.media);
}
/**
* Gets tag settings
*
* @param {Element} tag
* The player tag
*
* @return {Object}
* An object containing all of the settings
* for a player tag
*/
static getTagSettings(tag) {
const baseOptions = {
sources: [],
tracks: []
};
const tagOptions = getAttributes(tag);
const dataSetup = tagOptions['data-setup'];
if (hasClass(tag, 'vjs-fill')) {
tagOptions.fill = true;
}
if (hasClass(tag, 'vjs-fluid')) {
tagOptions.fluid = true;
}
// Check if data-setup attr exists.
if (dataSetup !== null) {
// Parse options JSON
try {
// If empty string, make it a parsable json object.
Object.assign(tagOptions, JSON.parse(dataSetup || '{}'));
} catch (e) {
log$1.error('data-setup', e);
}
}
Object.assign(baseOptions, tagOptions);
// Get tag children settings
if (tag.hasChildNodes()) {
const children = tag.childNodes;
for (let i = 0, j = children.length; i < j; i++) {
const child = children[i];
// Change case needed: http://ejohn.org/blog/nodename-case-sensitivity/
const childName = child.nodeName.toLowerCase();
if (childName === 'source') {
baseOptions.sources.push(getAttributes(child));
} else if (childName === 'track') {
baseOptions.tracks.push(getAttributes(child));
}
}
}
return baseOptions;
}
/**
* Set debug mode to enable/disable logs at info level.
*
* @param {boolean} enabled
* @fires Player#debugon
* @fires Player#debugoff
* @return {boolean|undefined}
*/
debug(enabled) {
if (enabled === undefined) {
return this.debugEnabled_;
}
if (enabled) {
this.trigger('debugon');
this.previousLogLevel_ = this.log.level;
this.log.level('debug');
this.debugEnabled_ = true;
} else {
this.trigger('debugoff');
this.log.level(this.previousLogLevel_);
this.previousLogLevel_ = undefined;
this.debugEnabled_ = false;
}
}
/**
* Set or get current playback rates.
* Takes an array and updates the playback rates menu with the new items.
* Pass in an empty array to hide the menu.
* Values other than arrays are ignored.
*
* @fires Player#playbackrateschange
* @param {number[]} [newRates]
* The new rates that the playback rates menu should update to.
* An empty array will hide the menu
* @return {number[]} When used as a getter will return the current playback rates
*/
playbackRates(newRates) {
if (newRates === undefined) {
return this.cache_.playbackRates;
}
// ignore any value that isn't an array
if (!Array.isArray(newRates)) {
return;
}
// ignore any arrays that don't only contain numbers
if (!newRates.every(rate => typeof rate === 'number')) {
return;
}
this.cache_.playbackRates = newRates;
/**
* fires when the playback rates in a player are changed
*
* @event Player#playbackrateschange
* @type {Event}
*/
this.trigger('playbackrateschange');
}
/**
* Reports whether or not a player has a plugin available.
*
* This does not report whether or not the plugin has ever been initialized
* on this player. For that, [usingPlugin]{@link Player#usingPlugin}.
*
* @method hasPlugin
* @param {string} name
* The name of a plugin.
*
* @return {boolean}
* Whether or not this player has the requested plugin available.
*/
/**
* Reports whether or not a player is using a plugin by name.
*
* For basic plugins, this only reports whether the plugin has _ever_ been
* initialized on this player.
*
* @method Player#usingPlugin
* @param {string} name
* The name of a plugin.
*
* @return {boolean}
* Whether or not this player is using the requested plugin.
*/
}
/**
* Get the {@link VideoTrackList}
*
* @link https://html.spec.whatwg.org/multipage/embedded-content.html#videotracklist
*
* @return {VideoTrackList}
* the current video track list
*
* @method Player.prototype.videoTracks
*/
/**
* Get the {@link AudioTrackList}
*
* @link https://html.spec.whatwg.org/multipage/embedded-content.html#audiotracklist
*
* @return {AudioTrackList}
* the current audio track list
*
* @method Player.prototype.audioTracks
*/
/**
* Get the {@link TextTrackList}
*
* @link http://www.w3.org/html/wg/drafts/html/master/embedded-content-0.html#dom-media-texttracks
*
* @return {TextTrackList}
* the current text track list
*
* @method Player.prototype.textTracks
*/
/**
* Get the remote {@link TextTrackList}
*
* @return {TextTrackList}
* The current remote text track list
*
* @method Player.prototype.remoteTextTracks
*/
/**
* Get the remote {@link HtmlTrackElementList} tracks.
*
* @return {HtmlTrackElementList}
* The current remote text track element list
*
* @method Player.prototype.remoteTextTrackEls
*/
ALL.names.forEach(function (name) {
const props = ALL[name];
Player.prototype[props.getterName] = function () {
if (this.tech_) {
return this.tech_[props.getterName]();
}
// if we have not yet loadTech_, we create {video,audio,text}Tracks_
// these will be passed to the tech during loading
this[props.privateName] = this[props.privateName] || new props.ListClass();
return this[props.privateName];
};
});
/**
* Get or set the `Player`'s crossorigin option. For the HTML5 player, this
* sets the `crossOrigin` property on the `` tag to control the CORS
* behavior.
*
* @see [Video Element Attributes]{@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/video#attr-crossorigin}
*
* @param {string} [value]
* The value to set the `Player`'s crossorigin to. If an argument is
* given, must be one of `anonymous` or `use-credentials`.
*
* @return {string|undefined}
* - The current crossorigin value of the `Player` when getting.
* - undefined when setting
*/
Player.prototype.crossorigin = Player.prototype.crossOrigin;
/**
* Global enumeration of players.
*
* The keys are the player IDs and the values are either the {@link Player}
* instance or `null` for disposed players.
*
* @type {Object}
*/
Player.players = {};
const navigator = window$1.navigator;
/*
* Player instance options, surfaced using options
* options = Player.prototype.options_
* Make changes in options, not here.
*
* @type {Object}
* @private
*/
Player.prototype.options_ = {
// Default order of fallback technology
techOrder: Tech.defaultTechOrder_,
html5: {},
// enable sourceset by default
enableSourceset: true,
// default inactivity timeout
inactivityTimeout: 2000,
// default playback rates
playbackRates: [],
// Add playback rate selection by adding rates
// 'playbackRates': [0.5, 1, 1.5, 2],
liveui: false,
// Included control sets
children: ['mediaLoader', 'posterImage', 'titleBar', 'textTrackDisplay', 'loadingSpinner', 'bigPlayButton', 'liveTracker', 'controlBar', 'errorDisplay', 'textTrackSettings', 'resizeManager'],
language: navigator && (navigator.languages && navigator.languages[0] || navigator.userLanguage || navigator.language) || 'en',
// locales and their language translations
languages: {},
// Default message to show when a video cannot be played.
notSupportedMessage: 'No compatible source was found for this media.',
normalizeAutoplay: false,
fullscreen: {
options: {
navigationUI: 'hide'
}
},
breakpoints: {},
responsive: false,
audioOnlyMode: false,
audioPosterMode: false,
spatialNavigation: {
enabled: false,
horizontalSeek: false
},
// Default smooth seeking to false
enableSmoothSeeking: false
};
TECH_EVENTS_RETRIGGER.forEach(function (event) {
Player.prototype[`handleTech${toTitleCase$1(event)}_`] = function () {
return this.trigger(event);
};
});
/**
* Fired when the player has initial duration and dimension information
*
* @event Player#loadedmetadata
* @type {Event}
*/
/**
* Fired when the player has downloaded data at the current playback position
*
* @event Player#loadeddata
* @type {Event}
*/
/**
* Fired when the current playback position has changed *
* During playback this is fired every 15-250 milliseconds, depending on the
* playback technology in use.
*
* @event Player#timeupdate
* @type {Event}
*/
/**
* Fired when the volume changes
*
* @event Player#volumechange
* @type {Event}
*/
Component$1.registerComponent('Player', Player);
/**
* @file plugin.js
*/
/**
* The base plugin name.
*
* @private
* @constant
* @type {string}
*/
const BASE_PLUGIN_NAME = 'plugin';
/**
* The key on which a player's active plugins cache is stored.
*
* @private
* @constant
* @type {string}
*/
const PLUGIN_CACHE_KEY = 'activePlugins_';
/**
* Stores registered plugins in a private space.
*
* @private
* @type {Object}
*/
const pluginStorage = {};
/**
* Reports whether or not a plugin has been registered.
*
* @private
* @param {string} name
* The name of a plugin.
*
* @return {boolean}
* Whether or not the plugin has been registered.
*/
const pluginExists = name => pluginStorage.hasOwnProperty(name);
/**
* Get a single registered plugin by name.
*
* @private
* @param {string} name
* The name of a plugin.
*
* @return {typeof Plugin|Function|undefined}
* The plugin (or undefined).
*/
const getPlugin = name => pluginExists(name) ? pluginStorage[name] : undefined;
/**
* Marks a plugin as "active" on a player.
*
* Also, ensures that the player has an object for tracking active plugins.
*
* @private
* @param {Player} player
* A Video.js player instance.
*
* @param {string} name
* The name of a plugin.
*/
const markPluginAsActive = (player, name) => {
player[PLUGIN_CACHE_KEY] = player[PLUGIN_CACHE_KEY] || {};
player[PLUGIN_CACHE_KEY][name] = true;
};
/**
* Triggers a pair of plugin setup events.
*
* @private
* @param {Player} player
* A Video.js player instance.
*
* @param {PluginEventHash} hash
* A plugin event hash.
*
* @param {boolean} [before]
* If true, prefixes the event name with "before". In other words,
* use this to trigger "beforepluginsetup" instead of "pluginsetup".
*/
const triggerSetupEvent = (player, hash, before) => {
const eventName = (before ? 'before' : '') + 'pluginsetup';
player.trigger(eventName, hash);
player.trigger(eventName + ':' + hash.name, hash);
};
/**
* Takes a basic plugin function and returns a wrapper function which marks
* on the player that the plugin has been activated.
*
* @private
* @param {string} name
* The name of the plugin.
*
* @param {Function} plugin
* The basic plugin.
*
* @return {Function}
* A wrapper function for the given plugin.
*/
const createBasicPlugin = function (name, plugin) {
const basicPluginWrapper = function () {
// We trigger the "beforepluginsetup" and "pluginsetup" events on the player
// regardless, but we want the hash to be consistent with the hash provided
// for advanced plugins.
//
// The only potentially counter-intuitive thing here is the `instance` in
// the "pluginsetup" event is the value returned by the `plugin` function.
triggerSetupEvent(this, {
name,
plugin,
instance: null
}, true);
const instance = plugin.apply(this, arguments);
markPluginAsActive(this, name);
triggerSetupEvent(this, {
name,
plugin,
instance
});
return instance;
};
Object.keys(plugin).forEach(function (prop) {
basicPluginWrapper[prop] = plugin[prop];
});
return basicPluginWrapper;
};
/**
* Takes a plugin sub-class and returns a factory function for generating
* instances of it.
*
* This factory function will replace itself with an instance of the requested
* sub-class of Plugin.
*
* @private
* @param {string} name
* The name of the plugin.
*
* @param {Plugin} PluginSubClass
* The advanced plugin.
*
* @return {Function}
*/
const createPluginFactory = (name, PluginSubClass) => {
// Add a `name` property to the plugin prototype so that each plugin can
// refer to itself by name.
PluginSubClass.prototype.name = name;
return function (...args) {
triggerSetupEvent(this, {
name,
plugin: PluginSubClass,
instance: null
}, true);
const instance = new PluginSubClass(...[this, ...args]);
// The plugin is replaced by a function that returns the current instance.
this[name] = () => instance;
triggerSetupEvent(this, instance.getEventHash());
return instance;
};
};
/**
* Parent class for all advanced plugins.
*
* @mixes module:evented~EventedMixin
* @mixes module:stateful~StatefulMixin
* @fires Player#beforepluginsetup
* @fires Player#beforepluginsetup:$name
* @fires Player#pluginsetup
* @fires Player#pluginsetup:$name
* @listens Player#dispose
* @throws {Error}
* If attempting to instantiate the base {@link Plugin} class
* directly instead of via a sub-class.
*/
class Plugin {
/**
* Creates an instance of this class.
*
* Sub-classes should call `super` to ensure plugins are properly initialized.
*
* @param {Player} player
* A Video.js player instance.
*/
constructor(player) {
if (this.constructor === Plugin) {
throw new Error('Plugin must be sub-classed; not directly instantiated.');
}
this.player = player;
if (!this.log) {
this.log = this.player.log.createLogger(this.name);
}
// Make this object evented, but remove the added `trigger` method so we
// use the prototype version instead.
evented(this);
delete this.trigger;
stateful(this, this.constructor.defaultState);
markPluginAsActive(player, this.name);
// Auto-bind the dispose method so we can use it as a listener and unbind
// it later easily.
this.dispose = this.dispose.bind(this);
// If the player is disposed, dispose the plugin.
player.on('dispose', this.dispose);
}
/**
* Get the version of the plugin that was set on .VERSION
*/
version() {
return this.constructor.VERSION;
}
/**
* Each event triggered by plugins includes a hash of additional data with
* conventional properties.
*
* This returns that object or mutates an existing hash.
*
* @param {Object} [hash={}]
* An object to be used as event an event hash.
*
* @return {PluginEventHash}
* An event hash object with provided properties mixed-in.
*/
getEventHash(hash = {}) {
hash.name = this.name;
hash.plugin = this.constructor;
hash.instance = this;
return hash;
}
/**
* Triggers an event on the plugin object and overrides
* {@link module:evented~EventedMixin.trigger|EventedMixin.trigger}.
*
* @param {string|Object} event
* An event type or an object with a type property.
*
* @param {Object} [hash={}]
* Additional data hash to merge with a
* {@link PluginEventHash|PluginEventHash}.
*
* @return {boolean}
* Whether or not default was prevented.
*/
trigger(event, hash = {}) {
return trigger(this.eventBusEl_, event, this.getEventHash(hash));
}
/**
* Handles "statechanged" events on the plugin. No-op by default, override by
* subclassing.
*
* @abstract
* @param {Event} e
* An event object provided by a "statechanged" event.
*
* @param {Object} e.changes
* An object describing changes that occurred with the "statechanged"
* event.
*/
handleStateChanged(e) {}
/**
* Disposes a plugin.
*
* Subclasses can override this if they want, but for the sake of safety,
* it's probably best to subscribe the "dispose" event.
*
* @fires Plugin#dispose
*/
dispose() {
const {
name,
player
} = this;
/**
* Signals that a advanced plugin is about to be disposed.
*
* @event Plugin#dispose
* @type {Event}
*/
this.trigger('dispose');
this.off();
player.off('dispose', this.dispose);
// Eliminate any possible sources of leaking memory by clearing up
// references between the player and the plugin instance and nulling out
// the plugin's state and replacing methods with a function that throws.
player[PLUGIN_CACHE_KEY][name] = false;
this.player = this.state = null;
// Finally, replace the plugin name on the player with a new factory
// function, so that the plugin is ready to be set up again.
player[name] = createPluginFactory(name, pluginStorage[name]);
}
/**
* Determines if a plugin is a basic plugin (i.e. not a sub-class of `Plugin`).
*
* @param {string|Function} plugin
* If a string, matches the name of a plugin. If a function, will be
* tested directly.
*
* @return {boolean}
* Whether or not a plugin is a basic plugin.
*/
static isBasic(plugin) {
const p = typeof plugin === 'string' ? getPlugin(plugin) : plugin;
return typeof p === 'function' && !Plugin.prototype.isPrototypeOf(p.prototype);
}
/**
* Register a Video.js plugin.
*
* @param {string} name
* The name of the plugin to be registered. Must be a string and
* must not match an existing plugin or a method on the `Player`
* prototype.
*
* @param {typeof Plugin|Function} plugin
* A sub-class of `Plugin` or a function for basic plugins.
*
* @return {typeof Plugin|Function}
* For advanced plugins, a factory function for that plugin. For
* basic plugins, a wrapper function that initializes the plugin.
*/
static registerPlugin(name, plugin) {
if (typeof name !== 'string') {
throw new Error(`Illegal plugin name, "${name}", must be a string, was ${typeof name}.`);
}
if (pluginExists(name)) {
log$1.warn(`A plugin named "${name}" already exists. You may want to avoid re-registering plugins!`);
} else if (Player.prototype.hasOwnProperty(name)) {
throw new Error(`Illegal plugin name, "${name}", cannot share a name with an existing player method!`);
}
if (typeof plugin !== 'function') {
throw new Error(`Illegal plugin for "${name}", must be a function, was ${typeof plugin}.`);
}
pluginStorage[name] = plugin;
// Add a player prototype method for all sub-classed plugins (but not for
// the base Plugin class).
if (name !== BASE_PLUGIN_NAME) {
if (Plugin.isBasic(plugin)) {
Player.prototype[name] = createBasicPlugin(name, plugin);
} else {
Player.prototype[name] = createPluginFactory(name, plugin);
}
}
return plugin;
}
/**
* De-register a Video.js plugin.
*
* @param {string} name
* The name of the plugin to be de-registered. Must be a string that
* matches an existing plugin.
*
* @throws {Error}
* If an attempt is made to de-register the base plugin.
*/
static deregisterPlugin(name) {
if (name === BASE_PLUGIN_NAME) {
throw new Error('Cannot de-register base plugin.');
}
if (pluginExists(name)) {
delete pluginStorage[name];
delete Player.prototype[name];
}
}
/**
* Gets an object containing multiple Video.js plugins.
*
* @param {Array} [names]
* If provided, should be an array of plugin names. Defaults to _all_
* plugin names.
*
* @return {Object|undefined}
* An object containing plugin(s) associated with their name(s) or
* `undefined` if no matching plugins exist).
*/
static getPlugins(names = Object.keys(pluginStorage)) {
let result;
names.forEach(name => {
const plugin = getPlugin(name);
if (plugin) {
result = result || {};
result[name] = plugin;
}
});
return result;
}
/**
* Gets a plugin's version, if available
*
* @param {string} name
* The name of a plugin.
*
* @return {string}
* The plugin's version or an empty string.
*/
static getPluginVersion(name) {
const plugin = getPlugin(name);
return plugin && plugin.VERSION || '';
}
}
/**
* Gets a plugin by name if it exists.
*
* @static
* @method getPlugin
* @memberOf Plugin
* @param {string} name
* The name of a plugin.
*
* @returns {typeof Plugin|Function|undefined}
* The plugin (or `undefined`).
*/
Plugin.getPlugin = getPlugin;
/**
* The name of the base plugin class as it is registered.
*
* @type {string}
*/
Plugin.BASE_PLUGIN_NAME = BASE_PLUGIN_NAME;
Plugin.registerPlugin(BASE_PLUGIN_NAME, Plugin);
/**
* Documented in player.js
*
* @ignore
*/
Player.prototype.usingPlugin = function (name) {
return !!this[PLUGIN_CACHE_KEY] && this[PLUGIN_CACHE_KEY][name] === true;
};
/**
* Documented in player.js
*
* @ignore
*/
Player.prototype.hasPlugin = function (name) {
return !!pluginExists(name);
};
/**
* Signals that a plugin is about to be set up on a player.
*
* @event Player#beforepluginsetup
* @type {PluginEventHash}
*/
/**
* Signals that a plugin is about to be set up on a player - by name. The name
* is the name of the plugin.
*
* @event Player#beforepluginsetup:$name
* @type {PluginEventHash}
*/
/**
* Signals that a plugin has just been set up on a player.
*
* @event Player#pluginsetup
* @type {PluginEventHash}
*/
/**
* Signals that a plugin has just been set up on a player - by name. The name
* is the name of the plugin.
*
* @event Player#pluginsetup:$name
* @type {PluginEventHash}
*/
/**
* @typedef {Object} PluginEventHash
*
* @property {string} instance
* For basic plugins, the return value of the plugin function. For
* advanced plugins, the plugin instance on which the event is fired.
*
* @property {string} name
* The name of the plugin.
*
* @property {string} plugin
* For basic plugins, the plugin function. For advanced plugins, the
* plugin class/constructor.
*/
/**
* @file deprecate.js
* @module deprecate
*/
/**
* Decorate a function with a deprecation message the first time it is called.
*
* @param {string} message
* A deprecation message to log the first time the returned function
* is called.
*
* @param {Function} fn
* The function to be deprecated.
*
* @return {Function}
* A wrapper function that will log a deprecation warning the first
* time it is called. The return value will be the return value of
* the wrapped function.
*/
function deprecate(message, fn) {
let warned = false;
return function (...args) {
if (!warned) {
log$1.warn(message);
}
warned = true;
return fn.apply(this, args);
};
}
/**
* Internal function used to mark a function as deprecated in the next major
* version with consistent messaging.
*
* @param {number} major The major version where it will be removed
* @param {string} oldName The old function name
* @param {string} newName The new function name
* @param {Function} fn The function to deprecate
* @return {Function} The decorated function
*/
function deprecateForMajor(major, oldName, newName, fn) {
return deprecate(`${oldName} is deprecated and will be removed in ${major}.0; please use ${newName} instead.`, fn);
}
var VjsErrors = {
NetworkBadStatus: 'networkbadstatus',
NetworkRequestFailed: 'networkrequestfailed',
NetworkRequestAborted: 'networkrequestaborted',
NetworkRequestTimeout: 'networkrequesttimeout',
NetworkBodyParserFailed: 'networkbodyparserfailed',
StreamingHlsPlaylistParserError: 'streaminghlsplaylistparsererror',
StreamingDashManifestParserError: 'streamingdashmanifestparsererror',
StreamingContentSteeringParserError: 'streamingcontentsteeringparsererror',
StreamingVttParserError: 'streamingvttparsererror',
StreamingFailedToSelectNextSegment: 'streamingfailedtoselectnextsegment',
StreamingFailedToDecryptSegment: 'streamingfailedtodecryptsegment',
StreamingFailedToTransmuxSegment: 'streamingfailedtotransmuxsegment',
StreamingFailedToAppendSegment: 'streamingfailedtoappendsegment',
StreamingCodecsChangeError: 'streamingcodecschangeerror'
};
/**
* @file video.js
* @module videojs
*/
/** @import { PlayerReadyCallback } from './player' */
/**
* Normalize an `id` value by trimming off a leading `#`
*
* @private
* @param {string} id
* A string, maybe with a leading `#`.
*
* @return {string}
* The string, without any leading `#`.
*/
const normalizeId = id => id.indexOf('#') === 0 ? id.slice(1) : id;
/**
* The `videojs()` function doubles as the main function for users to create a
* {@link Player} instance as well as the main library namespace.
*
* It can also be used as a getter for a pre-existing {@link Player} instance.
* However, we _strongly_ recommend using `videojs.getPlayer()` for this
* purpose because it avoids any potential for unintended initialization.
*
* Due to [limitations](https://github.com/jsdoc3/jsdoc/issues/955#issuecomment-313829149)
* of our JSDoc template, we cannot properly document this as both a function
* and a namespace, so its function signature is documented here.
*
* #### Arguments
* ##### id
* string|Element, **required**
*
* Video element or video element ID.
*
* ##### options
* Object, optional
*
* Options object for providing settings.
* See: [Options Guide](https://docs.videojs.com/tutorial-options.html).
*
* ##### ready
* {@link Component~ReadyCallback}, optional
*
* A function to be called when the {@link Player} and {@link Tech} are ready.
*
* #### Return Value
*
* The `videojs()` function returns a {@link Player} instance.
*
* @namespace
*
* @borrows AudioTrack as AudioTrack
* @borrows Component.getComponent as getComponent
* @borrows module:events.on as on
* @borrows module:events.one as one
* @borrows module:events.off as off
* @borrows module:events.trigger as trigger
* @borrows EventTarget as EventTarget
* @borrows module:middleware.use as use
* @borrows Player.players as players
* @borrows Plugin.registerPlugin as registerPlugin
* @borrows Plugin.deregisterPlugin as deregisterPlugin
* @borrows Plugin.getPlugins as getPlugins
* @borrows Plugin.getPlugin as getPlugin
* @borrows Plugin.getPluginVersion as getPluginVersion
* @borrows Tech.getTech as getTech
* @borrows Tech.registerTech as registerTech
* @borrows TextTrack as TextTrack
* @borrows VideoTrack as VideoTrack
*
* @param {string|Element} id
* Video element or video element ID.
*
* @param {Object} [options]
* Options object for providing settings.
* See: [Options Guide](https://docs.videojs.com/tutorial-options.html).
*
* @param {PlayerReadyCallback} [ready]
* A function to be called when the {@link Player} and {@link Tech} are
* ready.
*
* @return {Player}
* The `videojs()` function returns a {@link Player|Player} instance.
*/
function videojs(id, options, ready) {
let player = videojs.getPlayer(id);
if (player) {
if (options) {
log$1.warn(`Player "${id}" is already initialised. Options will not be applied.`);
}
if (ready) {
player.ready(ready);
}
return player;
}
const el = typeof id === 'string' ? $('#' + normalizeId(id)) : id;
if (!isEl(el)) {
throw new TypeError('The element or ID supplied is not valid. (videojs)');
}
// document.body.contains(el) will only check if el is contained within that one document.
// This causes problems for elements in iframes.
// Instead, use the element's ownerDocument instead of the global document.
// This will make sure that the element is indeed in the dom of that document.
// Additionally, check that the document in question has a default view.
// If the document is no longer attached to the dom, the defaultView of the document will be null.
// If element is inside Shadow DOM (e.g. is part of a Custom element), ownerDocument.body
// always returns false. Instead, use the Shadow DOM root.
const inShadowDom = 'getRootNode' in el ? el.getRootNode() instanceof window$1.ShadowRoot : false;
const rootNode = inShadowDom ? el.getRootNode() : el.ownerDocument.body;
if (!el.ownerDocument.defaultView || !rootNode.contains(el)) {
log$1.warn('The element supplied is not included in the DOM');
}
options = options || {};
// Store a copy of the el before modification, if it is to be restored in destroy()
// If div ingest, store the parent div
if (options.restoreEl === true) {
options.restoreEl = (el.parentNode && el.parentNode.hasAttribute && el.parentNode.hasAttribute('data-vjs-player') ? el.parentNode : el).cloneNode(true);
}
hooks('beforesetup').forEach(hookFunction => {
const opts = hookFunction(el, merge$1(options));
if (!isObject(opts) || Array.isArray(opts)) {
log$1.error('please return an object in beforesetup hooks');
return;
}
options = merge$1(options, opts);
});
// We get the current "Player" component here in case an integration has
// replaced it with a custom player.
const PlayerComponent = Component$1.getComponent('Player');
player = new PlayerComponent(el, options, ready);
hooks('setup').forEach(hookFunction => hookFunction(player));
return player;
}
videojs.hooks_ = hooks_;
videojs.hooks = hooks;
videojs.hook = hook;
videojs.hookOnce = hookOnce;
videojs.removeHook = removeHook;
// Add default styles
if (window$1.VIDEOJS_NO_DYNAMIC_STYLE !== true && isReal()) {
let style = $('.vjs-styles-defaults');
if (!style) {
style = createStyleElement('vjs-styles-defaults');
const head = $('head');
if (head) {
head.insertBefore(style, head.firstChild);
}
setTextContent(style, `
.video-js {
width: 300px;
height: 150px;
}
.vjs-fluid:not(.vjs-audio-only-mode) {
padding-top: 56.25%
}
`);
}
}
// Run Auto-load players
// You have to wait at least once in case this script is loaded after your
// video in the DOM (weird behavior only with minified version)
autoSetupTimeout(1, videojs);
/**
* Current Video.js version. Follows [semantic versioning](https://semver.org/).
*
* @type {string}
*/
videojs.VERSION = version$6;
/**
* The global options object. These are the settings that take effect
* if no overrides are specified when the player is created.
*
* @type {Object}
*/
videojs.options = Player.prototype.options_;
/**
* Get an object with the currently created players, keyed by player ID
*
* @return {Object}
* The created players
*/
videojs.getPlayers = () => Player.players;
/**
* Get a single player based on an ID or DOM element.
*
* This is useful if you want to check if an element or ID has an associated
* Video.js player, but not create one if it doesn't.
*
* @param {string|Element} id
* An HTML element - ``, ``, or `` -
* or a string matching the `id` of such an element.
*
* @return {Player|undefined}
* A player instance or `undefined` if there is no player instance
* matching the argument.
*/
videojs.getPlayer = id => {
const players = Player.players;
let tag;
if (typeof id === 'string') {
const nId = normalizeId(id);
const player = players[nId];
if (player) {
return player;
}
tag = $('#' + nId);
} else {
tag = id;
}
if (isEl(tag)) {
const {
player,
playerId
} = tag;
// Element may have a `player` property referring to an already created
// player instance. If so, return that.
if (player || players[playerId]) {
return player || players[playerId];
}
}
};
/**
* Returns an array of all current players.
*
* @return {Array}
* An array of all players. The array will be in the order that
* `Object.keys` provides, which could potentially vary between
* JavaScript engines.
*
*/
videojs.getAllPlayers = () =>
// Disposed players leave a key with a `null` value, so we need to make sure
// we filter those out.
Object.keys(Player.players).map(k => Player.players[k]).filter(Boolean);
videojs.players = Player.players;
videojs.getComponent = Component$1.getComponent;
/**
* Register a component so it can referred to by name. Used when adding to other
* components, either through addChild `component.addChild('myComponent')` or through
* default children options `{ children: ['myComponent'] }`.
*
* > NOTE: You could also just initialize the component before adding.
* `component.addChild(new MyComponent());`
*
* @param {string} name
* The class name of the component
*
* @param {typeof Component} comp
* The component class
*
* @return {typeof Component}
* The newly registered component
*/
videojs.registerComponent = (name, comp) => {
if (Tech.isTech(comp)) {
log$1.warn(`The ${name} tech was registered as a component. It should instead be registered using videojs.registerTech(name, tech)`);
}
return Component$1.registerComponent.call(Component$1, name, comp);
};
videojs.getTech = Tech.getTech;
videojs.registerTech = Tech.registerTech;
videojs.use = use;
/**
* An object that can be returned by a middleware to signify
* that the middleware is being terminated.
*
* @type {object}
* @property {object} middleware.TERMINATOR
*/
Object.defineProperty(videojs, 'middleware', {
value: {},
writeable: false,
enumerable: true
});
Object.defineProperty(videojs.middleware, 'TERMINATOR', {
value: TERMINATOR,
writeable: false,
enumerable: true
});
/**
* A reference to the {@link module:browser|browser utility module} as an object.
*
* @type {Object}
* @see {@link module:browser|browser}
*/
videojs.browser = browser;
/**
* A reference to the {@link module:obj|obj utility module} as an object.
*
* @type {Object}
* @see {@link module:obj|obj}
*/
videojs.obj = Obj;
/**
* Deprecated reference to the {@link module:obj.merge|merge function}
*
* @type {Function}
* @see {@link module:obj.merge|merge}
* @deprecated Deprecated and will be removed in 9.0. Please use videojs.obj.merge instead.
*/
videojs.mergeOptions = deprecateForMajor(9, 'videojs.mergeOptions', 'videojs.obj.merge', merge$1);
/**
* Deprecated reference to the {@link module:obj.defineLazyProperty|defineLazyProperty function}
*
* @type {Function}
* @see {@link module:obj.defineLazyProperty|defineLazyProperty}
* @deprecated Deprecated and will be removed in 9.0. Please use videojs.obj.defineLazyProperty instead.
*/
videojs.defineLazyProperty = deprecateForMajor(9, 'videojs.defineLazyProperty', 'videojs.obj.defineLazyProperty', defineLazyProperty);
/**
* Deprecated reference to the {@link module:fn.bind_|fn.bind_ function}
*
* @type {Function}
* @see {@link module:fn.bind_|fn.bind_}
* @deprecated Deprecated and will be removed in 9.0. Please use native Function.prototype.bind instead.
*/
videojs.bind = deprecateForMajor(9, 'videojs.bind', 'native Function.prototype.bind', bind_);
videojs.registerPlugin = Plugin.registerPlugin;
videojs.deregisterPlugin = Plugin.deregisterPlugin;
/**
* Deprecated method to register a plugin with Video.js
*
* @deprecated Deprecated and will be removed in 9.0. Use videojs.registerPlugin() instead.
*
* @param {string} name
* The plugin name
*
* @param {typeof Plugin|Function} plugin
* The plugin sub-class or function
*
* @return {typeof Plugin|Function}
*/
videojs.plugin = (name, plugin) => {
log$1.warn('videojs.plugin() is deprecated; use videojs.registerPlugin() instead');
return Plugin.registerPlugin(name, plugin);
};
videojs.getPlugins = Plugin.getPlugins;
videojs.getPlugin = Plugin.getPlugin;
videojs.getPluginVersion = Plugin.getPluginVersion;
/**
* Adding languages so that they're available to all players.
* Example: `videojs.addLanguage('es', { 'Hello': 'Hola' });`
*
* @param {string} code
* The language code or dictionary property
*
* @param {Object} data
* The data values to be translated
*
* @return {Object}
* The resulting language dictionary object
*/
videojs.addLanguage = function (code, data) {
code = ('' + code).toLowerCase();
videojs.options.languages = merge$1(videojs.options.languages, {
[code]: data
});
return videojs.options.languages[code];
};
/**
* A reference to the {@link module:log|log utility module} as an object.
*
* @type {Function}
* @see {@link module:log|log}
*/
videojs.log = log$1;
videojs.createLogger = createLogger;
/**
* A reference to the {@link module:time|time utility module} as an object.
*
* @type {Object}
* @see {@link module:time|time}
*/
videojs.time = Time;
/**
* Deprecated reference to the {@link module:time.createTimeRanges|createTimeRanges function}
*
* @type {Function}
* @see {@link module:time.createTimeRanges|createTimeRanges}
* @deprecated Deprecated and will be removed in 9.0. Please use videojs.time.createTimeRanges instead.
*/
videojs.createTimeRange = deprecateForMajor(9, 'videojs.createTimeRange', 'videojs.time.createTimeRanges', createTimeRanges$1);
/**
* Deprecated reference to the {@link module:time.createTimeRanges|createTimeRanges function}
*
* @type {Function}
* @see {@link module:time.createTimeRanges|createTimeRanges}
* @deprecated Deprecated and will be removed in 9.0. Please use videojs.time.createTimeRanges instead.
*/
videojs.createTimeRanges = deprecateForMajor(9, 'videojs.createTimeRanges', 'videojs.time.createTimeRanges', createTimeRanges$1);
/**
* Deprecated reference to the {@link module:time.formatTime|formatTime function}
*
* @type {Function}
* @see {@link module:time.formatTime|formatTime}
* @deprecated Deprecated and will be removed in 9.0. Please use videojs.time.format instead.
*/
videojs.formatTime = deprecateForMajor(9, 'videojs.formatTime', 'videojs.time.formatTime', formatTime);
/**
* Deprecated reference to the {@link module:time.setFormatTime|setFormatTime function}
*
* @type {Function}
* @see {@link module:time.setFormatTime|setFormatTime}
* @deprecated Deprecated and will be removed in 9.0. Please use videojs.time.setFormat instead.
*/
videojs.setFormatTime = deprecateForMajor(9, 'videojs.setFormatTime', 'videojs.time.setFormatTime', setFormatTime);
/**
* Deprecated reference to the {@link module:time.resetFormatTime|resetFormatTime function}
*
* @type {Function}
* @see {@link module:time.resetFormatTime|resetFormatTime}
* @deprecated Deprecated and will be removed in 9.0. Please use videojs.time.resetFormat instead.
*/
videojs.resetFormatTime = deprecateForMajor(9, 'videojs.resetFormatTime', 'videojs.time.resetFormatTime', resetFormatTime);
/**
* Deprecated reference to the {@link module:url.parseUrl|Url.parseUrl function}
*
* @type {Function}
* @see {@link module:url.parseUrl|parseUrl}
* @deprecated Deprecated and will be removed in 9.0. Please use videojs.url.parseUrl instead.
*/
videojs.parseUrl = deprecateForMajor(9, 'videojs.parseUrl', 'videojs.url.parseUrl', parseUrl);
/**
* Deprecated reference to the {@link module:url.isCrossOrigin|Url.isCrossOrigin function}
*
* @type {Function}
* @see {@link module:url.isCrossOrigin|isCrossOrigin}
* @deprecated Deprecated and will be removed in 9.0. Please use videojs.url.isCrossOrigin instead.
*/
videojs.isCrossOrigin = deprecateForMajor(9, 'videojs.isCrossOrigin', 'videojs.url.isCrossOrigin', isCrossOrigin);
videojs.EventTarget = EventTarget$2;
videojs.any = any;
videojs.on = on;
videojs.one = one;
videojs.off = off;
videojs.trigger = trigger;
/**
* A cross-browser XMLHttpRequest wrapper.
*
* @function
* @param {Object} options
* Settings for the request.
*
* @return {XMLHttpRequest|XDomainRequest}
* The request object.
*
* @see https://github.com/Raynos/xhr
*/
videojs.xhr = XHR;
videojs.TextTrack = TextTrack;
videojs.AudioTrack = AudioTrack;
videojs.VideoTrack = VideoTrack;
['isEl', 'isTextNode', 'createEl', 'hasClass', 'addClass', 'removeClass', 'toggleClass', 'setAttributes', 'getAttributes', 'emptyEl', 'appendContent', 'insertContent'].forEach(k => {
videojs[k] = function () {
log$1.warn(`videojs.${k}() is deprecated; use videojs.dom.${k}() instead`);
return Dom[k].apply(null, arguments);
};
});
videojs.computedStyle = deprecateForMajor(9, 'videojs.computedStyle', 'videojs.dom.computedStyle', computedStyle);
/**
* A reference to the {@link module:dom|DOM utility module} as an object.
*
* @type {Object}
* @see {@link module:dom|dom}
*/
videojs.dom = Dom;
/**
* A reference to the {@link module:fn|fn utility module} as an object.
*
* @type {Object}
* @see {@link module:fn|fn}
*/
videojs.fn = Fn;
/**
* A reference to the {@link module:num|num utility module} as an object.
*
* @type {Object}
* @see {@link module:num|num}
*/
videojs.num = Num;
/**
* A reference to the {@link module:str|str utility module} as an object.
*
* @type {Object}
* @see {@link module:str|str}
*/
videojs.str = Str;
/**
* A reference to the {@link module:url|URL utility module} as an object.
*
* @type {Object}
* @see {@link module:url|url}
*/
videojs.url = Url;
// The list of possible error types to occur in video.js
videojs.Error = VjsErrors;
/*! @name videojs-contrib-quality-levels @version 4.1.0 @license Apache-2.0 */
/**
* A single QualityLevel.
*
* interface QualityLevel {
* readonly attribute DOMString id;
* attribute DOMString label;
* readonly attribute long width;
* readonly attribute long height;
* readonly attribute long bitrate;
* attribute boolean enabled;
* };
*
* @class QualityLevel
*/
class QualityLevel {
/**
* Creates a QualityLevel
*
* @param {Representation|Object} representation The representation of the quality level
* @param {string} representation.id Unique id of the QualityLevel
* @param {number=} representation.width Resolution width of the QualityLevel
* @param {number=} representation.height Resolution height of the QualityLevel
* @param {number} representation.bandwidth Bitrate of the QualityLevel
* @param {number=} representation.frameRate Frame-rate of the QualityLevel
* @param {Function} representation.enabled Callback to enable/disable QualityLevel
*/
constructor(representation) {
let level = this; // eslint-disable-line
level.id = representation.id;
level.label = level.id;
level.width = representation.width;
level.height = representation.height;
level.bitrate = representation.bandwidth;
level.frameRate = representation.frameRate;
level.enabled_ = representation.enabled;
Object.defineProperty(level, 'enabled', {
/**
* Get whether the QualityLevel is enabled.
*
* @return {boolean} True if the QualityLevel is enabled.
*/
get() {
return level.enabled_();
},
/**
* Enable or disable the QualityLevel.
*
* @param {boolean} enable true to enable QualityLevel, false to disable.
*/
set(enable) {
level.enabled_(enable);
}
});
return level;
}
}
/**
* A list of QualityLevels.
*
* interface QualityLevelList : EventTarget {
* getter QualityLevel (unsigned long index);
* readonly attribute unsigned long length;
* readonly attribute long selectedIndex;
*
* void addQualityLevel(QualityLevel qualityLevel)
* void removeQualityLevel(QualityLevel remove)
* QualityLevel? getQualityLevelById(DOMString id);
*
* attribute EventHandler onchange;
* attribute EventHandler onaddqualitylevel;
* attribute EventHandler onremovequalitylevel;
* };
*
* @extends videojs.EventTarget
* @class QualityLevelList
*/
class QualityLevelList extends videojs.EventTarget {
/**
* Creates a QualityLevelList.
*/
constructor() {
super();
let list = this; // eslint-disable-line
list.levels_ = [];
list.selectedIndex_ = -1;
/**
* Get the index of the currently selected QualityLevel.
*
* @returns {number} The index of the selected QualityLevel. -1 if none selected.
* @readonly
*/
Object.defineProperty(list, 'selectedIndex', {
get() {
return list.selectedIndex_;
}
});
/**
* Get the length of the list of QualityLevels.
*
* @returns {number} The length of the list.
* @readonly
*/
Object.defineProperty(list, 'length', {
get() {
return list.levels_.length;
}
});
list[Symbol.iterator] = () => list.levels_.values();
return list;
}
/**
* Adds a quality level to the list.
*
* @param {Representation|Object} representation The representation of the quality level
* @param {string} representation.id Unique id of the QualityLevel
* @param {number=} representation.width Resolution width of the QualityLevel
* @param {number=} representation.height Resolution height of the QualityLevel
* @param {number} representation.bandwidth Bitrate of the QualityLevel
* @param {number=} representation.frameRate Frame-rate of the QualityLevel
* @param {Function} representation.enabled Callback to enable/disable QualityLevel
* @return {QualityLevel} the QualityLevel added to the list
* @method addQualityLevel
*/
addQualityLevel(representation) {
let qualityLevel = this.getQualityLevelById(representation.id);
// Do not add duplicate quality levels
if (qualityLevel) {
return qualityLevel;
}
const index = this.levels_.length;
qualityLevel = new QualityLevel(representation);
if (!('' + index in this)) {
Object.defineProperty(this, index, {
get() {
return this.levels_[index];
}
});
}
this.levels_.push(qualityLevel);
this.trigger({
qualityLevel,
type: 'addqualitylevel'
});
return qualityLevel;
}
/**
* Removes a quality level from the list.
*
* @param {QualityLevel} qualityLevel The QualityLevel to remove from the list.
* @return {QualityLevel|null} the QualityLevel removed or null if nothing removed
* @method removeQualityLevel
*/
removeQualityLevel(qualityLevel) {
let removed = null;
for (let i = 0, l = this.length; i < l; i++) {
if (this[i] === qualityLevel) {
removed = this.levels_.splice(i, 1)[0];
if (this.selectedIndex_ === i) {
this.selectedIndex_ = -1;
} else if (this.selectedIndex_ > i) {
this.selectedIndex_--;
}
break;
}
}
if (removed) {
this.trigger({
qualityLevel,
type: 'removequalitylevel'
});
}
return removed;
}
/**
* Searches for a QualityLevel with the given id.
*
* @param {string} id The id of the QualityLevel to find.
* @return {QualityLevel|null} The QualityLevel with id, or null if not found.
* @method getQualityLevelById
*/
getQualityLevelById(id) {
for (let i = 0, l = this.length; i < l; i++) {
const level = this[i];
if (level.id === id) {
return level;
}
}
return null;
}
/**
* Resets the list of QualityLevels to empty
*
* @method dispose
*/
dispose() {
this.selectedIndex_ = -1;
this.levels_.length = 0;
}
}
/**
* change - The selected QualityLevel has changed.
* addqualitylevel - A QualityLevel has been added to the QualityLevelList.
* removequalitylevel - A QualityLevel has been removed from the QualityLevelList.
*/
QualityLevelList.prototype.allowedEvents_ = {
change: 'change',
addqualitylevel: 'addqualitylevel',
removequalitylevel: 'removequalitylevel'
};
// emulate attribute EventHandler support to allow for feature detection
for (const event in QualityLevelList.prototype.allowedEvents_) {
QualityLevelList.prototype['on' + event] = null;
}
var version$5 = "4.1.0";
/**
* Initialization function for the qualityLevels plugin. Sets up the QualityLevelList and
* event handlers.
*
* @param {Player} player Player object.
* @param {Object} options Plugin options object.
* @return {QualityLevelList} a list of QualityLevels
*/
const initPlugin$1 = function (player, options) {
const originalPluginFn = player.qualityLevels;
const qualityLevelList = new QualityLevelList();
const disposeHandler = function () {
qualityLevelList.dispose();
player.qualityLevels = originalPluginFn;
player.off('dispose', disposeHandler);
};
player.on('dispose', disposeHandler);
player.qualityLevels = () => qualityLevelList;
player.qualityLevels.VERSION = version$5;
return qualityLevelList;
};
/**
* A video.js plugin.
*
* In the plugin function, the value of `this` is a video.js `Player`
* instance. You cannot rely on the player being in a "ready" state here,
* depending on how the plugin is invoked. This may or may not be important
* to you; if not, remove the wait for "ready"!
*
* @param {Object} options Plugin options object
* @return {QualityLevelList} a list of QualityLevels
*/
const qualityLevels = function (options) {
return initPlugin$1(this, videojs.obj.merge({}, options));
};
// Register the plugin with video.js.
videojs.registerPlugin('qualityLevels', qualityLevels);
// Include the version number.
qualityLevels.VERSION = version$5;
/*! @name @videojs/http-streaming @version 3.13.3 @license Apache-2.0 */
/**
* @file resolve-url.js - Handling how URLs are resolved and manipulated
*/
const resolveUrl = _resolveUrl;
/**
* If the xhr request was redirected, return the responseURL, otherwise,
* return the original url.
*
* @api private
*
* @param {string} url - an url being requested
* @param {XMLHttpRequest} req - xhr request result
*
* @return {string}
*/
const resolveManifestRedirect = (url, req) => {
// To understand how the responseURL below is set and generated:
// - https://fetch.spec.whatwg.org/#concept-response-url
// - https://fetch.spec.whatwg.org/#atomic-http-redirect-handling
if (req && req.responseURL && url !== req.responseURL) {
return req.responseURL;
}
return url;
};
const logger = source => {
if (videojs.log.debug) {
return videojs.log.debug.bind(videojs, 'VHS:', `${source} >`);
}
return function () {};
};
/**
* Provides a compatibility layer between Video.js 7 and 8 API changes for VHS.
*/
/**
* Delegates to videojs.obj.merge (Video.js 8) or
* videojs.mergeOptions (Video.js 7).
*/
function merge(...args) {
const context = videojs.obj || videojs;
const fn = context.merge || context.mergeOptions;
return fn.apply(context, args);
}
/**
* Delegates to videojs.time.createTimeRanges (Video.js 8) or
* videojs.createTimeRanges (Video.js 7).
*/
function createTimeRanges(...args) {
const context = videojs.time || videojs;
const fn = context.createTimeRanges || context.createTimeRanges;
return fn.apply(context, args);
}
/**
* Converts provided buffered ranges to a descriptive string
*
* @param {TimeRanges} buffered - received buffered time ranges
*
* @return {string} - descriptive string
*/
function bufferedRangesToString(buffered) {
if (buffered.length === 0) {
return 'Buffered Ranges are empty';
}
let bufferedRangesStr = 'Buffered Ranges: \n';
for (let i = 0; i < buffered.length; i++) {
const start = buffered.start(i);
const end = buffered.end(i);
bufferedRangesStr += `${start} --> ${end}. Duration (${end - start})\n`;
}
return bufferedRangesStr;
}
/**
* ranges
*
* Utilities for working with TimeRanges.
*
*/
const TIME_FUDGE_FACTOR = 1 / 30; // Comparisons between time values such as current time and the end of the buffered range
// can be misleading because of precision differences or when the current media has poorly
// aligned audio and video, which can cause values to be slightly off from what you would
// expect. This value is what we consider to be safe to use in such comparisons to account
// for these scenarios.
const SAFE_TIME_DELTA = TIME_FUDGE_FACTOR * 3;
const filterRanges = function (timeRanges, predicate) {
const results = [];
let i;
if (timeRanges && timeRanges.length) {
// Search for ranges that match the predicate
for (i = 0; i < timeRanges.length; i++) {
if (predicate(timeRanges.start(i), timeRanges.end(i))) {
results.push([timeRanges.start(i), timeRanges.end(i)]);
}
}
}
return createTimeRanges(results);
};
/**
* Attempts to find the buffered TimeRange that contains the specified
* time.
*
* @param {TimeRanges} buffered - the TimeRanges object to query
* @param {number} time - the time to filter on.
* @return {TimeRanges} a new TimeRanges object
*/
const findRange = function (buffered, time) {
return filterRanges(buffered, function (start, end) {
return start - SAFE_TIME_DELTA <= time && end + SAFE_TIME_DELTA >= time;
});
};
/**
* Returns the TimeRanges that begin later than the specified time.
*
* @param {TimeRanges} timeRanges - the TimeRanges object to query
* @param {number} time - the time to filter on.
* @return {TimeRanges} a new TimeRanges object.
*/
const findNextRange = function (timeRanges, time) {
return filterRanges(timeRanges, function (start) {
return start - TIME_FUDGE_FACTOR >= time;
});
};
/**
* Returns gaps within a list of TimeRanges
*
* @param {TimeRanges} buffered - the TimeRanges object
* @return {TimeRanges} a TimeRanges object of gaps
*/
const findGaps = function (buffered) {
if (buffered.length < 2) {
return createTimeRanges();
}
const ranges = [];
for (let i = 1; i < buffered.length; i++) {
const start = buffered.end(i - 1);
const end = buffered.start(i);
ranges.push([start, end]);
}
return createTimeRanges(ranges);
};
/**
* Calculate the intersection of two TimeRanges
*
* @param {TimeRanges} bufferA
* @param {TimeRanges} bufferB
* @return {TimeRanges} The interesection of `bufferA` with `bufferB`
*/
const bufferIntersection = function (bufferA, bufferB) {
let start = null;
let end = null;
let arity = 0;
const extents = [];
const ranges = [];
if (!bufferA || !bufferA.length || !bufferB || !bufferB.length) {
return createTimeRanges();
} // Handle the case where we have both buffers and create an
// intersection of the two
let count = bufferA.length; // A) Gather up all start and end times
while (count--) {
extents.push({
time: bufferA.start(count),
type: 'start'
});
extents.push({
time: bufferA.end(count),
type: 'end'
});
}
count = bufferB.length;
while (count--) {
extents.push({
time: bufferB.start(count),
type: 'start'
});
extents.push({
time: bufferB.end(count),
type: 'end'
});
} // B) Sort them by time
extents.sort(function (a, b) {
return a.time - b.time;
}); // C) Go along one by one incrementing arity for start and decrementing
// arity for ends
for (count = 0; count < extents.length; count++) {
if (extents[count].type === 'start') {
arity++; // D) If arity is ever incremented to 2 we are entering an
// overlapping range
if (arity === 2) {
start = extents[count].time;
}
} else if (extents[count].type === 'end') {
arity--; // E) If arity is ever decremented to 1 we leaving an
// overlapping range
if (arity === 1) {
end = extents[count].time;
}
} // F) Record overlapping ranges
if (start !== null && end !== null) {
ranges.push([start, end]);
start = null;
end = null;
}
}
return createTimeRanges(ranges);
};
/**
* Gets a human readable string for a TimeRange
*
* @param {TimeRange} range
* @return {string} a human readable string
*/
const printableRange = range => {
const strArr = [];
if (!range || !range.length) {
return '';
}
for (let i = 0; i < range.length; i++) {
strArr.push(range.start(i) + ' => ' + range.end(i));
}
return strArr.join(', ');
};
/**
* Calculates the amount of time left in seconds until the player hits the end of the
* buffer and causes a rebuffer
*
* @param {TimeRange} buffered
* The state of the buffer
* @param {Numnber} currentTime
* The current time of the player
* @param {number} playbackRate
* The current playback rate of the player. Defaults to 1.
* @return {number}
* Time until the player has to start rebuffering in seconds.
* @function timeUntilRebuffer
*/
const timeUntilRebuffer = function (buffered, currentTime, playbackRate = 1) {
const bufferedEnd = buffered.length ? buffered.end(buffered.length - 1) : 0;
return (bufferedEnd - currentTime) / playbackRate;
};
/**
* Converts a TimeRanges object into an array representation
*
* @param {TimeRanges} timeRanges
* @return {Array}
*/
const timeRangesToArray = timeRanges => {
const timeRangesList = [];
for (let i = 0; i < timeRanges.length; i++) {
timeRangesList.push({
start: timeRanges.start(i),
end: timeRanges.end(i)
});
}
return timeRangesList;
};
/**
* Determines if two time range objects are different.
*
* @param {TimeRange} a
* the first time range object to check
*
* @param {TimeRange} b
* the second time range object to check
*
* @return {Boolean}
* Whether the time range objects differ
*/
const isRangeDifferent = function (a, b) {
// same object
if (a === b) {
return false;
} // one or the other is undefined
if (!a && b || !b && a) {
return true;
} // length is different
if (a.length !== b.length) {
return true;
} // see if any start/end pair is different
for (let i = 0; i < a.length; i++) {
if (a.start(i) !== b.start(i) || a.end(i) !== b.end(i)) {
return true;
}
} // if the length and every pair is the same
// this is the same time range
return false;
};
const lastBufferedEnd = function (a) {
if (!a || !a.length || !a.end) {
return;
}
return a.end(a.length - 1);
};
/**
* A utility function to add up the amount of time in a timeRange
* after a specified startTime.
* ie:[[0, 10], [20, 40], [50, 60]] with a startTime 0
* would return 40 as there are 40s seconds after 0 in the timeRange
*
* @param {TimeRange} range
* The range to check against
* @param {number} startTime
* The time in the time range that you should start counting from
*
* @return {number}
* The number of seconds in the buffer passed the specified time.
*/
const timeAheadOf = function (range, startTime) {
let time = 0;
if (!range || !range.length) {
return time;
}
for (let i = 0; i < range.length; i++) {
const start = range.start(i);
const end = range.end(i); // startTime is after this range entirely
if (startTime > end) {
continue;
} // startTime is within this range
if (startTime > start && startTime <= end) {
time += end - startTime;
continue;
} // startTime is before this range.
time += end - start;
}
return time;
};
/**
* @file playlist.js
*
* Playlist related utilities.
*/
/**
* Get the duration of a segment, with special cases for
* llhls segments that do not have a duration yet.
*
* @param {Object} playlist
* the playlist that the segment belongs to.
* @param {Object} segment
* the segment to get a duration for.
*
* @return {number}
* the segment duration
*/
const segmentDurationWithParts = (playlist, segment) => {
// if this isn't a preload segment
// then we will have a segment duration that is accurate.
if (!segment.preload) {
return segment.duration;
} // otherwise we have to add up parts and preload hints
// to get an up to date duration.
let result = 0;
(segment.parts || []).forEach(function (p) {
result += p.duration;
}); // for preload hints we have to use partTargetDuration
// as they won't even have a duration yet.
(segment.preloadHints || []).forEach(function (p) {
if (p.type === 'PART') {
result += playlist.partTargetDuration;
}
});
return result;
};
/**
* A function to get a combined list of parts and segments with durations
* and indexes.
*
* @param {Playlist} playlist the playlist to get the list for.
*
* @return {Array} The part/segment list.
*/
const getPartsAndSegments = playlist => (playlist.segments || []).reduce((acc, segment, si) => {
if (segment.parts) {
segment.parts.forEach(function (part, pi) {
acc.push({
duration: part.duration,
segmentIndex: si,
partIndex: pi,
part,
segment
});
});
} else {
acc.push({
duration: segment.duration,
segmentIndex: si,
partIndex: null,
segment,
part: null
});
}
return acc;
}, []);
const getLastParts = media => {
const lastSegment = media.segments && media.segments.length && media.segments[media.segments.length - 1];
return lastSegment && lastSegment.parts || [];
};
const getKnownPartCount = ({
preloadSegment
}) => {
if (!preloadSegment) {
return;
}
const {
parts,
preloadHints
} = preloadSegment;
let partCount = (preloadHints || []).reduce((count, hint) => count + (hint.type === 'PART' ? 1 : 0), 0);
partCount += parts && parts.length ? parts.length : 0;
return partCount;
};
/**
* Get the number of seconds to delay from the end of a
* live playlist.
*
* @param {Playlist} main the main playlist
* @param {Playlist} media the media playlist
* @return {number} the hold back in seconds.
*/
const liveEdgeDelay = (main, media) => {
if (media.endList) {
return 0;
} // dash suggestedPresentationDelay trumps everything
if (main && main.suggestedPresentationDelay) {
return main.suggestedPresentationDelay;
}
const hasParts = getLastParts(media).length > 0; // look for "part" delays from ll-hls first
if (hasParts && media.serverControl && media.serverControl.partHoldBack) {
return media.serverControl.partHoldBack;
} else if (hasParts && media.partTargetDuration) {
return media.partTargetDuration * 3; // finally look for full segment delays
} else if (media.serverControl && media.serverControl.holdBack) {
return media.serverControl.holdBack;
} else if (media.targetDuration) {
return media.targetDuration * 3;
}
return 0;
};
/**
* walk backward until we find a duration we can use
* or return a failure
*
* @param {Playlist} playlist the playlist to walk through
* @param {Number} endSequence the mediaSequence to stop walking on
*/
const backwardDuration = function (playlist, endSequence) {
let result = 0;
let i = endSequence - playlist.mediaSequence; // if a start time is available for segment immediately following
// the interval, use it
let segment = playlist.segments[i]; // Walk backward until we find the latest segment with timeline
// information that is earlier than endSequence
if (segment) {
if (typeof segment.start !== 'undefined') {
return {
result: segment.start,
precise: true
};
}
if (typeof segment.end !== 'undefined') {
return {
result: segment.end - segment.duration,
precise: true
};
}
}
while (i--) {
segment = playlist.segments[i];
if (typeof segment.end !== 'undefined') {
return {
result: result + segment.end,
precise: true
};
}
result += segmentDurationWithParts(playlist, segment);
if (typeof segment.start !== 'undefined') {
return {
result: result + segment.start,
precise: true
};
}
}
return {
result,
precise: false
};
};
/**
* walk forward until we find a duration we can use
* or return a failure
*
* @param {Playlist} playlist the playlist to walk through
* @param {number} endSequence the mediaSequence to stop walking on
*/
const forwardDuration = function (playlist, endSequence) {
let result = 0;
let segment;
let i = endSequence - playlist.mediaSequence; // Walk forward until we find the earliest segment with timeline
// information
for (; i < playlist.segments.length; i++) {
segment = playlist.segments[i];
if (typeof segment.start !== 'undefined') {
return {
result: segment.start - result,
precise: true
};
}
result += segmentDurationWithParts(playlist, segment);
if (typeof segment.end !== 'undefined') {
return {
result: segment.end - result,
precise: true
};
}
} // indicate we didn't find a useful duration estimate
return {
result: -1,
precise: false
};
};
/**
* Calculate the media duration from the segments associated with a
* playlist. The duration of a subinterval of the available segments
* may be calculated by specifying an end index.
*
* @param {Object} playlist a media playlist object
* @param {number=} endSequence an exclusive upper boundary
* for the playlist. Defaults to playlist length.
* @param {number} expired the amount of time that has dropped
* off the front of the playlist in a live scenario
* @return {number} the duration between the first available segment
* and end index.
*/
const intervalDuration = function (playlist, endSequence, expired) {
if (typeof endSequence === 'undefined') {
endSequence = playlist.mediaSequence + playlist.segments.length;
}
if (endSequence < playlist.mediaSequence) {
return 0;
} // do a backward walk to estimate the duration
const backward = backwardDuration(playlist, endSequence);
if (backward.precise) {
// if we were able to base our duration estimate on timing
// information provided directly from the Media Source, return
// it
return backward.result;
} // walk forward to see if a precise duration estimate can be made
// that way
const forward = forwardDuration(playlist, endSequence);
if (forward.precise) {
// we found a segment that has been buffered and so it's
// position is known precisely
return forward.result;
} // return the less-precise, playlist-based duration estimate
return backward.result + expired;
};
/**
* Calculates the duration of a playlist. If a start and end index
* are specified, the duration will be for the subset of the media
* timeline between those two indices. The total duration for live
* playlists is always Infinity.
*
* @param {Object} playlist a media playlist object
* @param {number=} endSequence an exclusive upper
* boundary for the playlist. Defaults to the playlist media
* sequence number plus its length.
* @param {number=} expired the amount of time that has
* dropped off the front of the playlist in a live scenario
* @return {number} the duration between the start index and end
* index.
*/
const duration = function (playlist, endSequence, expired) {
if (!playlist) {
return 0;
}
if (typeof expired !== 'number') {
expired = 0;
} // if a slice of the total duration is not requested, use
// playlist-level duration indicators when they're present
if (typeof endSequence === 'undefined') {
// if present, use the duration specified in the playlist
if (playlist.totalDuration) {
return playlist.totalDuration;
} // duration should be Infinity for live playlists
if (!playlist.endList) {
return window$1.Infinity;
}
} // calculate the total duration based on the segment durations
return intervalDuration(playlist, endSequence, expired);
};
/**
* Calculate the time between two indexes in the current playlist
* neight the start- nor the end-index need to be within the current
* playlist in which case, the targetDuration of the playlist is used
* to approximate the durations of the segments
*
* @param {Array} options.durationList list to iterate over for durations.
* @param {number} options.defaultDuration duration to use for elements before or after the durationList
* @param {number} options.startIndex partsAndSegments index to start
* @param {number} options.endIndex partsAndSegments index to end.
* @return {number} the number of seconds between startIndex and endIndex
*/
const sumDurations = function ({
defaultDuration,
durationList,
startIndex,
endIndex
}) {
let durations = 0;
if (startIndex > endIndex) {
[startIndex, endIndex] = [endIndex, startIndex];
}
if (startIndex < 0) {
for (let i = startIndex; i < Math.min(0, endIndex); i++) {
durations += defaultDuration;
}
startIndex = 0;
}
for (let i = startIndex; i < endIndex; i++) {
durations += durationList[i].duration;
}
return durations;
};
/**
* Calculates the playlist end time
*
* @param {Object} playlist a media playlist object
* @param {number=} expired the amount of time that has
* dropped off the front of the playlist in a live scenario
* @param {boolean|false} useSafeLiveEnd a boolean value indicating whether or not the
* playlist end calculation should consider the safe live end
* (truncate the playlist end by three segments). This is normally
* used for calculating the end of the playlist's seekable range.
* This takes into account the value of liveEdgePadding.
* Setting liveEdgePadding to 0 is equivalent to setting this to false.
* @param {number} liveEdgePadding a number indicating how far from the end of the playlist we should be in seconds.
* If this is provided, it is used in the safe live end calculation.
* Setting useSafeLiveEnd=false or liveEdgePadding=0 are equivalent.
* Corresponds to suggestedPresentationDelay in DASH manifests.
* @return {number} the end time of playlist
* @function playlistEnd
*/
const playlistEnd = function (playlist, expired, useSafeLiveEnd, liveEdgePadding) {
if (!playlist || !playlist.segments) {
return null;
}
if (playlist.endList) {
return duration(playlist);
}
if (expired === null) {
return null;
}
expired = expired || 0;
let lastSegmentEndTime = intervalDuration(playlist, playlist.mediaSequence + playlist.segments.length, expired);
if (useSafeLiveEnd) {
liveEdgePadding = typeof liveEdgePadding === 'number' ? liveEdgePadding : liveEdgeDelay(null, playlist);
lastSegmentEndTime -= liveEdgePadding;
} // don't return a time less than zero
return Math.max(0, lastSegmentEndTime);
};
/**
* Calculates the interval of time that is currently seekable in a
* playlist. The returned time ranges are relative to the earliest
* moment in the specified playlist that is still available. A full
* seekable implementation for live streams would need to offset
* these values by the duration of content that has expired from the
* stream.
*
* @param {Object} playlist a media playlist object
* dropped off the front of the playlist in a live scenario
* @param {number=} expired the amount of time that has
* dropped off the front of the playlist in a live scenario
* @param {number} liveEdgePadding how far from the end of the playlist we should be in seconds.
* Corresponds to suggestedPresentationDelay in DASH manifests.
* @return {TimeRanges} the periods of time that are valid targets
* for seeking
*/
const seekable = function (playlist, expired, liveEdgePadding) {
const useSafeLiveEnd = true;
const seekableStart = expired || 0;
let seekableEnd = playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding);
if (seekableEnd === null) {
return createTimeRanges();
} // Clamp seekable end since it can not be less than the seekable start
if (seekableEnd < seekableStart) {
seekableEnd = seekableStart;
}
return createTimeRanges(seekableStart, seekableEnd);
};
/**
* Determine the index and estimated starting time of the segment that
* contains a specified playback position in a media playlist.
*
* @param {Object} options.playlist the media playlist to query
* @param {number} options.currentTime The number of seconds since the earliest
* possible position to determine the containing segment for
* @param {number} options.startTime the time when the segment/part starts
* @param {number} options.startingSegmentIndex the segment index to start looking at.
* @param {number?} [options.startingPartIndex] the part index to look at within the segment.
*
* @return {Object} an object with partIndex, segmentIndex, and startTime.
*/
const getMediaInfoForTime = function ({
playlist,
currentTime,
startingSegmentIndex,
startingPartIndex,
startTime,
exactManifestTimings
}) {
let time = currentTime - startTime;
const partsAndSegments = getPartsAndSegments(playlist);
let startIndex = 0;
for (let i = 0; i < partsAndSegments.length; i++) {
const partAndSegment = partsAndSegments[i];
if (startingSegmentIndex !== partAndSegment.segmentIndex) {
continue;
} // skip this if part index does not match.
if (typeof startingPartIndex === 'number' && typeof partAndSegment.partIndex === 'number' && startingPartIndex !== partAndSegment.partIndex) {
continue;
}
startIndex = i;
break;
}
if (time < 0) {
// Walk backward from startIndex in the playlist, adding durations
// until we find a segment that contains `time` and return it
if (startIndex > 0) {
for (let i = startIndex - 1; i >= 0; i--) {
const partAndSegment = partsAndSegments[i];
time += partAndSegment.duration;
if (exactManifestTimings) {
if (time < 0) {
continue;
}
} else if (time + TIME_FUDGE_FACTOR <= 0) {
continue;
}
return {
partIndex: partAndSegment.partIndex,
segmentIndex: partAndSegment.segmentIndex,
startTime: startTime - sumDurations({
defaultDuration: playlist.targetDuration,
durationList: partsAndSegments,
startIndex,
endIndex: i
})
};
}
} // We were unable to find a good segment within the playlist
// so select the first segment
return {
partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
startTime: currentTime
};
} // When startIndex is negative, we first walk forward to first segment
// adding target durations. If we "run out of time" before getting to
// the first segment, return the first segment
if (startIndex < 0) {
for (let i = startIndex; i < 0; i++) {
time -= playlist.targetDuration;
if (time < 0) {
return {
partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
startTime: currentTime
};
}
}
startIndex = 0;
} // Walk forward from startIndex in the playlist, subtracting durations
// until we find a segment that contains `time` and return it
for (let i = startIndex; i < partsAndSegments.length; i++) {
const partAndSegment = partsAndSegments[i];
time -= partAndSegment.duration;
const canUseFudgeFactor = partAndSegment.duration > TIME_FUDGE_FACTOR;
const isExactlyAtTheEnd = time === 0;
const isExtremelyCloseToTheEnd = canUseFudgeFactor && time + TIME_FUDGE_FACTOR >= 0;
if (isExactlyAtTheEnd || isExtremelyCloseToTheEnd) {
// 1) We are exactly at the end of the current segment.
// 2) We are extremely close to the end of the current segment (The difference is less than 1 / 30).
// We may encounter this situation when
// we don't have exact match between segment duration info in the manifest and the actual duration of the segment
// For example:
// We appended 3 segments 10 seconds each, meaning we should have 30 sec buffered,
// but we the actual buffered is 29.99999
//
// In both cases:
// if we passed current time -> it means that we already played current segment
// if we passed buffered.end -> it means that this segment is already loaded and buffered
// we should select the next segment if we have one:
if (i !== partsAndSegments.length - 1) {
continue;
}
}
if (exactManifestTimings) {
if (time > 0) {
continue;
}
} else if (time - TIME_FUDGE_FACTOR >= 0) {
continue;
}
return {
partIndex: partAndSegment.partIndex,
segmentIndex: partAndSegment.segmentIndex,
startTime: startTime + sumDurations({
defaultDuration: playlist.targetDuration,
durationList: partsAndSegments,
startIndex,
endIndex: i
})
};
} // We are out of possible candidates so load the last one...
return {
segmentIndex: partsAndSegments[partsAndSegments.length - 1].segmentIndex,
partIndex: partsAndSegments[partsAndSegments.length - 1].partIndex,
startTime: currentTime
};
};
/**
* Check whether the playlist is excluded or not.
*
* @param {Object} playlist the media playlist object
* @return {boolean} whether the playlist is excluded or not
* @function isExcluded
*/
const isExcluded = function (playlist) {
return playlist.excludeUntil && playlist.excludeUntil > Date.now();
};
/**
* Check whether the playlist is compatible with current playback configuration or has
* been excluded permanently for being incompatible.
*
* @param {Object} playlist the media playlist object
* @return {boolean} whether the playlist is incompatible or not
* @function isIncompatible
*/
const isIncompatible = function (playlist) {
return playlist.excludeUntil && playlist.excludeUntil === Infinity;
};
/**
* Check whether the playlist is enabled or not.
*
* @param {Object} playlist the media playlist object
* @return {boolean} whether the playlist is enabled or not
* @function isEnabled
*/
const isEnabled = function (playlist) {
const excluded = isExcluded(playlist);
return !playlist.disabled && !excluded;
};
/**
* Check whether the playlist has been manually disabled through the representations api.
*
* @param {Object} playlist the media playlist object
* @return {boolean} whether the playlist is disabled manually or not
* @function isDisabled
*/
const isDisabled = function (playlist) {
return playlist.disabled;
};
/**
* Returns whether the current playlist is an AES encrypted HLS stream
*
* @return {boolean} true if it's an AES encrypted HLS stream
*/
const isAes = function (media) {
for (let i = 0; i < media.segments.length; i++) {
if (media.segments[i].key) {
return true;
}
}
return false;
};
/**
* Checks if the playlist has a value for the specified attribute
*
* @param {string} attr
* Attribute to check for
* @param {Object} playlist
* The media playlist object
* @return {boolean}
* Whether the playlist contains a value for the attribute or not
* @function hasAttribute
*/
const hasAttribute = function (attr, playlist) {
return playlist.attributes && playlist.attributes[attr];
};
/**
* Estimates the time required to complete a segment download from the specified playlist
*
* @param {number} segmentDuration
* Duration of requested segment
* @param {number} bandwidth
* Current measured bandwidth of the player
* @param {Object} playlist
* The media playlist object
* @param {number=} bytesReceived
* Number of bytes already received for the request. Defaults to 0
* @return {number|NaN}
* The estimated time to request the segment. NaN if bandwidth information for
* the given playlist is unavailable
* @function estimateSegmentRequestTime
*/
const estimateSegmentRequestTime = function (segmentDuration, bandwidth, playlist, bytesReceived = 0) {
if (!hasAttribute('BANDWIDTH', playlist)) {
return NaN;
}
const size = segmentDuration * playlist.attributes.BANDWIDTH;
return (size - bytesReceived * 8) / bandwidth;
};
/*
* Returns whether the current playlist is the lowest rendition
*
* @return {Boolean} true if on lowest rendition
*/
const isLowestEnabledRendition = (main, media) => {
if (main.playlists.length === 1) {
return true;
}
const currentBandwidth = media.attributes.BANDWIDTH || Number.MAX_VALUE;
return main.playlists.filter(playlist => {
if (!isEnabled(playlist)) {
return false;
}
return (playlist.attributes.BANDWIDTH || 0) < currentBandwidth;
}).length === 0;
};
const playlistMatch = (a, b) => {
// both playlits are null
// or only one playlist is non-null
// no match
if (!a && !b || !a && b || a && !b) {
return false;
} // playlist objects are the same, match
if (a === b) {
return true;
} // first try to use id as it should be the most
// accurate
if (a.id && b.id && a.id === b.id) {
return true;
} // next try to use reslovedUri as it should be the
// second most accurate.
if (a.resolvedUri && b.resolvedUri && a.resolvedUri === b.resolvedUri) {
return true;
} // finally try to use uri as it should be accurate
// but might miss a few cases for relative uris
if (a.uri && b.uri && a.uri === b.uri) {
return true;
}
return false;
};
const someAudioVariant = function (main, callback) {
const AUDIO = main && main.mediaGroups && main.mediaGroups.AUDIO || {};
let found = false;
for (const groupName in AUDIO) {
for (const label in AUDIO[groupName]) {
found = callback(AUDIO[groupName][label]);
if (found) {
break;
}
}
if (found) {
break;
}
}
return !!found;
};
const isAudioOnly = main => {
// we are audio only if we have no main playlists but do
// have media group playlists.
if (!main || !main.playlists || !main.playlists.length) {
// without audio variants or playlists this
// is not an audio only main.
const found = someAudioVariant(main, variant => variant.playlists && variant.playlists.length || variant.uri);
return found;
} // if every playlist has only an audio codec it is audio only
for (let i = 0; i < main.playlists.length; i++) {
const playlist = main.playlists[i];
const CODECS = playlist.attributes && playlist.attributes.CODECS; // all codecs are audio, this is an audio playlist.
if (CODECS && CODECS.split(',').every(c => isAudioCodec(c))) {
continue;
} // playlist is in an audio group it is audio only
const found = someAudioVariant(main, variant => playlistMatch(playlist, variant));
if (found) {
continue;
} // if we make it here this playlist isn't audio and we
// are not audio only
return false;
} // if we make it past every playlist without returning, then
// this is an audio only playlist.
return true;
}; // exports
var Playlist = {
liveEdgeDelay,
duration,
seekable,
getMediaInfoForTime,
isEnabled,
isDisabled,
isExcluded,
isIncompatible,
playlistEnd,
isAes,
hasAttribute,
estimateSegmentRequestTime,
isLowestEnabledRendition,
isAudioOnly,
playlistMatch,
segmentDurationWithParts
};
const {
log
} = videojs;
const createPlaylistID = (index, uri) => {
return `${index}-${uri}`;
}; // default function for creating a group id
const groupID = (type, group, label) => {
return `placeholder-uri-${type}-${group}-${label}`;
};
/**
* Parses a given m3u8 playlist
*
* @param {Function} [onwarn]
* a function to call when the parser triggers a warning event.
* @param {Function} [oninfo]
* a function to call when the parser triggers an info event.
* @param {string} manifestString
* The downloaded manifest string
* @param {Object[]} [customTagParsers]
* An array of custom tag parsers for the m3u8-parser instance
* @param {Object[]} [customTagMappers]
* An array of custom tag mappers for the m3u8-parser instance
* @param {boolean} [llhls]
* Whether to keep ll-hls features in the manifest after parsing.
* @return {Object}
* The manifest object
*/
const parseManifest = ({
onwarn,
oninfo,
manifestString,
customTagParsers = [],
customTagMappers = [],
llhls
}) => {
const parser = new Parser();
if (onwarn) {
parser.on('warn', onwarn);
}
if (oninfo) {
parser.on('info', oninfo);
}
customTagParsers.forEach(customParser => parser.addParser(customParser));
customTagMappers.forEach(mapper => parser.addTagMapper(mapper));
parser.push(manifestString);
parser.end();
const manifest = parser.manifest; // remove llhls features from the parsed manifest
// if we don't want llhls support.
if (!llhls) {
['preloadSegment', 'skip', 'serverControl', 'renditionReports', 'partInf', 'partTargetDuration'].forEach(function (k) {
if (manifest.hasOwnProperty(k)) {
delete manifest[k];
}
});
if (manifest.segments) {
manifest.segments.forEach(function (segment) {
['parts', 'preloadHints'].forEach(function (k) {
if (segment.hasOwnProperty(k)) {
delete segment[k];
}
});
});
}
}
if (!manifest.targetDuration) {
let targetDuration = 10;
if (manifest.segments && manifest.segments.length) {
targetDuration = manifest.segments.reduce((acc, s) => Math.max(acc, s.duration), 0);
}
if (onwarn) {
onwarn({
message: `manifest has no targetDuration defaulting to ${targetDuration}`
});
}
manifest.targetDuration = targetDuration;
}
const parts = getLastParts(manifest);
if (parts.length && !manifest.partTargetDuration) {
const partTargetDuration = parts.reduce((acc, p) => Math.max(acc, p.duration), 0);
if (onwarn) {
onwarn({
message: `manifest has no partTargetDuration defaulting to ${partTargetDuration}`
});
log.error('LL-HLS manifest has parts but lacks required #EXT-X-PART-INF:PART-TARGET value. See https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-09#section-4.4.3.7. Playback is not guaranteed.');
}
manifest.partTargetDuration = partTargetDuration;
}
return manifest;
};
/**
* Loops through all supported media groups in main and calls the provided
* callback for each group
*
* @param {Object} main
* The parsed main manifest object
* @param {Function} callback
* Callback to call for each media group
*/
const forEachMediaGroup = (main, callback) => {
if (!main.mediaGroups) {
return;
}
['AUDIO', 'SUBTITLES'].forEach(mediaType => {
if (!main.mediaGroups[mediaType]) {
return;
}
for (const groupKey in main.mediaGroups[mediaType]) {
for (const labelKey in main.mediaGroups[mediaType][groupKey]) {
const mediaProperties = main.mediaGroups[mediaType][groupKey][labelKey];
callback(mediaProperties, mediaType, groupKey, labelKey);
}
}
});
};
/**
* Adds properties and attributes to the playlist to keep consistent functionality for
* playlists throughout VHS.
*
* @param {Object} config
* Arguments object
* @param {Object} config.playlist
* The media playlist
* @param {string} [config.uri]
* The uri to the media playlist (if media playlist is not from within a main
* playlist)
* @param {string} id
* ID to use for the playlist
*/
const setupMediaPlaylist = ({
playlist,
uri,
id
}) => {
playlist.id = id;
playlist.playlistErrors_ = 0;
if (uri) {
// For media playlists, m3u8-parser does not have access to a URI, as HLS media
// playlists do not contain their own source URI, but one is needed for consistency in
// VHS.
playlist.uri = uri;
} // For HLS main playlists, even though certain attributes MUST be defined, the
// stream may still be played without them.
// For HLS media playlists, m3u8-parser does not attach an attributes object to the
// manifest.
//
// To avoid undefined reference errors through the project, and make the code easier
// to write/read, add an empty attributes object for these cases.
playlist.attributes = playlist.attributes || {};
};
/**
* Adds ID, resolvedUri, and attributes properties to each playlist of the main, where
* necessary. In addition, creates playlist IDs for each playlist and adds playlist ID to
* playlist references to the playlists array.
*
* @param {Object} main
* The main playlist
*/
const setupMediaPlaylists = main => {
let i = main.playlists.length;
while (i--) {
const playlist = main.playlists[i];
setupMediaPlaylist({
playlist,
id: createPlaylistID(i, playlist.uri)
});
playlist.resolvedUri = resolveUrl(main.uri, playlist.uri);
main.playlists[playlist.id] = playlist; // URI reference added for backwards compatibility
main.playlists[playlist.uri] = playlist; // Although the spec states an #EXT-X-STREAM-INF tag MUST have a BANDWIDTH attribute,
// the stream can be played without it. Although an attributes property may have been
// added to the playlist to prevent undefined references, issue a warning to fix the
// manifest.
if (!playlist.attributes.BANDWIDTH) {
log.warn('Invalid playlist STREAM-INF detected. Missing BANDWIDTH attribute.');
}
}
};
/**
* Adds resolvedUri properties to each media group.
*
* @param {Object} main
* The main playlist
*/
const resolveMediaGroupUris = main => {
forEachMediaGroup(main, properties => {
if (properties.uri) {
properties.resolvedUri = resolveUrl(main.uri, properties.uri);
}
});
};
/**
* Creates a main playlist wrapper to insert a sole media playlist into.
*
* @param {Object} media
* Media playlist
* @param {string} uri
* The media URI
*
* @return {Object}
* main playlist
*/
const mainForMedia = (media, uri) => {
const id = createPlaylistID(0, uri);
const main = {
mediaGroups: {
'AUDIO': {},
'VIDEO': {},
'CLOSED-CAPTIONS': {},
'SUBTITLES': {}
},
uri: window$1.location.href,
resolvedUri: window$1.location.href,
playlists: [{
uri,
id,
resolvedUri: uri,
// m3u8-parser does not attach an attributes property to media playlists so make
// sure that the property is attached to avoid undefined reference errors
attributes: {}
}]
}; // set up ID reference
main.playlists[id] = main.playlists[0]; // URI reference added for backwards compatibility
main.playlists[uri] = main.playlists[0];
return main;
};
/**
* Does an in-place update of the main manifest to add updated playlist URI references
* as well as other properties needed by VHS that aren't included by the parser.
*
* @param {Object} main
* main manifest object
* @param {string} uri
* The source URI
* @param {function} createGroupID
* A function to determine how to create the groupID for mediaGroups
*/
const addPropertiesToMain = (main, uri, createGroupID = groupID) => {
main.uri = uri;
for (let i = 0; i < main.playlists.length; i++) {
if (!main.playlists[i].uri) {
// Set up phony URIs for the playlists since playlists are referenced by their URIs
// throughout VHS, but some formats (e.g., DASH) don't have external URIs
// TODO: consider adding dummy URIs in mpd-parser
const phonyUri = `placeholder-uri-${i}`;
main.playlists[i].uri = phonyUri;
}
}
const audioOnlyMain = isAudioOnly(main);
forEachMediaGroup(main, (properties, mediaType, groupKey, labelKey) => {
// add a playlist array under properties
if (!properties.playlists || !properties.playlists.length) {
// If the manifest is audio only and this media group does not have a uri, check
// if the media group is located in the main list of playlists. If it is, don't add
// placeholder properties as it shouldn't be considered an alternate audio track.
if (audioOnlyMain && mediaType === 'AUDIO' && !properties.uri) {
for (let i = 0; i < main.playlists.length; i++) {
const p = main.playlists[i];
if (p.attributes && p.attributes.AUDIO && p.attributes.AUDIO === groupKey) {
return;
}
}
}
properties.playlists = [_extends({}, properties)];
}
properties.playlists.forEach(function (p, i) {
const groupId = createGroupID(mediaType, groupKey, labelKey, p);
const id = createPlaylistID(i, groupId);
if (p.uri) {
p.resolvedUri = p.resolvedUri || resolveUrl(main.uri, p.uri);
} else {
// DEPRECATED, this has been added to prevent a breaking change.
// previously we only ever had a single media group playlist, so
// we mark the first playlist uri without prepending the index as we used to
// ideally we would do all of the playlists the same way.
p.uri = i === 0 ? groupId : id; // don't resolve a placeholder uri to an absolute url, just use
// the placeholder again
p.resolvedUri = p.uri;
}
p.id = p.id || id; // add an empty attributes object, all playlists are
// expected to have this.
p.attributes = p.attributes || {}; // setup ID and URI references (URI for backwards compatibility)
main.playlists[p.id] = p;
main.playlists[p.uri] = p;
});
});
setupMediaPlaylists(main);
resolveMediaGroupUris(main);
};
class DateRangesStorage {
constructor() {
this.offset_ = null;
this.pendingDateRanges_ = new Map();
this.processedDateRanges_ = new Map();
}
setOffset(segments = []) {
// already set
if (this.offset_ !== null) {
return;
} // no segment to process
if (!segments.length) {
return;
}
const [firstSegment] = segments; // no program date time
if (firstSegment.programDateTime === undefined) {
return;
} // Set offset as ProgramDateTime for the very first segment of the very first playlist load:
this.offset_ = firstSegment.programDateTime / 1000;
}
setPendingDateRanges(dateRanges = []) {
if (!dateRanges.length) {
return;
}
const [dateRange] = dateRanges;
const startTime = dateRange.startDate.getTime();
this.trimProcessedDateRanges_(startTime);
this.pendingDateRanges_ = dateRanges.reduce((map, pendingDateRange) => {
map.set(pendingDateRange.id, pendingDateRange);
return map;
}, new Map());
}
processDateRange(dateRange) {
this.pendingDateRanges_.delete(dateRange.id);
this.processedDateRanges_.set(dateRange.id, dateRange);
}
getDateRangesToProcess() {
if (this.offset_ === null) {
return [];
}
const dateRangeClasses = {};
const dateRangesToProcess = [];
this.pendingDateRanges_.forEach((dateRange, id) => {
if (this.processedDateRanges_.has(id)) {
return;
}
dateRange.startTime = dateRange.startDate.getTime() / 1000 - this.offset_;
dateRange.processDateRange = () => this.processDateRange(dateRange);
dateRangesToProcess.push(dateRange);
if (!dateRange.class) {
return;
}
if (dateRangeClasses[dateRange.class]) {
const length = dateRangeClasses[dateRange.class].push(dateRange);
dateRange.classListIndex = length - 1;
} else {
dateRangeClasses[dateRange.class] = [dateRange];
dateRange.classListIndex = 0;
}
});
for (const dateRange of dateRangesToProcess) {
const classList = dateRangeClasses[dateRange.class] || [];
if (dateRange.endDate) {
dateRange.endTime = dateRange.endDate.getTime() / 1000 - this.offset_;
} else if (dateRange.endOnNext && classList[dateRange.classListIndex + 1]) {
dateRange.endTime = classList[dateRange.classListIndex + 1].startTime;
} else if (dateRange.duration) {
dateRange.endTime = dateRange.startTime + dateRange.duration;
} else if (dateRange.plannedDuration) {
dateRange.endTime = dateRange.startTime + dateRange.plannedDuration;
} else {
dateRange.endTime = dateRange.startTime;
}
}
return dateRangesToProcess;
}
trimProcessedDateRanges_(startTime) {
const copy = new Map(this.processedDateRanges_);
copy.forEach((dateRange, id) => {
if (dateRange.startDate.getTime() < startTime) {
this.processedDateRanges_.delete(id);
}
});
}
}
const QUOTA_EXCEEDED_ERR = 22;
const getStreamingNetworkErrorMetadata = ({
requestType,
request,
error,
parseFailure
}) => {
const isBadStatus = request.status < 200 || request.status > 299;
const isFailure = request.status >= 400 && request.status <= 499;
const errorMetadata = {
uri: request.uri,
requestType
};
const isBadStatusOrParseFailure = isBadStatus && !isFailure || parseFailure;
if (error && isFailure) {
// copy original error and add to the metadata.
errorMetadata.error = _extends({}, error);
errorMetadata.errorType = videojs.Error.NetworkRequestFailed;
} else if (request.aborted) {
errorMetadata.errorType = videojs.Error.NetworkRequestAborted;
} else if (request.timedout) {
errorMetadata.erroType = videojs.Error.NetworkRequestTimeout;
} else if (isBadStatusOrParseFailure) {
const errorType = parseFailure ? videojs.Error.NetworkBodyParserFailed : videojs.Error.NetworkBadStatus;
errorMetadata.errorType = errorType;
errorMetadata.status = request.status;
errorMetadata.headers = request.headers;
}
return errorMetadata;
};
const {
EventTarget: EventTarget$1
} = videojs;
const addLLHLSQueryDirectives = (uri, media) => {
if (media.endList || !media.serverControl) {
return uri;
}
const parameters = {};
if (media.serverControl.canBlockReload) {
const {
preloadSegment
} = media; // next msn is a zero based value, length is not.
let nextMSN = media.mediaSequence + media.segments.length; // If preload segment has parts then it is likely
// that we are going to request a part of that preload segment.
// the logic below is used to determine that.
if (preloadSegment) {
const parts = preloadSegment.parts || []; // _HLS_part is a zero based index
const nextPart = getKnownPartCount(media) - 1; // if nextPart is > -1 and not equal to just the
// length of parts, then we know we had part preload hints
// and we need to add the _HLS_part= query
if (nextPart > -1 && nextPart !== parts.length - 1) {
// add existing parts to our preload hints
// eslint-disable-next-line
parameters._HLS_part = nextPart;
} // this if statement makes sure that we request the msn
// of the preload segment if:
// 1. the preload segment had parts (and was not yet a full segment)
// but was added to our segments array
// 2. the preload segment had preload hints for parts that are not in
// the manifest yet.
// in all other cases we want the segment after the preload segment
// which will be given by using media.segments.length because it is 1 based
// rather than 0 based.
if (nextPart > -1 || parts.length) {
nextMSN--;
}
} // add _HLS_msn= in front of any _HLS_part query
// eslint-disable-next-line
parameters._HLS_msn = nextMSN;
}
if (media.serverControl && media.serverControl.canSkipUntil) {
// add _HLS_skip= infront of all other queries.
// eslint-disable-next-line
parameters._HLS_skip = media.serverControl.canSkipDateranges ? 'v2' : 'YES';
}
if (Object.keys(parameters).length) {
const parsedUri = new window$1.URL(uri);
['_HLS_skip', '_HLS_msn', '_HLS_part'].forEach(function (name) {
if (!parameters.hasOwnProperty(name)) {
return;
}
parsedUri.searchParams.set(name, parameters[name]);
});
uri = parsedUri.toString();
}
return uri;
};
/**
* Returns a new segment object with properties and
* the parts array merged.
*
* @param {Object} a the old segment
* @param {Object} b the new segment
*
* @return {Object} the merged segment
*/
const updateSegment = (a, b) => {
if (!a) {
return b;
}
const result = merge(a, b); // if only the old segment has preload hints
// and the new one does not, remove preload hints.
if (a.preloadHints && !b.preloadHints) {
delete result.preloadHints;
} // if only the old segment has parts
// then the parts are no longer valid
if (a.parts && !b.parts) {
delete result.parts; // if both segments have parts
// copy part propeties from the old segment
// to the new one.
} else if (a.parts && b.parts) {
for (let i = 0; i < b.parts.length; i++) {
if (a.parts && a.parts[i]) {
result.parts[i] = merge(a.parts[i], b.parts[i]);
}
}
} // set skipped to false for segments that have
// have had information merged from the old segment.
if (!a.skipped && b.skipped) {
result.skipped = false;
} // set preload to false for segments that have
// had information added in the new segment.
if (a.preload && !b.preload) {
result.preload = false;
}
return result;
};
/**
* Returns a new array of segments that is the result of merging
* properties from an older list of segments onto an updated
* list. No properties on the updated playlist will be ovewritten.
*
* @param {Array} original the outdated list of segments
* @param {Array} update the updated list of segments
* @param {number=} offset the index of the first update
* segment in the original segment list. For non-live playlists,
* this should always be zero and does not need to be
* specified. For live playlists, it should be the difference
* between the media sequence numbers in the original and updated
* playlists.
* @return {Array} a list of merged segment objects
*/
const updateSegments = (original, update, offset) => {
const oldSegments = original.slice();
const newSegments = update.slice();
offset = offset || 0;
const result = [];
let currentMap;
for (let newIndex = 0; newIndex < newSegments.length; newIndex++) {
const oldSegment = oldSegments[newIndex + offset];
const newSegment = newSegments[newIndex];
if (oldSegment) {
currentMap = oldSegment.map || currentMap;
result.push(updateSegment(oldSegment, newSegment));
} else {
// carry over map to new segment if it is missing
if (currentMap && !newSegment.map) {
newSegment.map = currentMap;
}
result.push(newSegment);
}
}
return result;
};
const resolveSegmentUris = (segment, baseUri) => {
// preloadSegment will not have a uri at all
// as the segment isn't actually in the manifest yet, only parts
if (!segment.resolvedUri && segment.uri) {
segment.resolvedUri = resolveUrl(baseUri, segment.uri);
}
if (segment.key && !segment.key.resolvedUri) {
segment.key.resolvedUri = resolveUrl(baseUri, segment.key.uri);
}
if (segment.map && !segment.map.resolvedUri) {
segment.map.resolvedUri = resolveUrl(baseUri, segment.map.uri);
}
if (segment.map && segment.map.key && !segment.map.key.resolvedUri) {
segment.map.key.resolvedUri = resolveUrl(baseUri, segment.map.key.uri);
}
if (segment.parts && segment.parts.length) {
segment.parts.forEach(p => {
if (p.resolvedUri) {
return;
}
p.resolvedUri = resolveUrl(baseUri, p.uri);
});
}
if (segment.preloadHints && segment.preloadHints.length) {
segment.preloadHints.forEach(p => {
if (p.resolvedUri) {
return;
}
p.resolvedUri = resolveUrl(baseUri, p.uri);
});
}
};
const getAllSegments = function (media) {
const segments = media.segments || [];
const preloadSegment = media.preloadSegment; // a preloadSegment with only preloadHints is not currently
// a usable segment, only include a preloadSegment that has
// parts.
if (preloadSegment && preloadSegment.parts && preloadSegment.parts.length) {
// if preloadHints has a MAP that means that the
// init segment is going to change. We cannot use any of the parts
// from this preload segment.
if (preloadSegment.preloadHints) {
for (let i = 0; i < preloadSegment.preloadHints.length; i++) {
if (preloadSegment.preloadHints[i].type === 'MAP') {
return segments;
}
}
} // set the duration for our preload segment to target duration.
preloadSegment.duration = media.targetDuration;
preloadSegment.preload = true;
segments.push(preloadSegment);
}
return segments;
}; // consider the playlist unchanged if the playlist object is the same or
// the number of segments is equal, the media sequence number is unchanged,
// and this playlist hasn't become the end of the playlist
const isPlaylistUnchanged = (a, b) => a === b || a.segments && b.segments && a.segments.length === b.segments.length && a.endList === b.endList && a.mediaSequence === b.mediaSequence && a.preloadSegment === b.preloadSegment;
/**
* Returns a new main playlist that is the result of merging an
* updated media playlist into the original version. If the
* updated media playlist does not match any of the playlist
* entries in the original main playlist, null is returned.
*
* @param {Object} main a parsed main M3U8 object
* @param {Object} media a parsed media M3U8 object
* @return {Object} a new object that represents the original
* main playlist with the updated media playlist merged in, or
* null if the merge produced no change.
*/
const updateMain$1 = (main, newMedia, unchangedCheck = isPlaylistUnchanged) => {
const result = merge(main, {});
const oldMedia = result.playlists[newMedia.id];
if (!oldMedia) {
return null;
}
if (unchangedCheck(oldMedia, newMedia)) {
return null;
}
newMedia.segments = getAllSegments(newMedia);
const mergedPlaylist = merge(oldMedia, newMedia); // always use the new media's preload segment
if (mergedPlaylist.preloadSegment && !newMedia.preloadSegment) {
delete mergedPlaylist.preloadSegment;
} // if the update could overlap existing segment information, merge the two segment lists
if (oldMedia.segments) {
if (newMedia.skip) {
newMedia.segments = newMedia.segments || []; // add back in objects for skipped segments, so that we merge
// old properties into the new segments
for (let i = 0; i < newMedia.skip.skippedSegments; i++) {
newMedia.segments.unshift({
skipped: true
});
}
}
mergedPlaylist.segments = updateSegments(oldMedia.segments, newMedia.segments, newMedia.mediaSequence - oldMedia.mediaSequence);
} // resolve any segment URIs to prevent us from having to do it later
mergedPlaylist.segments.forEach(segment => {
resolveSegmentUris(segment, mergedPlaylist.resolvedUri);
}); // TODO Right now in the playlists array there are two references to each playlist, one
// that is referenced by index, and one by URI. The index reference may no longer be
// necessary.
for (let i = 0; i < result.playlists.length; i++) {
if (result.playlists[i].id === newMedia.id) {
result.playlists[i] = mergedPlaylist;
}
}
result.playlists[newMedia.id] = mergedPlaylist; // URI reference added for backwards compatibility
result.playlists[newMedia.uri] = mergedPlaylist; // update media group playlist references.
forEachMediaGroup(main, (properties, mediaType, groupKey, labelKey) => {
if (!properties.playlists) {
return;
}
for (let i = 0; i < properties.playlists.length; i++) {
if (newMedia.id === properties.playlists[i].id) {
properties.playlists[i] = mergedPlaylist;
}
}
});
return result;
};
/**
* Calculates the time to wait before refreshing a live playlist
*
* @param {Object} media
* The current media
* @param {boolean} update
* True if there were any updates from the last refresh, false otherwise
* @return {number}
* The time in ms to wait before refreshing the live playlist
*/
const refreshDelay = (media, update) => {
const segments = media.segments || [];
const lastSegment = segments[segments.length - 1];
const lastPart = lastSegment && lastSegment.parts && lastSegment.parts[lastSegment.parts.length - 1];
const lastDuration = lastPart && lastPart.duration || lastSegment && lastSegment.duration;
if (update && lastDuration) {
return lastDuration * 1000;
} // if the playlist is unchanged since the last reload or last segment duration
// cannot be determined, try again after half the target duration
return (media.partTargetDuration || media.targetDuration || 10) * 500;
};
const playlistMetadataPayload = (playlists, type, isLive) => {
if (!playlists) {
return;
}
const renditions = [];
playlists.forEach(playlist => {
// we need attributes to populate rendition data.
if (!playlist.attributes) {
return;
}
const {
BANDWIDTH,
RESOLUTION,
CODECS
} = playlist.attributes;
renditions.push({
id: playlist.id,
bandwidth: BANDWIDTH,
resolution: RESOLUTION,
codecs: CODECS
});
});
return {
type,
isLive,
renditions
};
};
/**
* Load a playlist from a remote location
*
* @class PlaylistLoader
* @extends Stream
* @param {string|Object} src url or object of manifest
* @param {boolean} withCredentials the withCredentials xhr option
* @class
*/
class PlaylistLoader extends EventTarget$1 {
constructor(src, vhs, options = {}) {
super();
if (!src) {
throw new Error('A non-empty playlist URL or object is required');
}
this.logger_ = logger('PlaylistLoader');
const {
withCredentials = false
} = options;
this.src = src;
this.vhs_ = vhs;
this.withCredentials = withCredentials;
this.addDateRangesToTextTrack_ = options.addDateRangesToTextTrack;
const vhsOptions = vhs.options_;
this.customTagParsers = vhsOptions && vhsOptions.customTagParsers || [];
this.customTagMappers = vhsOptions && vhsOptions.customTagMappers || [];
this.llhls = vhsOptions && vhsOptions.llhls;
this.dateRangesStorage_ = new DateRangesStorage(); // initialize the loader state
this.state = 'HAVE_NOTHING'; // live playlist staleness timeout
this.handleMediaupdatetimeout_ = this.handleMediaupdatetimeout_.bind(this);
this.on('mediaupdatetimeout', this.handleMediaupdatetimeout_);
this.on('loadedplaylist', this.handleLoadedPlaylist_.bind(this));
}
handleLoadedPlaylist_() {
const mediaPlaylist = this.media();
if (!mediaPlaylist) {
return;
}
this.dateRangesStorage_.setOffset(mediaPlaylist.segments);
this.dateRangesStorage_.setPendingDateRanges(mediaPlaylist.dateRanges);
const availableDateRanges = this.dateRangesStorage_.getDateRangesToProcess();
if (!availableDateRanges.length || !this.addDateRangesToTextTrack_) {
return;
}
this.addDateRangesToTextTrack_(availableDateRanges);
}
handleMediaupdatetimeout_() {
if (this.state !== 'HAVE_METADATA') {
// only refresh the media playlist if no other activity is going on
return;
}
const media = this.media();
let uri = resolveUrl(this.main.uri, media.uri);
if (this.llhls) {
uri = addLLHLSQueryDirectives(uri, media);
}
this.state = 'HAVE_CURRENT_METADATA';
this.request = this.vhs_.xhr({
uri,
withCredentials: this.withCredentials,
requestType: 'hls-playlist'
}, (error, req) => {
// disposed
if (!this.request) {
return;
}
if (error) {
return this.playlistRequestError(this.request, this.media(), 'HAVE_METADATA');
}
this.haveMetadata({
playlistString: this.request.responseText,
url: this.media().uri,
id: this.media().id
});
});
}
playlistRequestError(xhr, playlist, startingState) {
const {
uri,
id
} = playlist; // any in-flight request is now finished
this.request = null;
if (startingState) {
this.state = startingState;
}
this.error = {
playlist: this.main.playlists[id],
status: xhr.status,
message: `HLS playlist request error at URL: ${uri}.`,
responseText: xhr.responseText,
code: xhr.status >= 500 ? 4 : 2,
metadata: getStreamingNetworkErrorMetadata({
requestType: xhr.requestType,
request: xhr,
error: xhr.error
})
};
this.trigger('error');
}
parseManifest_({
url,
manifestString
}) {
try {
return parseManifest({
onwarn: ({
message
}) => this.logger_(`m3u8-parser warn for ${url}: ${message}`),
oninfo: ({
message
}) => this.logger_(`m3u8-parser info for ${url}: ${message}`),
manifestString,
customTagParsers: this.customTagParsers,
customTagMappers: this.customTagMappers,
llhls: this.llhls
});
} catch (error) {
this.error = error;
this.error.metadata = {
errorType: videojs.Error.StreamingHlsPlaylistParserError,
error
};
}
}
/**
* Update the playlist loader's state in response to a new or updated playlist.
*
* @param {string} [playlistString]
* Playlist string (if playlistObject is not provided)
* @param {Object} [playlistObject]
* Playlist object (if playlistString is not provided)
* @param {string} url
* URL of playlist
* @param {string} id
* ID to use for playlist
*/
haveMetadata({
playlistString,
playlistObject,
url,
id
}) {
// any in-flight request is now finished
this.request = null;
this.state = 'HAVE_METADATA';
const metadata = {
playlistInfo: {
type: 'media',
uri: url
}
};
this.trigger({
type: 'playlistparsestart',
metadata
});
const playlist = playlistObject || this.parseManifest_({
url,
manifestString: playlistString
});
playlist.lastRequest = Date.now();
setupMediaPlaylist({
playlist,
uri: url,
id
}); // merge this playlist into the main manifest
const update = updateMain$1(this.main, playlist);
this.targetDuration = playlist.partTargetDuration || playlist.targetDuration;
this.pendingMedia_ = null;
if (update) {
this.main = update;
this.media_ = this.main.playlists[id];
} else {
this.trigger('playlistunchanged');
}
this.updateMediaUpdateTimeout_(refreshDelay(this.media(), !!update));
metadata.parsedPlaylist = playlistMetadataPayload(this.main.playlists, metadata.playlistInfo.type, !this.media_.endList);
this.trigger({
type: 'playlistparsecomplete',
metadata
});
this.trigger('loadedplaylist');
}
/**
* Abort any outstanding work and clean up.
*/
dispose() {
this.trigger('dispose');
this.stopRequest();
window$1.clearTimeout(this.mediaUpdateTimeout);
window$1.clearTimeout(this.finalRenditionTimeout);
this.dateRangesStorage_ = new DateRangesStorage();
this.off();
}
stopRequest() {
if (this.request) {
const oldRequest = this.request;
this.request = null;
oldRequest.onreadystatechange = null;
oldRequest.abort();
}
}
/**
* When called without any arguments, returns the currently
* active media playlist. When called with a single argument,
* triggers the playlist loader to asynchronously switch to the
* specified media playlist. Calling this method while the
* loader is in the HAVE_NOTHING causes an error to be emitted
* but otherwise has no effect.
*
* @param {Object=} playlist the parsed media playlist
* object to switch to
* @param {boolean=} shouldDelay whether we should delay the request by half target duration
*
* @return {Playlist} the current loaded media
*/
media(playlist, shouldDelay) {
// getter
if (!playlist) {
return this.media_;
} // setter
if (this.state === 'HAVE_NOTHING') {
throw new Error('Cannot switch media playlist from ' + this.state);
} // find the playlist object if the target playlist has been
// specified by URI
if (typeof playlist === 'string') {
if (!this.main.playlists[playlist]) {
throw new Error('Unknown playlist URI: ' + playlist);
}
playlist = this.main.playlists[playlist];
}
window$1.clearTimeout(this.finalRenditionTimeout);
if (shouldDelay) {
const delay = (playlist.partTargetDuration || playlist.targetDuration) / 2 * 1000 || 5 * 1000;
this.finalRenditionTimeout = window$1.setTimeout(this.media.bind(this, playlist, false), delay);
return;
}
const startingState = this.state;
const mediaChange = !this.media_ || playlist.id !== this.media_.id;
const mainPlaylistRef = this.main.playlists[playlist.id]; // switch to fully loaded playlists immediately
if (mainPlaylistRef && mainPlaylistRef.endList ||
// handle the case of a playlist object (e.g., if using vhs-json with a resolved
// media playlist or, for the case of demuxed audio, a resolved audio media group)
playlist.endList && playlist.segments.length) {
// abort outstanding playlist requests
if (this.request) {
this.request.onreadystatechange = null;
this.request.abort();
this.request = null;
}
this.state = 'HAVE_METADATA';
this.media_ = playlist; // trigger media change if the active media has been updated
if (mediaChange) {
this.trigger('mediachanging');
if (startingState === 'HAVE_MAIN_MANIFEST') {
// The initial playlist was a main manifest, and the first media selected was
// also provided (in the form of a resolved playlist object) as part of the
// source object (rather than just a URL). Therefore, since the media playlist
// doesn't need to be requested, loadedmetadata won't trigger as part of the
// normal flow, and needs an explicit trigger here.
this.trigger('loadedmetadata');
} else {
this.trigger('mediachange');
}
}
return;
} // We update/set the timeout here so that live playlists
// that are not a media change will "start" the loader as expected.
// We expect that this function will start the media update timeout
// cycle again. This also prevents a playlist switch failure from
// causing us to stall during live.
this.updateMediaUpdateTimeout_(refreshDelay(playlist, true)); // switching to the active playlist is a no-op
if (!mediaChange) {
return;
}
this.state = 'SWITCHING_MEDIA'; // there is already an outstanding playlist request
if (this.request) {
if (playlist.resolvedUri === this.request.url) {
// requesting to switch to the same playlist multiple times
// has no effect after the first
return;
}
this.request.onreadystatechange = null;
this.request.abort();
this.request = null;
} // request the new playlist
if (this.media_) {
this.trigger('mediachanging');
}
this.pendingMedia_ = playlist;
const metadata = {
playlistInfo: {
type: 'media',
uri: playlist.uri
}
};
this.trigger({
type: 'playlistrequeststart',
metadata
});
this.request = this.vhs_.xhr({
uri: playlist.resolvedUri,
withCredentials: this.withCredentials,
requestType: 'hls-playlist'
}, (error, req) => {
// disposed
if (!this.request) {
return;
}
playlist.lastRequest = Date.now();
playlist.resolvedUri = resolveManifestRedirect(playlist.resolvedUri, req);
if (error) {
return this.playlistRequestError(this.request, playlist, startingState);
}
this.trigger({
type: 'playlistrequestcomplete',
metadata
});
this.haveMetadata({
playlistString: req.responseText,
url: playlist.uri,
id: playlist.id
}); // fire loadedmetadata the first time a media playlist is loaded
if (startingState === 'HAVE_MAIN_MANIFEST') {
this.trigger('loadedmetadata');
} else {
this.trigger('mediachange');
}
});
}
/**
* pause loading of the playlist
*/
pause() {
if (this.mediaUpdateTimeout) {
window$1.clearTimeout(this.mediaUpdateTimeout);
this.mediaUpdateTimeout = null;
}
this.stopRequest();
if (this.state === 'HAVE_NOTHING') {
// If we pause the loader before any data has been retrieved, its as if we never
// started, so reset to an unstarted state.
this.started = false;
} // Need to restore state now that no activity is happening
if (this.state === 'SWITCHING_MEDIA') {
// if the loader was in the process of switching media, it should either return to
// HAVE_MAIN_MANIFEST or HAVE_METADATA depending on if the loader has loaded a media
// playlist yet. This is determined by the existence of loader.media_
if (this.media_) {
this.state = 'HAVE_METADATA';
} else {
this.state = 'HAVE_MAIN_MANIFEST';
}
} else if (this.state === 'HAVE_CURRENT_METADATA') {
this.state = 'HAVE_METADATA';
}
}
/**
* start loading of the playlist
*/
load(shouldDelay) {
if (this.mediaUpdateTimeout) {
window$1.clearTimeout(this.mediaUpdateTimeout);
this.mediaUpdateTimeout = null;
}
const media = this.media();
if (shouldDelay) {
const delay = media ? (media.partTargetDuration || media.targetDuration) / 2 * 1000 : 5 * 1000;
this.mediaUpdateTimeout = window$1.setTimeout(() => {
this.mediaUpdateTimeout = null;
this.load();
}, delay);
return;
}
if (!this.started) {
this.start();
return;
}
if (media && !media.endList) {
this.trigger('mediaupdatetimeout');
} else {
this.trigger('loadedplaylist');
}
}
updateMediaUpdateTimeout_(delay) {
if (this.mediaUpdateTimeout) {
window$1.clearTimeout(this.mediaUpdateTimeout);
this.mediaUpdateTimeout = null;
} // we only have use mediaupdatetimeout for live playlists.
if (!this.media() || this.media().endList) {
return;
}
this.mediaUpdateTimeout = window$1.setTimeout(() => {
this.mediaUpdateTimeout = null;
this.trigger('mediaupdatetimeout');
this.updateMediaUpdateTimeout_(delay);
}, delay);
}
/**
* start loading of the playlist
*/
start() {
this.started = true;
if (typeof this.src === 'object') {
// in the case of an entirely constructed manifest object (meaning there's no actual
// manifest on a server), default the uri to the page's href
if (!this.src.uri) {
this.src.uri = window$1.location.href;
} // resolvedUri is added on internally after the initial request. Since there's no
// request for pre-resolved manifests, add on resolvedUri here.
this.src.resolvedUri = this.src.uri; // Since a manifest object was passed in as the source (instead of a URL), the first
// request can be skipped (since the top level of the manifest, at a minimum, is
// already available as a parsed manifest object). However, if the manifest object
// represents a main playlist, some media playlists may need to be resolved before
// the starting segment list is available. Therefore, go directly to setup of the
// initial playlist, and let the normal flow continue from there.
//
// Note that the call to setup is asynchronous, as other sections of VHS may assume
// that the first request is asynchronous.
setTimeout(() => {
this.setupInitialPlaylist(this.src);
}, 0);
return;
}
const metadata = {
playlistInfo: {
type: 'multivariant',
uri: this.src
}
};
this.trigger({
type: 'playlistrequeststart',
metadata
}); // request the specified URL
this.request = this.vhs_.xhr({
uri: this.src,
withCredentials: this.withCredentials,
requestType: 'hls-playlist'
}, (error, req) => {
// disposed
if (!this.request) {
return;
} // clear the loader's request reference
this.request = null;
if (error) {
this.error = {
status: req.status,
message: `HLS playlist request error at URL: ${this.src}.`,
responseText: req.responseText,
// MEDIA_ERR_NETWORK
code: 2,
metadata: getStreamingNetworkErrorMetadata({
requestType: req.requestType,
request: req,
error
})
};
if (this.state === 'HAVE_NOTHING') {
this.started = false;
}
return this.trigger('error');
}
this.trigger({
type: 'playlistrequestcomplete',
metadata
});
this.src = resolveManifestRedirect(this.src, req);
this.trigger({
type: 'playlistparsestart',
metadata
});
const manifest = this.parseManifest_({
manifestString: req.responseText,
url: this.src
}); // we haven't loaded any variant playlists here so we default to false for isLive.
metadata.parsedPlaylist = playlistMetadataPayload(manifest.playlists, metadata.playlistInfo.type, false);
this.trigger({
type: 'playlistparsecomplete',
metadata
});
this.setupInitialPlaylist(manifest);
});
}
srcUri() {
return typeof this.src === 'string' ? this.src : this.src.uri;
}
/**
* Given a manifest object that's either a main or media playlist, trigger the proper
* events and set the state of the playlist loader.
*
* If the manifest object represents a main playlist, `loadedplaylist` will be
* triggered to allow listeners to select a playlist. If none is selected, the loader
* will default to the first one in the playlists array.
*
* If the manifest object represents a media playlist, `loadedplaylist` will be
* triggered followed by `loadedmetadata`, as the only available playlist is loaded.
*
* In the case of a media playlist, a main playlist object wrapper with one playlist
* will be created so that all logic can handle playlists in the same fashion (as an
* assumed manifest object schema).
*
* @param {Object} manifest
* The parsed manifest object
*/
setupInitialPlaylist(manifest) {
this.state = 'HAVE_MAIN_MANIFEST';
if (manifest.playlists) {
this.main = manifest;
addPropertiesToMain(this.main, this.srcUri()); // If the initial main playlist has playlists wtih segments already resolved,
// then resolve URIs in advance, as they are usually done after a playlist request,
// which may not happen if the playlist is resolved.
manifest.playlists.forEach(playlist => {
playlist.segments = getAllSegments(playlist);
playlist.segments.forEach(segment => {
resolveSegmentUris(segment, playlist.resolvedUri);
});
});
this.trigger('loadedplaylist');
if (!this.request) {
// no media playlist was specifically selected so start
// from the first listed one
this.media(this.main.playlists[0]);
}
return;
} // In order to support media playlists passed in as vhs-json, the case where the uri
// is not provided as part of the manifest should be considered, and an appropriate
// default used.
const uri = this.srcUri() || window$1.location.href;
this.main = mainForMedia(manifest, uri);
this.haveMetadata({
playlistObject: manifest,
url: uri,
id: this.main.playlists[0].id
});
this.trigger('loadedmetadata');
}
/**
* Updates or deletes a preexisting pathway clone.
* Ensures that all playlists related to the old pathway clone are
* either updated or deleted.
*
* @param {Object} clone On update, the pathway clone object for the newly updated pathway clone.
* On delete, the old pathway clone object to be deleted.
* @param {boolean} isUpdate True if the pathway is to be updated,
* false if it is meant to be deleted.
*/
updateOrDeleteClone(clone, isUpdate) {
const main = this.main;
const pathway = clone.ID;
let i = main.playlists.length; // Iterate backwards through the playlist so we can remove playlists if necessary.
while (i--) {
const p = main.playlists[i];
if (p.attributes['PATHWAY-ID'] === pathway) {
const oldPlaylistUri = p.resolvedUri;
const oldPlaylistId = p.id; // update the indexed playlist and add new playlists by ID and URI
if (isUpdate) {
const newPlaylistUri = this.createCloneURI_(p.resolvedUri, clone);
const newPlaylistId = createPlaylistID(pathway, newPlaylistUri);
const attributes = this.createCloneAttributes_(pathway, p.attributes);
const updatedPlaylist = this.createClonePlaylist_(p, newPlaylistId, clone, attributes);
main.playlists[i] = updatedPlaylist;
main.playlists[newPlaylistId] = updatedPlaylist;
main.playlists[newPlaylistUri] = updatedPlaylist;
} else {
// Remove the indexed playlist.
main.playlists.splice(i, 1);
} // Remove playlists by the old ID and URI.
delete main.playlists[oldPlaylistId];
delete main.playlists[oldPlaylistUri];
}
}
this.updateOrDeleteCloneMedia(clone, isUpdate);
}
/**
* Updates or deletes media data based on the pathway clone object.
* Due to the complexity of the media groups and playlists, in all cases
* we remove all of the old media groups and playlists.
* On updates, we then create new media groups and playlists based on the
* new pathway clone object.
*
* @param {Object} clone The pathway clone object for the newly updated pathway clone.
* @param {boolean} isUpdate True if the pathway is to be updated,
* false if it is meant to be deleted.
*/
updateOrDeleteCloneMedia(clone, isUpdate) {
const main = this.main;
const id = clone.ID;
['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(mediaType => {
if (!main.mediaGroups[mediaType] || !main.mediaGroups[mediaType][id]) {
return;
}
for (const groupKey in main.mediaGroups[mediaType]) {
// Remove all media playlists for the media group for this pathway clone.
if (groupKey === id) {
for (const labelKey in main.mediaGroups[mediaType][groupKey]) {
const oldMedia = main.mediaGroups[mediaType][groupKey][labelKey];
oldMedia.playlists.forEach((p, i) => {
const oldMediaPlaylist = main.playlists[p.id];
const oldPlaylistId = oldMediaPlaylist.id;
const oldPlaylistUri = oldMediaPlaylist.resolvedUri;
delete main.playlists[oldPlaylistId];
delete main.playlists[oldPlaylistUri];
});
} // Delete the old media group.
delete main.mediaGroups[mediaType][groupKey];
}
}
}); // Create the new media groups and playlists if there is an update.
if (isUpdate) {
this.createClonedMediaGroups_(clone);
}
}
/**
* Given a pathway clone object, clones all necessary playlists.
*
* @param {Object} clone The pathway clone object.
* @param {Object} basePlaylist The original playlist to clone from.
*/
addClonePathway(clone, basePlaylist = {}) {
const main = this.main;
const index = main.playlists.length;
const uri = this.createCloneURI_(basePlaylist.resolvedUri, clone);
const playlistId = createPlaylistID(clone.ID, uri);
const attributes = this.createCloneAttributes_(clone.ID, basePlaylist.attributes);
const playlist = this.createClonePlaylist_(basePlaylist, playlistId, clone, attributes);
main.playlists[index] = playlist; // add playlist by ID and URI
main.playlists[playlistId] = playlist;
main.playlists[uri] = playlist;
this.createClonedMediaGroups_(clone);
}
/**
* Given a pathway clone object we create clones of all media.
* In this function, all necessary information and updated playlists
* are added to the `mediaGroup` object.
* Playlists are also added to the `playlists` array so the media groups
* will be properly linked.
*
* @param {Object} clone The pathway clone object.
*/
createClonedMediaGroups_(clone) {
const id = clone.ID;
const baseID = clone['BASE-ID'];
const main = this.main;
['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(mediaType => {
// If the media type doesn't exist, or there is already a clone, skip
// to the next media type.
if (!main.mediaGroups[mediaType] || main.mediaGroups[mediaType][id]) {
return;
}
for (const groupKey in main.mediaGroups[mediaType]) {
if (groupKey === baseID) {
// Create the group.
main.mediaGroups[mediaType][id] = {};
} else {
// There is no need to iterate over label keys in this case.
continue;
}
for (const labelKey in main.mediaGroups[mediaType][groupKey]) {
const oldMedia = main.mediaGroups[mediaType][groupKey][labelKey];
main.mediaGroups[mediaType][id][labelKey] = _extends({}, oldMedia);
const newMedia = main.mediaGroups[mediaType][id][labelKey]; // update URIs on the media
const newUri = this.createCloneURI_(oldMedia.resolvedUri, clone);
newMedia.resolvedUri = newUri;
newMedia.uri = newUri; // Reset playlists in the new media group.
newMedia.playlists = []; // Create new playlists in the newly cloned media group.
oldMedia.playlists.forEach((p, i) => {
const oldMediaPlaylist = main.playlists[p.id];
const group = groupID(mediaType, id, labelKey);
const newPlaylistID = createPlaylistID(id, group); // Check to see if it already exists
if (oldMediaPlaylist && !main.playlists[newPlaylistID]) {
const newMediaPlaylist = this.createClonePlaylist_(oldMediaPlaylist, newPlaylistID, clone);
const newPlaylistUri = newMediaPlaylist.resolvedUri;
main.playlists[newPlaylistID] = newMediaPlaylist;
main.playlists[newPlaylistUri] = newMediaPlaylist;
}
newMedia.playlists[i] = this.createClonePlaylist_(p, newPlaylistID, clone);
});
}
}
});
}
/**
* Using the original playlist to be cloned, and the pathway clone object
* information, we create a new playlist.
*
* @param {Object} basePlaylist The original playlist to be cloned from.
* @param {string} id The desired id of the newly cloned playlist.
* @param {Object} clone The pathway clone object.
* @param {Object} attributes An optional object to populate the `attributes` property in the playlist.
*
* @return {Object} The combined cloned playlist.
*/
createClonePlaylist_(basePlaylist, id, clone, attributes) {
const uri = this.createCloneURI_(basePlaylist.resolvedUri, clone);
const newProps = {
resolvedUri: uri,
uri,
id
}; // Remove all segments from previous playlist in the clone.
if (basePlaylist.segments) {
newProps.segments = [];
}
if (attributes) {
newProps.attributes = attributes;
}
return merge(basePlaylist, newProps);
}
/**
* Generates an updated URI for a cloned pathway based on the original
* pathway's URI and the paramaters from the pathway clone object in the
* content steering server response.
*
* @param {string} baseUri URI to be updated in the cloned pathway.
* @param {Object} clone The pathway clone object.
*
* @return {string} The updated URI for the cloned pathway.
*/
createCloneURI_(baseURI, clone) {
const uri = new URL(baseURI);
uri.hostname = clone['URI-REPLACEMENT'].HOST;
const params = clone['URI-REPLACEMENT'].PARAMS; // Add params to the cloned URL.
for (const key of Object.keys(params)) {
uri.searchParams.set(key, params[key]);
}
return uri.href;
}
/**
* Helper function to create the attributes needed for the new clone.
* This mainly adds the necessary media attributes.
*
* @param {string} id The pathway clone object ID.
* @param {Object} oldAttributes The old attributes to compare to.
* @return {Object} The new attributes to add to the playlist.
*/
createCloneAttributes_(id, oldAttributes) {
const attributes = {
['PATHWAY-ID']: id
};
['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(mediaType => {
if (oldAttributes[mediaType]) {
attributes[mediaType] = id;
}
});
return attributes;
}
/**
* Returns the key ID set from a playlist
*
* @param {playlist} playlist to fetch the key ID set from.
* @return a Set of 32 digit hex strings that represent the unique keyIds for that playlist.
*/
getKeyIdSet(playlist) {
if (playlist.contentProtection) {
const keyIds = new Set();
for (const keysystem in playlist.contentProtection) {
const keyId = playlist.contentProtection[keysystem].attributes.keyId;
if (keyId) {
keyIds.add(keyId.toLowerCase());
}
}
return keyIds;
}
}
}
/**
* @file xhr.js
*/
const callbackWrapper = function (request, error, response, callback) {
const reqResponse = request.responseType === 'arraybuffer' ? request.response : request.responseText;
if (!error && reqResponse) {
request.responseTime = Date.now();
request.roundTripTime = request.responseTime - request.requestTime;
request.bytesReceived = reqResponse.byteLength || reqResponse.length;
if (!request.bandwidth) {
request.bandwidth = Math.floor(request.bytesReceived / request.roundTripTime * 8 * 1000);
}
}
if (response.headers) {
request.responseHeaders = response.headers;
} // videojs.xhr now uses a specific code on the error
// object to signal that a request has timed out instead
// of setting a boolean on the request object
if (error && error.code === 'ETIMEDOUT') {
request.timedout = true;
} // videojs.xhr no longer considers status codes outside of 200 and 0
// (for file uris) to be errors, but the old XHR did, so emulate that
// behavior. Status 206 may be used in response to byterange requests.
if (!error && !request.aborted && response.statusCode !== 200 && response.statusCode !== 206 && response.statusCode !== 0) {
error = new Error('XHR Failed with a response of: ' + (request && (reqResponse || request.responseText)));
}
callback(error, request);
};
/**
* Iterates over the request hooks Set and calls them in order
*
* @param {Set} hooks the hook Set to iterate over
* @param {Object} options the request options to pass to the xhr wrapper
* @return the callback hook function return value, the modified or new options Object.
*/
const callAllRequestHooks = (requestSet, options) => {
if (!requestSet || !requestSet.size) {
return;
}
let newOptions = options;
requestSet.forEach(requestCallback => {
newOptions = requestCallback(newOptions);
});
return newOptions;
};
/**
* Iterates over the response hooks Set and calls them in order.
*
* @param {Set} hooks the hook Set to iterate over
* @param {Object} request the xhr request object
* @param {Object} error the xhr error object
* @param {Object} response the xhr response object
*/
const callAllResponseHooks = (responseSet, request, error, response) => {
if (!responseSet || !responseSet.size) {
return;
}
responseSet.forEach(responseCallback => {
responseCallback(request, error, response);
});
};
const xhrFactory = function () {
const xhr = function XhrFunction(options, callback) {
// Add a default timeout
options = merge({
timeout: 45e3
}, options); // Allow an optional user-specified function to modify the option
// object before we construct the xhr request
// TODO: Remove beforeRequest in the next major release.
const beforeRequest = XhrFunction.beforeRequest || videojs.Vhs.xhr.beforeRequest; // onRequest and onResponse hooks as a Set, at either the player or global level.
// TODO: new Set added here for beforeRequest alias. Remove this when beforeRequest is removed.
const _requestCallbackSet = XhrFunction._requestCallbackSet || videojs.Vhs.xhr._requestCallbackSet || new Set();
const _responseCallbackSet = XhrFunction._responseCallbackSet || videojs.Vhs.xhr._responseCallbackSet;
if (beforeRequest && typeof beforeRequest === 'function') {
videojs.log.warn('beforeRequest is deprecated, use onRequest instead.');
_requestCallbackSet.add(beforeRequest);
} // Use the standard videojs.xhr() method unless `videojs.Vhs.xhr` has been overriden
// TODO: switch back to videojs.Vhs.xhr.name === 'XhrFunction' when we drop IE11
const xhrMethod = videojs.Vhs.xhr.original === true ? videojs.xhr : videojs.Vhs.xhr; // call all registered onRequest hooks, assign new options.
const beforeRequestOptions = callAllRequestHooks(_requestCallbackSet, options); // Remove the beforeRequest function from the hooks set so stale beforeRequest functions are not called.
_requestCallbackSet.delete(beforeRequest); // xhrMethod will call XMLHttpRequest.open and XMLHttpRequest.send
const request = xhrMethod(beforeRequestOptions || options, function (error, response) {
// call all registered onResponse hooks
callAllResponseHooks(_responseCallbackSet, request, error, response);
return callbackWrapper(request, error, response, callback);
});
const originalAbort = request.abort;
request.abort = function () {
request.aborted = true;
return originalAbort.apply(request, arguments);
};
request.uri = options.uri;
request.requestType = options.requestType;
request.requestTime = Date.now();
return request;
};
xhr.original = true;
return xhr;
};
/**
* Turns segment byterange into a string suitable for use in
* HTTP Range requests
*
* @param {Object} byterange - an object with two values defining the start and end
* of a byte-range
*/
const byterangeStr = function (byterange) {
// `byterangeEnd` is one less than `offset + length` because the HTTP range
// header uses inclusive ranges
let byterangeEnd;
const byterangeStart = byterange.offset;
if (typeof byterange.offset === 'bigint' || typeof byterange.length === 'bigint') {
byterangeEnd = window$1.BigInt(byterange.offset) + window$1.BigInt(byterange.length) - window$1.BigInt(1);
} else {
byterangeEnd = byterange.offset + byterange.length - 1;
}
return 'bytes=' + byterangeStart + '-' + byterangeEnd;
};
/**
* Defines headers for use in the xhr request for a particular segment.
*
* @param {Object} segment - a simplified copy of the segmentInfo object
* from SegmentLoader
*/
const segmentXhrHeaders = function (segment) {
const headers = {};
if (segment.byterange) {
headers.Range = byterangeStr(segment.byterange);
}
return headers;
};
/**
* @file bin-utils.js
*/
/**
* convert a TimeRange to text
*
* @param {TimeRange} range the timerange to use for conversion
* @param {number} i the iterator on the range to convert
* @return {string} the range in string format
*/
const textRange = function (range, i) {
return range.start(i) + '-' + range.end(i);
};
/**
* format a number as hex string
*
* @param {number} e The number
* @param {number} i the iterator
* @return {string} the hex formatted number as a string
*/
const formatHexString = function (e, i) {
const value = e.toString(16);
return '00'.substring(0, 2 - value.length) + value + (i % 2 ? ' ' : '');
};
const formatAsciiString = function (e) {
if (e >= 0x20 && e < 0x7e) {
return String.fromCharCode(e);
}
return '.';
};
/**
* Creates an object for sending to a web worker modifying properties that are TypedArrays
* into a new object with seperated properties for the buffer, byteOffset, and byteLength.
*
* @param {Object} message
* Object of properties and values to send to the web worker
* @return {Object}
* Modified message with TypedArray values expanded
* @function createTransferableMessage
*/
const createTransferableMessage = function (message) {
const transferable = {};
Object.keys(message).forEach(key => {
const value = message[key];
if (isArrayBufferView(value)) {
transferable[key] = {
bytes: value.buffer,
byteOffset: value.byteOffset,
byteLength: value.byteLength
};
} else {
transferable[key] = value;
}
});
return transferable;
};
/**
* Returns a unique string identifier for a media initialization
* segment.
*
* @param {Object} initSegment
* the init segment object.
*
* @return {string} the generated init segment id
*/
const initSegmentId = function (initSegment) {
const byterange = initSegment.byterange || {
length: Infinity,
offset: 0
};
return [byterange.length, byterange.offset, initSegment.resolvedUri].join(',');
};
/**
* Returns a unique string identifier for a media segment key.
*
* @param {Object} key the encryption key
* @return {string} the unique id for the media segment key.
*/
const segmentKeyId = function (key) {
return key.resolvedUri;
};
/**
* utils to help dump binary data to the console
*
* @param {Array|TypedArray} data
* data to dump to a string
*
* @return {string} the data as a hex string.
*/
const hexDump = data => {
const bytes = Array.prototype.slice.call(data);
const step = 16;
let result = '';
let hex;
let ascii;
for (let j = 0; j < bytes.length / step; j++) {
hex = bytes.slice(j * step, j * step + step).map(formatHexString).join('');
ascii = bytes.slice(j * step, j * step + step).map(formatAsciiString).join('');
result += hex + ' ' + ascii + '\n';
}
return result;
};
const tagDump = ({
bytes
}) => hexDump(bytes);
const textRanges = ranges => {
let result = '';
let i;
for (i = 0; i < ranges.length; i++) {
result += textRange(ranges, i) + ' ';
}
return result;
};
var utils = /*#__PURE__*/Object.freeze({
__proto__: null,
createTransferableMessage: createTransferableMessage,
initSegmentId: initSegmentId,
segmentKeyId: segmentKeyId,
hexDump: hexDump,
tagDump: tagDump,
textRanges: textRanges
});
// TODO handle fmp4 case where the timing info is accurate and doesn't involve transmux
// 25% was arbitrarily chosen, and may need to be refined over time.
const SEGMENT_END_FUDGE_PERCENT = 0.25;
/**
* Converts a player time (any time that can be gotten/set from player.currentTime(),
* e.g., any time within player.seekable().start(0) to player.seekable().end(0)) to a
* program time (any time referencing the real world (e.g., EXT-X-PROGRAM-DATE-TIME)).
*
* The containing segment is required as the EXT-X-PROGRAM-DATE-TIME serves as an "anchor
* point" (a point where we have a mapping from program time to player time, with player
* time being the post transmux start of the segment).
*
* For more details, see [this doc](../../docs/program-time-from-player-time.md).
*
* @param {number} playerTime the player time
* @param {Object} segment the segment which contains the player time
* @return {Date} program time
*/
const playerTimeToProgramTime = (playerTime, segment) => {
if (!segment.dateTimeObject) {
// Can't convert without an "anchor point" for the program time (i.e., a time that can
// be used to map the start of a segment with a real world time).
return null;
}
const transmuxerPrependedSeconds = segment.videoTimingInfo.transmuxerPrependedSeconds;
const transmuxedStart = segment.videoTimingInfo.transmuxedPresentationStart; // get the start of the content from before old content is prepended
const startOfSegment = transmuxedStart + transmuxerPrependedSeconds;
const offsetFromSegmentStart = playerTime - startOfSegment;
return new Date(segment.dateTimeObject.getTime() + offsetFromSegmentStart * 1000);
};
const originalSegmentVideoDuration = videoTimingInfo => {
return videoTimingInfo.transmuxedPresentationEnd - videoTimingInfo.transmuxedPresentationStart - videoTimingInfo.transmuxerPrependedSeconds;
};
/**
* Finds a segment that contains the time requested given as an ISO-8601 string. The
* returned segment might be an estimate or an accurate match.
*
* @param {string} programTime The ISO-8601 programTime to find a match for
* @param {Object} playlist A playlist object to search within
*/
const findSegmentForProgramTime = (programTime, playlist) => {
// Assumptions:
// - verifyProgramDateTimeTags has already been run
// - live streams have been started
let dateTimeObject;
try {
dateTimeObject = new Date(programTime);
} catch (e) {
return null;
}
if (!playlist || !playlist.segments || playlist.segments.length === 0) {
return null;
}
let segment = playlist.segments[0];
if (dateTimeObject < new Date(segment.dateTimeObject)) {
// Requested time is before stream start.
return null;
}
for (let i = 0; i < playlist.segments.length - 1; i++) {
segment = playlist.segments[i];
const nextSegmentStart = new Date(playlist.segments[i + 1].dateTimeObject);
if (dateTimeObject < nextSegmentStart) {
break;
}
}
const lastSegment = playlist.segments[playlist.segments.length - 1];
const lastSegmentStart = lastSegment.dateTimeObject;
const lastSegmentDuration = lastSegment.videoTimingInfo ? originalSegmentVideoDuration(lastSegment.videoTimingInfo) : lastSegment.duration + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT;
const lastSegmentEnd = new Date(lastSegmentStart.getTime() + lastSegmentDuration * 1000);
if (dateTimeObject > lastSegmentEnd) {
// Beyond the end of the stream, or our best guess of the end of the stream.
return null;
}
if (dateTimeObject > new Date(lastSegmentStart)) {
segment = lastSegment;
}
return {
segment,
estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : Playlist.duration(playlist, playlist.mediaSequence + playlist.segments.indexOf(segment)),
// Although, given that all segments have accurate date time objects, the segment
// selected should be accurate, unless the video has been transmuxed at some point
// (determined by the presence of the videoTimingInfo object), the segment's "player
// time" (the start time in the player) can't be considered accurate.
type: segment.videoTimingInfo ? 'accurate' : 'estimate'
};
};
/**
* Finds a segment that contains the given player time(in seconds).
*
* @param {number} time The player time to find a match for
* @param {Object} playlist A playlist object to search within
*/
const findSegmentForPlayerTime = (time, playlist) => {
// Assumptions:
// - there will always be a segment.duration
// - we can start from zero
// - segments are in time order
if (!playlist || !playlist.segments || playlist.segments.length === 0) {
return null;
}
let segmentEnd = 0;
let segment;
for (let i = 0; i < playlist.segments.length; i++) {
segment = playlist.segments[i]; // videoTimingInfo is set after the segment is downloaded and transmuxed, and
// should contain the most accurate values we have for the segment's player times.
//
// Use the accurate transmuxedPresentationEnd value if it is available, otherwise fall
// back to an estimate based on the manifest derived (inaccurate) segment.duration, to
// calculate an end value.
segmentEnd = segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationEnd : segmentEnd + segment.duration;
if (time <= segmentEnd) {
break;
}
}
const lastSegment = playlist.segments[playlist.segments.length - 1];
if (lastSegment.videoTimingInfo && lastSegment.videoTimingInfo.transmuxedPresentationEnd < time) {
// The time requested is beyond the stream end.
return null;
}
if (time > segmentEnd) {
// The time is within or beyond the last segment.
//
// Check to see if the time is beyond a reasonable guess of the end of the stream.
if (time > segmentEnd + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT) {
// Technically, because the duration value is only an estimate, the time may still
// exist in the last segment, however, there isn't enough information to make even
// a reasonable estimate.
return null;
}
segment = lastSegment;
}
return {
segment,
estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : segmentEnd - segment.duration,
// Because videoTimingInfo is only set after transmux, it is the only way to get
// accurate timing values.
type: segment.videoTimingInfo ? 'accurate' : 'estimate'
};
};
/**
* Gives the offset of the comparisonTimestamp from the programTime timestamp in seconds.
* If the offset returned is positive, the programTime occurs after the
* comparisonTimestamp.
* If the offset is negative, the programTime occurs before the comparisonTimestamp.
*
* @param {string} comparisonTimeStamp An ISO-8601 timestamp to compare against
* @param {string} programTime The programTime as an ISO-8601 string
* @return {number} offset
*/
const getOffsetFromTimestamp = (comparisonTimeStamp, programTime) => {
let segmentDateTime;
let programDateTime;
try {
segmentDateTime = new Date(comparisonTimeStamp);
programDateTime = new Date(programTime);
} catch (e) {// TODO handle error
}
const segmentTimeEpoch = segmentDateTime.getTime();
const programTimeEpoch = programDateTime.getTime();
return (programTimeEpoch - segmentTimeEpoch) / 1000;
};
/**
* Checks that all segments in this playlist have programDateTime tags.
*
* @param {Object} playlist A playlist object
*/
const verifyProgramDateTimeTags = playlist => {
if (!playlist.segments || playlist.segments.length === 0) {
return false;
}
for (let i = 0; i < playlist.segments.length; i++) {
const segment = playlist.segments[i];
if (!segment.dateTimeObject) {
return false;
}
}
return true;
};
/**
* Returns the programTime of the media given a playlist and a playerTime.
* The playlist must have programDateTime tags for a programDateTime tag to be returned.
* If the segments containing the time requested have not been buffered yet, an estimate
* may be returned to the callback.
*
* @param {Object} args
* @param {Object} args.playlist A playlist object to search within
* @param {number} time A playerTime in seconds
* @param {Function} callback(err, programTime)
* @return {string} err.message A detailed error message
* @return {Object} programTime
* @return {number} programTime.mediaSeconds The streamTime in seconds
* @return {string} programTime.programDateTime The programTime as an ISO-8601 String
*/
const getProgramTime = ({
playlist,
time = undefined,
callback
}) => {
if (!callback) {
throw new Error('getProgramTime: callback must be provided');
}
if (!playlist || time === undefined) {
return callback({
message: 'getProgramTime: playlist and time must be provided'
});
}
const matchedSegment = findSegmentForPlayerTime(time, playlist);
if (!matchedSegment) {
return callback({
message: 'valid programTime was not found'
});
}
if (matchedSegment.type === 'estimate') {
return callback({
message: 'Accurate programTime could not be determined.' + ' Please seek to e.seekTime and try again',
seekTime: matchedSegment.estimatedStart
});
}
const programTimeObject = {
mediaSeconds: time
};
const programTime = playerTimeToProgramTime(time, matchedSegment.segment);
if (programTime) {
programTimeObject.programDateTime = programTime.toISOString();
}
return callback(null, programTimeObject);
};
/**
* Seeks in the player to a time that matches the given programTime ISO-8601 string.
*
* @param {Object} args
* @param {string} args.programTime A programTime to seek to as an ISO-8601 String
* @param {Object} args.playlist A playlist to look within
* @param {number} args.retryCount The number of times to try for an accurate seek. Default is 2.
* @param {Function} args.seekTo A method to perform a seek
* @param {boolean} args.pauseAfterSeek Whether to end in a paused state after seeking. Default is true.
* @param {Object} args.tech The tech to seek on
* @param {Function} args.callback(err, newTime) A callback to return the new time to
* @return {string} err.message A detailed error message
* @return {number} newTime The exact time that was seeked to in seconds
*/
const seekToProgramTime = ({
programTime,
playlist,
retryCount = 2,
seekTo,
pauseAfterSeek = true,
tech,
callback
}) => {
if (!callback) {
throw new Error('seekToProgramTime: callback must be provided');
}
if (typeof programTime === 'undefined' || !playlist || !seekTo) {
return callback({
message: 'seekToProgramTime: programTime, seekTo and playlist must be provided'
});
}
if (!playlist.endList && !tech.hasStarted_) {
return callback({
message: 'player must be playing a live stream to start buffering'
});
}
if (!verifyProgramDateTimeTags(playlist)) {
return callback({
message: 'programDateTime tags must be provided in the manifest ' + playlist.resolvedUri
});
}
const matchedSegment = findSegmentForProgramTime(programTime, playlist); // no match
if (!matchedSegment) {
return callback({
message: `${programTime} was not found in the stream`
});
}
const segment = matchedSegment.segment;
const mediaOffset = getOffsetFromTimestamp(segment.dateTimeObject, programTime);
if (matchedSegment.type === 'estimate') {
// we've run out of retries
if (retryCount === 0) {
return callback({
message: `${programTime} is not buffered yet. Try again`
});
}
seekTo(matchedSegment.estimatedStart + mediaOffset);
tech.one('seeked', () => {
seekToProgramTime({
programTime,
playlist,
retryCount: retryCount - 1,
seekTo,
pauseAfterSeek,
tech,
callback
});
});
return;
} // Since the segment.start value is determined from the buffered end or ending time
// of the prior segment, the seekToTime doesn't need to account for any transmuxer
// modifications.
const seekToTime = segment.start + mediaOffset;
const seekedCallback = () => {
return callback(null, tech.currentTime());
}; // listen for seeked event
tech.one('seeked', seekedCallback); // pause before seeking as video.js will restore this state
if (pauseAfterSeek) {
tech.pause();
}
seekTo(seekToTime);
};
// which will only happen if the request is complete.
const callbackOnCompleted = (request, cb) => {
if (request.readyState === 4) {
return cb();
}
return;
};
const containerRequest = (uri, xhr, cb, requestType) => {
let bytes = [];
let id3Offset;
let finished = false;
const endRequestAndCallback = function (err, req, type, _bytes) {
req.abort();
finished = true;
return cb(err, req, type, _bytes);
};
const progressListener = function (error, request) {
if (finished) {
return;
}
if (error) {
error.metadata = getStreamingNetworkErrorMetadata({
requestType,
request,
error
});
return endRequestAndCallback(error, request, '', bytes);
} // grap the new part of content that was just downloaded
const newPart = request.responseText.substring(bytes && bytes.byteLength || 0, request.responseText.length); // add that onto bytes
bytes = concatTypedArrays(bytes, stringToBytes(newPart, true));
id3Offset = id3Offset || getId3Offset(bytes); // we need at least 10 bytes to determine a type
// or we need at least two bytes after an id3Offset
if (bytes.length < 10 || id3Offset && bytes.length < id3Offset + 2) {
return callbackOnCompleted(request, () => endRequestAndCallback(error, request, '', bytes));
}
const type = detectContainerForBytes(bytes); // if this looks like a ts segment but we don't have enough data
// to see the second sync byte, wait until we have enough data
// before declaring it ts
if (type === 'ts' && bytes.length < 188) {
return callbackOnCompleted(request, () => endRequestAndCallback(error, request, '', bytes));
} // this may be an unsynced ts segment
// wait for 376 bytes before detecting no container
if (!type && bytes.length < 376) {
return callbackOnCompleted(request, () => endRequestAndCallback(error, request, '', bytes));
}
return endRequestAndCallback(null, request, type, bytes);
};
const options = {
uri,
beforeSend(request) {
// this forces the browser to pass the bytes to us unprocessed
request.overrideMimeType('text/plain; charset=x-user-defined');
request.addEventListener('progress', function ({
total,
loaded
}) {
return callbackWrapper(request, null, {
statusCode: request.status
}, progressListener);
});
}
};
const request = xhr(options, function (error, response) {
return callbackWrapper(request, error, response, progressListener);
});
return request;
};
const {
EventTarget
} = videojs;
const dashPlaylistUnchanged = function (a, b) {
if (!isPlaylistUnchanged(a, b)) {
return false;
} // for dash the above check will often return true in scenarios where
// the playlist actually has changed because mediaSequence isn't a
// dash thing, and we often set it to 1. So if the playlists have the same amount
// of segments we return true.
// So for dash we need to make sure that the underlying segments are different.
// if sidx changed then the playlists are different.
if (a.sidx && b.sidx && (a.sidx.offset !== b.sidx.offset || a.sidx.length !== b.sidx.length)) {
return false;
} else if (!a.sidx && b.sidx || a.sidx && !b.sidx) {
return false;
} // one or the other does not have segments
// there was a change.
if (a.segments && !b.segments || !a.segments && b.segments) {
return false;
} // neither has segments nothing changed
if (!a.segments && !b.segments) {
return true;
} // check segments themselves
for (let i = 0; i < a.segments.length; i++) {
const aSegment = a.segments[i];
const bSegment = b.segments[i]; // if uris are different between segments there was a change
if (aSegment.uri !== bSegment.uri) {
return false;
} // neither segment has a byterange, there will be no byterange change.
if (!aSegment.byterange && !bSegment.byterange) {
continue;
}
const aByterange = aSegment.byterange;
const bByterange = bSegment.byterange; // if byterange only exists on one of the segments, there was a change.
if (aByterange && !bByterange || !aByterange && bByterange) {
return false;
} // if both segments have byterange with different offsets, there was a change.
if (aByterange.offset !== bByterange.offset || aByterange.length !== bByterange.length) {
return false;
}
} // if everything was the same with segments, this is the same playlist.
return true;
};
/**
* Use the representation IDs from the mpd object to create groupIDs, the NAME is set to mandatory representation
* ID in the parser. This allows for continuous playout across periods with the same representation IDs
* (continuous periods as defined in DASH-IF 3.2.12). This is assumed in the mpd-parser as well. If we want to support
* periods without continuous playback this function may need modification as well as the parser.
*/
const dashGroupId = (type, group, label, playlist) => {
// If the manifest somehow does not have an ID (non-dash compliant), use the label.
const playlistId = playlist.attributes.NAME || label;
return `placeholder-uri-${type}-${group}-${playlistId}`;
};
/**
* Parses the main XML string and updates playlist URI references.
*
* @param {Object} config
* Object of arguments
* @param {string} config.mainXml
* The mpd XML
* @param {string} config.srcUrl
* The mpd URL
* @param {Date} config.clientOffset
* A time difference between server and client
* @param {Object} config.sidxMapping
* SIDX mappings for moof/mdat URIs and byte ranges
* @return {Object}
* The parsed mpd manifest object
*/
const parseMainXml = ({
mainXml,
srcUrl,
clientOffset,
sidxMapping,
previousManifest
}) => {
const manifest = parse(mainXml, {
manifestUri: srcUrl,
clientOffset,
sidxMapping,
previousManifest
});
addPropertiesToMain(manifest, srcUrl, dashGroupId);
return manifest;
};
/**
* Removes any mediaGroup labels that no longer exist in the newMain
*
* @param {Object} update
* The previous mpd object being updated
* @param {Object} newMain
* The new mpd object
*/
const removeOldMediaGroupLabels = (update, newMain) => {
forEachMediaGroup(update, (properties, type, group, label) => {
if (!(label in newMain.mediaGroups[type][group])) {
delete update.mediaGroups[type][group][label];
}
});
};
/**
* Returns a new main manifest that is the result of merging an updated main manifest
* into the original version.
*
* @param {Object} oldMain
* The old parsed mpd object
* @param {Object} newMain
* The updated parsed mpd object
* @return {Object}
* A new object representing the original main manifest with the updated media
* playlists merged in
*/
const updateMain = (oldMain, newMain, sidxMapping) => {
let noChanges = true;
let update = merge(oldMain, {
// These are top level properties that can be updated
duration: newMain.duration,
minimumUpdatePeriod: newMain.minimumUpdatePeriod,
timelineStarts: newMain.timelineStarts
}); // First update the playlists in playlist list
for (let i = 0; i < newMain.playlists.length; i++) {
const playlist = newMain.playlists[i];
if (playlist.sidx) {
const sidxKey = generateSidxKey(playlist.sidx); // add sidx segments to the playlist if we have all the sidx info already
if (sidxMapping && sidxMapping[sidxKey] && sidxMapping[sidxKey].sidx) {
addSidxSegmentsToPlaylist(playlist, sidxMapping[sidxKey].sidx, playlist.sidx.resolvedUri);
}
}
const playlistUpdate = updateMain$1(update, playlist, dashPlaylistUnchanged);
if (playlistUpdate) {
update = playlistUpdate;
noChanges = false;
}
} // Then update media group playlists
forEachMediaGroup(newMain, (properties, type, group, label) => {
if (properties.playlists && properties.playlists.length) {
const id = properties.playlists[0].id;
const playlistUpdate = updateMain$1(update, properties.playlists[0], dashPlaylistUnchanged);
if (playlistUpdate) {
update = playlistUpdate; // add new mediaGroup label if it doesn't exist and assign the new mediaGroup.
if (!(label in update.mediaGroups[type][group])) {
update.mediaGroups[type][group][label] = properties;
} // update the playlist reference within media groups
update.mediaGroups[type][group][label].playlists[0] = update.playlists[id];
noChanges = false;
}
}
}); // remove mediaGroup labels and references that no longer exist in the newMain
removeOldMediaGroupLabels(update, newMain);
if (newMain.minimumUpdatePeriod !== oldMain.minimumUpdatePeriod) {
noChanges = false;
}
if (noChanges) {
return null;
}
return update;
}; // SIDX should be equivalent if the URI and byteranges of the SIDX match.
// If the SIDXs have maps, the two maps should match,
// both `a` and `b` missing SIDXs is considered matching.
// If `a` or `b` but not both have a map, they aren't matching.
const equivalentSidx = (a, b) => {
const neitherMap = Boolean(!a.map && !b.map);
const equivalentMap = neitherMap || Boolean(a.map && b.map && a.map.byterange.offset === b.map.byterange.offset && a.map.byterange.length === b.map.byterange.length);
return equivalentMap && a.uri === b.uri && a.byterange.offset === b.byterange.offset && a.byterange.length === b.byterange.length;
}; // exported for testing
const compareSidxEntry = (playlists, oldSidxMapping) => {
const newSidxMapping = {};
for (const id in playlists) {
const playlist = playlists[id];
const currentSidxInfo = playlist.sidx;
if (currentSidxInfo) {
const key = generateSidxKey(currentSidxInfo);
if (!oldSidxMapping[key]) {
break;
}
const savedSidxInfo = oldSidxMapping[key].sidxInfo;
if (equivalentSidx(savedSidxInfo, currentSidxInfo)) {
newSidxMapping[key] = oldSidxMapping[key];
}
}
}
return newSidxMapping;
};
/**
* A function that filters out changed items as they need to be requested separately.
*
* The method is exported for testing
*
* @param {Object} main the parsed mpd XML returned via mpd-parser
* @param {Object} oldSidxMapping the SIDX to compare against
*/
const filterChangedSidxMappings = (main, oldSidxMapping) => {
const videoSidx = compareSidxEntry(main.playlists, oldSidxMapping);
let mediaGroupSidx = videoSidx;
forEachMediaGroup(main, (properties, mediaType, groupKey, labelKey) => {
if (properties.playlists && properties.playlists.length) {
const playlists = properties.playlists;
mediaGroupSidx = merge(mediaGroupSidx, compareSidxEntry(playlists, oldSidxMapping));
}
});
return mediaGroupSidx;
};
class DashPlaylistLoader extends EventTarget {
// DashPlaylistLoader must accept either a src url or a playlist because subsequent
// playlist loader setups from media groups will expect to be able to pass a playlist
// (since there aren't external URLs to media playlists with DASH)
constructor(srcUrlOrPlaylist, vhs, options = {}, mainPlaylistLoader) {
super();
this.mainPlaylistLoader_ = mainPlaylistLoader || this;
if (!mainPlaylistLoader) {
this.isMain_ = true;
}
const {
withCredentials = false
} = options;
this.vhs_ = vhs;
this.withCredentials = withCredentials;
this.addMetadataToTextTrack = options.addMetadataToTextTrack;
if (!srcUrlOrPlaylist) {
throw new Error('A non-empty playlist URL or object is required');
} // event naming?
this.on('minimumUpdatePeriod', () => {
this.refreshXml_();
}); // live playlist staleness timeout
this.on('mediaupdatetimeout', () => {
this.refreshMedia_(this.media().id);
});
this.state = 'HAVE_NOTHING';
this.loadedPlaylists_ = {};
this.logger_ = logger('DashPlaylistLoader'); // initialize the loader state
// The mainPlaylistLoader will be created with a string
if (this.isMain_) {
this.mainPlaylistLoader_.srcUrl = srcUrlOrPlaylist; // TODO: reset sidxMapping between period changes
// once multi-period is refactored
this.mainPlaylistLoader_.sidxMapping_ = {};
} else {
this.childPlaylist_ = srcUrlOrPlaylist;
}
}
requestErrored_(err, request, startingState) {
// disposed
if (!this.request) {
return true;
} // pending request is cleared
this.request = null;
if (err) {
// use the provided error object or create one
// based on the request/response
this.error = typeof err === 'object' && !(err instanceof Error) ? err : {
status: request.status,
message: 'DASH request error at URL: ' + request.uri,
response: request.response,
// MEDIA_ERR_NETWORK
code: 2,
metadata: err.metadata
};
if (startingState) {
this.state = startingState;
}
this.trigger('error');
return true;
}
}
/**
* Verify that the container of the sidx segment can be parsed
* and if it can, get and parse that segment.
*/
addSidxSegments_(playlist, startingState, cb) {
const sidxKey = playlist.sidx && generateSidxKey(playlist.sidx); // playlist lacks sidx or sidx segments were added to this playlist already.
if (!playlist.sidx || !sidxKey || this.mainPlaylistLoader_.sidxMapping_[sidxKey]) {
// keep this function async
this.mediaRequest_ = window$1.setTimeout(() => cb(false), 0);
return;
} // resolve the segment URL relative to the playlist
const uri = resolveManifestRedirect(playlist.sidx.resolvedUri);
const fin = (err, request) => {
if (this.requestErrored_(err, request, startingState)) {
return;
}
const sidxMapping = this.mainPlaylistLoader_.sidxMapping_;
const {
requestType
} = request;
let sidx;
try {
sidx = parseSidx(toUint8(request.response).subarray(8));
} catch (e) {
e.metadata = getStreamingNetworkErrorMetadata({
requestType,
request,
parseFailure: true
}); // sidx parsing failed.
this.requestErrored_(e, request, startingState);
return;
}
sidxMapping[sidxKey] = {
sidxInfo: playlist.sidx,
sidx
};
addSidxSegmentsToPlaylist(playlist, sidx, playlist.sidx.resolvedUri);
return cb(true);
};
const REQUEST_TYPE = 'dash-sidx';
this.request = containerRequest(uri, this.vhs_.xhr, (err, request, container, bytes) => {
if (err) {
return fin(err, request);
}
if (!container || container !== 'mp4') {
const sidxContainer = container || 'unknown';
return fin({
status: request.status,
message: `Unsupported ${sidxContainer} container type for sidx segment at URL: ${uri}`,
// response is just bytes in this case
// but we really don't want to return that.
response: '',
playlist,
internal: true,
playlistExclusionDuration: Infinity,
// MEDIA_ERR_NETWORK
code: 2
}, request);
} // if we already downloaded the sidx bytes in the container request, use them
const {
offset,
length
} = playlist.sidx.byterange;
if (bytes.length >= length + offset) {
return fin(err, {
response: bytes.subarray(offset, offset + length),
status: request.status,
uri: request.uri
});
} // otherwise request sidx bytes
this.request = this.vhs_.xhr({
uri,
responseType: 'arraybuffer',
requestType: 'dash-sidx',
headers: segmentXhrHeaders({
byterange: playlist.sidx.byterange
})
}, fin);
}, REQUEST_TYPE);
}
dispose() {
this.trigger('dispose');
this.stopRequest();
this.loadedPlaylists_ = {};
window$1.clearTimeout(this.minimumUpdatePeriodTimeout_);
window$1.clearTimeout(this.mediaRequest_);
window$1.clearTimeout(this.mediaUpdateTimeout);
this.mediaUpdateTimeout = null;
this.mediaRequest_ = null;
this.minimumUpdatePeriodTimeout_ = null;
if (this.mainPlaylistLoader_.createMupOnMedia_) {
this.off('loadedmetadata', this.mainPlaylistLoader_.createMupOnMedia_);
this.mainPlaylistLoader_.createMupOnMedia_ = null;
}
this.off();
}
hasPendingRequest() {
return this.request || this.mediaRequest_;
}
stopRequest() {
if (this.request) {
const oldRequest = this.request;
this.request = null;
oldRequest.onreadystatechange = null;
oldRequest.abort();
}
}
media(playlist) {
// getter
if (!playlist) {
return this.media_;
} // setter
if (this.state === 'HAVE_NOTHING') {
throw new Error('Cannot switch media playlist from ' + this.state);
}
const startingState = this.state; // find the playlist object if the target playlist has been specified by URI
if (typeof playlist === 'string') {
if (!this.mainPlaylistLoader_.main.playlists[playlist]) {
throw new Error('Unknown playlist URI: ' + playlist);
}
playlist = this.mainPlaylistLoader_.main.playlists[playlist];
}
const mediaChange = !this.media_ || playlist.id !== this.media_.id; // switch to previously loaded playlists immediately
if (mediaChange && this.loadedPlaylists_[playlist.id] && this.loadedPlaylists_[playlist.id].endList) {
this.state = 'HAVE_METADATA';
this.media_ = playlist; // trigger media change if the active media has been updated
if (mediaChange) {
this.trigger('mediachanging');
this.trigger('mediachange');
}
return;
} // switching to the active playlist is a no-op
if (!mediaChange) {
return;
} // switching from an already loaded playlist
if (this.media_) {
this.trigger('mediachanging');
}
this.addSidxSegments_(playlist, startingState, sidxChanged => {
// everything is ready just continue to haveMetadata
this.haveMetadata({
startingState,
playlist
});
});
}
haveMetadata({
startingState,
playlist
}) {
this.state = 'HAVE_METADATA';
this.loadedPlaylists_[playlist.id] = playlist;
this.mediaRequest_ = null; // This will trigger loadedplaylist
this.refreshMedia_(playlist.id); // fire loadedmetadata the first time a media playlist is loaded
// to resolve setup of media groups
if (startingState === 'HAVE_MAIN_MANIFEST') {
this.trigger('loadedmetadata');
} else {
// trigger media change if the active media has been updated
this.trigger('mediachange');
}
}
pause() {
if (this.mainPlaylistLoader_.createMupOnMedia_) {
this.off('loadedmetadata', this.mainPlaylistLoader_.createMupOnMedia_);
this.mainPlaylistLoader_.createMupOnMedia_ = null;
}
this.stopRequest();
window$1.clearTimeout(this.mediaUpdateTimeout);
this.mediaUpdateTimeout = null;
if (this.isMain_) {
window$1.clearTimeout(this.mainPlaylistLoader_.minimumUpdatePeriodTimeout_);
this.mainPlaylistLoader_.minimumUpdatePeriodTimeout_ = null;
}
if (this.state === 'HAVE_NOTHING') {
// If we pause the loader before any data has been retrieved, its as if we never
// started, so reset to an unstarted state.
this.started = false;
}
}
load(isFinalRendition) {
window$1.clearTimeout(this.mediaUpdateTimeout);
this.mediaUpdateTimeout = null;
const media = this.media();
if (isFinalRendition) {
const delay = media ? media.targetDuration / 2 * 1000 : 5 * 1000;
this.mediaUpdateTimeout = window$1.setTimeout(() => this.load(), delay);
return;
} // because the playlists are internal to the manifest, load should either load the
// main manifest, or do nothing but trigger an event
if (!this.started) {
this.start();
return;
}
if (media && !media.endList) {
// Check to see if this is the main loader and the MUP was cleared (this happens
// when the loader was paused). `media` should be set at this point since one is always
// set during `start()`.
if (this.isMain_ && !this.minimumUpdatePeriodTimeout_) {
// Trigger minimumUpdatePeriod to refresh the main manifest
this.trigger('minimumUpdatePeriod'); // Since there was no prior minimumUpdatePeriodTimeout it should be recreated
this.updateMinimumUpdatePeriodTimeout_();
}
this.trigger('mediaupdatetimeout');
} else {
this.trigger('loadedplaylist');
}
}
start() {
this.started = true; // We don't need to request the main manifest again
// Call this asynchronously to match the xhr request behavior below
if (!this.isMain_) {
this.mediaRequest_ = window$1.setTimeout(() => this.haveMain_(), 0);
return;
}
this.requestMain_((req, mainChanged) => {
this.haveMain_();
if (!this.hasPendingRequest() && !this.media_) {
this.media(this.mainPlaylistLoader_.main.playlists[0]);
}
});
}
requestMain_(cb) {
const metadata = {
manifestInfo: {
uri: this.mainPlaylistLoader_.srcUrl
}
};
this.trigger({
type: 'manifestrequeststart',
metadata
});
this.request = this.vhs_.xhr({
uri: this.mainPlaylistLoader_.srcUrl,
withCredentials: this.withCredentials,
requestType: 'dash-manifest'
}, (error, req) => {
if (error) {
const {
requestType
} = req;
error.metadata = getStreamingNetworkErrorMetadata({
requestType,
request: req,
error
});
}
if (this.requestErrored_(error, req)) {
if (this.state === 'HAVE_NOTHING') {
this.started = false;
}
return;
}
this.trigger({
type: 'manifestrequestcomplete',
metadata
});
const mainChanged = req.responseText !== this.mainPlaylistLoader_.mainXml_;
this.mainPlaylistLoader_.mainXml_ = req.responseText;
if (req.responseHeaders && req.responseHeaders.date) {
this.mainLoaded_ = Date.parse(req.responseHeaders.date);
} else {
this.mainLoaded_ = Date.now();
}
this.mainPlaylistLoader_.srcUrl = resolveManifestRedirect(this.mainPlaylistLoader_.srcUrl, req);
if (mainChanged) {
this.handleMain_();
this.syncClientServerClock_(() => {
return cb(req, mainChanged);
});
return;
}
return cb(req, mainChanged);
});
}
/**
* Parses the main xml for UTCTiming node to sync the client clock to the server
* clock. If the UTCTiming node requires a HEAD or GET request, that request is made.
*
* @param {Function} done
* Function to call when clock sync has completed
*/
syncClientServerClock_(done) {
const utcTiming = parseUTCTiming(this.mainPlaylistLoader_.mainXml_); // No UTCTiming element found in the mpd. Use Date header from mpd request as the
// server clock
if (utcTiming === null) {
this.mainPlaylistLoader_.clientOffset_ = this.mainLoaded_ - Date.now();
return done();
}
if (utcTiming.method === 'DIRECT') {
this.mainPlaylistLoader_.clientOffset_ = utcTiming.value - Date.now();
return done();
}
this.request = this.vhs_.xhr({
uri: resolveUrl(this.mainPlaylistLoader_.srcUrl, utcTiming.value),
method: utcTiming.method,
withCredentials: this.withCredentials,
requestType: 'dash-clock-sync'
}, (error, req) => {
// disposed
if (!this.request) {
return;
}
if (error) {
const {
requestType
} = req;
this.error.metadata = getStreamingNetworkErrorMetadata({
requestType,
request: req,
error
}); // sync request failed, fall back to using date header from mpd
// TODO: log warning
this.mainPlaylistLoader_.clientOffset_ = this.mainLoaded_ - Date.now();
return done();
}
let serverTime;
if (utcTiming.method === 'HEAD') {
if (!req.responseHeaders || !req.responseHeaders.date) {
// expected date header not preset, fall back to using date header from mpd
// TODO: log warning
serverTime = this.mainLoaded_;
} else {
serverTime = Date.parse(req.responseHeaders.date);
}
} else {
serverTime = Date.parse(req.responseText);
}
this.mainPlaylistLoader_.clientOffset_ = serverTime - Date.now();
done();
});
}
haveMain_() {
this.state = 'HAVE_MAIN_MANIFEST';
if (this.isMain_) {
// We have the main playlist at this point, so
// trigger this to allow PlaylistController
// to make an initial playlist selection
this.trigger('loadedplaylist');
} else if (!this.media_) {
// no media playlist was specifically selected so select
// the one the child playlist loader was created with
this.media(this.childPlaylist_);
}
}
handleMain_() {
// clear media request
this.mediaRequest_ = null;
const oldMain = this.mainPlaylistLoader_.main;
const metadata = {
manifestInfo: {
uri: this.mainPlaylistLoader_.srcUrl
}
};
this.trigger({
type: 'manifestparsestart',
metadata
});
let newMain;
try {
newMain = parseMainXml({
mainXml: this.mainPlaylistLoader_.mainXml_,
srcUrl: this.mainPlaylistLoader_.srcUrl,
clientOffset: this.mainPlaylistLoader_.clientOffset_,
sidxMapping: this.mainPlaylistLoader_.sidxMapping_,
previousManifest: oldMain
});
} catch (error) {
this.error = error;
this.error.metadata = {
errorType: videojs.Error.StreamingDashManifestParserError,
error
};
this.trigger('error');
} // if we have an old main to compare the new main against
if (oldMain) {
newMain = updateMain(oldMain, newMain, this.mainPlaylistLoader_.sidxMapping_);
} // only update main if we have a new main
this.mainPlaylistLoader_.main = newMain ? newMain : oldMain;
const location = this.mainPlaylistLoader_.main.locations && this.mainPlaylistLoader_.main.locations[0];
if (location && location !== this.mainPlaylistLoader_.srcUrl) {
this.mainPlaylistLoader_.srcUrl = location;
}
if (!oldMain || newMain && newMain.minimumUpdatePeriod !== oldMain.minimumUpdatePeriod) {
this.updateMinimumUpdatePeriodTimeout_();
}
this.addEventStreamToMetadataTrack_(newMain);
if (newMain) {
const {
duration,
endList
} = newMain;
const renditions = [];
newMain.playlists.forEach(playlist => {
renditions.push({
id: playlist.id,
bandwidth: playlist.attributes.BANDWIDTH,
resolution: playlist.attributes.RESOLUTION,
codecs: playlist.attributes.CODECS
});
});
const parsedManifest = {
duration,
isLive: !endList,
renditions
};
metadata.parsedManifest = parsedManifest;
this.trigger({
type: 'manifestparsecomplete',
metadata
});
}
return Boolean(newMain);
}
updateMinimumUpdatePeriodTimeout_() {
const mpl = this.mainPlaylistLoader_; // cancel any pending creation of mup on media
// a new one will be added if needed.
if (mpl.createMupOnMedia_) {
mpl.off('loadedmetadata', mpl.createMupOnMedia_);
mpl.createMupOnMedia_ = null;
} // clear any pending timeouts
if (mpl.minimumUpdatePeriodTimeout_) {
window$1.clearTimeout(mpl.minimumUpdatePeriodTimeout_);
mpl.minimumUpdatePeriodTimeout_ = null;
}
let mup = mpl.main && mpl.main.minimumUpdatePeriod; // If the minimumUpdatePeriod has a value of 0, that indicates that the current
// MPD has no future validity, so a new one will need to be acquired when new
// media segments are to be made available. Thus, we use the target duration
// in this case
if (mup === 0) {
if (mpl.media()) {
mup = mpl.media().targetDuration * 1000;
} else {
mpl.createMupOnMedia_ = mpl.updateMinimumUpdatePeriodTimeout_;
mpl.one('loadedmetadata', mpl.createMupOnMedia_);
}
} // if minimumUpdatePeriod is invalid or <= zero, which
// can happen when a live video becomes VOD. skip timeout
// creation.
if (typeof mup !== 'number' || mup <= 0) {
if (mup < 0) {
this.logger_(`found invalid minimumUpdatePeriod of ${mup}, not setting a timeout`);
}
return;
}
this.createMUPTimeout_(mup);
}
createMUPTimeout_(mup) {
const mpl = this.mainPlaylistLoader_;
mpl.minimumUpdatePeriodTimeout_ = window$1.setTimeout(() => {
mpl.minimumUpdatePeriodTimeout_ = null;
mpl.trigger('minimumUpdatePeriod');
mpl.createMUPTimeout_(mup);
}, mup);
}
/**
* Sends request to refresh the main xml and updates the parsed main manifest
*/
refreshXml_() {
this.requestMain_((req, mainChanged) => {
if (!mainChanged) {
return;
}
if (this.media_) {
this.media_ = this.mainPlaylistLoader_.main.playlists[this.media_.id];
} // This will filter out updated sidx info from the mapping
this.mainPlaylistLoader_.sidxMapping_ = filterChangedSidxMappings(this.mainPlaylistLoader_.main, this.mainPlaylistLoader_.sidxMapping_);
this.addSidxSegments_(this.media(), this.state, sidxChanged => {
// TODO: do we need to reload the current playlist?
this.refreshMedia_(this.media().id);
});
});
}
/**
* Refreshes the media playlist by re-parsing the main xml and updating playlist
* references. If this is an alternate loader, the updated parsed manifest is retrieved
* from the main loader.
*/
refreshMedia_(mediaID) {
if (!mediaID) {
throw new Error('refreshMedia_ must take a media id');
} // for main we have to reparse the main xml
// to re-create segments based on current timing values
// which may change media. We only skip updating the main manifest
// if this is the first time this.media_ is being set.
// as main was just parsed in that case.
if (this.media_ && this.isMain_) {
this.handleMain_();
}
const playlists = this.mainPlaylistLoader_.main.playlists;
const mediaChanged = !this.media_ || this.media_ !== playlists[mediaID];
if (mediaChanged) {
this.media_ = playlists[mediaID];
} else {
this.trigger('playlistunchanged');
}
if (!this.mediaUpdateTimeout) {
const createMediaUpdateTimeout = () => {
if (this.media().endList) {
return;
}
this.mediaUpdateTimeout = window$1.setTimeout(() => {
this.trigger('mediaupdatetimeout');
createMediaUpdateTimeout();
}, refreshDelay(this.media(), Boolean(mediaChanged)));
};
createMediaUpdateTimeout();
}
this.trigger('loadedplaylist');
}
/**
* Takes eventstream data from a parsed DASH manifest and adds it to the metadata text track.
*
* @param {manifest} newMain the newly parsed manifest
*/
addEventStreamToMetadataTrack_(newMain) {
// Only add new event stream metadata if we have a new manifest.
if (newMain && this.mainPlaylistLoader_.main.eventStream) {
// convert EventStream to ID3-like data.
const metadataArray = this.mainPlaylistLoader_.main.eventStream.map(eventStreamNode => {
return {
cueTime: eventStreamNode.start,
frames: [{
data: eventStreamNode.messageData
}]
};
});
this.addMetadataToTextTrack('EventStream', metadataArray, this.mainPlaylistLoader_.main.duration);
}
}
/**
* Returns the key ID set from a playlist
*
* @param {playlist} playlist to fetch the key ID set from.
* @return a Set of 32 digit hex strings that represent the unique keyIds for that playlist.
*/
getKeyIdSet(playlist) {
if (playlist.contentProtection) {
const keyIds = new Set();
for (const keysystem in playlist.contentProtection) {
const defaultKID = playlist.contentProtection[keysystem].attributes['cenc:default_KID'];
if (defaultKID) {
// DASH keyIds are separated by dashes.
keyIds.add(defaultKID.replace(/-/g, '').toLowerCase());
}
}
return keyIds;
}
}
}
var Config = {
GOAL_BUFFER_LENGTH: 30,
MAX_GOAL_BUFFER_LENGTH: 60,
BACK_BUFFER_LENGTH: 30,
GOAL_BUFFER_LENGTH_RATE: 1,
// 0.5 MB/s
INITIAL_BANDWIDTH: 4194304,
// A fudge factor to apply to advertised playlist bitrates to account for
// temporary flucations in client bandwidth
BANDWIDTH_VARIANCE: 1.2,
// How much of the buffer must be filled before we consider upswitching
BUFFER_LOW_WATER_LINE: 0,
MAX_BUFFER_LOW_WATER_LINE: 30,
// TODO: Remove this when experimentalBufferBasedABR is removed
EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE: 16,
BUFFER_LOW_WATER_LINE_RATE: 1,
// If the buffer is greater than the high water line, we won't switch down
BUFFER_HIGH_WATER_LINE: 30
};
const stringToArrayBuffer = string => {
const view = new Uint8Array(new ArrayBuffer(string.length));
for (let i = 0; i < string.length; i++) {
view[i] = string.charCodeAt(i);
}
return view.buffer;
};
/* global Blob, BlobBuilder, Worker */
// unify worker interface
const browserWorkerPolyFill = function (workerObj) {
// node only supports on/off
workerObj.on = workerObj.addEventListener;
workerObj.off = workerObj.removeEventListener;
return workerObj;
};
const createObjectURL = function (str) {
try {
return URL.createObjectURL(new Blob([str], {
type: 'application/javascript'
}));
} catch (e) {
const blob = new BlobBuilder();
blob.append(str);
return URL.createObjectURL(blob.getBlob());
}
};
const factory = function (code) {
return function () {
const objectUrl = createObjectURL(code);
const worker = browserWorkerPolyFill(new Worker(objectUrl));
worker.objURL = objectUrl;
const terminate = worker.terminate;
worker.on = worker.addEventListener;
worker.off = worker.removeEventListener;
worker.terminate = function () {
URL.revokeObjectURL(objectUrl);
return terminate.call(this);
};
return worker;
};
};
const transform = function (code) {
return `var browserWorkerPolyFill = ${browserWorkerPolyFill.toString()};\n` + 'browserWorkerPolyFill(self);\n' + code;
};
const getWorkerString = function (fn) {
return fn.toString().replace(/^function.+?{/, '').slice(0, -1);
};
/* rollup-plugin-worker-factory start for worker!/home/runner/work/http-streaming/http-streaming/src/transmuxer-worker.js */
const workerCode$1 = transform(getWorkerString(function () {
var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
/**
* mux.js
*
* Copyright (c) Brightcove
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
*
* A lightweight readable stream implemention that handles event dispatching.
* Objects that inherit from streams should call init in their constructors.
*/
var Stream$8 = function () {
this.init = function () {
var listeners = {};
/**
* Add a listener for a specified event type.
* @param type {string} the event name
* @param listener {function} the callback to be invoked when an event of
* the specified type occurs
*/
this.on = function (type, listener) {
if (!listeners[type]) {
listeners[type] = [];
}
listeners[type] = listeners[type].concat(listener);
};
/**
* Remove a listener for a specified event type.
* @param type {string} the event name
* @param listener {function} a function previously registered for this
* type of event through `on`
*/
this.off = function (type, listener) {
var index;
if (!listeners[type]) {
return false;
}
index = listeners[type].indexOf(listener);
listeners[type] = listeners[type].slice();
listeners[type].splice(index, 1);
return index > -1;
};
/**
* Trigger an event of the specified type on this stream. Any additional
* arguments to this function are passed as parameters to event listeners.
* @param type {string} the event name
*/
this.trigger = function (type) {
var callbacks, i, length, args;
callbacks = listeners[type];
if (!callbacks) {
return;
} // Slicing the arguments on every invocation of this method
// can add a significant amount of overhead. Avoid the
// intermediate object creation for the common case of a
// single callback argument
if (arguments.length === 2) {
length = callbacks.length;
for (i = 0; i < length; ++i) {
callbacks[i].call(this, arguments[1]);
}
} else {
args = [];
i = arguments.length;
for (i = 1; i < arguments.length; ++i) {
args.push(arguments[i]);
}
length = callbacks.length;
for (i = 0; i < length; ++i) {
callbacks[i].apply(this, args);
}
}
};
/**
* Destroys the stream and cleans up.
*/
this.dispose = function () {
listeners = {};
};
};
};
/**
* Forwards all `data` events on this stream to the destination stream. The
* destination stream should provide a method `push` to receive the data
* events as they arrive.
* @param destination {stream} the stream that will receive all `data` events
* @param autoFlush {boolean} if false, we will not call `flush` on the destination
* when the current stream emits a 'done' event
* @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
*/
Stream$8.prototype.pipe = function (destination) {
this.on('data', function (data) {
destination.push(data);
});
this.on('done', function (flushSource) {
destination.flush(flushSource);
});
this.on('partialdone', function (flushSource) {
destination.partialFlush(flushSource);
});
this.on('endedtimeline', function (flushSource) {
destination.endTimeline(flushSource);
});
this.on('reset', function (flushSource) {
destination.reset(flushSource);
});
return destination;
}; // Default stream functions that are expected to be overridden to perform
// actual work. These are provided by the prototype as a sort of no-op
// implementation so that we don't have to check for their existence in the
// `pipe` function above.
Stream$8.prototype.push = function (data) {
this.trigger('data', data);
};
Stream$8.prototype.flush = function (flushSource) {
this.trigger('done', flushSource);
};
Stream$8.prototype.partialFlush = function (flushSource) {
this.trigger('partialdone', flushSource);
};
Stream$8.prototype.endTimeline = function (flushSource) {
this.trigger('endedtimeline', flushSource);
};
Stream$8.prototype.reset = function (flushSource) {
this.trigger('reset', flushSource);
};
var stream = Stream$8;
var MAX_UINT32$1 = Math.pow(2, 32);
var getUint64$3 = function (uint8) {
var dv = new DataView(uint8.buffer, uint8.byteOffset, uint8.byteLength);
var value;
if (dv.getBigUint64) {
value = dv.getBigUint64(0);
if (value < Number.MAX_SAFE_INTEGER) {
return Number(value);
}
return value;
}
return dv.getUint32(0) * MAX_UINT32$1 + dv.getUint32(4);
};
var numbers = {
getUint64: getUint64$3,
MAX_UINT32: MAX_UINT32$1
};
/**
* mux.js
*
* Copyright (c) Brightcove
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
*
* Functions that generate fragmented MP4s suitable for use with Media
* Source Extensions.
*/
var MAX_UINT32 = numbers.MAX_UINT32;
var box, dinf, esds, ftyp, mdat, mfhd, minf, moof, moov, mvex, mvhd, trak, tkhd, mdia, mdhd, hdlr, sdtp, stbl, stsd, traf, trex, trun$1, types, MAJOR_BRAND, MINOR_VERSION, AVC1_BRAND, VIDEO_HDLR, AUDIO_HDLR, HDLR_TYPES, VMHD, SMHD, DREF, STCO, STSC, STSZ, STTS; // pre-calculate constants
(function () {
var i;
types = {
avc1: [],
// codingname
avcC: [],
btrt: [],
dinf: [],
dref: [],
esds: [],
ftyp: [],
hdlr: [],
mdat: [],
mdhd: [],
mdia: [],
mfhd: [],
minf: [],
moof: [],
moov: [],
mp4a: [],
// codingname
mvex: [],
mvhd: [],
pasp: [],
sdtp: [],
smhd: [],
stbl: [],
stco: [],
stsc: [],
stsd: [],
stsz: [],
stts: [],
styp: [],
tfdt: [],
tfhd: [],
traf: [],
trak: [],
trun: [],
trex: [],
tkhd: [],
vmhd: []
}; // In environments where Uint8Array is undefined (e.g., IE8), skip set up so that we
// don't throw an error
if (typeof Uint8Array === 'undefined') {
return;
}
for (i in types) {
if (types.hasOwnProperty(i)) {
types[i] = [i.charCodeAt(0), i.charCodeAt(1), i.charCodeAt(2), i.charCodeAt(3)];
}
}
MAJOR_BRAND = new Uint8Array(['i'.charCodeAt(0), 's'.charCodeAt(0), 'o'.charCodeAt(0), 'm'.charCodeAt(0)]);
AVC1_BRAND = new Uint8Array(['a'.charCodeAt(0), 'v'.charCodeAt(0), 'c'.charCodeAt(0), '1'.charCodeAt(0)]);
MINOR_VERSION = new Uint8Array([0, 0, 0, 1]);
VIDEO_HDLR = new Uint8Array([0x00,
// version 0
0x00, 0x00, 0x00,
// flags
0x00, 0x00, 0x00, 0x00,
// pre_defined
0x76, 0x69, 0x64, 0x65,
// handler_type: 'vide'
0x00, 0x00, 0x00, 0x00,
// reserved
0x00, 0x00, 0x00, 0x00,
// reserved
0x00, 0x00, 0x00, 0x00,
// reserved
0x56, 0x69, 0x64, 0x65, 0x6f, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler'
]);
AUDIO_HDLR = new Uint8Array([0x00,
// version 0
0x00, 0x00, 0x00,
// flags
0x00, 0x00, 0x00, 0x00,
// pre_defined
0x73, 0x6f, 0x75, 0x6e,
// handler_type: 'soun'
0x00, 0x00, 0x00, 0x00,
// reserved
0x00, 0x00, 0x00, 0x00,
// reserved
0x00, 0x00, 0x00, 0x00,
// reserved
0x53, 0x6f, 0x75, 0x6e, 0x64, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler'
]);
HDLR_TYPES = {
video: VIDEO_HDLR,
audio: AUDIO_HDLR
};
DREF = new Uint8Array([0x00,
// version 0
0x00, 0x00, 0x00,
// flags
0x00, 0x00, 0x00, 0x01,
// entry_count
0x00, 0x00, 0x00, 0x0c,
// entry_size
0x75, 0x72, 0x6c, 0x20,
// 'url' type
0x00,
// version 0
0x00, 0x00, 0x01 // entry_flags
]);
SMHD = new Uint8Array([0x00,
// version
0x00, 0x00, 0x00,
// flags
0x00, 0x00,
// balance, 0 means centered
0x00, 0x00 // reserved
]);
STCO = new Uint8Array([0x00,
// version
0x00, 0x00, 0x00,
// flags
0x00, 0x00, 0x00, 0x00 // entry_count
]);
STSC = STCO;
STSZ = new Uint8Array([0x00,
// version
0x00, 0x00, 0x00,
// flags
0x00, 0x00, 0x00, 0x00,
// sample_size
0x00, 0x00, 0x00, 0x00 // sample_count
]);
STTS = STCO;
VMHD = new Uint8Array([0x00,
// version
0x00, 0x00, 0x01,
// flags
0x00, 0x00,
// graphicsmode
0x00, 0x00, 0x00, 0x00, 0x00, 0x00 // opcolor
]);
})();
box = function (type) {
var payload = [],
size = 0,
i,
result,
view;
for (i = 1; i < arguments.length; i++) {
payload.push(arguments[i]);
}
i = payload.length; // calculate the total size we need to allocate
while (i--) {
size += payload[i].byteLength;
}
result = new Uint8Array(size + 8);
view = new DataView(result.buffer, result.byteOffset, result.byteLength);
view.setUint32(0, result.byteLength);
result.set(type, 4); // copy the payload into the result
for (i = 0, size = 8; i < payload.length; i++) {
result.set(payload[i], size);
size += payload[i].byteLength;
}
return result;
};
dinf = function () {
return box(types.dinf, box(types.dref, DREF));
};
esds = function (track) {
return box(types.esds, new Uint8Array([0x00,
// version
0x00, 0x00, 0x00,
// flags
// ES_Descriptor
0x03,
// tag, ES_DescrTag
0x19,
// length
0x00, 0x00,
// ES_ID
0x00,
// streamDependenceFlag, URL_flag, reserved, streamPriority
// DecoderConfigDescriptor
0x04,
// tag, DecoderConfigDescrTag
0x11,
// length
0x40,
// object type
0x15,
// streamType
0x00, 0x06, 0x00,
// bufferSizeDB
0x00, 0x00, 0xda, 0xc0,
// maxBitrate
0x00, 0x00, 0xda, 0xc0,
// avgBitrate
// DecoderSpecificInfo
0x05,
// tag, DecoderSpecificInfoTag
0x02,
// length
// ISO/IEC 14496-3, AudioSpecificConfig
// for samplingFrequencyIndex see ISO/IEC 13818-7:2006, 8.1.3.2.2, Table 35
track.audioobjecttype << 3 | track.samplingfrequencyindex >>> 1, track.samplingfrequencyindex << 7 | track.channelcount << 3, 0x06, 0x01, 0x02 // GASpecificConfig
]));
};
ftyp = function () {
return box(types.ftyp, MAJOR_BRAND, MINOR_VERSION, MAJOR_BRAND, AVC1_BRAND);
};
hdlr = function (type) {
return box(types.hdlr, HDLR_TYPES[type]);
};
mdat = function (data) {
return box(types.mdat, data);
};
mdhd = function (track) {
var result = new Uint8Array([0x00,
// version 0
0x00, 0x00, 0x00,
// flags
0x00, 0x00, 0x00, 0x02,
// creation_time
0x00, 0x00, 0x00, 0x03,
// modification_time
0x00, 0x01, 0x5f, 0x90,
// timescale, 90,000 "ticks" per second
track.duration >>> 24 & 0xFF, track.duration >>> 16 & 0xFF, track.duration >>> 8 & 0xFF, track.duration & 0xFF,
// duration
0x55, 0xc4,
// 'und' language (undetermined)
0x00, 0x00]); // Use the sample rate from the track metadata, when it is
// defined. The sample rate can be parsed out of an ADTS header, for
// instance.
if (track.samplerate) {
result[12] = track.samplerate >>> 24 & 0xFF;
result[13] = track.samplerate >>> 16 & 0xFF;
result[14] = track.samplerate >>> 8 & 0xFF;
result[15] = track.samplerate & 0xFF;
}
return box(types.mdhd, result);
};
mdia = function (track) {
return box(types.mdia, mdhd(track), hdlr(track.type), minf(track));
};
mfhd = function (sequenceNumber) {
return box(types.mfhd, new Uint8Array([0x00, 0x00, 0x00, 0x00,
// flags
(sequenceNumber & 0xFF000000) >> 24, (sequenceNumber & 0xFF0000) >> 16, (sequenceNumber & 0xFF00) >> 8, sequenceNumber & 0xFF // sequence_number
]));
};
minf = function (track) {
return box(types.minf, track.type === 'video' ? box(types.vmhd, VMHD) : box(types.smhd, SMHD), dinf(), stbl(track));
};
moof = function (sequenceNumber, tracks) {
var trackFragments = [],
i = tracks.length; // build traf boxes for each track fragment
while (i--) {
trackFragments[i] = traf(tracks[i]);
}
return box.apply(null, [types.moof, mfhd(sequenceNumber)].concat(trackFragments));
};
/**
* Returns a movie box.
* @param tracks {array} the tracks associated with this movie
* @see ISO/IEC 14496-12:2012(E), section 8.2.1
*/
moov = function (tracks) {
var i = tracks.length,
boxes = [];
while (i--) {
boxes[i] = trak(tracks[i]);
}
return box.apply(null, [types.moov, mvhd(0xffffffff)].concat(boxes).concat(mvex(tracks)));
};
mvex = function (tracks) {
var i = tracks.length,
boxes = [];
while (i--) {
boxes[i] = trex(tracks[i]);
}
return box.apply(null, [types.mvex].concat(boxes));
};
mvhd = function (duration) {
var bytes = new Uint8Array([0x00,
// version 0
0x00, 0x00, 0x00,
// flags
0x00, 0x00, 0x00, 0x01,
// creation_time
0x00, 0x00, 0x00, 0x02,
// modification_time
0x00, 0x01, 0x5f, 0x90,
// timescale, 90,000 "ticks" per second
(duration & 0xFF000000) >> 24, (duration & 0xFF0000) >> 16, (duration & 0xFF00) >> 8, duration & 0xFF,
// duration
0x00, 0x01, 0x00, 0x00,
// 1.0 rate
0x01, 0x00,
// 1.0 volume
0x00, 0x00,
// reserved
0x00, 0x00, 0x00, 0x00,
// reserved
0x00, 0x00, 0x00, 0x00,
// reserved
0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00,
// transformation: unity matrix
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
// pre_defined
0xff, 0xff, 0xff, 0xff // next_track_ID
]);
return box(types.mvhd, bytes);
};
sdtp = function (track) {
var samples = track.samples || [],
bytes = new Uint8Array(4 + samples.length),
flags,
i; // leave the full box header (4 bytes) all zero
// write the sample table
for (i = 0; i < samples.length; i++) {
flags = samples[i].flags;
bytes[i + 4] = flags.dependsOn << 4 | flags.isDependedOn << 2 | flags.hasRedundancy;
}
return box(types.sdtp, bytes);
};
stbl = function (track) {
return box(types.stbl, stsd(track), box(types.stts, STTS), box(types.stsc, STSC), box(types.stsz, STSZ), box(types.stco, STCO));
};
(function () {
var videoSample, audioSample;
stsd = function (track) {
return box(types.stsd, new Uint8Array([0x00,
// version 0
0x00, 0x00, 0x00,
// flags
0x00, 0x00, 0x00, 0x01]), track.type === 'video' ? videoSample(track) : audioSample(track));
};
videoSample = function (track) {
var sps = track.sps || [],
pps = track.pps || [],
sequenceParameterSets = [],
pictureParameterSets = [],
i,
avc1Box; // assemble the SPSs
for (i = 0; i < sps.length; i++) {
sequenceParameterSets.push((sps[i].byteLength & 0xFF00) >>> 8);
sequenceParameterSets.push(sps[i].byteLength & 0xFF); // sequenceParameterSetLength
sequenceParameterSets = sequenceParameterSets.concat(Array.prototype.slice.call(sps[i])); // SPS
} // assemble the PPSs
for (i = 0; i < pps.length; i++) {
pictureParameterSets.push((pps[i].byteLength & 0xFF00) >>> 8);
pictureParameterSets.push(pps[i].byteLength & 0xFF);
pictureParameterSets = pictureParameterSets.concat(Array.prototype.slice.call(pps[i]));
}
avc1Box = [types.avc1, new Uint8Array([0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
// reserved
0x00, 0x01,
// data_reference_index
0x00, 0x00,
// pre_defined
0x00, 0x00,
// reserved
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
// pre_defined
(track.width & 0xff00) >> 8, track.width & 0xff,
// width
(track.height & 0xff00) >> 8, track.height & 0xff,
// height
0x00, 0x48, 0x00, 0x00,
// horizresolution
0x00, 0x48, 0x00, 0x00,
// vertresolution
0x00, 0x00, 0x00, 0x00,
// reserved
0x00, 0x01,
// frame_count
0x13, 0x76, 0x69, 0x64, 0x65, 0x6f, 0x6a, 0x73, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x69, 0x62, 0x2d, 0x68, 0x6c, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
// compressorname
0x00, 0x18,
// depth = 24
0x11, 0x11 // pre_defined = -1
]), box(types.avcC, new Uint8Array([0x01,
// configurationVersion
track.profileIdc,
// AVCProfileIndication
track.profileCompatibility,
// profile_compatibility
track.levelIdc,
// AVCLevelIndication
0xff // lengthSizeMinusOne, hard-coded to 4 bytes
].concat([sps.length],
// numOfSequenceParameterSets
sequenceParameterSets,
// "SPS"
[pps.length],
// numOfPictureParameterSets
pictureParameterSets // "PPS"
))), box(types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80,
// bufferSizeDB
0x00, 0x2d, 0xc6, 0xc0,
// maxBitrate
0x00, 0x2d, 0xc6, 0xc0 // avgBitrate
]))];
if (track.sarRatio) {
var hSpacing = track.sarRatio[0],
vSpacing = track.sarRatio[1];
avc1Box.push(box(types.pasp, new Uint8Array([(hSpacing & 0xFF000000) >> 24, (hSpacing & 0xFF0000) >> 16, (hSpacing & 0xFF00) >> 8, hSpacing & 0xFF, (vSpacing & 0xFF000000) >> 24, (vSpacing & 0xFF0000) >> 16, (vSpacing & 0xFF00) >> 8, vSpacing & 0xFF])));
}
return box.apply(null, avc1Box);
};
audioSample = function (track) {
return box(types.mp4a, new Uint8Array([
// SampleEntry, ISO/IEC 14496-12
0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
// reserved
0x00, 0x01,
// data_reference_index
// AudioSampleEntry, ISO/IEC 14496-12
0x00, 0x00, 0x00, 0x00,
// reserved
0x00, 0x00, 0x00, 0x00,
// reserved
(track.channelcount & 0xff00) >> 8, track.channelcount & 0xff,
// channelcount
(track.samplesize & 0xff00) >> 8, track.samplesize & 0xff,
// samplesize
0x00, 0x00,
// pre_defined
0x00, 0x00,
// reserved
(track.samplerate & 0xff00) >> 8, track.samplerate & 0xff, 0x00, 0x00 // samplerate, 16.16
// MP4AudioSampleEntry, ISO/IEC 14496-14
]), esds(track));
};
})();
tkhd = function (track) {
var result = new Uint8Array([0x00,
// version 0
0x00, 0x00, 0x07,
// flags
0x00, 0x00, 0x00, 0x00,
// creation_time
0x00, 0x00, 0x00, 0x00,
// modification_time
(track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF,
// track_ID
0x00, 0x00, 0x00, 0x00,
// reserved
(track.duration & 0xFF000000) >> 24, (track.duration & 0xFF0000) >> 16, (track.duration & 0xFF00) >> 8, track.duration & 0xFF,
// duration
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
// reserved
0x00, 0x00,
// layer
0x00, 0x00,
// alternate_group
0x01, 0x00,
// non-audio track volume
0x00, 0x00,
// reserved
0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00,
// transformation: unity matrix
(track.width & 0xFF00) >> 8, track.width & 0xFF, 0x00, 0x00,
// width
(track.height & 0xFF00) >> 8, track.height & 0xFF, 0x00, 0x00 // height
]);
return box(types.tkhd, result);
};
/**
* Generate a track fragment (traf) box. A traf box collects metadata
* about tracks in a movie fragment (moof) box.
*/
traf = function (track) {
var trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable, dataOffset, upperWordBaseMediaDecodeTime, lowerWordBaseMediaDecodeTime;
trackFragmentHeader = box(types.tfhd, new Uint8Array([0x00,
// version 0
0x00, 0x00, 0x3a,
// flags
(track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF,
// track_ID
0x00, 0x00, 0x00, 0x01,
// sample_description_index
0x00, 0x00, 0x00, 0x00,
// default_sample_duration
0x00, 0x00, 0x00, 0x00,
// default_sample_size
0x00, 0x00, 0x00, 0x00 // default_sample_flags
]));
upperWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime / MAX_UINT32);
lowerWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime % MAX_UINT32);
trackFragmentDecodeTime = box(types.tfdt, new Uint8Array([0x01,
// version 1
0x00, 0x00, 0x00,
// flags
// baseMediaDecodeTime
upperWordBaseMediaDecodeTime >>> 24 & 0xFF, upperWordBaseMediaDecodeTime >>> 16 & 0xFF, upperWordBaseMediaDecodeTime >>> 8 & 0xFF, upperWordBaseMediaDecodeTime & 0xFF, lowerWordBaseMediaDecodeTime >>> 24 & 0xFF, lowerWordBaseMediaDecodeTime >>> 16 & 0xFF, lowerWordBaseMediaDecodeTime >>> 8 & 0xFF, lowerWordBaseMediaDecodeTime & 0xFF])); // the data offset specifies the number of bytes from the start of
// the containing moof to the first payload byte of the associated
// mdat
dataOffset = 32 +
// tfhd
20 +
// tfdt
8 +
// traf header
16 +
// mfhd
8 +
// moof header
8; // mdat header
// audio tracks require less metadata
if (track.type === 'audio') {
trackFragmentRun = trun$1(track, dataOffset);
return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun);
} // video tracks should contain an independent and disposable samples
// box (sdtp)
// generate one and adjust offsets to match
sampleDependencyTable = sdtp(track);
trackFragmentRun = trun$1(track, sampleDependencyTable.length + dataOffset);
return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable);
};
/**
* Generate a track box.
* @param track {object} a track definition
* @return {Uint8Array} the track box
*/
trak = function (track) {
track.duration = track.duration || 0xffffffff;
return box(types.trak, tkhd(track), mdia(track));
};
trex = function (track) {
var result = new Uint8Array([0x00,
// version 0
0x00, 0x00, 0x00,
// flags
(track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF,
// track_ID
0x00, 0x00, 0x00, 0x01,
// default_sample_description_index
0x00, 0x00, 0x00, 0x00,
// default_sample_duration
0x00, 0x00, 0x00, 0x00,
// default_sample_size
0x00, 0x01, 0x00, 0x01 // default_sample_flags
]); // the last two bytes of default_sample_flags is the sample
// degradation priority, a hint about the importance of this sample
// relative to others. Lower the degradation priority for all sample
// types other than video.
if (track.type !== 'video') {
result[result.length - 1] = 0x00;
}
return box(types.trex, result);
};
(function () {
var audioTrun, videoTrun, trunHeader; // This method assumes all samples are uniform. That is, if a
// duration is present for the first sample, it will be present for
// all subsequent samples.
// see ISO/IEC 14496-12:2012, Section 8.8.8.1
trunHeader = function (samples, offset) {
var durationPresent = 0,
sizePresent = 0,
flagsPresent = 0,
compositionTimeOffset = 0; // trun flag constants
if (samples.length) {
if (samples[0].duration !== undefined) {
durationPresent = 0x1;
}
if (samples[0].size !== undefined) {
sizePresent = 0x2;
}
if (samples[0].flags !== undefined) {
flagsPresent = 0x4;
}
if (samples[0].compositionTimeOffset !== undefined) {
compositionTimeOffset = 0x8;
}
}
return [0x00,
// version 0
0x00, durationPresent | sizePresent | flagsPresent | compositionTimeOffset, 0x01,
// flags
(samples.length & 0xFF000000) >>> 24, (samples.length & 0xFF0000) >>> 16, (samples.length & 0xFF00) >>> 8, samples.length & 0xFF,
// sample_count
(offset & 0xFF000000) >>> 24, (offset & 0xFF0000) >>> 16, (offset & 0xFF00) >>> 8, offset & 0xFF // data_offset
];
};
videoTrun = function (track, offset) {
var bytesOffest, bytes, header, samples, sample, i;
samples = track.samples || [];
offset += 8 + 12 + 16 * samples.length;
header = trunHeader(samples, offset);
bytes = new Uint8Array(header.length + samples.length * 16);
bytes.set(header);
bytesOffest = header.length;
for (i = 0; i < samples.length; i++) {
sample = samples[i];
bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
bytes[bytesOffest++] = sample.flags.isLeading << 2 | sample.flags.dependsOn;
bytes[bytesOffest++] = sample.flags.isDependedOn << 6 | sample.flags.hasRedundancy << 4 | sample.flags.paddingValue << 1 | sample.flags.isNonSyncSample;
bytes[bytesOffest++] = sample.flags.degradationPriority & 0xF0 << 8;
bytes[bytesOffest++] = sample.flags.degradationPriority & 0x0F; // sample_flags
bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF000000) >>> 24;
bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF0000) >>> 16;
bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF00) >>> 8;
bytes[bytesOffest++] = sample.compositionTimeOffset & 0xFF; // sample_composition_time_offset
}
return box(types.trun, bytes);
};
audioTrun = function (track, offset) {
var bytes, bytesOffest, header, samples, sample, i;
samples = track.samples || [];
offset += 8 + 12 + 8 * samples.length;
header = trunHeader(samples, offset);
bytes = new Uint8Array(header.length + samples.length * 8);
bytes.set(header);
bytesOffest = header.length;
for (i = 0; i < samples.length; i++) {
sample = samples[i];
bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
}
return box(types.trun, bytes);
};
trun$1 = function (track, offset) {
if (track.type === 'audio') {
return audioTrun(track, offset);
}
return videoTrun(track, offset);
};
})();
var mp4Generator = {
ftyp: ftyp,
mdat: mdat,
moof: moof,
moov: moov,
initSegment: function (tracks) {
var fileType = ftyp(),
movie = moov(tracks),
result;
result = new Uint8Array(fileType.byteLength + movie.byteLength);
result.set(fileType);
result.set(movie, fileType.byteLength);
return result;
}
};
/**
* mux.js
*
* Copyright (c) Brightcove
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
*/
// composed of the nal units that make up that frame
// Also keep track of cummulative data about the frame from the nal units such
// as the frame duration, starting pts, etc.
var groupNalsIntoFrames = function (nalUnits) {
var i,
currentNal,
currentFrame = [],
frames = []; // TODO added for LHLS, make sure this is OK
frames.byteLength = 0;
frames.nalCount = 0;
frames.duration = 0;
currentFrame.byteLength = 0;
for (i = 0; i < nalUnits.length; i++) {
currentNal = nalUnits[i]; // Split on 'aud'-type nal units
if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
// Since the very first nal unit is expected to be an AUD
// only push to the frames array when currentFrame is not empty
if (currentFrame.length) {
currentFrame.duration = currentNal.dts - currentFrame.dts; // TODO added for LHLS, make sure this is OK
frames.byteLength += currentFrame.byteLength;
frames.nalCount += currentFrame.length;
frames.duration += currentFrame.duration;
frames.push(currentFrame);
}
currentFrame = [currentNal];
currentFrame.byteLength = currentNal.data.byteLength;
currentFrame.pts = currentNal.pts;
currentFrame.dts = currentNal.dts;
} else {
// Specifically flag key frames for ease of use later
if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
currentFrame.keyFrame = true;
}
currentFrame.duration = currentNal.dts - currentFrame.dts;
currentFrame.byteLength += currentNal.data.byteLength;
currentFrame.push(currentNal);
}
} // For the last frame, use the duration of the previous frame if we
// have nothing better to go on
if (frames.length && (!currentFrame.duration || currentFrame.duration <= 0)) {
currentFrame.duration = frames[frames.length - 1].duration;
} // Push the final frame
// TODO added for LHLS, make sure this is OK
frames.byteLength += currentFrame.byteLength;
frames.nalCount += currentFrame.length;
frames.duration += currentFrame.duration;
frames.push(currentFrame);
return frames;
}; // Convert an array of frames into an array of Gop with each Gop being composed
// of the frames that make up that Gop
// Also keep track of cummulative data about the Gop from the frames such as the
// Gop duration, starting pts, etc.
var groupFramesIntoGops = function (frames) {
var i,
currentFrame,
currentGop = [],
gops = []; // We must pre-set some of the values on the Gop since we
// keep running totals of these values
currentGop.byteLength = 0;
currentGop.nalCount = 0;
currentGop.duration = 0;
currentGop.pts = frames[0].pts;
currentGop.dts = frames[0].dts; // store some metadata about all the Gops
gops.byteLength = 0;
gops.nalCount = 0;
gops.duration = 0;
gops.pts = frames[0].pts;
gops.dts = frames[0].dts;
for (i = 0; i < frames.length; i++) {
currentFrame = frames[i];
if (currentFrame.keyFrame) {
// Since the very first frame is expected to be an keyframe
// only push to the gops array when currentGop is not empty
if (currentGop.length) {
gops.push(currentGop);
gops.byteLength += currentGop.byteLength;
gops.nalCount += currentGop.nalCount;
gops.duration += currentGop.duration;
}
currentGop = [currentFrame];
currentGop.nalCount = currentFrame.length;
currentGop.byteLength = currentFrame.byteLength;
currentGop.pts = currentFrame.pts;
currentGop.dts = currentFrame.dts;
currentGop.duration = currentFrame.duration;
} else {
currentGop.duration += currentFrame.duration;
currentGop.nalCount += currentFrame.length;
currentGop.byteLength += currentFrame.byteLength;
currentGop.push(currentFrame);
}
}
if (gops.length && currentGop.duration <= 0) {
currentGop.duration = gops[gops.length - 1].duration;
}
gops.byteLength += currentGop.byteLength;
gops.nalCount += currentGop.nalCount;
gops.duration += currentGop.duration; // push the final Gop
gops.push(currentGop);
return gops;
};
/*
* Search for the first keyframe in the GOPs and throw away all frames
* until that keyframe. Then extend the duration of the pulled keyframe
* and pull the PTS and DTS of the keyframe so that it covers the time
* range of the frames that were disposed.
*
* @param {Array} gops video GOPs
* @returns {Array} modified video GOPs
*/
var extendFirstKeyFrame = function (gops) {
var currentGop;
if (!gops[0][0].keyFrame && gops.length > 1) {
// Remove the first GOP
currentGop = gops.shift();
gops.byteLength -= currentGop.byteLength;
gops.nalCount -= currentGop.nalCount; // Extend the first frame of what is now the
// first gop to cover the time period of the
// frames we just removed
gops[0][0].dts = currentGop.dts;
gops[0][0].pts = currentGop.pts;
gops[0][0].duration += currentGop.duration;
}
return gops;
};
/**
* Default sample object
* see ISO/IEC 14496-12:2012, section 8.6.4.3
*/
var createDefaultSample = function () {
return {
size: 0,
flags: {
isLeading: 0,
dependsOn: 1,
isDependedOn: 0,
hasRedundancy: 0,
degradationPriority: 0,
isNonSyncSample: 1
}
};
};
/*
* Collates information from a video frame into an object for eventual
* entry into an MP4 sample table.
*
* @param {Object} frame the video frame
* @param {Number} dataOffset the byte offset to position the sample
* @return {Object} object containing sample table info for a frame
*/
var sampleForFrame = function (frame, dataOffset) {
var sample = createDefaultSample();
sample.dataOffset = dataOffset;
sample.compositionTimeOffset = frame.pts - frame.dts;
sample.duration = frame.duration;
sample.size = 4 * frame.length; // Space for nal unit size
sample.size += frame.byteLength;
if (frame.keyFrame) {
sample.flags.dependsOn = 2;
sample.flags.isNonSyncSample = 0;
}
return sample;
}; // generate the track's sample table from an array of gops
var generateSampleTable$1 = function (gops, baseDataOffset) {
var h,
i,
sample,
currentGop,
currentFrame,
dataOffset = baseDataOffset || 0,
samples = [];
for (h = 0; h < gops.length; h++) {
currentGop = gops[h];
for (i = 0; i < currentGop.length; i++) {
currentFrame = currentGop[i];
sample = sampleForFrame(currentFrame, dataOffset);
dataOffset += sample.size;
samples.push(sample);
}
}
return samples;
}; // generate the track's raw mdat data from an array of gops
var concatenateNalData = function (gops) {
var h,
i,
j,
currentGop,
currentFrame,
currentNal,
dataOffset = 0,
nalsByteLength = gops.byteLength,
numberOfNals = gops.nalCount,
totalByteLength = nalsByteLength + 4 * numberOfNals,
data = new Uint8Array(totalByteLength),
view = new DataView(data.buffer); // For each Gop..
for (h = 0; h < gops.length; h++) {
currentGop = gops[h]; // For each Frame..
for (i = 0; i < currentGop.length; i++) {
currentFrame = currentGop[i]; // For each NAL..
for (j = 0; j < currentFrame.length; j++) {
currentNal = currentFrame[j];
view.setUint32(dataOffset, currentNal.data.byteLength);
dataOffset += 4;
data.set(currentNal.data, dataOffset);
dataOffset += currentNal.data.byteLength;
}
}
}
return data;
}; // generate the track's sample table from a frame
var generateSampleTableForFrame = function (frame, baseDataOffset) {
var sample,
dataOffset = baseDataOffset || 0,
samples = [];
sample = sampleForFrame(frame, dataOffset);
samples.push(sample);
return samples;
}; // generate the track's raw mdat data from a frame
var concatenateNalDataForFrame = function (frame) {
var i,
currentNal,
dataOffset = 0,
nalsByteLength = frame.byteLength,
numberOfNals = frame.length,
totalByteLength = nalsByteLength + 4 * numberOfNals,
data = new Uint8Array(totalByteLength),
view = new DataView(data.buffer); // For each NAL..
for (i = 0; i < frame.length; i++) {
currentNal = frame[i];
view.setUint32(dataOffset, currentNal.data.byteLength);
dataOffset += 4;
data.set(currentNal.data, dataOffset);
dataOffset += currentNal.data.byteLength;
}
return data;
};
var frameUtils$1 = {
groupNalsIntoFrames: groupNalsIntoFrames,
groupFramesIntoGops: groupFramesIntoGops,
extendFirstKeyFrame: extendFirstKeyFrame,
generateSampleTable: generateSampleTable$1,
concatenateNalData: concatenateNalData,
generateSampleTableForFrame: generateSampleTableForFrame,
concatenateNalDataForFrame: concatenateNalDataForFrame
};
/**
* mux.js
*
* Copyright (c) Brightcove
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
*/
var highPrefix = [33, 16, 5, 32, 164, 27];
var lowPrefix = [33, 65, 108, 84, 1, 2, 4, 8, 168, 2, 4, 8, 17, 191, 252];
var zeroFill = function (count) {
var a = [];
while (count--) {
a.push(0);
}
return a;
};
var makeTable = function (metaTable) {
return Object.keys(metaTable).reduce(function (obj, key) {
obj[key] = new Uint8Array(metaTable[key].reduce(function (arr, part) {
return arr.concat(part);
}, []));
return obj;
}, {});
};
var silence;
var silence_1 = function () {
if (!silence) {
// Frames-of-silence to use for filling in missing AAC frames
var coneOfSilence = {
96000: [highPrefix, [227, 64], zeroFill(154), [56]],
88200: [highPrefix, [231], zeroFill(170), [56]],
64000: [highPrefix, [248, 192], zeroFill(240), [56]],
48000: [highPrefix, [255, 192], zeroFill(268), [55, 148, 128], zeroFill(54), [112]],
44100: [highPrefix, [255, 192], zeroFill(268), [55, 163, 128], zeroFill(84), [112]],
32000: [highPrefix, [255, 192], zeroFill(268), [55, 234], zeroFill(226), [112]],
24000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 112], zeroFill(126), [224]],
16000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 255], zeroFill(269), [223, 108], zeroFill(195), [1, 192]],
12000: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 253, 128], zeroFill(259), [56]],
11025: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 255, 192], zeroFill(268), [55, 175, 128], zeroFill(108), [112]],
8000: [lowPrefix, zeroFill(268), [3, 121, 16], zeroFill(47), [7]]
};
silence = makeTable(coneOfSilence);
}
return silence;
};
/**
* mux.js
*
* Copyright (c) Brightcove
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
*/
var ONE_SECOND_IN_TS$4 = 90000,
// 90kHz clock
secondsToVideoTs,
secondsToAudioTs,
videoTsToSeconds,
audioTsToSeconds,
audioTsToVideoTs,
videoTsToAudioTs,
metadataTsToSeconds;
secondsToVideoTs = function (seconds) {
return seconds * ONE_SECOND_IN_TS$4;
};
secondsToAudioTs = function (seconds, sampleRate) {
return seconds * sampleRate;
};
videoTsToSeconds = function (timestamp) {
return timestamp / ONE_SECOND_IN_TS$4;
};
audioTsToSeconds = function (timestamp, sampleRate) {
return timestamp / sampleRate;
};
audioTsToVideoTs = function (timestamp, sampleRate) {
return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
};
videoTsToAudioTs = function (timestamp, sampleRate) {
return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
};
/**
* Adjust ID3 tag or caption timing information by the timeline pts values
* (if keepOriginalTimestamps is false) and convert to seconds
*/
metadataTsToSeconds = function (timestamp, timelineStartPts, keepOriginalTimestamps) {
return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);
};
var clock$2 = {
ONE_SECOND_IN_TS: ONE_SECOND_IN_TS$4,
secondsToVideoTs: secondsToVideoTs,
secondsToAudioTs: secondsToAudioTs,
videoTsToSeconds: videoTsToSeconds,
audioTsToSeconds: audioTsToSeconds,
audioTsToVideoTs: audioTsToVideoTs,
videoTsToAudioTs: videoTsToAudioTs,
metadataTsToSeconds: metadataTsToSeconds
};
/**
* mux.js
*
* Copyright (c) Brightcove
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
*/
var coneOfSilence = silence_1;
var clock$1 = clock$2;
/**
* Sum the `byteLength` properties of the data in each AAC frame
*/
var sumFrameByteLengths = function (array) {
var i,
currentObj,
sum = 0; // sum the byteLength's all each nal unit in the frame
for (i = 0; i < array.length; i++) {
currentObj = array[i];
sum += currentObj.data.byteLength;
}
return sum;
}; // Possibly pad (prefix) the audio track with silence if appending this track
// would lead to the introduction of a gap in the audio buffer
var prefixWithSilence = function (track, frames, audioAppendStartTs, videoBaseMediaDecodeTime) {
var baseMediaDecodeTimeTs,
frameDuration = 0,
audioGapDuration = 0,
audioFillFrameCount = 0,
audioFillDuration = 0,
silentFrame,
i,
firstFrame;
if (!frames.length) {
return;
}
baseMediaDecodeTimeTs = clock$1.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate); // determine frame clock duration based on sample rate, round up to avoid overfills
frameDuration = Math.ceil(clock$1.ONE_SECOND_IN_TS / (track.samplerate / 1024));
if (audioAppendStartTs && videoBaseMediaDecodeTime) {
// insert the shortest possible amount (audio gap or audio to video gap)
audioGapDuration = baseMediaDecodeTimeTs - Math.max(audioAppendStartTs, videoBaseMediaDecodeTime); // number of full frames in the audio gap
audioFillFrameCount = Math.floor(audioGapDuration / frameDuration);
audioFillDuration = audioFillFrameCount * frameDuration;
} // don't attempt to fill gaps smaller than a single frame or larger
// than a half second
if (audioFillFrameCount < 1 || audioFillDuration > clock$1.ONE_SECOND_IN_TS / 2) {
return;
}
silentFrame = coneOfSilence()[track.samplerate];
if (!silentFrame) {
// we don't have a silent frame pregenerated for the sample rate, so use a frame
// from the content instead
silentFrame = frames[0].data;
}
for (i = 0; i < audioFillFrameCount; i++) {
firstFrame = frames[0];
frames.splice(0, 0, {
data: silentFrame,
dts: firstFrame.dts - frameDuration,
pts: firstFrame.pts - frameDuration
});
}
track.baseMediaDecodeTime -= Math.floor(clock$1.videoTsToAudioTs(audioFillDuration, track.samplerate));
return audioFillDuration;
}; // If the audio segment extends before the earliest allowed dts
// value, remove AAC frames until starts at or after the earliest
// allowed DTS so that we don't end up with a negative baseMedia-
// DecodeTime for the audio track
var trimAdtsFramesByEarliestDts = function (adtsFrames, track, earliestAllowedDts) {
if (track.minSegmentDts >= earliestAllowedDts) {
return adtsFrames;
} // We will need to recalculate the earliest segment Dts
track.minSegmentDts = Infinity;
return adtsFrames.filter(function (currentFrame) {
// If this is an allowed frame, keep it and record it's Dts
if (currentFrame.dts >= earliestAllowedDts) {
track.minSegmentDts = Math.min(track.minSegmentDts, currentFrame.dts);
track.minSegmentPts = track.minSegmentDts;
return true;
} // Otherwise, discard it
return false;
});
}; // generate the track's raw mdat data from an array of frames
var generateSampleTable = function (frames) {
var i,
currentFrame,
samples = [];
for (i = 0; i < frames.length; i++) {
currentFrame = frames[i];
samples.push({
size: currentFrame.data.byteLength,
duration: 1024 // For AAC audio, all samples contain 1024 samples
});
}
return samples;
}; // generate the track's sample table from an array of frames
var concatenateFrameData = function (frames) {
var i,
currentFrame,
dataOffset = 0,
data = new Uint8Array(sumFrameByteLengths(frames));
for (i = 0; i < frames.length; i++) {
currentFrame = frames[i];
data.set(currentFrame.data, dataOffset);
dataOffset += currentFrame.data.byteLength;
}
return data;
};
var audioFrameUtils$1 = {
prefixWithSilence: prefixWithSilence,
trimAdtsFramesByEarliestDts: trimAdtsFramesByEarliestDts,
generateSampleTable: generateSampleTable,
concatenateFrameData: concatenateFrameData
};
/**
* mux.js
*
* Copyright (c) Brightcove
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
*/
var ONE_SECOND_IN_TS$3 = clock$2.ONE_SECOND_IN_TS;
/**
* Store information about the start and end of the track and the
* duration for each frame/sample we process in order to calculate
* the baseMediaDecodeTime
*/
var collectDtsInfo = function (track, data) {
if (typeof data.pts === 'number') {
if (track.timelineStartInfo.pts === undefined) {
track.timelineStartInfo.pts = data.pts;
}
if (track.minSegmentPts === undefined) {
track.minSegmentPts = data.pts;
} else {
track.minSegmentPts = Math.min(track.minSegmentPts, data.pts);
}
if (track.maxSegmentPts === undefined) {
track.maxSegmentPts = data.pts;
} else {
track.maxSegmentPts = Math.max(track.maxSegmentPts, data.pts);
}
}
if (typeof data.dts === 'number') {
if (track.timelineStartInfo.dts === undefined) {
track.timelineStartInfo.dts = data.dts;
}
if (track.minSegmentDts === undefined) {
track.minSegmentDts = data.dts;
} else {
track.minSegmentDts = Math.min(track.minSegmentDts, data.dts);
}
if (track.maxSegmentDts === undefined) {
track.maxSegmentDts = data.dts;
} else {
track.maxSegmentDts = Math.max(track.maxSegmentDts, data.dts);
}
}
};
/**
* Clear values used to calculate the baseMediaDecodeTime between
* tracks
*/
var clearDtsInfo = function (track) {
delete track.minSegmentDts;
delete track.maxSegmentDts;
delete track.minSegmentPts;
delete track.maxSegmentPts;
};
/**
* Calculate the track's baseMediaDecodeTime based on the earliest
* DTS the transmuxer has ever seen and the minimum DTS for the
* current track
* @param track {object} track metadata configuration
* @param keepOriginalTimestamps {boolean} If true, keep the timestamps
* in the source; false to adjust the first segment to start at 0.
*/
var calculateTrackBaseMediaDecodeTime = function (track, keepOriginalTimestamps) {
var baseMediaDecodeTime,
scale,
minSegmentDts = track.minSegmentDts; // Optionally adjust the time so the first segment starts at zero.
if (!keepOriginalTimestamps) {
minSegmentDts -= track.timelineStartInfo.dts;
} // track.timelineStartInfo.baseMediaDecodeTime is the location, in time, where
// we want the start of the first segment to be placed
baseMediaDecodeTime = track.timelineStartInfo.baseMediaDecodeTime; // Add to that the distance this segment is from the very first
baseMediaDecodeTime += minSegmentDts; // baseMediaDecodeTime must not become negative
baseMediaDecodeTime = Math.max(0, baseMediaDecodeTime);
if (track.type === 'audio') {
// Audio has a different clock equal to the sampling_rate so we need to
// scale the PTS values into the clock rate of the track
scale = track.samplerate / ONE_SECOND_IN_TS$3;
baseMediaDecodeTime *= scale;
baseMediaDecodeTime = Math.floor(baseMediaDecodeTime);
}
return baseMediaDecodeTime;
};
var trackDecodeInfo$1 = {
clearDtsInfo: clearDtsInfo,
calculateTrackBaseMediaDecodeTime: calculateTrackBaseMediaDecodeTime,
collectDtsInfo: collectDtsInfo
};
/**
* mux.js
*
* Copyright (c) Brightcove
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
*
* Reads in-band caption information from a video elementary
* stream. Captions must follow the CEA-708 standard for injection
* into an MPEG-2 transport streams.
* @see https://en.wikipedia.org/wiki/CEA-708
* @see https://www.gpo.gov/fdsys/pkg/CFR-2007-title47-vol1/pdf/CFR-2007-title47-vol1-sec15-119.pdf
*/
// payload type field to indicate how they are to be
// interpreted. CEAS-708 caption content is always transmitted with
// payload type 0x04.
var USER_DATA_REGISTERED_ITU_T_T35 = 4,
RBSP_TRAILING_BITS = 128;
/**
* Parse a supplemental enhancement information (SEI) NAL unit.
* Stops parsing once a message of type ITU T T35 has been found.
*
* @param bytes {Uint8Array} the bytes of a SEI NAL unit
* @return {object} the parsed SEI payload
* @see Rec. ITU-T H.264, 7.3.2.3.1
*/
var parseSei = function (bytes) {
var i = 0,
result = {
payloadType: -1,
payloadSize: 0
},
payloadType = 0,
payloadSize = 0; // go through the sei_rbsp parsing each each individual sei_message
while (i < bytes.byteLength) {
// stop once we have hit the end of the sei_rbsp
if (bytes[i] === RBSP_TRAILING_BITS) {
break;
} // Parse payload type
while (bytes[i] === 0xFF) {
payloadType += 255;
i++;
}
payloadType += bytes[i++]; // Parse payload size
while (bytes[i] === 0xFF) {
payloadSize += 255;
i++;
}
payloadSize += bytes[i++]; // this sei_message is a 608/708 caption so save it and break
// there can only ever be one caption message in a frame's sei
if (!result.payload && payloadType === USER_DATA_REGISTERED_ITU_T_T35) {
var userIdentifier = String.fromCharCode(bytes[i + 3], bytes[i + 4], bytes[i + 5], bytes[i + 6]);
if (userIdentifier === 'GA94') {
result.payloadType = payloadType;
result.payloadSize = payloadSize;
result.payload = bytes.subarray(i, i + payloadSize);
break;
} else {
result.payload = void 0;
}
} // skip the payload and parse the next message
i += payloadSize;
payloadType = 0;
payloadSize = 0;
}
return result;
}; // see ANSI/SCTE 128-1 (2013), section 8.1
var parseUserData = function (sei) {
// itu_t_t35_contry_code must be 181 (United States) for
// captions
if (sei.payload[0] !== 181) {
return null;
} // itu_t_t35_provider_code should be 49 (ATSC) for captions
if ((sei.payload[1] << 8 | sei.payload[2]) !== 49) {
return null;
} // the user_identifier should be "GA94" to indicate ATSC1 data
if (String.fromCharCode(sei.payload[3], sei.payload[4], sei.payload[5], sei.payload[6]) !== 'GA94') {
return null;
} // finally, user_data_type_code should be 0x03 for caption data
if (sei.payload[7] !== 0x03) {
return null;
} // return the user_data_type_structure and strip the trailing
// marker bits
return sei.payload.subarray(8, sei.payload.length - 1);
}; // see CEA-708-D, section 4.4
var parseCaptionPackets = function (pts, userData) {
var results = [],
i,
count,
offset,
data; // if this is just filler, return immediately
if (!(userData[0] & 0x40)) {
return results;
} // parse out the cc_data_1 and cc_data_2 fields
count = userData[0] & 0x1f;
for (i = 0; i < count; i++) {
offset = i * 3;
data = {
type: userData[offset + 2] & 0x03,
pts: pts
}; // capture cc data when cc_valid is 1
if (userData[offset + 2] & 0x04) {
data.ccData = userData[offset + 3] << 8 | userData[offset + 4];
results.push(data);
}
}
return results;
};
var discardEmulationPreventionBytes$1 = function (data) {
var length = data.byteLength,
emulationPreventionBytesPositions = [],
i = 1,
newLength,
newData; // Find all `Emulation Prevention Bytes`
while (i < length - 2) {
if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
emulationPreventionBytesPositions.push(i + 2);
i += 2;
} else {
i++;
}
} // If no Emulation Prevention Bytes were found just return the original
// array
if (emulationPreventionBytesPositions.length === 0) {
return data;
} // Create a new array to hold the NAL unit data
newLength = length - emulationPreventionBytesPositions.length;
newData = new Uint8Array(newLength);
var sourceIndex = 0;
for (i = 0; i < newLength; sourceIndex++, i++) {
if (sourceIndex === emulationPreventionBytesPositions[0]) {
// Skip this byte
sourceIndex++; // Remove this position index
emulationPreventionBytesPositions.shift();
}
newData[i] = data[sourceIndex];
}
return newData;
}; // exports
var captionPacketParser = {
parseSei: parseSei,
parseUserData: parseUserData,
parseCaptionPackets: parseCaptionPackets,
discardEmulationPreventionBytes: discardEmulationPreventionBytes$1,
USER_DATA_REGISTERED_ITU_T_T35: USER_DATA_REGISTERED_ITU_T_T35
};
/**
* mux.js
*
* Copyright (c) Brightcove
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
*
* Reads in-band caption information from a video elementary
* stream. Captions must follow the CEA-708 standard for injection
* into an MPEG-2 transport streams.
* @see https://en.wikipedia.org/wiki/CEA-708
* @see https://www.gpo.gov/fdsys/pkg/CFR-2007-title47-vol1/pdf/CFR-2007-title47-vol1-sec15-119.pdf
*/
// Link To Transport
// -----------------
var Stream$7 = stream;
var cea708Parser = captionPacketParser;
var CaptionStream$2 = function (options) {
options = options || {};
CaptionStream$2.prototype.init.call(this); // parse708captions flag, default to true
this.parse708captions_ = typeof options.parse708captions === 'boolean' ? options.parse708captions : true;
this.captionPackets_ = [];
this.ccStreams_ = [new Cea608Stream(0, 0),
// eslint-disable-line no-use-before-define
new Cea608Stream(0, 1),
// eslint-disable-line no-use-before-define
new Cea608Stream(1, 0),
// eslint-disable-line no-use-before-define
new Cea608Stream(1, 1) // eslint-disable-line no-use-before-define
];
if (this.parse708captions_) {
this.cc708Stream_ = new Cea708Stream({
captionServices: options.captionServices
}); // eslint-disable-line no-use-before-define
}
this.reset(); // forward data and done events from CCs to this CaptionStream
this.ccStreams_.forEach(function (cc) {
cc.on('data', this.trigger.bind(this, 'data'));
cc.on('partialdone', this.trigger.bind(this, 'partialdone'));
cc.on('done', this.trigger.bind(this, 'done'));
}, this);
if (this.parse708captions_) {
this.cc708Stream_.on('data', this.trigger.bind(this, 'data'));
this.cc708Stream_.on('partialdone', this.trigger.bind(this, 'partialdone'));
this.cc708Stream_.on('done', this.trigger.bind(this, 'done'));
}
};
CaptionStream$2.prototype = new Stream$7();
CaptionStream$2.prototype.push = function (event) {
var sei, userData, newCaptionPackets; // only examine SEI NALs
if (event.nalUnitType !== 'sei_rbsp') {
return;
} // parse the sei
sei = cea708Parser.parseSei(event.escapedRBSP); // no payload data, skip
if (!sei.payload) {
return;
} // ignore everything but user_data_registered_itu_t_t35
if (sei.payloadType !== cea708Parser.USER_DATA_REGISTERED_ITU_T_T35) {
return;
} // parse out the user data payload
userData = cea708Parser.parseUserData(sei); // ignore unrecognized userData
if (!userData) {
return;
} // Sometimes, the same segment # will be downloaded twice. To stop the
// caption data from being processed twice, we track the latest dts we've
// received and ignore everything with a dts before that. However, since
// data for a specific dts can be split across packets on either side of
// a segment boundary, we need to make sure we *don't* ignore the packets
// from the *next* segment that have dts === this.latestDts_. By constantly
// tracking the number of packets received with dts === this.latestDts_, we
// know how many should be ignored once we start receiving duplicates.
if (event.dts < this.latestDts_) {
// We've started getting older data, so set the flag.
this.ignoreNextEqualDts_ = true;
return;
} else if (event.dts === this.latestDts_ && this.ignoreNextEqualDts_) {
this.numSameDts_--;
if (!this.numSameDts_) {
// We've received the last duplicate packet, time to start processing again
this.ignoreNextEqualDts_ = false;
}
return;
} // parse out CC data packets and save them for later
newCaptionPackets = cea708Parser.parseCaptionPackets(event.pts, userData);
this.captionPackets_ = this.captionPackets_.concat(newCaptionPackets);
if (this.latestDts_ !== event.dts) {
this.numSameDts_ = 0;
}
this.numSameDts_++;
this.latestDts_ = event.dts;
};
CaptionStream$2.prototype.flushCCStreams = function (flushType) {
this.ccStreams_.forEach(function (cc) {
return flushType === 'flush' ? cc.flush() : cc.partialFlush();
}, this);
};
CaptionStream$2.prototype.flushStream = function (flushType) {
// make sure we actually parsed captions before proceeding
if (!this.captionPackets_.length) {
this.flushCCStreams(flushType);
return;
} // In Chrome, the Array#sort function is not stable so add a
// presortIndex that we can use to ensure we get a stable-sort
this.captionPackets_.forEach(function (elem, idx) {
elem.presortIndex = idx;
}); // sort caption byte-pairs based on their PTS values
this.captionPackets_.sort(function (a, b) {
if (a.pts === b.pts) {
return a.presortIndex - b.presortIndex;
}
return a.pts - b.pts;
});
this.captionPackets_.forEach(function (packet) {
if (packet.type < 2) {
// Dispatch packet to the right Cea608Stream
this.dispatchCea608Packet(packet);
} else {
// Dispatch packet to the Cea708Stream
this.dispatchCea708Packet(packet);
}
}, this);
this.captionPackets_.length = 0;
this.flushCCStreams(flushType);
};
CaptionStream$2.prototype.flush = function () {
return this.flushStream('flush');
}; // Only called if handling partial data
CaptionStream$2.prototype.partialFlush = function () {
return this.flushStream('partialFlush');
};
CaptionStream$2.prototype.reset = function () {
this.latestDts_ = null;
this.ignoreNextEqualDts_ = false;
this.numSameDts_ = 0;
this.activeCea608Channel_ = [null, null];
this.ccStreams_.forEach(function (ccStream) {
ccStream.reset();
});
}; // From the CEA-608 spec:
/*
* When XDS sub-packets are interleaved with other services, the end of each sub-packet shall be followed
* by a control pair to change to a different service. When any of the control codes from 0x10 to 0x1F is
* used to begin a control code pair, it indicates the return to captioning or Text data. The control code pair
* and subsequent data should then be processed according to the FCC rules. It may be necessary for the
* line 21 data encoder to automatically insert a control code pair (i.e. RCL, RU2, RU3, RU4, RDC, or RTD)
* to switch to captioning or Text.
*/
// With that in mind, we ignore any data between an XDS control code and a
// subsequent closed-captioning control code.
CaptionStream$2.prototype.dispatchCea608Packet = function (packet) {
// NOTE: packet.type is the CEA608 field
if (this.setsTextOrXDSActive(packet)) {
this.activeCea608Channel_[packet.type] = null;
} else if (this.setsChannel1Active(packet)) {
this.activeCea608Channel_[packet.type] = 0;
} else if (this.setsChannel2Active(packet)) {
this.activeCea608Channel_[packet.type] = 1;
}
if (this.activeCea608Channel_[packet.type] === null) {
// If we haven't received anything to set the active channel, or the
// packets are Text/XDS data, discard the data; we don't want jumbled
// captions
return;
}
this.ccStreams_[(packet.type << 1) + this.activeCea608Channel_[packet.type]].push(packet);
};
CaptionStream$2.prototype.setsChannel1Active = function (packet) {
return (packet.ccData & 0x7800) === 0x1000;
};
CaptionStream$2.prototype.setsChannel2Active = function (packet) {
return (packet.ccData & 0x7800) === 0x1800;
};
CaptionStream$2.prototype.setsTextOrXDSActive = function (packet) {
return (packet.ccData & 0x7100) === 0x0100 || (packet.ccData & 0x78fe) === 0x102a || (packet.ccData & 0x78fe) === 0x182a;
};
CaptionStream$2.prototype.dispatchCea708Packet = function (packet) {
if (this.parse708captions_) {
this.cc708Stream_.push(packet);
}
}; // ----------------------
// Session to Application
// ----------------------
// This hash maps special and extended character codes to their
// proper Unicode equivalent. The first one-byte key is just a
// non-standard character code. The two-byte keys that follow are
// the extended CEA708 character codes, along with the preceding
// 0x10 extended character byte to distinguish these codes from
// non-extended character codes. Every CEA708 character code that
// is not in this object maps directly to a standard unicode
// character code.
// The transparent space and non-breaking transparent space are
// technically not fully supported since there is no code to
// make them transparent, so they have normal non-transparent
// stand-ins.
// The special closed caption (CC) character isn't a standard
// unicode character, so a fairly similar unicode character was
// chosen in it's place.
var CHARACTER_TRANSLATION_708 = {
0x7f: 0x266a,
// ♪
0x1020: 0x20,
// Transparent Space
0x1021: 0xa0,
// Nob-breaking Transparent Space
0x1025: 0x2026,
// …
0x102a: 0x0160,
// Š
0x102c: 0x0152,
// Œ
0x1030: 0x2588,
// █
0x1031: 0x2018,
// ‘
0x1032: 0x2019,
// ’
0x1033: 0x201c,
// “
0x1034: 0x201d,
// ”
0x1035: 0x2022,
// •
0x1039: 0x2122,
// ™
0x103a: 0x0161,
// š
0x103c: 0x0153,
// œ
0x103d: 0x2120,
// ℠
0x103f: 0x0178,
// Ÿ
0x1076: 0x215b,
// ⅛
0x1077: 0x215c,
// ⅜
0x1078: 0x215d,
// ⅝
0x1079: 0x215e,
// ⅞
0x107a: 0x23d0,
// ⏐
0x107b: 0x23a4,
// ⎤
0x107c: 0x23a3,
// ⎣
0x107d: 0x23af,
// ⎯
0x107e: 0x23a6,
// ⎦
0x107f: 0x23a1,
// ⎡
0x10a0: 0x3138 // ㄸ (CC char)
};
var get708CharFromCode = function (code) {
var newCode = CHARACTER_TRANSLATION_708[code] || code;
if (code & 0x1000 && code === newCode) {
// Invalid extended code
return '';
}
return String.fromCharCode(newCode);
};
var within708TextBlock = function (b) {
return 0x20 <= b && b <= 0x7f || 0xa0 <= b && b <= 0xff;
};
var Cea708Window = function (windowNum) {
this.windowNum = windowNum;
this.reset();
};
Cea708Window.prototype.reset = function () {
this.clearText();
this.pendingNewLine = false;
this.winAttr = {};
this.penAttr = {};
this.penLoc = {};
this.penColor = {}; // These default values are arbitrary,
// defineWindow will usually override them
this.visible = 0;
this.rowLock = 0;
this.columnLock = 0;
this.priority = 0;
this.relativePositioning = 0;
this.anchorVertical = 0;
this.anchorHorizontal = 0;
this.anchorPoint = 0;
this.rowCount = 1;
this.virtualRowCount = this.rowCount + 1;
this.columnCount = 41;
this.windowStyle = 0;
this.penStyle = 0;
};
Cea708Window.prototype.getText = function () {
return this.rows.join('\n');
};
Cea708Window.prototype.clearText = function () {
this.rows = [''];
this.rowIdx = 0;
};
Cea708Window.prototype.newLine = function (pts) {
if (this.rows.length >= this.virtualRowCount && typeof this.beforeRowOverflow === 'function') {
this.beforeRowOverflow(pts);
}
if (this.rows.length > 0) {
this.rows.push('');
this.rowIdx++;
} // Show all virtual rows since there's no visible scrolling
while (this.rows.length > this.virtualRowCount) {
this.rows.shift();
this.rowIdx--;
}
};
Cea708Window.prototype.isEmpty = function () {
if (this.rows.length === 0) {
return true;
} else if (this.rows.length === 1) {
return this.rows[0] === '';
}
return false;
};
Cea708Window.prototype.addText = function (text) {
this.rows[this.rowIdx] += text;
};
Cea708Window.prototype.backspace = function () {
if (!this.isEmpty()) {
var row = this.rows[this.rowIdx];
this.rows[this.rowIdx] = row.substr(0, row.length - 1);
}
};
var Cea708Service = function (serviceNum, encoding, stream) {
this.serviceNum = serviceNum;
this.text = '';
this.currentWindow = new Cea708Window(-1);
this.windows = [];
this.stream = stream; // Try to setup a TextDecoder if an `encoding` value was provided
if (typeof encoding === 'string') {
this.createTextDecoder(encoding);
}
};
/**
* Initialize service windows
* Must be run before service use
*
* @param {Integer} pts PTS value
* @param {Function} beforeRowOverflow Function to execute before row overflow of a window
*/
Cea708Service.prototype.init = function (pts, beforeRowOverflow) {
this.startPts = pts;
for (var win = 0; win < 8; win++) {
this.windows[win] = new Cea708Window(win);
if (typeof beforeRowOverflow === 'function') {
this.windows[win].beforeRowOverflow = beforeRowOverflow;
}
}
};
/**
* Set current window of service to be affected by commands
*
* @param {Integer} windowNum Window number
*/
Cea708Service.prototype.setCurrentWindow = function (windowNum) {
this.currentWindow = this.windows[windowNum];
};
/**
* Try to create a TextDecoder if it is natively supported
*/
Cea708Service.prototype.createTextDecoder = function (encoding) {
if (typeof TextDecoder === 'undefined') {
this.stream.trigger('log', {
level: 'warn',
message: 'The `encoding` option is unsupported without TextDecoder support'
});
} else {
try {
this.textDecoder_ = new TextDecoder(encoding);
} catch (error) {
this.stream.trigger('log', {
level: 'warn',
message: 'TextDecoder could not be created with ' + encoding + ' encoding. ' + error
});
}
}
};
var Cea708Stream = function (options) {
options = options || {};
Cea708Stream.prototype.init.call(this);
var self = this;
var captionServices = options.captionServices || {};
var captionServiceEncodings = {};
var serviceProps; // Get service encodings from captionServices option block
Object.keys(captionServices).forEach(serviceName => {
serviceProps = captionServices[serviceName];
if (/^SERVICE/.test(serviceName)) {
captionServiceEncodings[serviceName] = serviceProps.encoding;
}
});
this.serviceEncodings = captionServiceEncodings;
this.current708Packet = null;
this.services = {};
this.push = function (packet) {
if (packet.type === 3) {
// 708 packet start
self.new708Packet();
self.add708Bytes(packet);
} else {
if (self.current708Packet === null) {
// This should only happen at the start of a file if there's no packet start.
self.new708Packet();
}
self.add708Bytes(packet);
}
};
};
Cea708Stream.prototype = new Stream$7();
/**
* Push current 708 packet, create new 708 packet.
*/
Cea708Stream.prototype.new708Packet = function () {
if (this.current708Packet !== null) {
this.push708Packet();
}
this.current708Packet = {
data: [],
ptsVals: []
};
};
/**
* Add pts and both bytes from packet into current 708 packet.
*/
Cea708Stream.prototype.add708Bytes = function (packet) {
var data = packet.ccData;
var byte0 = data >>> 8;
var byte1 = data & 0xff; // I would just keep a list of packets instead of bytes, but it isn't clear in the spec
// that service blocks will always line up with byte pairs.
this.current708Packet.ptsVals.push(packet.pts);
this.current708Packet.data.push(byte0);
this.current708Packet.data.push(byte1);
};
/**
* Parse completed 708 packet into service blocks and push each service block.
*/
Cea708Stream.prototype.push708Packet = function () {
var packet708 = this.current708Packet;
var packetData = packet708.data;
var serviceNum = null;
var blockSize = null;
var i = 0;
var b = packetData[i++];
packet708.seq = b >> 6;
packet708.sizeCode = b & 0x3f; // 0b00111111;
for (; i < packetData.length; i++) {
b = packetData[i++];
serviceNum = b >> 5;
blockSize = b & 0x1f; // 0b00011111
if (serviceNum === 7 && blockSize > 0) {
// Extended service num
b = packetData[i++];
serviceNum = b;
}
this.pushServiceBlock(serviceNum, i, blockSize);
if (blockSize > 0) {
i += blockSize - 1;
}
}
};
/**
* Parse service block, execute commands, read text.
*
* Note: While many of these commands serve important purposes,
* many others just parse out the parameters or attributes, but
* nothing is done with them because this is not a full and complete
* implementation of the entire 708 spec.
*
* @param {Integer} serviceNum Service number
* @param {Integer} start Start index of the 708 packet data
* @param {Integer} size Block size
*/
Cea708Stream.prototype.pushServiceBlock = function (serviceNum, start, size) {
var b;
var i = start;
var packetData = this.current708Packet.data;
var service = this.services[serviceNum];
if (!service) {
service = this.initService(serviceNum, i);
}
for (; i < start + size && i < packetData.length; i++) {
b = packetData[i];
if (within708TextBlock(b)) {
i = this.handleText(i, service);
} else if (b === 0x18) {
i = this.multiByteCharacter(i, service);
} else if (b === 0x10) {
i = this.extendedCommands(i, service);
} else if (0x80 <= b && b <= 0x87) {
i = this.setCurrentWindow(i, service);
} else if (0x98 <= b && b <= 0x9f) {
i = this.defineWindow(i, service);
} else if (b === 0x88) {
i = this.clearWindows(i, service);
} else if (b === 0x8c) {
i = this.deleteWindows(i, service);
} else if (b === 0x89) {
i = this.displayWindows(i, service);
} else if (b === 0x8a) {
i = this.hideWindows(i, service);
} else if (b === 0x8b) {
i = this.toggleWindows(i, service);
} else if (b === 0x97) {
i = this.setWindowAttributes(i, service);
} else if (b === 0x90) {
i = this.setPenAttributes(i, service);
} else if (b === 0x91) {
i = this.setPenColor(i, service);
} else if (b === 0x92) {
i = this.setPenLocation(i, service);
} else if (b === 0x8f) {
service = this.reset(i, service);
} else if (b === 0x08) {
// BS: Backspace
service.currentWindow.backspace();
} else if (b === 0x0c) {
// FF: Form feed
service.currentWindow.clearText();
} else if (b === 0x0d) {
// CR: Carriage return
service.currentWindow.pendingNewLine = true;
} else if (b === 0x0e) {
// HCR: Horizontal carriage return
service.currentWindow.clearText();
} else if (b === 0x8d) {
// DLY: Delay, nothing to do
i++;
} else ;
}
};
/**
* Execute an extended command
*
* @param {Integer} i Current index in the 708 packet
* @param {Service} service The service object to be affected
* @return {Integer} New index after parsing
*/
Cea708Stream.prototype.extendedCommands = function (i, service) {
var packetData = this.current708Packet.data;
var b = packetData[++i];
if (within708TextBlock(b)) {
i = this.handleText(i, service, {
isExtended: true
});
}
return i;
};
/**
* Get PTS value of a given byte index
*
* @param {Integer} byteIndex Index of the byte
* @return {Integer} PTS
*/
Cea708Stream.prototype.getPts = function (byteIndex) {
// There's 1 pts value per 2 bytes
return this.current708Packet.ptsVals[Math.floor(byteIndex / 2)];
};
/**
* Initializes a service
*
* @param {Integer} serviceNum Service number
* @return {Service} Initialized service object
*/
Cea708Stream.prototype.initService = function (serviceNum, i) {
var serviceName = 'SERVICE' + serviceNum;
var self = this;
var serviceName;
var encoding;
if (serviceName in this.serviceEncodings) {
encoding = this.serviceEncodings[serviceName];
}
this.services[serviceNum] = new Cea708Service(serviceNum, encoding, self);
this.services[serviceNum].init(this.getPts(i), function (pts) {
self.flushDisplayed(pts, self.services[serviceNum]);
});
return this.services[serviceNum];
};
/**
* Execute text writing to current window
*
* @param {Integer} i Current index in the 708 packet
* @param {Service} service The service object to be affected
* @return {Integer} New index after parsing
*/
Cea708Stream.prototype.handleText = function (i, service, options) {
var isExtended = options && options.isExtended;
var isMultiByte = options && options.isMultiByte;
var packetData = this.current708Packet.data;
var extended = isExtended ? 0x1000 : 0x0000;
var currentByte = packetData[i];
var nextByte = packetData[i + 1];
var win = service.currentWindow;
var char;
var charCodeArray; // Converts an array of bytes to a unicode hex string.
function toHexString(byteArray) {
return byteArray.map(byte => {
return ('0' + (byte & 0xFF).toString(16)).slice(-2);
}).join('');
}
if (isMultiByte) {
charCodeArray = [currentByte, nextByte];
i++;
} else {
charCodeArray = [currentByte];
} // Use the TextDecoder if one was created for this service
if (service.textDecoder_ && !isExtended) {
char = service.textDecoder_.decode(new Uint8Array(charCodeArray));
} else {
// We assume any multi-byte char without a decoder is unicode.
if (isMultiByte) {
const unicode = toHexString(charCodeArray); // Takes a unicode hex string and creates a single character.
char = String.fromCharCode(parseInt(unicode, 16));
} else {
char = get708CharFromCode(extended | currentByte);
}
}
if (win.pendingNewLine && !win.isEmpty()) {
win.newLine(this.getPts(i));
}
win.pendingNewLine = false;
win.addText(char);
return i;
};
/**
* Handle decoding of multibyte character
*
* @param {Integer} i Current index in the 708 packet
* @param {Service} service The service object to be affected
* @return {Integer} New index after parsing
*/
Cea708Stream.prototype.multiByteCharacter = function (i, service) {
var packetData = this.current708Packet.data;
var firstByte = packetData[i + 1];
var secondByte = packetData[i + 2];
if (within708TextBlock(firstByte) && within708TextBlock(secondByte)) {
i = this.handleText(++i, service, {
isMultiByte: true
});
}
return i;
};
/**
* Parse and execute the CW# command.
*
* Set the current window.
*
* @param {Integer} i Current index in the 708 packet
* @param {Service} service The service object to be affected
* @return {Integer} New index after parsing
*/
Cea708Stream.prototype.setCurrentWindow = function (i, service) {
var packetData = this.current708Packet.data;
var b = packetData[i];
var windowNum = b & 0x07;
service.setCurrentWindow(windowNum);
return i;
};
/**
* Parse and execute the DF# command.
*
* Define a window and set it as the current window.
*
* @param {Integer} i Current index in the 708 packet
* @param {Service} service The service object to be affected
* @return {Integer} New index after parsing
*/
Cea708Stream.prototype.defineWindow = function (i, service) {
var packetData = this.current708Packet.data;
var b = packetData[i];
var windowNum = b & 0x07;
service.setCurrentWindow(windowNum);
var win = service.currentWindow;
b = packetData[++i];
win.visible = (b & 0x20) >> 5; // v
win.rowLock = (b & 0x10) >> 4; // rl
win.columnLock = (b & 0x08) >> 3; // cl
win.priority = b & 0x07; // p
b = packetData[++i];
win.relativePositioning = (b & 0x80) >> 7; // rp
win.anchorVertical = b & 0x7f; // av
b = packetData[++i];
win.anchorHorizontal = b; // ah
b = packetData[++i];
win.anchorPoint = (b & 0xf0) >> 4; // ap
win.rowCount = b & 0x0f; // rc
b = packetData[++i];
win.columnCount = b & 0x3f; // cc
b = packetData[++i];
win.windowStyle = (b & 0x38) >> 3; // ws
win.penStyle = b & 0x07; // ps
// The spec says there are (rowCount+1) "virtual rows"
win.virtualRowCount = win.rowCount + 1;
return i;
};
/**
* Parse and execute the SWA command.
*
* Set attributes of the current window.
*
* @param {Integer} i Current index in the 708 packet
* @param {Service} service The service object to be affected
* @return {Integer} New index after parsing
*/
Cea708Stream.prototype.setWindowAttributes = function (i, service) {
var packetData = this.current708Packet.data;
var b = packetData[i];
var winAttr = service.currentWindow.winAttr;
b = packetData[++i];
winAttr.fillOpacity = (b & 0xc0) >> 6; // fo
winAttr.fillRed = (b & 0x30) >> 4; // fr
winAttr.fillGreen = (b & 0x0c) >> 2; // fg
winAttr.fillBlue = b & 0x03; // fb
b = packetData[++i];
winAttr.borderType = (b & 0xc0) >> 6; // bt
winAttr.borderRed = (b & 0x30) >> 4; // br
winAttr.borderGreen = (b & 0x0c) >> 2; // bg
winAttr.borderBlue = b & 0x03; // bb
b = packetData[++i];
winAttr.borderType += (b & 0x80) >> 5; // bt
winAttr.wordWrap = (b & 0x40) >> 6; // ww
winAttr.printDirection = (b & 0x30) >> 4; // pd
winAttr.scrollDirection = (b & 0x0c) >> 2; // sd
winAttr.justify = b & 0x03; // j
b = packetData[++i];
winAttr.effectSpeed = (b & 0xf0) >> 4; // es
winAttr.effectDirection = (b & 0x0c) >> 2; // ed
winAttr.displayEffect = b & 0x03; // de
return i;
};
/**
* Gather text from all displayed windows and push a caption to output.
*
* @param {Integer} i Current index in the 708 packet
* @param {Service} service The service object to be affected
*/
Cea708Stream.prototype.flushDisplayed = function (pts, service) {
var displayedText = []; // TODO: Positioning not supported, displaying multiple windows will not necessarily
// display text in the correct order, but sample files so far have not shown any issue.
for (var winId = 0; winId < 8; winId++) {
if (service.windows[winId].visible && !service.windows[winId].isEmpty()) {
displayedText.push(service.windows[winId].getText());
}
}
service.endPts = pts;
service.text = displayedText.join('\n\n');
this.pushCaption(service);
service.startPts = pts;
};
/**
* Push a caption to output if the caption contains text.
*
* @param {Service} service The service object to be affected
*/
Cea708Stream.prototype.pushCaption = function (service) {
if (service.text !== '') {
this.trigger('data', {
startPts: service.startPts,
endPts: service.endPts,
text: service.text,
stream: 'cc708_' + service.serviceNum
});
service.text = '';
service.startPts = service.endPts;
}
};
/**
* Parse and execute the DSW command.
*
* Set visible property of windows based on the parsed bitmask.
*
* @param {Integer} i Current index in the 708 packet
* @param {Service} service The service object to be affected
* @return {Integer} New index after parsing
*/
Cea708Stream.prototype.displayWindows = function (i, service) {
var packetData = this.current708Packet.data;
var b = packetData[++i];
var pts = this.getPts(i);
this.flushDisplayed(pts, service);
for (var winId = 0; winId < 8; winId++) {
if (b & 0x01 << winId) {
service.windows[winId].visible = 1;
}
}
return i;
};
/**
* Parse and execute the HDW command.
*
* Set visible property of windows based on the parsed bitmask.
*
* @param {Integer} i Current index in the 708 packet
* @param {Service} service The service object to be affected
* @return {Integer} New index after parsing
*/
Cea708Stream.prototype.hideWindows = function (i, service) {
var packetData = this.current708Packet.data;
var b = packetData[++i];
var pts = this.getPts(i);
this.flushDisplayed(pts, service);
for (var winId = 0; winId < 8; winId++) {
if (b & 0x01 << winId) {
service.windows[winId].visible = 0;
}
}
return i;
};
/**
* Parse and execute the TGW command.
*
* Set visible property of windows based on the parsed bitmask.
*
* @param {Integer} i Current index in the 708 packet
* @param {Service} service The service object to be affected
* @return {Integer} New index after parsing
*/
Cea708Stream.prototype.toggleWindows = function (i, service) {
var packetData = this.current708Packet.data;
var b = packetData[++i];
var pts = this.getPts(i);
this.flushDisplayed(pts, service);
for (var winId = 0; winId < 8; winId++) {
if (b & 0x01 << winId) {
service.windows[winId].visible ^= 1;
}
}
return i;
};
/**
* Parse and execute the CLW command.
*
* Clear text of windows based on the parsed bitmask.
*
* @param {Integer} i Current index in the 708 packet
* @param {Service} service The service object to be affected
* @return {Integer} New index after parsing
*/
Cea708Stream.prototype.clearWindows = function (i, service) {
var packetData = this.current708Packet.data;
var b = packetData[++i];
var pts = this.getPts(i);
this.flushDisplayed(pts, service);
for (var winId = 0; winId < 8; winId++) {
if (b & 0x01 << winId) {
service.windows[winId].clearText();
}
}
return i;
};
/**
* Parse and execute the DLW command.
*
* Re-initialize windows based on the parsed bitmask.
*
* @param {Integer} i Current index in the 708 packet
* @param {Service} service The service object to be affected
* @return {Integer} New index after parsing
*/
Cea708Stream.prototype.deleteWindows = function (i, service) {
var packetData = this.current708Packet.data;
var b = packetData[++i];
var pts = this.getPts(i);
this.flushDisplayed(pts, service);
for (var winId = 0; winId < 8; winId++) {
if (b & 0x01 << winId) {
service.windows[winId].reset();
}
}
return i;
};
/**
* Parse and execute the SPA command.
*
* Set pen attributes of the current window.
*
* @param {Integer} i Current index in the 708 packet
* @param {Service} service The service object to be affected
* @return {Integer} New index after parsing
*/
Cea708Stream.prototype.setPenAttributes = function (i, service) {
var packetData = this.current708Packet.data;
var b = packetData[i];
var penAttr = service.currentWindow.penAttr;
b = packetData[++i];
penAttr.textTag = (b & 0xf0) >> 4; // tt
penAttr.offset = (b & 0x0c) >> 2; // o
penAttr.penSize = b & 0x03; // s
b = packetData[++i];
penAttr.italics = (b & 0x80) >> 7; // i
penAttr.underline = (b & 0x40) >> 6; // u
penAttr.edgeType = (b & 0x38) >> 3; // et
penAttr.fontStyle = b & 0x07; // fs
return i;
};
/**
* Parse and execute the SPC command.
*
* Set pen color of the current window.
*
* @param {Integer} i Current index in the 708 packet
* @param {Service} service The service object to be affected
* @return {Integer} New index after parsing
*/
Cea708Stream.prototype.setPenColor = function (i, service) {
var packetData = this.current708Packet.data;
var b = packetData[i];
var penColor = service.currentWindow.penColor;
b = packetData[++i];
penColor.fgOpacity = (b & 0xc0) >> 6; // fo
penColor.fgRed = (b & 0x30) >> 4; // fr
penColor.fgGreen = (b & 0x0c) >> 2; // fg
penColor.fgBlue = b & 0x03; // fb
b = packetData[++i];
penColor.bgOpacity = (b & 0xc0) >> 6; // bo
penColor.bgRed = (b & 0x30) >> 4; // br
penColor.bgGreen = (b & 0x0c) >> 2; // bg
penColor.bgBlue = b & 0x03; // bb
b = packetData[++i];
penColor.edgeRed = (b & 0x30) >> 4; // er
penColor.edgeGreen = (b & 0x0c) >> 2; // eg
penColor.edgeBlue = b & 0x03; // eb
return i;
};
/**
* Parse and execute the SPL command.
*
* Set pen location of the current window.
*
* @param {Integer} i Current index in the 708 packet
* @param {Service} service The service object to be affected
* @return {Integer} New index after parsing
*/
Cea708Stream.prototype.setPenLocation = function (i, service) {
var packetData = this.current708Packet.data;
var b = packetData[i];
var penLoc = service.currentWindow.penLoc; // Positioning isn't really supported at the moment, so this essentially just inserts a linebreak
service.currentWindow.pendingNewLine = true;
b = packetData[++i];
penLoc.row = b & 0x0f; // r
b = packetData[++i];
penLoc.column = b & 0x3f; // c
return i;
};
/**
* Execute the RST command.
*
* Reset service to a clean slate. Re-initialize.
*
* @param {Integer} i Current index in the 708 packet
* @param {Service} service The service object to be affected
* @return {Service} Re-initialized service
*/
Cea708Stream.prototype.reset = function (i, service) {
var pts = this.getPts(i);
this.flushDisplayed(pts, service);
return this.initService(service.serviceNum, i);
}; // This hash maps non-ASCII, special, and extended character codes to their
// proper Unicode equivalent. The first keys that are only a single byte
// are the non-standard ASCII characters, which simply map the CEA608 byte
// to the standard ASCII/Unicode. The two-byte keys that follow are the CEA608
// character codes, but have their MSB bitmasked with 0x03 so that a lookup
// can be performed regardless of the field and data channel on which the
// character code was received.
var CHARACTER_TRANSLATION = {
0x2a: 0xe1,
// á
0x5c: 0xe9,
// é
0x5e: 0xed,
// í
0x5f: 0xf3,
// ó
0x60: 0xfa,
// ú
0x7b: 0xe7,
// ç
0x7c: 0xf7,
// ÷
0x7d: 0xd1,
// Ñ
0x7e: 0xf1,
// ñ
0x7f: 0x2588,
// █
0x0130: 0xae,
// ®
0x0131: 0xb0,
// °
0x0132: 0xbd,
// ½
0x0133: 0xbf,
// ¿
0x0134: 0x2122,
// ™
0x0135: 0xa2,
// ¢
0x0136: 0xa3,
// £
0x0137: 0x266a,
// ♪
0x0138: 0xe0,
// à
0x0139: 0xa0,
//
0x013a: 0xe8,
// è
0x013b: 0xe2,
// â
0x013c: 0xea,
// ê
0x013d: 0xee,
// î
0x013e: 0xf4,
// ô
0x013f: 0xfb,
// û
0x0220: 0xc1,
// Á
0x0221: 0xc9,
// É
0x0222: 0xd3,
// Ó
0x0223: 0xda,
// Ú
0x0224: 0xdc,
// Ü
0x0225: 0xfc,
// ü
0x0226: 0x2018,
// ‘
0x0227: 0xa1,
// ¡
0x0228: 0x2a,
// *
0x0229: 0x27,
// '
0x022a: 0x2014,
// —
0x022b: 0xa9,
// ©
0x022c: 0x2120,
// ℠
0x022d: 0x2022,
// •
0x022e: 0x201c,
// “
0x022f: 0x201d,
// ”
0x0230: 0xc0,
// À
0x0231: 0xc2,
// Â
0x0232: 0xc7,
// Ç
0x0233: 0xc8,
// È
0x0234: 0xca,
// Ê
0x0235: 0xcb,
// Ë
0x0236: 0xeb,
// ë
0x0237: 0xce,
// Î
0x0238: 0xcf,
// Ï
0x0239: 0xef,
// ï
0x023a: 0xd4,
// Ô
0x023b: 0xd9,
// Ù
0x023c: 0xf9,
// ù
0x023d: 0xdb,
// Û
0x023e: 0xab,
// «
0x023f: 0xbb,
// »
0x0320: 0xc3,
// Ã
0x0321: 0xe3,
// ã
0x0322: 0xcd,
// Í
0x0323: 0xcc,
// Ì
0x0324: 0xec,
// ì
0x0325: 0xd2,
// Ò
0x0326: 0xf2,
// ò
0x0327: 0xd5,
// Õ
0x0328: 0xf5,
// õ
0x0329: 0x7b,
// {
0x032a: 0x7d,
// }
0x032b: 0x5c,
// \
0x032c: 0x5e,
// ^
0x032d: 0x5f,
// _
0x032e: 0x7c,
// |
0x032f: 0x7e,
// ~
0x0330: 0xc4,
// Ä
0x0331: 0xe4,
// ä
0x0332: 0xd6,
// Ö
0x0333: 0xf6,
// ö
0x0334: 0xdf,
// ß
0x0335: 0xa5,
// ¥
0x0336: 0xa4,
// ¤
0x0337: 0x2502,
// │
0x0338: 0xc5,
// Å
0x0339: 0xe5,
// å
0x033a: 0xd8,
// Ø
0x033b: 0xf8,
// ø
0x033c: 0x250c,
// ┌
0x033d: 0x2510,
// ┐
0x033e: 0x2514,
// └
0x033f: 0x2518 // ┘
};
var getCharFromCode = function (code) {
if (code === null) {
return '';
}
code = CHARACTER_TRANSLATION[code] || code;
return String.fromCharCode(code);
}; // the index of the last row in a CEA-608 display buffer
var BOTTOM_ROW = 14; // This array is used for mapping PACs -> row #, since there's no way of
// getting it through bit logic.
var ROWS = [0x1100, 0x1120, 0x1200, 0x1220, 0x1500, 0x1520, 0x1600, 0x1620, 0x1700, 0x1720, 0x1000, 0x1300, 0x1320, 0x1400, 0x1420]; // CEA-608 captions are rendered onto a 34x15 matrix of character
// cells. The "bottom" row is the last element in the outer array.
// We keep track of positioning information as we go by storing the
// number of indentations and the tab offset in this buffer.
var createDisplayBuffer = function () {
var result = [],
i = BOTTOM_ROW + 1;
while (i--) {
result.push({
text: '',
indent: 0,
offset: 0
});
}
return result;
};
var Cea608Stream = function (field, dataChannel) {
Cea608Stream.prototype.init.call(this);
this.field_ = field || 0;
this.dataChannel_ = dataChannel || 0;
this.name_ = 'CC' + ((this.field_ << 1 | this.dataChannel_) + 1);
this.setConstants();
this.reset();
this.push = function (packet) {
var data, swap, char0, char1, text; // remove the parity bits
data = packet.ccData & 0x7f7f; // ignore duplicate control codes; the spec demands they're sent twice
if (data === this.lastControlCode_) {
this.lastControlCode_ = null;
return;
} // Store control codes
if ((data & 0xf000) === 0x1000) {
this.lastControlCode_ = data;
} else if (data !== this.PADDING_) {
this.lastControlCode_ = null;
}
char0 = data >>> 8;
char1 = data & 0xff;
if (data === this.PADDING_) {
return;
} else if (data === this.RESUME_CAPTION_LOADING_) {
this.mode_ = 'popOn';
} else if (data === this.END_OF_CAPTION_) {
// If an EOC is received while in paint-on mode, the displayed caption
// text should be swapped to non-displayed memory as if it was a pop-on
// caption. Because of that, we should explicitly switch back to pop-on
// mode
this.mode_ = 'popOn';
this.clearFormatting(packet.pts); // if a caption was being displayed, it's gone now
this.flushDisplayed(packet.pts); // flip memory
swap = this.displayed_;
this.displayed_ = this.nonDisplayed_;
this.nonDisplayed_ = swap; // start measuring the time to display the caption
this.startPts_ = packet.pts;
} else if (data === this.ROLL_UP_2_ROWS_) {
this.rollUpRows_ = 2;
this.setRollUp(packet.pts);
} else if (data === this.ROLL_UP_3_ROWS_) {
this.rollUpRows_ = 3;
this.setRollUp(packet.pts);
} else if (data === this.ROLL_UP_4_ROWS_) {
this.rollUpRows_ = 4;
this.setRollUp(packet.pts);
} else if (data === this.CARRIAGE_RETURN_) {
this.clearFormatting(packet.pts);
this.flushDisplayed(packet.pts);
this.shiftRowsUp_();
this.startPts_ = packet.pts;
} else if (data === this.BACKSPACE_) {
if (this.mode_ === 'popOn') {
this.nonDisplayed_[this.row_].text = this.nonDisplayed_[this.row_].text.slice(0, -1);
} else {
this.displayed_[this.row_].text = this.displayed_[this.row_].text.slice(0, -1);
}
} else if (data === this.ERASE_DISPLAYED_MEMORY_) {
this.flushDisplayed(packet.pts);
this.displayed_ = createDisplayBuffer();
} else if (data === this.ERASE_NON_DISPLAYED_MEMORY_) {
this.nonDisplayed_ = createDisplayBuffer();
} else if (data === this.RESUME_DIRECT_CAPTIONING_) {
if (this.mode_ !== 'paintOn') {
// NOTE: This should be removed when proper caption positioning is
// implemented
this.flushDisplayed(packet.pts);
this.displayed_ = createDisplayBuffer();
}
this.mode_ = 'paintOn';
this.startPts_ = packet.pts; // Append special characters to caption text
} else if (this.isSpecialCharacter(char0, char1)) {
// Bitmask char0 so that we can apply character transformations
// regardless of field and data channel.
// Then byte-shift to the left and OR with char1 so we can pass the
// entire character code to `getCharFromCode`.
char0 = (char0 & 0x03) << 8;
text = getCharFromCode(char0 | char1);
this[this.mode_](packet.pts, text);
this.column_++; // Append extended characters to caption text
} else if (this.isExtCharacter(char0, char1)) {
// Extended characters always follow their "non-extended" equivalents.
// IE if a "è" is desired, you'll always receive "eè"; non-compliant
// decoders are supposed to drop the "è", while compliant decoders
// backspace the "e" and insert "è".
// Delete the previous character
if (this.mode_ === 'popOn') {
this.nonDisplayed_[this.row_].text = this.nonDisplayed_[this.row_].text.slice(0, -1);
} else {
this.displayed_[this.row_].text = this.displayed_[this.row_].text.slice(0, -1);
} // Bitmask char0 so that we can apply character transformations
// regardless of field and data channel.
// Then byte-shift to the left and OR with char1 so we can pass the
// entire character code to `getCharFromCode`.
char0 = (char0 & 0x03) << 8;
text = getCharFromCode(char0 | char1);
this[this.mode_](packet.pts, text);
this.column_++; // Process mid-row codes
} else if (this.isMidRowCode(char0, char1)) {
// Attributes are not additive, so clear all formatting
this.clearFormatting(packet.pts); // According to the standard, mid-row codes
// should be replaced with spaces, so add one now
this[this.mode_](packet.pts, ' ');
this.column_++;
if ((char1 & 0xe) === 0xe) {
this.addFormatting(packet.pts, ['i']);
}
if ((char1 & 0x1) === 0x1) {
this.addFormatting(packet.pts, ['u']);
} // Detect offset control codes and adjust cursor
} else if (this.isOffsetControlCode(char0, char1)) {
// Cursor position is set by indent PAC (see below) in 4-column
// increments, with an additional offset code of 1-3 to reach any
// of the 32 columns specified by CEA-608. So all we need to do
// here is increment the column cursor by the given offset.
const offset = char1 & 0x03; // For an offest value 1-3, set the offset for that caption
// in the non-displayed array.
this.nonDisplayed_[this.row_].offset = offset;
this.column_ += offset; // Detect PACs (Preamble Address Codes)
} else if (this.isPAC(char0, char1)) {
// There's no logic for PAC -> row mapping, so we have to just
// find the row code in an array and use its index :(
var row = ROWS.indexOf(data & 0x1f20); // Configure the caption window if we're in roll-up mode
if (this.mode_ === 'rollUp') {
// This implies that the base row is incorrectly set.
// As per the recommendation in CEA-608(Base Row Implementation), defer to the number
// of roll-up rows set.
if (row - this.rollUpRows_ + 1 < 0) {
row = this.rollUpRows_ - 1;
}
this.setRollUp(packet.pts, row);
} // Ensure the row is between 0 and 14, otherwise use the most
// recent or default row.
if (row !== this.row_ && row >= 0 && row <= 14) {
// formatting is only persistent for current row
this.clearFormatting(packet.pts);
this.row_ = row;
} // All PACs can apply underline, so detect and apply
// (All odd-numbered second bytes set underline)
if (char1 & 0x1 && this.formatting_.indexOf('u') === -1) {
this.addFormatting(packet.pts, ['u']);
}
if ((data & 0x10) === 0x10) {
// We've got an indent level code. Each successive even number
// increments the column cursor by 4, so we can get the desired
// column position by bit-shifting to the right (to get n/2)
// and multiplying by 4.
const indentations = (data & 0xe) >> 1;
this.column_ = indentations * 4; // add to the number of indentations for positioning
this.nonDisplayed_[this.row_].indent += indentations;
}
if (this.isColorPAC(char1)) {
// it's a color code, though we only support white, which
// can be either normal or italicized. white italics can be
// either 0x4e or 0x6e depending on the row, so we just
// bitwise-and with 0xe to see if italics should be turned on
if ((char1 & 0xe) === 0xe) {
this.addFormatting(packet.pts, ['i']);
}
} // We have a normal character in char0, and possibly one in char1
} else if (this.isNormalChar(char0)) {
if (char1 === 0x00) {
char1 = null;
}
text = getCharFromCode(char0);
text += getCharFromCode(char1);
this[this.mode_](packet.pts, text);
this.column_ += text.length;
} // finish data processing
};
};
Cea608Stream.prototype = new Stream$7(); // Trigger a cue point that captures the current state of the
// display buffer
Cea608Stream.prototype.flushDisplayed = function (pts) {
const logWarning = index => {
this.trigger('log', {
level: 'warn',
message: 'Skipping a malformed 608 caption at index ' + index + '.'
});
};
const content = [];
this.displayed_.forEach((row, i) => {
if (row && row.text && row.text.length) {
try {
// remove spaces from the start and end of the string
row.text = row.text.trim();
} catch (e) {
// Ordinarily, this shouldn't happen. However, caption
// parsing errors should not throw exceptions and
// break playback.
logWarning(i);
} // See the below link for more details on the following fields:
// https://dvcs.w3.org/hg/text-tracks/raw-file/default/608toVTT/608toVTT.html#positioning-in-cea-608
if (row.text.length) {
content.push({
// The text to be displayed in the caption from this specific row, with whitespace removed.
text: row.text,
// Value between 1 and 15 representing the PAC row used to calculate line height.
line: i + 1,
// A number representing the indent position by percentage (CEA-608 PAC indent code).
// The value will be a number between 10 and 80. Offset is used to add an aditional
// value to the position if necessary.
position: 10 + Math.min(70, row.indent * 10) + row.offset * 2.5
});
}
} else if (row === undefined || row === null) {
logWarning(i);
}
});
if (content.length) {
this.trigger('data', {
startPts: this.startPts_,
endPts: pts,
content,
stream: this.name_
});
}
};
/**
* Zero out the data, used for startup and on seek
*/
Cea608Stream.prototype.reset = function () {
this.mode_ = 'popOn'; // When in roll-up mode, the index of the last row that will
// actually display captions. If a caption is shifted to a row
// with a lower index than this, it is cleared from the display
// buffer
this.topRow_ = 0;
this.startPts_ = 0;
this.displayed_ = createDisplayBuffer();
this.nonDisplayed_ = createDisplayBuffer();
this.lastControlCode_ = null; // Track row and column for proper line-breaking and spacing
this.column_ = 0;
this.row_ = BOTTOM_ROW;
this.rollUpRows_ = 2; // This variable holds currently-applied formatting
this.formatting_ = [];
};
/**
* Sets up control code and related constants for this instance
*/
Cea608Stream.prototype.setConstants = function () {
// The following attributes have these uses:
// ext_ : char0 for mid-row codes, and the base for extended
// chars (ext_+0, ext_+1, and ext_+2 are char0s for
// extended codes)
// control_: char0 for control codes, except byte-shifted to the
// left so that we can do this.control_ | CONTROL_CODE
// offset_: char0 for tab offset codes
//
// It's also worth noting that control codes, and _only_ control codes,
// differ between field 1 and field2. Field 2 control codes are always
// their field 1 value plus 1. That's why there's the "| field" on the
// control value.
if (this.dataChannel_ === 0) {
this.BASE_ = 0x10;
this.EXT_ = 0x11;
this.CONTROL_ = (0x14 | this.field_) << 8;
this.OFFSET_ = 0x17;
} else if (this.dataChannel_ === 1) {
this.BASE_ = 0x18;
this.EXT_ = 0x19;
this.CONTROL_ = (0x1c | this.field_) << 8;
this.OFFSET_ = 0x1f;
} // Constants for the LSByte command codes recognized by Cea608Stream. This
// list is not exhaustive. For a more comprehensive listing and semantics see
// http://www.gpo.gov/fdsys/pkg/CFR-2010-title47-vol1/pdf/CFR-2010-title47-vol1-sec15-119.pdf
// Padding
this.PADDING_ = 0x0000; // Pop-on Mode
this.RESUME_CAPTION_LOADING_ = this.CONTROL_ | 0x20;
this.END_OF_CAPTION_ = this.CONTROL_ | 0x2f; // Roll-up Mode
this.ROLL_UP_2_ROWS_ = this.CONTROL_ | 0x25;
this.ROLL_UP_3_ROWS_ = this.CONTROL_ | 0x26;
this.ROLL_UP_4_ROWS_ = this.CONTROL_ | 0x27;
this.CARRIAGE_RETURN_ = this.CONTROL_ | 0x2d; // paint-on mode
this.RESUME_DIRECT_CAPTIONING_ = this.CONTROL_ | 0x29; // Erasure
this.BACKSPACE_ = this.CONTROL_ | 0x21;
this.ERASE_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2c;
this.ERASE_NON_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2e;
};
/**
* Detects if the 2-byte packet data is a special character
*
* Special characters have a second byte in the range 0x30 to 0x3f,
* with the first byte being 0x11 (for data channel 1) or 0x19 (for
* data channel 2).
*
* @param {Integer} char0 The first byte
* @param {Integer} char1 The second byte
* @return {Boolean} Whether the 2 bytes are an special character
*/
Cea608Stream.prototype.isSpecialCharacter = function (char0, char1) {
return char0 === this.EXT_ && char1 >= 0x30 && char1 <= 0x3f;
};
/**
* Detects if the 2-byte packet data is an extended character
*
* Extended characters have a second byte in the range 0x20 to 0x3f,
* with the first byte being 0x12 or 0x13 (for data channel 1) or
* 0x1a or 0x1b (for data channel 2).
*
* @param {Integer} char0 The first byte
* @param {Integer} char1 The second byte
* @return {Boolean} Whether the 2 bytes are an extended character
*/
Cea608Stream.prototype.isExtCharacter = function (char0, char1) {
return (char0 === this.EXT_ + 1 || char0 === this.EXT_ + 2) && char1 >= 0x20 && char1 <= 0x3f;
};
/**
* Detects if the 2-byte packet is a mid-row code
*
* Mid-row codes have a second byte in the range 0x20 to 0x2f, with
* the first byte being 0x11 (for data channel 1) or 0x19 (for data
* channel 2).
*
* @param {Integer} char0 The first byte
* @param {Integer} char1 The second byte
* @return {Boolean} Whether the 2 bytes are a mid-row code
*/
Cea608Stream.prototype.isMidRowCode = function (char0, char1) {
return char0 === this.EXT_ && char1 >= 0x20 && char1 <= 0x2f;
};
/**
* Detects if the 2-byte packet is an offset control code
*
* Offset control codes have a second byte in the range 0x21 to 0x23,
* with the first byte being 0x17 (for data channel 1) or 0x1f (for
* data channel 2).
*
* @param {Integer} char0 The first byte
* @param {Integer} char1 The second byte
* @return {Boolean} Whether the 2 bytes are an offset control code
*/
Cea608Stream.prototype.isOffsetControlCode = function (char0, char1) {
return char0 === this.OFFSET_ && char1 >= 0x21 && char1 <= 0x23;
};
/**
* Detects if the 2-byte packet is a Preamble Address Code
*
* PACs have a first byte in the range 0x10 to 0x17 (for data channel 1)
* or 0x18 to 0x1f (for data channel 2), with the second byte in the
* range 0x40 to 0x7f.
*
* @param {Integer} char0 The first byte
* @param {Integer} char1 The second byte
* @return {Boolean} Whether the 2 bytes are a PAC
*/
Cea608Stream.prototype.isPAC = function (char0, char1) {
return char0 >= this.BASE_ && char0 < this.BASE_ + 8 && char1 >= 0x40 && char1 <= 0x7f;
};
/**
* Detects if a packet's second byte is in the range of a PAC color code
*
* PAC color codes have the second byte be in the range 0x40 to 0x4f, or
* 0x60 to 0x6f.
*
* @param {Integer} char1 The second byte
* @return {Boolean} Whether the byte is a color PAC
*/
Cea608Stream.prototype.isColorPAC = function (char1) {
return char1 >= 0x40 && char1 <= 0x4f || char1 >= 0x60 && char1 <= 0x7f;
};
/**
* Detects if a single byte is in the range of a normal character
*
* Normal text bytes are in the range 0x20 to 0x7f.
*
* @param {Integer} char The byte
* @return {Boolean} Whether the byte is a normal character
*/
Cea608Stream.prototype.isNormalChar = function (char) {
return char >= 0x20 && char <= 0x7f;
};
/**
* Configures roll-up
*
* @param {Integer} pts Current PTS
* @param {Integer} newBaseRow Used by PACs to slide the current window to
* a new position
*/
Cea608Stream.prototype.setRollUp = function (pts, newBaseRow) {
// Reset the base row to the bottom row when switching modes
if (this.mode_ !== 'rollUp') {
this.row_ = BOTTOM_ROW;
this.mode_ = 'rollUp'; // Spec says to wipe memories when switching to roll-up
this.flushDisplayed(pts);
this.nonDisplayed_ = createDisplayBuffer();
this.displayed_ = createDisplayBuffer();
}
if (newBaseRow !== undefined && newBaseRow !== this.row_) {
// move currently displayed captions (up or down) to the new base row
for (var i = 0; i < this.rollUpRows_; i++) {
this.displayed_[newBaseRow - i] = this.displayed_[this.row_ - i];
this.displayed_[this.row_ - i] = {
text: '',
indent: 0,
offset: 0
};
}
}
if (newBaseRow === undefined) {
newBaseRow = this.row_;
}
this.topRow_ = newBaseRow - this.rollUpRows_ + 1;
}; // Adds the opening HTML tag for the passed character to the caption text,
// and keeps track of it for later closing
Cea608Stream.prototype.addFormatting = function (pts, format) {
this.formatting_ = this.formatting_.concat(format);
var text = format.reduce(function (text, format) {
return text + '<' + format + '>';
}, '');
this[this.mode_](pts, text);
}; // Adds HTML closing tags for current formatting to caption text and
// clears remembered formatting
Cea608Stream.prototype.clearFormatting = function (pts) {
if (!this.formatting_.length) {
return;
}
var text = this.formatting_.reverse().reduce(function (text, format) {
return text + '' + format + '>';
}, '');
this.formatting_ = [];
this[this.mode_](pts, text);
}; // Mode Implementations
Cea608Stream.prototype.popOn = function (pts, text) {
var baseRow = this.nonDisplayed_[this.row_].text; // buffer characters
baseRow += text;
this.nonDisplayed_[this.row_].text = baseRow;
};
Cea608Stream.prototype.rollUp = function (pts, text) {
var baseRow = this.displayed_[this.row_].text;
baseRow += text;
this.displayed_[this.row_].text = baseRow;
};
Cea608Stream.prototype.shiftRowsUp_ = function () {
var i; // clear out inactive rows
for (i = 0; i < this.topRow_; i++) {
this.displayed_[i] = {
text: '',
indent: 0,
offset: 0
};
}
for (i = this.row_ + 1; i < BOTTOM_ROW + 1; i++) {
this.displayed_[i] = {
text: '',
indent: 0,
offset: 0
};
} // shift displayed rows up
for (i = this.topRow_; i < this.row_; i++) {
this.displayed_[i] = this.displayed_[i + 1];
} // clear out the bottom row
this.displayed_[this.row_] = {
text: '',
indent: 0,
offset: 0
};
};
Cea608Stream.prototype.paintOn = function (pts, text) {
var baseRow = this.displayed_[this.row_].text;
baseRow += text;
this.displayed_[this.row_].text = baseRow;
}; // exports
var captionStream = {
CaptionStream: CaptionStream$2,
Cea608Stream: Cea608Stream,
Cea708Stream: Cea708Stream
};
/**
* mux.js
*
* Copyright (c) Brightcove
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
*/
var streamTypes = {
H264_STREAM_TYPE: 0x1B,
ADTS_STREAM_TYPE: 0x0F,
METADATA_STREAM_TYPE: 0x15
};
/**
* mux.js
*
* Copyright (c) Brightcove
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
*
* Accepts program elementary stream (PES) data events and corrects
* decode and presentation time stamps to account for a rollover
* of the 33 bit value.
*/
var Stream$6 = stream;
var MAX_TS = 8589934592;
var RO_THRESH = 4294967296;
var TYPE_SHARED = 'shared';
var handleRollover$1 = function (value, reference) {
var direction = 1;
if (value > reference) {
// If the current timestamp value is greater than our reference timestamp and we detect a
// timestamp rollover, this means the roll over is happening in the opposite direction.
// Example scenario: Enter a long stream/video just after a rollover occurred. The reference
// point will be set to a small number, e.g. 1. The user then seeks backwards over the
// rollover point. In loading this segment, the timestamp values will be very large,
// e.g. 2^33 - 1. Since this comes before the data we loaded previously, we want to adjust
// the time stamp to be `value - 2^33`.
direction = -1;
} // Note: A seek forwards or back that is greater than the RO_THRESH (2^32, ~13 hours) will
// cause an incorrect adjustment.
while (Math.abs(reference - value) > RO_THRESH) {
value += direction * MAX_TS;
}
return value;
};
var TimestampRolloverStream$1 = function (type) {
var lastDTS, referenceDTS;
TimestampRolloverStream$1.prototype.init.call(this); // The "shared" type is used in cases where a stream will contain muxed
// video and audio. We could use `undefined` here, but having a string
// makes debugging a little clearer.
this.type_ = type || TYPE_SHARED;
this.push = function (data) {
/**
* Rollover stream expects data from elementary stream.
* Elementary stream can push forward 2 types of data
* - Parsed Video/Audio/Timed-metadata PES (packetized elementary stream) packets
* - Tracks metadata from PMT (Program Map Table)
* Rollover stream expects pts/dts info to be available, since it stores lastDTS
* We should ignore non-PES packets since they may override lastDTS to undefined.
* lastDTS is important to signal the next segments
* about rollover from the previous segments.
*/
if (data.type === 'metadata') {
this.trigger('data', data);
return;
} // Any "shared" rollover streams will accept _all_ data. Otherwise,
// streams will only accept data that matches their type.
if (this.type_ !== TYPE_SHARED && data.type !== this.type_) {
return;
}
if (referenceDTS === undefined) {
referenceDTS = data.dts;
}
data.dts = handleRollover$1(data.dts, referenceDTS);
data.pts = handleRollover$1(data.pts, referenceDTS);
lastDTS = data.dts;
this.trigger('data', data);
};
this.flush = function () {
referenceDTS = lastDTS;
this.trigger('done');
};
this.endTimeline = function () {
this.flush();
this.trigger('endedtimeline');
};
this.discontinuity = function () {
referenceDTS = void 0;
lastDTS = void 0;
};
this.reset = function () {
this.discontinuity();
this.trigger('reset');
};
};
TimestampRolloverStream$1.prototype = new Stream$6();
var timestampRolloverStream = {
TimestampRolloverStream: TimestampRolloverStream$1,
handleRollover: handleRollover$1
}; // Once IE11 support is dropped, this function should be removed.
var typedArrayIndexOf$1 = (typedArray, element, fromIndex) => {
if (!typedArray) {
return -1;
}
var currentIndex = fromIndex;
for (; currentIndex < typedArray.length; currentIndex++) {
if (typedArray[currentIndex] === element) {
return currentIndex;
}
}
return -1;
};
var typedArray = {
typedArrayIndexOf: typedArrayIndexOf$1
};
/**
* mux.js
*
* Copyright (c) Brightcove
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
*
* Tools for parsing ID3 frame data
* @see http://id3.org/id3v2.3.0
*/
var typedArrayIndexOf = typedArray.typedArrayIndexOf,
// Frames that allow different types of text encoding contain a text
// encoding description byte [ID3v2.4.0 section 4.]
textEncodingDescriptionByte = {
Iso88591: 0x00,
// ISO-8859-1, terminated with \0.
Utf16: 0x01,
// UTF-16 encoded Unicode BOM, terminated with \0\0
Utf16be: 0x02,
// UTF-16BE encoded Unicode, without BOM, terminated with \0\0
Utf8: 0x03 // UTF-8 encoded Unicode, terminated with \0
},
// return a percent-encoded representation of the specified byte range
// @see http://en.wikipedia.org/wiki/Percent-encoding
percentEncode$1 = function (bytes, start, end) {
var i,
result = '';
for (i = start; i < end; i++) {
result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
}
return result;
},
// return the string representation of the specified byte range,
// interpreted as UTf-8.
parseUtf8 = function (bytes, start, end) {
return decodeURIComponent(percentEncode$1(bytes, start, end));
},
// return the string representation of the specified byte range,
// interpreted as ISO-8859-1.
parseIso88591$1 = function (bytes, start, end) {
return unescape(percentEncode$1(bytes, start, end)); // jshint ignore:line
},
parseSyncSafeInteger$1 = function (data) {
return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
},
frameParsers = {
'APIC': function (frame) {
var i = 1,
mimeTypeEndIndex,
descriptionEndIndex,
LINK_MIME_TYPE = '-->';
if (frame.data[0] !== textEncodingDescriptionByte.Utf8) {
// ignore frames with unrecognized character encodings
return;
} // parsing fields [ID3v2.4.0 section 4.14.]
mimeTypeEndIndex = typedArrayIndexOf(frame.data, 0, i);
if (mimeTypeEndIndex < 0) {
// malformed frame
return;
} // parsing Mime type field (terminated with \0)
frame.mimeType = parseIso88591$1(frame.data, i, mimeTypeEndIndex);
i = mimeTypeEndIndex + 1; // parsing 1-byte Picture Type field
frame.pictureType = frame.data[i];
i++;
descriptionEndIndex = typedArrayIndexOf(frame.data, 0, i);
if (descriptionEndIndex < 0) {
// malformed frame
return;
} // parsing Description field (terminated with \0)
frame.description = parseUtf8(frame.data, i, descriptionEndIndex);
i = descriptionEndIndex + 1;
if (frame.mimeType === LINK_MIME_TYPE) {
// parsing Picture Data field as URL (always represented as ISO-8859-1 [ID3v2.4.0 section 4.])
frame.url = parseIso88591$1(frame.data, i, frame.data.length);
} else {
// parsing Picture Data field as binary data
frame.pictureData = frame.data.subarray(i, frame.data.length);
}
},
'T*': function (frame) {
if (frame.data[0] !== textEncodingDescriptionByte.Utf8) {
// ignore frames with unrecognized character encodings
return;
} // parse text field, do not include null terminator in the frame value
// frames that allow different types of encoding contain terminated text [ID3v2.4.0 section 4.]
frame.value = parseUtf8(frame.data, 1, frame.data.length).replace(/\0*$/, ''); // text information frames supports multiple strings, stored as a terminator separated list [ID3v2.4.0 section 4.2.]
frame.values = frame.value.split('\0');
},
'TXXX': function (frame) {
var descriptionEndIndex;
if (frame.data[0] !== textEncodingDescriptionByte.Utf8) {
// ignore frames with unrecognized character encodings
return;
}
descriptionEndIndex = typedArrayIndexOf(frame.data, 0, 1);
if (descriptionEndIndex === -1) {
return;
} // parse the text fields
frame.description = parseUtf8(frame.data, 1, descriptionEndIndex); // do not include the null terminator in the tag value
// frames that allow different types of encoding contain terminated text
// [ID3v2.4.0 section 4.]
frame.value = parseUtf8(frame.data, descriptionEndIndex + 1, frame.data.length).replace(/\0*$/, '');
frame.data = frame.value;
},
'W*': function (frame) {
// parse URL field; URL fields are always represented as ISO-8859-1 [ID3v2.4.0 section 4.]
// if the value is followed by a string termination all the following information should be ignored [ID3v2.4.0 section 4.3]
frame.url = parseIso88591$1(frame.data, 0, frame.data.length).replace(/\0.*$/, '');
},
'WXXX': function (frame) {
var descriptionEndIndex;
if (frame.data[0] !== textEncodingDescriptionByte.Utf8) {
// ignore frames with unrecognized character encodings
return;
}
descriptionEndIndex = typedArrayIndexOf(frame.data, 0, 1);
if (descriptionEndIndex === -1) {
return;
} // parse the description and URL fields
frame.description = parseUtf8(frame.data, 1, descriptionEndIndex); // URL fields are always represented as ISO-8859-1 [ID3v2.4.0 section 4.]
// if the value is followed by a string termination all the following information
// should be ignored [ID3v2.4.0 section 4.3]
frame.url = parseIso88591$1(frame.data, descriptionEndIndex + 1, frame.data.length).replace(/\0.*$/, '');
},
'PRIV': function (frame) {
var i;
for (i = 0; i < frame.data.length; i++) {
if (frame.data[i] === 0) {
// parse the description and URL fields
frame.owner = parseIso88591$1(frame.data, 0, i);
break;
}
}
frame.privateData = frame.data.subarray(i + 1);
frame.data = frame.privateData;
}
};
var parseId3Frames$1 = function (data) {
var frameSize,
frameHeader,
frameStart = 10,
tagSize = 0,
frames = []; // If we don't have enough data for a header, 10 bytes,
// or 'ID3' in the first 3 bytes this is not a valid ID3 tag.
if (data.length < 10 || data[0] !== 'I'.charCodeAt(0) || data[1] !== 'D'.charCodeAt(0) || data[2] !== '3'.charCodeAt(0)) {
return;
} // the frame size is transmitted as a 28-bit integer in the
// last four bytes of the ID3 header.
// The most significant bit of each byte is dropped and the
// results concatenated to recover the actual value.
tagSize = parseSyncSafeInteger$1(data.subarray(6, 10)); // ID3 reports the tag size excluding the header but it's more
// convenient for our comparisons to include it
tagSize += 10; // check bit 6 of byte 5 for the extended header flag.
var hasExtendedHeader = data[5] & 0x40;
if (hasExtendedHeader) {
// advance the frame start past the extended header
frameStart += 4; // header size field
frameStart += parseSyncSafeInteger$1(data.subarray(10, 14));
tagSize -= parseSyncSafeInteger$1(data.subarray(16, 20)); // clip any padding off the end
} // parse one or more ID3 frames
// http://id3.org/id3v2.3.0#ID3v2_frame_overview
do {
// determine the number of bytes in this frame
frameSize = parseSyncSafeInteger$1(data.subarray(frameStart + 4, frameStart + 8));
if (frameSize < 1) {
break;
}
frameHeader = String.fromCharCode(data[frameStart], data[frameStart + 1], data[frameStart + 2], data[frameStart + 3]);
var frame = {
id: frameHeader,
data: data.subarray(frameStart + 10, frameStart + frameSize + 10)
};
frame.key = frame.id; // parse frame values
if (frameParsers[frame.id]) {
// use frame specific parser
frameParsers[frame.id](frame);
} else if (frame.id[0] === 'T') {
// use text frame generic parser
frameParsers['T*'](frame);
} else if (frame.id[0] === 'W') {
// use URL link frame generic parser
frameParsers['W*'](frame);
}
frames.push(frame);
frameStart += 10; // advance past the frame header
frameStart += frameSize; // advance past the frame body
} while (frameStart < tagSize);
return frames;
};
var parseId3 = {
parseId3Frames: parseId3Frames$1,
parseSyncSafeInteger: parseSyncSafeInteger$1,
frameParsers: frameParsers
};
/**
* mux.js
*
* Copyright (c) Brightcove
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
*
* Accepts program elementary stream (PES) data events and parses out
* ID3 metadata from them, if present.
* @see http://id3.org/id3v2.3.0
*/
var Stream$5 = stream,
StreamTypes$3 = streamTypes,
id3 = parseId3,
MetadataStream;
MetadataStream = function (options) {
var settings = {
// the bytes of the program-level descriptor field in MP2T
// see ISO/IEC 13818-1:2013 (E), section 2.6 "Program and
// program element descriptors"
descriptor: options && options.descriptor
},
// the total size in bytes of the ID3 tag being parsed
tagSize = 0,
// tag data that is not complete enough to be parsed
buffer = [],
// the total number of bytes currently in the buffer
bufferSize = 0,
i;
MetadataStream.prototype.init.call(this); // calculate the text track in-band metadata track dispatch type
// https://html.spec.whatwg.org/multipage/embedded-content.html#steps-to-expose-a-media-resource-specific-text-track
this.dispatchType = StreamTypes$3.METADATA_STREAM_TYPE.toString(16);
if (settings.descriptor) {
for (i = 0; i < settings.descriptor.length; i++) {
this.dispatchType += ('00' + settings.descriptor[i].toString(16)).slice(-2);
}
}
this.push = function (chunk) {
var tag, frameStart, frameSize, frame, i, frameHeader;
if (chunk.type !== 'timed-metadata') {
return;
} // if data_alignment_indicator is set in the PES header,
// we must have the start of a new ID3 tag. Assume anything
// remaining in the buffer was malformed and throw it out
if (chunk.dataAlignmentIndicator) {
bufferSize = 0;
buffer.length = 0;
} // ignore events that don't look like ID3 data
if (buffer.length === 0 && (chunk.data.length < 10 || chunk.data[0] !== 'I'.charCodeAt(0) || chunk.data[1] !== 'D'.charCodeAt(0) || chunk.data[2] !== '3'.charCodeAt(0))) {
this.trigger('log', {
level: 'warn',
message: 'Skipping unrecognized metadata packet'
});
return;
} // add this chunk to the data we've collected so far
buffer.push(chunk);
bufferSize += chunk.data.byteLength; // grab the size of the entire frame from the ID3 header
if (buffer.length === 1) {
// the frame size is transmitted as a 28-bit integer in the
// last four bytes of the ID3 header.
// The most significant bit of each byte is dropped and the
// results concatenated to recover the actual value.
tagSize = id3.parseSyncSafeInteger(chunk.data.subarray(6, 10)); // ID3 reports the tag size excluding the header but it's more
// convenient for our comparisons to include it
tagSize += 10;
} // if the entire frame has not arrived, wait for more data
if (bufferSize < tagSize) {
return;
} // collect the entire frame so it can be parsed
tag = {
data: new Uint8Array(tagSize),
frames: [],
pts: buffer[0].pts,
dts: buffer[0].dts
};
for (i = 0; i < tagSize;) {
tag.data.set(buffer[0].data.subarray(0, tagSize - i), i);
i += buffer[0].data.byteLength;
bufferSize -= buffer[0].data.byteLength;
buffer.shift();
} // find the start of the first frame and the end of the tag
frameStart = 10;
if (tag.data[5] & 0x40) {
// advance the frame start past the extended header
frameStart += 4; // header size field
frameStart += id3.parseSyncSafeInteger(tag.data.subarray(10, 14)); // clip any padding off the end
tagSize -= id3.parseSyncSafeInteger(tag.data.subarray(16, 20));
} // parse one or more ID3 frames
// http://id3.org/id3v2.3.0#ID3v2_frame_overview
do {
// determine the number of bytes in this frame
frameSize = id3.parseSyncSafeInteger(tag.data.subarray(frameStart + 4, frameStart + 8));
if (frameSize < 1) {
this.trigger('log', {
level: 'warn',
message: 'Malformed ID3 frame encountered. Skipping remaining metadata parsing.'
}); // If the frame is malformed, don't parse any further frames but allow previous valid parsed frames
// to be sent along.
break;
}
frameHeader = String.fromCharCode(tag.data[frameStart], tag.data[frameStart + 1], tag.data[frameStart + 2], tag.data[frameStart + 3]);
frame = {
id: frameHeader,
data: tag.data.subarray(frameStart + 10, frameStart + frameSize + 10)
};
frame.key = frame.id; // parse frame values
if (id3.frameParsers[frame.id]) {
// use frame specific parser
id3.frameParsers[frame.id](frame);
} else if (frame.id[0] === 'T') {
// use text frame generic parser
id3.frameParsers['T*'](frame);
} else if (frame.id[0] === 'W') {
// use URL link frame generic parser
id3.frameParsers['W*'](frame);
} // handle the special PRIV frame used to indicate the start
// time for raw AAC data
if (frame.owner === 'com.apple.streaming.transportStreamTimestamp') {
var d = frame.data,
size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
size *= 4;
size += d[7] & 0x03;
frame.timeStamp = size; // in raw AAC, all subsequent data will be timestamped based
// on the value of this frame
// we couldn't have known the appropriate pts and dts before
// parsing this ID3 tag so set those values now
if (tag.pts === undefined && tag.dts === undefined) {
tag.pts = frame.timeStamp;
tag.dts = frame.timeStamp;
}
this.trigger('timestamp', frame);
}
tag.frames.push(frame);
frameStart += 10; // advance past the frame header
frameStart += frameSize; // advance past the frame body
} while (frameStart < tagSize);
this.trigger('data', tag);
};
};
MetadataStream.prototype = new Stream$5();
var metadataStream = MetadataStream;
/**
* mux.js
*
* Copyright (c) Brightcove
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
*
* A stream-based mp2t to mp4 converter. This utility can be used to
* deliver mp4s to a SourceBuffer on platforms that support native
* Media Source Extensions.
*/
var Stream$4 = stream,
CaptionStream$1 = captionStream,
StreamTypes$2 = streamTypes,
TimestampRolloverStream = timestampRolloverStream.TimestampRolloverStream; // object types
var TransportPacketStream, TransportParseStream, ElementaryStream; // constants
var MP2T_PACKET_LENGTH$1 = 188,
// bytes
SYNC_BYTE$1 = 0x47;
/**
* Splits an incoming stream of binary data into MPEG-2 Transport
* Stream packets.
*/
TransportPacketStream = function () {
var buffer = new Uint8Array(MP2T_PACKET_LENGTH$1),
bytesInBuffer = 0;
TransportPacketStream.prototype.init.call(this); // Deliver new bytes to the stream.
/**
* Split a stream of data into M2TS packets
**/
this.push = function (bytes) {
var startIndex = 0,
endIndex = MP2T_PACKET_LENGTH$1,
everything; // If there are bytes remaining from the last segment, prepend them to the
// bytes that were pushed in
if (bytesInBuffer) {
everything = new Uint8Array(bytes.byteLength + bytesInBuffer);
everything.set(buffer.subarray(0, bytesInBuffer));
everything.set(bytes, bytesInBuffer);
bytesInBuffer = 0;
} else {
everything = bytes;
} // While we have enough data for a packet
while (endIndex < everything.byteLength) {
// Look for a pair of start and end sync bytes in the data..
if (everything[startIndex] === SYNC_BYTE$1 && everything[endIndex] === SYNC_BYTE$1) {
// We found a packet so emit it and jump one whole packet forward in
// the stream
this.trigger('data', everything.subarray(startIndex, endIndex));
startIndex += MP2T_PACKET_LENGTH$1;
endIndex += MP2T_PACKET_LENGTH$1;
continue;
} // If we get here, we have somehow become de-synchronized and we need to step
// forward one byte at a time until we find a pair of sync bytes that denote
// a packet
startIndex++;
endIndex++;
} // If there was some data left over at the end of the segment that couldn't
// possibly be a whole packet, keep it because it might be the start of a packet
// that continues in the next segment
if (startIndex < everything.byteLength) {
buffer.set(everything.subarray(startIndex), 0);
bytesInBuffer = everything.byteLength - startIndex;
}
};
/**
* Passes identified M2TS packets to the TransportParseStream to be parsed
**/
this.flush = function () {
// If the buffer contains a whole packet when we are being flushed, emit it
// and empty the buffer. Otherwise hold onto the data because it may be
// important for decoding the next segment
if (bytesInBuffer === MP2T_PACKET_LENGTH$1 && buffer[0] === SYNC_BYTE$1) {
this.trigger('data', buffer);
bytesInBuffer = 0;
}
this.trigger('done');
};
this.endTimeline = function () {
this.flush();
this.trigger('endedtimeline');
};
this.reset = function () {
bytesInBuffer = 0;
this.trigger('reset');
};
};
TransportPacketStream.prototype = new Stream$4();
/**
* Accepts an MP2T TransportPacketStream and emits data events with parsed
* forms of the individual transport stream packets.
*/
TransportParseStream = function () {
var parsePsi, parsePat, parsePmt, self;
TransportParseStream.prototype.init.call(this);
self = this;
this.packetsWaitingForPmt = [];
this.programMapTable = undefined;
parsePsi = function (payload, psi) {
var offset = 0; // PSI packets may be split into multiple sections and those
// sections may be split into multiple packets. If a PSI
// section starts in this packet, the payload_unit_start_indicator
// will be true and the first byte of the payload will indicate
// the offset from the current position to the start of the
// section.
if (psi.payloadUnitStartIndicator) {
offset += payload[offset] + 1;
}
if (psi.type === 'pat') {
parsePat(payload.subarray(offset), psi);
} else {
parsePmt(payload.subarray(offset), psi);
}
};
parsePat = function (payload, pat) {
pat.section_number = payload[7]; // eslint-disable-line camelcase
pat.last_section_number = payload[8]; // eslint-disable-line camelcase
// skip the PSI header and parse the first PMT entry
self.pmtPid = (payload[10] & 0x1F) << 8 | payload[11];
pat.pmtPid = self.pmtPid;
};
/**
* Parse out the relevant fields of a Program Map Table (PMT).
* @param payload {Uint8Array} the PMT-specific portion of an MP2T
* packet. The first byte in this array should be the table_id
* field.
* @param pmt {object} the object that should be decorated with
* fields parsed from the PMT.
*/
parsePmt = function (payload, pmt) {
var sectionLength, tableEnd, programInfoLength, offset; // PMTs can be sent ahead of the time when they should actually
// take effect. We don't believe this should ever be the case
// for HLS but we'll ignore "forward" PMT declarations if we see
// them. Future PMT declarations have the current_next_indicator
// set to zero.
if (!(payload[5] & 0x01)) {
return;
} // overwrite any existing program map table
self.programMapTable = {
video: null,
audio: null,
'timed-metadata': {}
}; // the mapping table ends at the end of the current section
sectionLength = (payload[1] & 0x0f) << 8 | payload[2];
tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
// long the program info descriptors are
programInfoLength = (payload[10] & 0x0f) << 8 | payload[11]; // advance the offset to the first entry in the mapping table
offset = 12 + programInfoLength;
while (offset < tableEnd) {
var streamType = payload[offset];
var pid = (payload[offset + 1] & 0x1F) << 8 | payload[offset + 2]; // only map a single elementary_pid for audio and video stream types
// TODO: should this be done for metadata too? for now maintain behavior of
// multiple metadata streams
if (streamType === StreamTypes$2.H264_STREAM_TYPE && self.programMapTable.video === null) {
self.programMapTable.video = pid;
} else if (streamType === StreamTypes$2.ADTS_STREAM_TYPE && self.programMapTable.audio === null) {
self.programMapTable.audio = pid;
} else if (streamType === StreamTypes$2.METADATA_STREAM_TYPE) {
// map pid to stream type for metadata streams
self.programMapTable['timed-metadata'][pid] = streamType;
} // move to the next table entry
// skip past the elementary stream descriptors, if present
offset += ((payload[offset + 3] & 0x0F) << 8 | payload[offset + 4]) + 5;
} // record the map on the packet as well
pmt.programMapTable = self.programMapTable;
};
/**
* Deliver a new MP2T packet to the next stream in the pipeline.
*/
this.push = function (packet) {
var result = {},
offset = 4;
result.payloadUnitStartIndicator = !!(packet[1] & 0x40); // pid is a 13-bit field starting at the last bit of packet[1]
result.pid = packet[1] & 0x1f;
result.pid <<= 8;
result.pid |= packet[2]; // if an adaption field is present, its length is specified by the
// fifth byte of the TS packet header. The adaptation field is
// used to add stuffing to PES packets that don't fill a complete
// TS packet, and to specify some forms of timing and control data
// that we do not currently use.
if ((packet[3] & 0x30) >>> 4 > 0x01) {
offset += packet[offset] + 1;
} // parse the rest of the packet based on the type
if (result.pid === 0) {
result.type = 'pat';
parsePsi(packet.subarray(offset), result);
this.trigger('data', result);
} else if (result.pid === this.pmtPid) {
result.type = 'pmt';
parsePsi(packet.subarray(offset), result);
this.trigger('data', result); // if there are any packets waiting for a PMT to be found, process them now
while (this.packetsWaitingForPmt.length) {
this.processPes_.apply(this, this.packetsWaitingForPmt.shift());
}
} else if (this.programMapTable === undefined) {
// When we have not seen a PMT yet, defer further processing of
// PES packets until one has been parsed
this.packetsWaitingForPmt.push([packet, offset, result]);
} else {
this.processPes_(packet, offset, result);
}
};
this.processPes_ = function (packet, offset, result) {
// set the appropriate stream type
if (result.pid === this.programMapTable.video) {
result.streamType = StreamTypes$2.H264_STREAM_TYPE;
} else if (result.pid === this.programMapTable.audio) {
result.streamType = StreamTypes$2.ADTS_STREAM_TYPE;
} else {
// if not video or audio, it is timed-metadata or unknown
// if unknown, streamType will be undefined
result.streamType = this.programMapTable['timed-metadata'][result.pid];
}
result.type = 'pes';
result.data = packet.subarray(offset);
this.trigger('data', result);
};
};
TransportParseStream.prototype = new Stream$4();
TransportParseStream.STREAM_TYPES = {
h264: 0x1b,
adts: 0x0f
};
/**
* Reconsistutes program elementary stream (PES) packets from parsed
* transport stream packets. That is, if you pipe an
* mp2t.TransportParseStream into a mp2t.ElementaryStream, the output
* events will be events which capture the bytes for individual PES
* packets plus relevant metadata that has been extracted from the
* container.
*/
ElementaryStream = function () {
var self = this,
segmentHadPmt = false,
// PES packet fragments
video = {
data: [],
size: 0
},
audio = {
data: [],
size: 0
},
timedMetadata = {
data: [],
size: 0
},
programMapTable,
parsePes = function (payload, pes) {
var ptsDtsFlags;
const startPrefix = payload[0] << 16 | payload[1] << 8 | payload[2]; // default to an empty array
pes.data = new Uint8Array(); // In certain live streams, the start of a TS fragment has ts packets
// that are frame data that is continuing from the previous fragment. This
// is to check that the pes data is the start of a new pes payload
if (startPrefix !== 1) {
return;
} // get the packet length, this will be 0 for video
pes.packetLength = 6 + (payload[4] << 8 | payload[5]); // find out if this packets starts a new keyframe
pes.dataAlignmentIndicator = (payload[6] & 0x04) !== 0; // PES packets may be annotated with a PTS value, or a PTS value
// and a DTS value. Determine what combination of values is
// available to work with.
ptsDtsFlags = payload[7]; // PTS and DTS are normally stored as a 33-bit number. Javascript
// performs all bitwise operations on 32-bit integers but javascript
// supports a much greater range (52-bits) of integer using standard
// mathematical operations.
// We construct a 31-bit value using bitwise operators over the 31
// most significant bits and then multiply by 4 (equal to a left-shift
// of 2) before we add the final 2 least significant bits of the
// timestamp (equal to an OR.)
if (ptsDtsFlags & 0xC0) {
// the PTS and DTS are not written out directly. For information
// on how they are encoded, see
// http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
pes.pts = (payload[9] & 0x0E) << 27 | (payload[10] & 0xFF) << 20 | (payload[11] & 0xFE) << 12 | (payload[12] & 0xFF) << 5 | (payload[13] & 0xFE) >>> 3;
pes.pts *= 4; // Left shift by 2
pes.pts += (payload[13] & 0x06) >>> 1; // OR by the two LSBs
pes.dts = pes.pts;
if (ptsDtsFlags & 0x40) {
pes.dts = (payload[14] & 0x0E) << 27 | (payload[15] & 0xFF) << 20 | (payload[16] & 0xFE) << 12 | (payload[17] & 0xFF) << 5 | (payload[18] & 0xFE) >>> 3;
pes.dts *= 4; // Left shift by 2
pes.dts += (payload[18] & 0x06) >>> 1; // OR by the two LSBs
}
} // the data section starts immediately after the PES header.
// pes_header_data_length specifies the number of header bytes
// that follow the last byte of the field.
pes.data = payload.subarray(9 + payload[8]);
},
/**
* Pass completely parsed PES packets to the next stream in the pipeline
**/
flushStream = function (stream, type, forceFlush) {
var packetData = new Uint8Array(stream.size),
event = {
type: type
},
i = 0,
offset = 0,
packetFlushable = false,
fragment; // do nothing if there is not enough buffered data for a complete
// PES header
if (!stream.data.length || stream.size < 9) {
return;
}
event.trackId = stream.data[0].pid; // reassemble the packet
for (i = 0; i < stream.data.length; i++) {
fragment = stream.data[i];
packetData.set(fragment.data, offset);
offset += fragment.data.byteLength;
} // parse assembled packet's PES header
parsePes(packetData, event); // non-video PES packets MUST have a non-zero PES_packet_length
// check that there is enough stream data to fill the packet
packetFlushable = type === 'video' || event.packetLength <= stream.size; // flush pending packets if the conditions are right
if (forceFlush || packetFlushable) {
stream.size = 0;
stream.data.length = 0;
} // only emit packets that are complete. this is to avoid assembling
// incomplete PES packets due to poor segmentation
if (packetFlushable) {
self.trigger('data', event);
}
};
ElementaryStream.prototype.init.call(this);
/**
* Identifies M2TS packet types and parses PES packets using metadata
* parsed from the PMT
**/
this.push = function (data) {
({
pat: function () {// we have to wait for the PMT to arrive as well before we
// have any meaningful metadata
},
pes: function () {
var stream, streamType;
switch (data.streamType) {
case StreamTypes$2.H264_STREAM_TYPE:
stream = video;
streamType = 'video';
break;
case StreamTypes$2.ADTS_STREAM_TYPE:
stream = audio;
streamType = 'audio';
break;
case StreamTypes$2.METADATA_STREAM_TYPE:
stream = timedMetadata;
streamType = 'timed-metadata';
break;
default:
// ignore unknown stream types
return;
} // if a new packet is starting, we can flush the completed
// packet
if (data.payloadUnitStartIndicator) {
flushStream(stream, streamType, true);
} // buffer this fragment until we are sure we've received the
// complete payload
stream.data.push(data);
stream.size += data.data.byteLength;
},
pmt: function () {
var event = {
type: 'metadata',
tracks: []
};
programMapTable = data.programMapTable; // translate audio and video streams to tracks
if (programMapTable.video !== null) {
event.tracks.push({
timelineStartInfo: {
baseMediaDecodeTime: 0
},
id: +programMapTable.video,
codec: 'avc',
type: 'video'
});
}
if (programMapTable.audio !== null) {
event.tracks.push({
timelineStartInfo: {
baseMediaDecodeTime: 0
},
id: +programMapTable.audio,
codec: 'adts',
type: 'audio'
});
}
segmentHadPmt = true;
self.trigger('data', event);
}
})[data.type]();
};
this.reset = function () {
video.size = 0;
video.data.length = 0;
audio.size = 0;
audio.data.length = 0;
this.trigger('reset');
};
/**
* Flush any remaining input. Video PES packets may be of variable
* length. Normally, the start of a new video packet can trigger the
* finalization of the previous packet. That is not possible if no
* more video is forthcoming, however. In that case, some other
* mechanism (like the end of the file) has to be employed. When it is
* clear that no additional data is forthcoming, calling this method
* will flush the buffered packets.
*/
this.flushStreams_ = function () {
// !!THIS ORDER IS IMPORTANT!!
// video first then audio
flushStream(video, 'video');
flushStream(audio, 'audio');
flushStream(timedMetadata, 'timed-metadata');
};
this.flush = function () {
// if on flush we haven't had a pmt emitted
// and we have a pmt to emit. emit the pmt
// so that we trigger a trackinfo downstream.
if (!segmentHadPmt && programMapTable) {
var pmt = {
type: 'metadata',
tracks: []
}; // translate audio and video streams to tracks
if (programMapTable.video !== null) {
pmt.tracks.push({
timelineStartInfo: {
baseMediaDecodeTime: 0
},
id: +programMapTable.video,
codec: 'avc',
type: 'video'
});
}
if (programMapTable.audio !== null) {
pmt.tracks.push({
timelineStartInfo: {
baseMediaDecodeTime: 0
},
id: +programMapTable.audio,
codec: 'adts',
type: 'audio'
});
}
self.trigger('data', pmt);
}
segmentHadPmt = false;
this.flushStreams_();
this.trigger('done');
};
};
ElementaryStream.prototype = new Stream$4();
var m2ts$1 = {
PAT_PID: 0x0000,
MP2T_PACKET_LENGTH: MP2T_PACKET_LENGTH$1,
TransportPacketStream: TransportPacketStream,
TransportParseStream: TransportParseStream,
ElementaryStream: ElementaryStream,
TimestampRolloverStream: TimestampRolloverStream,
CaptionStream: CaptionStream$1.CaptionStream,
Cea608Stream: CaptionStream$1.Cea608Stream,
Cea708Stream: CaptionStream$1.Cea708Stream,
MetadataStream: metadataStream
};
for (var type in StreamTypes$2) {
if (StreamTypes$2.hasOwnProperty(type)) {
m2ts$1[type] = StreamTypes$2[type];
}
}
var m2ts_1 = m2ts$1;
/**
* mux.js
*
* Copyright (c) Brightcove
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
*/
var Stream$3 = stream;
var ONE_SECOND_IN_TS$2 = clock$2.ONE_SECOND_IN_TS;
var AdtsStream$1;
var ADTS_SAMPLING_FREQUENCIES$1 = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
/*
* Accepts a ElementaryStream and emits data events with parsed
* AAC Audio Frames of the individual packets. Input audio in ADTS
* format is unpacked and re-emitted as AAC frames.
*
* @see http://wiki.multimedia.cx/index.php?title=ADTS
* @see http://wiki.multimedia.cx/?title=Understanding_AAC
*/
AdtsStream$1 = function (handlePartialSegments) {
var buffer,
frameNum = 0;
AdtsStream$1.prototype.init.call(this);
this.skipWarn_ = function (start, end) {
this.trigger('log', {
level: 'warn',
message: `adts skiping bytes ${start} to ${end} in frame ${frameNum} outside syncword`
});
};
this.push = function (packet) {
var i = 0,
frameLength,
protectionSkipBytes,
oldBuffer,
sampleCount,
adtsFrameDuration;
if (!handlePartialSegments) {
frameNum = 0;
}
if (packet.type !== 'audio') {
// ignore non-audio data
return;
} // Prepend any data in the buffer to the input data so that we can parse
// aac frames the cross a PES packet boundary
if (buffer && buffer.length) {
oldBuffer = buffer;
buffer = new Uint8Array(oldBuffer.byteLength + packet.data.byteLength);
buffer.set(oldBuffer);
buffer.set(packet.data, oldBuffer.byteLength);
} else {
buffer = packet.data;
} // unpack any ADTS frames which have been fully received
// for details on the ADTS header, see http://wiki.multimedia.cx/index.php?title=ADTS
var skip; // We use i + 7 here because we want to be able to parse the entire header.
// If we don't have enough bytes to do that, then we definitely won't have a full frame.
while (i + 7 < buffer.length) {
// Look for the start of an ADTS header..
if (buffer[i] !== 0xFF || (buffer[i + 1] & 0xF6) !== 0xF0) {
if (typeof skip !== 'number') {
skip = i;
} // If a valid header was not found, jump one forward and attempt to
// find a valid ADTS header starting at the next byte
i++;
continue;
}
if (typeof skip === 'number') {
this.skipWarn_(skip, i);
skip = null;
} // The protection skip bit tells us if we have 2 bytes of CRC data at the
// end of the ADTS header
protectionSkipBytes = (~buffer[i + 1] & 0x01) * 2; // Frame length is a 13 bit integer starting 16 bits from the
// end of the sync sequence
// NOTE: frame length includes the size of the header
frameLength = (buffer[i + 3] & 0x03) << 11 | buffer[i + 4] << 3 | (buffer[i + 5] & 0xe0) >> 5;
sampleCount = ((buffer[i + 6] & 0x03) + 1) * 1024;
adtsFrameDuration = sampleCount * ONE_SECOND_IN_TS$2 / ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2]; // If we don't have enough data to actually finish this ADTS frame,
// then we have to wait for more data
if (buffer.byteLength - i < frameLength) {
break;
} // Otherwise, deliver the complete AAC frame
this.trigger('data', {
pts: packet.pts + frameNum * adtsFrameDuration,
dts: packet.dts + frameNum * adtsFrameDuration,
sampleCount: sampleCount,
audioobjecttype: (buffer[i + 2] >>> 6 & 0x03) + 1,
channelcount: (buffer[i + 2] & 1) << 2 | (buffer[i + 3] & 0xc0) >>> 6,
samplerate: ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2],
samplingfrequencyindex: (buffer[i + 2] & 0x3c) >>> 2,
// assume ISO/IEC 14496-12 AudioSampleEntry default of 16
samplesize: 16,
// data is the frame without it's header
data: buffer.subarray(i + 7 + protectionSkipBytes, i + frameLength)
});
frameNum++;
i += frameLength;
}
if (typeof skip === 'number') {
this.skipWarn_(skip, i);
skip = null;
} // remove processed bytes from the buffer.
buffer = buffer.subarray(i);
};
this.flush = function () {
frameNum = 0;
this.trigger('done');
};
this.reset = function () {
buffer = void 0;
this.trigger('reset');
};
this.endTimeline = function () {
buffer = void 0;
this.trigger('endedtimeline');
};
};
AdtsStream$1.prototype = new Stream$3();
var adts = AdtsStream$1;
/**
* mux.js
*
* Copyright (c) Brightcove
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
*/
var ExpGolomb$1;
/**
* Parser for exponential Golomb codes, a variable-bitwidth number encoding
* scheme used by h264.
*/
ExpGolomb$1 = function (workingData) {
var
// the number of bytes left to examine in workingData
workingBytesAvailable = workingData.byteLength,
// the current word being examined
workingWord = 0,
// :uint
// the number of bits left to examine in the current word
workingBitsAvailable = 0; // :uint;
// ():uint
this.length = function () {
return 8 * workingBytesAvailable;
}; // ():uint
this.bitsAvailable = function () {
return 8 * workingBytesAvailable + workingBitsAvailable;
}; // ():void
this.loadWord = function () {
var position = workingData.byteLength - workingBytesAvailable,
workingBytes = new Uint8Array(4),
availableBytes = Math.min(4, workingBytesAvailable);
if (availableBytes === 0) {
throw new Error('no bytes available');
}
workingBytes.set(workingData.subarray(position, position + availableBytes));
workingWord = new DataView(workingBytes.buffer).getUint32(0); // track the amount of workingData that has been processed
workingBitsAvailable = availableBytes * 8;
workingBytesAvailable -= availableBytes;
}; // (count:int):void
this.skipBits = function (count) {
var skipBytes; // :int
if (workingBitsAvailable > count) {
workingWord <<= count;
workingBitsAvailable -= count;
} else {
count -= workingBitsAvailable;
skipBytes = Math.floor(count / 8);
count -= skipBytes * 8;
workingBytesAvailable -= skipBytes;
this.loadWord();
workingWord <<= count;
workingBitsAvailable -= count;
}
}; // (size:int):uint
this.readBits = function (size) {
var bits = Math.min(workingBitsAvailable, size),
// :uint
valu = workingWord >>> 32 - bits; // :uint
// if size > 31, handle error
workingBitsAvailable -= bits;
if (workingBitsAvailable > 0) {
workingWord <<= bits;
} else if (workingBytesAvailable > 0) {
this.loadWord();
}
bits = size - bits;
if (bits > 0) {
return valu << bits | this.readBits(bits);
}
return valu;
}; // ():uint
this.skipLeadingZeros = function () {
var leadingZeroCount; // :uint
for (leadingZeroCount = 0; leadingZeroCount < workingBitsAvailable; ++leadingZeroCount) {
if ((workingWord & 0x80000000 >>> leadingZeroCount) !== 0) {
// the first bit of working word is 1
workingWord <<= leadingZeroCount;
workingBitsAvailable -= leadingZeroCount;
return leadingZeroCount;
}
} // we exhausted workingWord and still have not found a 1
this.loadWord();
return leadingZeroCount + this.skipLeadingZeros();
}; // ():void
this.skipUnsignedExpGolomb = function () {
this.skipBits(1 + this.skipLeadingZeros());
}; // ():void
this.skipExpGolomb = function () {
this.skipBits(1 + this.skipLeadingZeros());
}; // ():uint
this.readUnsignedExpGolomb = function () {
var clz = this.skipLeadingZeros(); // :uint
return this.readBits(clz + 1) - 1;
}; // ():int
this.readExpGolomb = function () {
var valu = this.readUnsignedExpGolomb(); // :int
if (0x01 & valu) {
// the number is odd if the low order bit is set
return 1 + valu >>> 1; // add 1 to make it even, and divide by 2
}
return -1 * (valu >>> 1); // divide by two then make it negative
}; // Some convenience functions
// :Boolean
this.readBoolean = function () {
return this.readBits(1) === 1;
}; // ():int
this.readUnsignedByte = function () {
return this.readBits(8);
};
this.loadWord();
};
var expGolomb = ExpGolomb$1;
/**
* mux.js
*
* Copyright (c) Brightcove
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
*/
var Stream$2 = stream;
var ExpGolomb = expGolomb;
var H264Stream$1, NalByteStream;
var PROFILES_WITH_OPTIONAL_SPS_DATA;
/**
* Accepts a NAL unit byte stream and unpacks the embedded NAL units.
*/
NalByteStream = function () {
var syncPoint = 0,
i,
buffer;
NalByteStream.prototype.init.call(this);
/*
* Scans a byte stream and triggers a data event with the NAL units found.
* @param {Object} data Event received from H264Stream
* @param {Uint8Array} data.data The h264 byte stream to be scanned
*
* @see H264Stream.push
*/
this.push = function (data) {
var swapBuffer;
if (!buffer) {
buffer = data.data;
} else {
swapBuffer = new Uint8Array(buffer.byteLength + data.data.byteLength);
swapBuffer.set(buffer);
swapBuffer.set(data.data, buffer.byteLength);
buffer = swapBuffer;
}
var len = buffer.byteLength; // Rec. ITU-T H.264, Annex B
// scan for NAL unit boundaries
// a match looks like this:
// 0 0 1 .. NAL .. 0 0 1
// ^ sync point ^ i
// or this:
// 0 0 1 .. NAL .. 0 0 0
// ^ sync point ^ i
// advance the sync point to a NAL start, if necessary
for (; syncPoint < len - 3; syncPoint++) {
if (buffer[syncPoint + 2] === 1) {
// the sync point is properly aligned
i = syncPoint + 5;
break;
}
}
while (i < len) {
// look at the current byte to determine if we've hit the end of
// a NAL unit boundary
switch (buffer[i]) {
case 0:
// skip past non-sync sequences
if (buffer[i - 1] !== 0) {
i += 2;
break;
} else if (buffer[i - 2] !== 0) {
i++;
break;
} // deliver the NAL unit if it isn't empty
if (syncPoint + 3 !== i - 2) {
this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
} // drop trailing zeroes
do {
i++;
} while (buffer[i] !== 1 && i < len);
syncPoint = i - 2;
i += 3;
break;
case 1:
// skip past non-sync sequences
if (buffer[i - 1] !== 0 || buffer[i - 2] !== 0) {
i += 3;
break;
} // deliver the NAL unit
this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
syncPoint = i - 2;
i += 3;
break;
default:
// the current byte isn't a one or zero, so it cannot be part
// of a sync sequence
i += 3;
break;
}
} // filter out the NAL units that were delivered
buffer = buffer.subarray(syncPoint);
i -= syncPoint;
syncPoint = 0;
};
this.reset = function () {
buffer = null;
syncPoint = 0;
this.trigger('reset');
};
this.flush = function () {
// deliver the last buffered NAL unit
if (buffer && buffer.byteLength > 3) {
this.trigger('data', buffer.subarray(syncPoint + 3));
} // reset the stream state
buffer = null;
syncPoint = 0;
this.trigger('done');
};
this.endTimeline = function () {
this.flush();
this.trigger('endedtimeline');
};
};
NalByteStream.prototype = new Stream$2(); // values of profile_idc that indicate additional fields are included in the SPS
// see Recommendation ITU-T H.264 (4/2013),
// 7.3.2.1.1 Sequence parameter set data syntax
PROFILES_WITH_OPTIONAL_SPS_DATA = {
100: true,
110: true,
122: true,
244: true,
44: true,
83: true,
86: true,
118: true,
128: true,
// TODO: the three profiles below don't
// appear to have sps data in the specificiation anymore?
138: true,
139: true,
134: true
};
/**
* Accepts input from a ElementaryStream and produces H.264 NAL unit data
* events.
*/
H264Stream$1 = function () {
var nalByteStream = new NalByteStream(),
self,
trackId,
currentPts,
currentDts,
discardEmulationPreventionBytes,
readSequenceParameterSet,
skipScalingList;
H264Stream$1.prototype.init.call(this);
self = this;
/*
* Pushes a packet from a stream onto the NalByteStream
*
* @param {Object} packet - A packet received from a stream
* @param {Uint8Array} packet.data - The raw bytes of the packet
* @param {Number} packet.dts - Decode timestamp of the packet
* @param {Number} packet.pts - Presentation timestamp of the packet
* @param {Number} packet.trackId - The id of the h264 track this packet came from
* @param {('video'|'audio')} packet.type - The type of packet
*
*/
this.push = function (packet) {
if (packet.type !== 'video') {
return;
}
trackId = packet.trackId;
currentPts = packet.pts;
currentDts = packet.dts;
nalByteStream.push(packet);
};
/*
* Identify NAL unit types and pass on the NALU, trackId, presentation and decode timestamps
* for the NALUs to the next stream component.
* Also, preprocess caption and sequence parameter NALUs.
*
* @param {Uint8Array} data - A NAL unit identified by `NalByteStream.push`
* @see NalByteStream.push
*/
nalByteStream.on('data', function (data) {
var event = {
trackId: trackId,
pts: currentPts,
dts: currentDts,
data: data,
nalUnitTypeCode: data[0] & 0x1f
};
switch (event.nalUnitTypeCode) {
case 0x05:
event.nalUnitType = 'slice_layer_without_partitioning_rbsp_idr';
break;
case 0x06:
event.nalUnitType = 'sei_rbsp';
event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
break;
case 0x07:
event.nalUnitType = 'seq_parameter_set_rbsp';
event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
event.config = readSequenceParameterSet(event.escapedRBSP);
break;
case 0x08:
event.nalUnitType = 'pic_parameter_set_rbsp';
break;
case 0x09:
event.nalUnitType = 'access_unit_delimiter_rbsp';
break;
} // This triggers data on the H264Stream
self.trigger('data', event);
});
nalByteStream.on('done', function () {
self.trigger('done');
});
nalByteStream.on('partialdone', function () {
self.trigger('partialdone');
});
nalByteStream.on('reset', function () {
self.trigger('reset');
});
nalByteStream.on('endedtimeline', function () {
self.trigger('endedtimeline');
});
this.flush = function () {
nalByteStream.flush();
};
this.partialFlush = function () {
nalByteStream.partialFlush();
};
this.reset = function () {
nalByteStream.reset();
};
this.endTimeline = function () {
nalByteStream.endTimeline();
};
/**
* Advance the ExpGolomb decoder past a scaling list. The scaling
* list is optionally transmitted as part of a sequence parameter
* set and is not relevant to transmuxing.
* @param count {number} the number of entries in this scaling list
* @param expGolombDecoder {object} an ExpGolomb pointed to the
* start of a scaling list
* @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
*/
skipScalingList = function (count, expGolombDecoder) {
var lastScale = 8,
nextScale = 8,
j,
deltaScale;
for (j = 0; j < count; j++) {
if (nextScale !== 0) {
deltaScale = expGolombDecoder.readExpGolomb();
nextScale = (lastScale + deltaScale + 256) % 256;
}
lastScale = nextScale === 0 ? lastScale : nextScale;
}
};
/**
* Expunge any "Emulation Prevention" bytes from a "Raw Byte
* Sequence Payload"
* @param data {Uint8Array} the bytes of a RBSP from a NAL
* unit
* @return {Uint8Array} the RBSP without any Emulation
* Prevention Bytes
*/
discardEmulationPreventionBytes = function (data) {
var length = data.byteLength,
emulationPreventionBytesPositions = [],
i = 1,
newLength,
newData; // Find all `Emulation Prevention Bytes`
while (i < length - 2) {
if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
emulationPreventionBytesPositions.push(i + 2);
i += 2;
} else {
i++;
}
} // If no Emulation Prevention Bytes were found just return the original
// array
if (emulationPreventionBytesPositions.length === 0) {
return data;
} // Create a new array to hold the NAL unit data
newLength = length - emulationPreventionBytesPositions.length;
newData = new Uint8Array(newLength);
var sourceIndex = 0;
for (i = 0; i < newLength; sourceIndex++, i++) {
if (sourceIndex === emulationPreventionBytesPositions[0]) {
// Skip this byte
sourceIndex++; // Remove this position index
emulationPreventionBytesPositions.shift();
}
newData[i] = data[sourceIndex];
}
return newData;
};
/**
* Read a sequence parameter set and return some interesting video
* properties. A sequence parameter set is the H264 metadata that
* describes the properties of upcoming video frames.
* @param data {Uint8Array} the bytes of a sequence parameter set
* @return {object} an object with configuration parsed from the
* sequence parameter set, including the dimensions of the
* associated video frames.
*/
readSequenceParameterSet = function (data) {
var frameCropLeftOffset = 0,
frameCropRightOffset = 0,
frameCropTopOffset = 0,
frameCropBottomOffset = 0,
expGolombDecoder,
profileIdc,
levelIdc,
profileCompatibility,
chromaFormatIdc,
picOrderCntType,
numRefFramesInPicOrderCntCycle,
picWidthInMbsMinus1,
picHeightInMapUnitsMinus1,
frameMbsOnlyFlag,
scalingListCount,
sarRatio = [1, 1],
aspectRatioIdc,
i;
expGolombDecoder = new ExpGolomb(data);
profileIdc = expGolombDecoder.readUnsignedByte(); // profile_idc
profileCompatibility = expGolombDecoder.readUnsignedByte(); // constraint_set[0-5]_flag
levelIdc = expGolombDecoder.readUnsignedByte(); // level_idc u(8)
expGolombDecoder.skipUnsignedExpGolomb(); // seq_parameter_set_id
// some profiles have more optional data we don't need
if (PROFILES_WITH_OPTIONAL_SPS_DATA[profileIdc]) {
chromaFormatIdc = expGolombDecoder.readUnsignedExpGolomb();
if (chromaFormatIdc === 3) {
expGolombDecoder.skipBits(1); // separate_colour_plane_flag
}
expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_luma_minus8
expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_chroma_minus8
expGolombDecoder.skipBits(1); // qpprime_y_zero_transform_bypass_flag
if (expGolombDecoder.readBoolean()) {
// seq_scaling_matrix_present_flag
scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
for (i = 0; i < scalingListCount; i++) {
if (expGolombDecoder.readBoolean()) {
// seq_scaling_list_present_flag[ i ]
if (i < 6) {
skipScalingList(16, expGolombDecoder);
} else {
skipScalingList(64, expGolombDecoder);
}
}
}
}
}
expGolombDecoder.skipUnsignedExpGolomb(); // log2_max_frame_num_minus4
picOrderCntType = expGolombDecoder.readUnsignedExpGolomb();
if (picOrderCntType === 0) {
expGolombDecoder.readUnsignedExpGolomb(); // log2_max_pic_order_cnt_lsb_minus4
} else if (picOrderCntType === 1) {
expGolombDecoder.skipBits(1); // delta_pic_order_always_zero_flag
expGolombDecoder.skipExpGolomb(); // offset_for_non_ref_pic
expGolombDecoder.skipExpGolomb(); // offset_for_top_to_bottom_field
numRefFramesInPicOrderCntCycle = expGolombDecoder.readUnsignedExpGolomb();
for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
expGolombDecoder.skipExpGolomb(); // offset_for_ref_frame[ i ]
}
}
expGolombDecoder.skipUnsignedExpGolomb(); // max_num_ref_frames
expGolombDecoder.skipBits(1); // gaps_in_frame_num_value_allowed_flag
picWidthInMbsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
picHeightInMapUnitsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
frameMbsOnlyFlag = expGolombDecoder.readBits(1);
if (frameMbsOnlyFlag === 0) {
expGolombDecoder.skipBits(1); // mb_adaptive_frame_field_flag
}
expGolombDecoder.skipBits(1); // direct_8x8_inference_flag
if (expGolombDecoder.readBoolean()) {
// frame_cropping_flag
frameCropLeftOffset = expGolombDecoder.readUnsignedExpGolomb();
frameCropRightOffset = expGolombDecoder.readUnsignedExpGolomb();
frameCropTopOffset = expGolombDecoder.readUnsignedExpGolomb();
frameCropBottomOffset = expGolombDecoder.readUnsignedExpGolomb();
}
if (expGolombDecoder.readBoolean()) {
// vui_parameters_present_flag
if (expGolombDecoder.readBoolean()) {
// aspect_ratio_info_present_flag
aspectRatioIdc = expGolombDecoder.readUnsignedByte();
switch (aspectRatioIdc) {
case 1:
sarRatio = [1, 1];
break;
case 2:
sarRatio = [12, 11];
break;
case 3:
sarRatio = [10, 11];
break;
case 4:
sarRatio = [16, 11];
break;
case 5:
sarRatio = [40, 33];
break;
case 6:
sarRatio = [24, 11];
break;
case 7:
sarRatio = [20, 11];
break;
case 8:
sarRatio = [32, 11];
break;
case 9:
sarRatio = [80, 33];
break;
case 10:
sarRatio = [18, 11];
break;
case 11:
sarRatio = [15, 11];
break;
case 12:
sarRatio = [64, 33];
break;
case 13:
sarRatio = [160, 99];
break;
case 14:
sarRatio = [4, 3];
break;
case 15:
sarRatio = [3, 2];
break;
case 16:
sarRatio = [2, 1];
break;
case 255:
{
sarRatio = [expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte(), expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte()];
break;
}
}
if (sarRatio) {
sarRatio[0] / sarRatio[1];
}
}
}
return {
profileIdc: profileIdc,
levelIdc: levelIdc,
profileCompatibility: profileCompatibility,
width: (picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2,
height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - frameCropTopOffset * 2 - frameCropBottomOffset * 2,
// sar is sample aspect ratio
sarRatio: sarRatio
};
};
};
H264Stream$1.prototype = new Stream$2();
var h264 = {
H264Stream: H264Stream$1,
NalByteStream: NalByteStream
};
/**
* mux.js
*
* Copyright (c) Brightcove
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
*
* Utilities to detect basic properties and metadata about Aac data.
*/
var ADTS_SAMPLING_FREQUENCIES = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
var parseId3TagSize = function (header, byteIndex) {
var returnSize = header[byteIndex + 6] << 21 | header[byteIndex + 7] << 14 | header[byteIndex + 8] << 7 | header[byteIndex + 9],
flags = header[byteIndex + 5],
footerPresent = (flags & 16) >> 4; // if we get a negative returnSize clamp it to 0
returnSize = returnSize >= 0 ? returnSize : 0;
if (footerPresent) {
return returnSize + 20;
}
return returnSize + 10;
};
var getId3Offset = function (data, offset) {
if (data.length - offset < 10 || data[offset] !== 'I'.charCodeAt(0) || data[offset + 1] !== 'D'.charCodeAt(0) || data[offset + 2] !== '3'.charCodeAt(0)) {
return offset;
}
offset += parseId3TagSize(data, offset);
return getId3Offset(data, offset);
}; // TODO: use vhs-utils
var isLikelyAacData$1 = function (data) {
var offset = getId3Offset(data, 0);
return data.length >= offset + 2 && (data[offset] & 0xFF) === 0xFF && (data[offset + 1] & 0xF0) === 0xF0 &&
// verify that the 2 layer bits are 0, aka this
// is not mp3 data but aac data.
(data[offset + 1] & 0x16) === 0x10;
};
var parseSyncSafeInteger = function (data) {
return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
}; // return a percent-encoded representation of the specified byte range
// @see http://en.wikipedia.org/wiki/Percent-encoding
var percentEncode = function (bytes, start, end) {
var i,
result = '';
for (i = start; i < end; i++) {
result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
}
return result;
}; // return the string representation of the specified byte range,
// interpreted as ISO-8859-1.
var parseIso88591 = function (bytes, start, end) {
return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
};
var parseAdtsSize = function (header, byteIndex) {
var lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
middle = header[byteIndex + 4] << 3,
highTwo = header[byteIndex + 3] & 0x3 << 11;
return highTwo | middle | lowThree;
};
var parseType$4 = function (header, byteIndex) {
if (header[byteIndex] === 'I'.charCodeAt(0) && header[byteIndex + 1] === 'D'.charCodeAt(0) && header[byteIndex + 2] === '3'.charCodeAt(0)) {
return 'timed-metadata';
} else if (header[byteIndex] & 0xff === 0xff && (header[byteIndex + 1] & 0xf0) === 0xf0) {
return 'audio';
}
return null;
};
var parseSampleRate = function (packet) {
var i = 0;
while (i + 5 < packet.length) {
if (packet[i] !== 0xFF || (packet[i + 1] & 0xF6) !== 0xF0) {
// If a valid header was not found, jump one forward and attempt to
// find a valid ADTS header starting at the next byte
i++;
continue;
}
return ADTS_SAMPLING_FREQUENCIES[(packet[i + 2] & 0x3c) >>> 2];
}
return null;
};
var parseAacTimestamp = function (packet) {
var frameStart, frameSize, frame, frameHeader; // find the start of the first frame and the end of the tag
frameStart = 10;
if (packet[5] & 0x40) {
// advance the frame start past the extended header
frameStart += 4; // header size field
frameStart += parseSyncSafeInteger(packet.subarray(10, 14));
} // parse one or more ID3 frames
// http://id3.org/id3v2.3.0#ID3v2_frame_overview
do {
// determine the number of bytes in this frame
frameSize = parseSyncSafeInteger(packet.subarray(frameStart + 4, frameStart + 8));
if (frameSize < 1) {
return null;
}
frameHeader = String.fromCharCode(packet[frameStart], packet[frameStart + 1], packet[frameStart + 2], packet[frameStart + 3]);
if (frameHeader === 'PRIV') {
frame = packet.subarray(frameStart + 10, frameStart + frameSize + 10);
for (var i = 0; i < frame.byteLength; i++) {
if (frame[i] === 0) {
var owner = parseIso88591(frame, 0, i);
if (owner === 'com.apple.streaming.transportStreamTimestamp') {
var d = frame.subarray(i + 1);
var size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
size *= 4;
size += d[7] & 0x03;
return size;
}
break;
}
}
}
frameStart += 10; // advance past the frame header
frameStart += frameSize; // advance past the frame body
} while (frameStart < packet.byteLength);
return null;
};
var utils = {
isLikelyAacData: isLikelyAacData$1,
parseId3TagSize: parseId3TagSize,
parseAdtsSize: parseAdtsSize,
parseType: parseType$4,
parseSampleRate: parseSampleRate,
parseAacTimestamp: parseAacTimestamp
};
/**
* mux.js
*
* Copyright (c) Brightcove
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
*
* A stream-based aac to mp4 converter. This utility can be used to
* deliver mp4s to a SourceBuffer on platforms that support native
* Media Source Extensions.
*/
var Stream$1 = stream;
var aacUtils = utils; // Constants
var AacStream$1;
/**
* Splits an incoming stream of binary data into ADTS and ID3 Frames.
*/
AacStream$1 = function () {
var everything = new Uint8Array(),
timeStamp = 0;
AacStream$1.prototype.init.call(this);
this.setTimestamp = function (timestamp) {
timeStamp = timestamp;
};
this.push = function (bytes) {
var frameSize = 0,
byteIndex = 0,
bytesLeft,
chunk,
packet,
tempLength; // If there are bytes remaining from the last segment, prepend them to the
// bytes that were pushed in
if (everything.length) {
tempLength = everything.length;
everything = new Uint8Array(bytes.byteLength + tempLength);
everything.set(everything.subarray(0, tempLength));
everything.set(bytes, tempLength);
} else {
everything = bytes;
}
while (everything.length - byteIndex >= 3) {
if (everything[byteIndex] === 'I'.charCodeAt(0) && everything[byteIndex + 1] === 'D'.charCodeAt(0) && everything[byteIndex + 2] === '3'.charCodeAt(0)) {
// Exit early because we don't have enough to parse
// the ID3 tag header
if (everything.length - byteIndex < 10) {
break;
} // check framesize
frameSize = aacUtils.parseId3TagSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
// to emit a full packet
// Add to byteIndex to support multiple ID3 tags in sequence
if (byteIndex + frameSize > everything.length) {
break;
}
chunk = {
type: 'timed-metadata',
data: everything.subarray(byteIndex, byteIndex + frameSize)
};
this.trigger('data', chunk);
byteIndex += frameSize;
continue;
} else if ((everything[byteIndex] & 0xff) === 0xff && (everything[byteIndex + 1] & 0xf0) === 0xf0) {
// Exit early because we don't have enough to parse
// the ADTS frame header
if (everything.length - byteIndex < 7) {
break;
}
frameSize = aacUtils.parseAdtsSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
// to emit a full packet
if (byteIndex + frameSize > everything.length) {
break;
}
packet = {
type: 'audio',
data: everything.subarray(byteIndex, byteIndex + frameSize),
pts: timeStamp,
dts: timeStamp
};
this.trigger('data', packet);
byteIndex += frameSize;
continue;
}
byteIndex++;
}
bytesLeft = everything.length - byteIndex;
if (bytesLeft > 0) {
everything = everything.subarray(byteIndex);
} else {
everything = new Uint8Array();
}
};
this.reset = function () {
everything = new Uint8Array();
this.trigger('reset');
};
this.endTimeline = function () {
everything = new Uint8Array();
this.trigger('endedtimeline');
};
};
AacStream$1.prototype = new Stream$1();
var aac = AacStream$1;
var AUDIO_PROPERTIES$1 = ['audioobjecttype', 'channelcount', 'samplerate', 'samplingfrequencyindex', 'samplesize'];
var audioProperties = AUDIO_PROPERTIES$1;
var VIDEO_PROPERTIES$1 = ['width', 'height', 'profileIdc', 'levelIdc', 'profileCompatibility', 'sarRatio'];
var videoProperties = VIDEO_PROPERTIES$1;
/**
* mux.js
*
* Copyright (c) Brightcove
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
*
* A stream-based mp2t to mp4 converter. This utility can be used to
* deliver mp4s to a SourceBuffer on platforms that support native
* Media Source Extensions.
*/
var Stream = stream;
var mp4 = mp4Generator;
var frameUtils = frameUtils$1;
var audioFrameUtils = audioFrameUtils$1;
var trackDecodeInfo = trackDecodeInfo$1;
var m2ts = m2ts_1;
var clock = clock$2;
var AdtsStream = adts;
var H264Stream = h264.H264Stream;
var AacStream = aac;
var isLikelyAacData = utils.isLikelyAacData;
var ONE_SECOND_IN_TS$1 = clock$2.ONE_SECOND_IN_TS;
var AUDIO_PROPERTIES = audioProperties;
var VIDEO_PROPERTIES = videoProperties; // object types
var VideoSegmentStream, AudioSegmentStream, Transmuxer, CoalesceStream;
var retriggerForStream = function (key, event) {
event.stream = key;
this.trigger('log', event);
};
var addPipelineLogRetriggers = function (transmuxer, pipeline) {
var keys = Object.keys(pipeline);
for (var i = 0; i < keys.length; i++) {
var key = keys[i]; // skip non-stream keys and headOfPipeline
// which is just a duplicate
if (key === 'headOfPipeline' || !pipeline[key].on) {
continue;
}
pipeline[key].on('log', retriggerForStream.bind(transmuxer, key));
}
};
/**
* Compare two arrays (even typed) for same-ness
*/
var arrayEquals = function (a, b) {
var i;
if (a.length !== b.length) {
return false;
} // compare the value of each element in the array
for (i = 0; i < a.length; i++) {
if (a[i] !== b[i]) {
return false;
}
}
return true;
};
var generateSegmentTimingInfo = function (baseMediaDecodeTime, startDts, startPts, endDts, endPts, prependedContentDuration) {
var ptsOffsetFromDts = startPts - startDts,
decodeDuration = endDts - startDts,
presentationDuration = endPts - startPts; // The PTS and DTS values are based on the actual stream times from the segment,
// however, the player time values will reflect a start from the baseMediaDecodeTime.
// In order to provide relevant values for the player times, base timing info on the
// baseMediaDecodeTime and the DTS and PTS durations of the segment.
return {
start: {
dts: baseMediaDecodeTime,
pts: baseMediaDecodeTime + ptsOffsetFromDts
},
end: {
dts: baseMediaDecodeTime + decodeDuration,
pts: baseMediaDecodeTime + presentationDuration
},
prependedContentDuration: prependedContentDuration,
baseMediaDecodeTime: baseMediaDecodeTime
};
};
/**
* Constructs a single-track, ISO BMFF media segment from AAC data
* events. The output of this stream can be fed to a SourceBuffer
* configured with a suitable initialization segment.
* @param track {object} track metadata configuration
* @param options {object} transmuxer options object
* @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
* in the source; false to adjust the first segment to start at 0.
*/
AudioSegmentStream = function (track, options) {
var adtsFrames = [],
sequenceNumber,
earliestAllowedDts = 0,
audioAppendStartTs = 0,
videoBaseMediaDecodeTime = Infinity;
options = options || {};
sequenceNumber = options.firstSequenceNumber || 0;
AudioSegmentStream.prototype.init.call(this);
this.push = function (data) {
trackDecodeInfo.collectDtsInfo(track, data);
if (track) {
AUDIO_PROPERTIES.forEach(function (prop) {
track[prop] = data[prop];
});
} // buffer audio data until end() is called
adtsFrames.push(data);
};
this.setEarliestDts = function (earliestDts) {
earliestAllowedDts = earliestDts;
};
this.setVideoBaseMediaDecodeTime = function (baseMediaDecodeTime) {
videoBaseMediaDecodeTime = baseMediaDecodeTime;
};
this.setAudioAppendStart = function (timestamp) {
audioAppendStartTs = timestamp;
};
this.flush = function () {
var frames, moof, mdat, boxes, frameDuration, segmentDuration, videoClockCyclesOfSilencePrefixed; // return early if no audio data has been observed
if (adtsFrames.length === 0) {
this.trigger('done', 'AudioSegmentStream');
return;
}
frames = audioFrameUtils.trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts);
track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps); // amount of audio filled but the value is in video clock rather than audio clock
videoClockCyclesOfSilencePrefixed = audioFrameUtils.prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime); // we have to build the index from byte locations to
// samples (that is, adts frames) in the audio data
track.samples = audioFrameUtils.generateSampleTable(frames); // concatenate the audio data to constuct the mdat
mdat = mp4.mdat(audioFrameUtils.concatenateFrameData(frames));
adtsFrames = [];
moof = mp4.moof(sequenceNumber, [track]);
boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // bump the sequence number for next time
sequenceNumber++;
boxes.set(moof);
boxes.set(mdat, moof.byteLength);
trackDecodeInfo.clearDtsInfo(track);
frameDuration = Math.ceil(ONE_SECOND_IN_TS$1 * 1024 / track.samplerate); // TODO this check was added to maintain backwards compatibility (particularly with
// tests) on adding the timingInfo event. However, it seems unlikely that there's a
// valid use-case where an init segment/data should be triggered without associated
// frames. Leaving for now, but should be looked into.
if (frames.length) {
segmentDuration = frames.length * frameDuration;
this.trigger('segmentTimingInfo', generateSegmentTimingInfo(
// The audio track's baseMediaDecodeTime is in audio clock cycles, but the
// frame info is in video clock cycles. Convert to match expectation of
// listeners (that all timestamps will be based on video clock cycles).
clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate),
// frame times are already in video clock, as is segment duration
frames[0].dts, frames[0].pts, frames[0].dts + segmentDuration, frames[0].pts + segmentDuration, videoClockCyclesOfSilencePrefixed || 0));
this.trigger('timingInfo', {
start: frames[0].pts,
end: frames[0].pts + segmentDuration
});
}
this.trigger('data', {
track: track,
boxes: boxes
});
this.trigger('done', 'AudioSegmentStream');
};
this.reset = function () {
trackDecodeInfo.clearDtsInfo(track);
adtsFrames = [];
this.trigger('reset');
};
};
AudioSegmentStream.prototype = new Stream();
/**
* Constructs a single-track, ISO BMFF media segment from H264 data
* events. The output of this stream can be fed to a SourceBuffer
* configured with a suitable initialization segment.
* @param track {object} track metadata configuration
* @param options {object} transmuxer options object
* @param options.alignGopsAtEnd {boolean} If true, start from the end of the
* gopsToAlignWith list when attempting to align gop pts
* @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
* in the source; false to adjust the first segment to start at 0.
*/
VideoSegmentStream = function (track, options) {
var sequenceNumber,
nalUnits = [],
gopsToAlignWith = [],
config,
pps;
options = options || {};
sequenceNumber = options.firstSequenceNumber || 0;
VideoSegmentStream.prototype.init.call(this);
delete track.minPTS;
this.gopCache_ = [];
/**
* Constructs a ISO BMFF segment given H264 nalUnits
* @param {Object} nalUnit A data event representing a nalUnit
* @param {String} nalUnit.nalUnitType
* @param {Object} nalUnit.config Properties for a mp4 track
* @param {Uint8Array} nalUnit.data The nalUnit bytes
* @see lib/codecs/h264.js
**/
this.push = function (nalUnit) {
trackDecodeInfo.collectDtsInfo(track, nalUnit); // record the track config
if (nalUnit.nalUnitType === 'seq_parameter_set_rbsp' && !config) {
config = nalUnit.config;
track.sps = [nalUnit.data];
VIDEO_PROPERTIES.forEach(function (prop) {
track[prop] = config[prop];
}, this);
}
if (nalUnit.nalUnitType === 'pic_parameter_set_rbsp' && !pps) {
pps = nalUnit.data;
track.pps = [nalUnit.data];
} // buffer video until flush() is called
nalUnits.push(nalUnit);
};
/**
* Pass constructed ISO BMFF track and boxes on to the
* next stream in the pipeline
**/
this.flush = function () {
var frames,
gopForFusion,
gops,
moof,
mdat,
boxes,
prependedContentDuration = 0,
firstGop,
lastGop; // Throw away nalUnits at the start of the byte stream until
// we find the first AUD
while (nalUnits.length) {
if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
break;
}
nalUnits.shift();
} // Return early if no video data has been observed
if (nalUnits.length === 0) {
this.resetStream_();
this.trigger('done', 'VideoSegmentStream');
return;
} // Organize the raw nal-units into arrays that represent
// higher-level constructs such as frames and gops
// (group-of-pictures)
frames = frameUtils.groupNalsIntoFrames(nalUnits);
gops = frameUtils.groupFramesIntoGops(frames); // If the first frame of this fragment is not a keyframe we have
// a problem since MSE (on Chrome) requires a leading keyframe.
//
// We have two approaches to repairing this situation:
// 1) GOP-FUSION:
// This is where we keep track of the GOPS (group-of-pictures)
// from previous fragments and attempt to find one that we can
// prepend to the current fragment in order to create a valid
// fragment.
// 2) KEYFRAME-PULLING:
// Here we search for the first keyframe in the fragment and
// throw away all the frames between the start of the fragment
// and that keyframe. We then extend the duration and pull the
// PTS of the keyframe forward so that it covers the time range
// of the frames that were disposed of.
//
// #1 is far prefereable over #2 which can cause "stuttering" but
// requires more things to be just right.
if (!gops[0][0].keyFrame) {
// Search for a gop for fusion from our gopCache
gopForFusion = this.getGopForFusion_(nalUnits[0], track);
if (gopForFusion) {
// in order to provide more accurate timing information about the segment, save
// the number of seconds prepended to the original segment due to GOP fusion
prependedContentDuration = gopForFusion.duration;
gops.unshift(gopForFusion); // Adjust Gops' metadata to account for the inclusion of the
// new gop at the beginning
gops.byteLength += gopForFusion.byteLength;
gops.nalCount += gopForFusion.nalCount;
gops.pts = gopForFusion.pts;
gops.dts = gopForFusion.dts;
gops.duration += gopForFusion.duration;
} else {
// If we didn't find a candidate gop fall back to keyframe-pulling
gops = frameUtils.extendFirstKeyFrame(gops);
}
} // Trim gops to align with gopsToAlignWith
if (gopsToAlignWith.length) {
var alignedGops;
if (options.alignGopsAtEnd) {
alignedGops = this.alignGopsAtEnd_(gops);
} else {
alignedGops = this.alignGopsAtStart_(gops);
}
if (!alignedGops) {
// save all the nals in the last GOP into the gop cache
this.gopCache_.unshift({
gop: gops.pop(),
pps: track.pps,
sps: track.sps
}); // Keep a maximum of 6 GOPs in the cache
this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
nalUnits = []; // return early no gops can be aligned with desired gopsToAlignWith
this.resetStream_();
this.trigger('done', 'VideoSegmentStream');
return;
} // Some gops were trimmed. clear dts info so minSegmentDts and pts are correct
// when recalculated before sending off to CoalesceStream
trackDecodeInfo.clearDtsInfo(track);
gops = alignedGops;
}
trackDecodeInfo.collectDtsInfo(track, gops); // First, we have to build the index from byte locations to
// samples (that is, frames) in the video data
track.samples = frameUtils.generateSampleTable(gops); // Concatenate the video data and construct the mdat
mdat = mp4.mdat(frameUtils.concatenateNalData(gops));
track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps);
this.trigger('processedGopsInfo', gops.map(function (gop) {
return {
pts: gop.pts,
dts: gop.dts,
byteLength: gop.byteLength
};
}));
firstGop = gops[0];
lastGop = gops[gops.length - 1];
this.trigger('segmentTimingInfo', generateSegmentTimingInfo(track.baseMediaDecodeTime, firstGop.dts, firstGop.pts, lastGop.dts + lastGop.duration, lastGop.pts + lastGop.duration, prependedContentDuration));
this.trigger('timingInfo', {
start: gops[0].pts,
end: gops[gops.length - 1].pts + gops[gops.length - 1].duration
}); // save all the nals in the last GOP into the gop cache
this.gopCache_.unshift({
gop: gops.pop(),
pps: track.pps,
sps: track.sps
}); // Keep a maximum of 6 GOPs in the cache
this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
nalUnits = [];
this.trigger('baseMediaDecodeTime', track.baseMediaDecodeTime);
this.trigger('timelineStartInfo', track.timelineStartInfo);
moof = mp4.moof(sequenceNumber, [track]); // it would be great to allocate this array up front instead of
// throwing away hundreds of media segment fragments
boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // Bump the sequence number for next time
sequenceNumber++;
boxes.set(moof);
boxes.set(mdat, moof.byteLength);
this.trigger('data', {
track: track,
boxes: boxes
});
this.resetStream_(); // Continue with the flush process now
this.trigger('done', 'VideoSegmentStream');
};
this.reset = function () {
this.resetStream_();
nalUnits = [];
this.gopCache_.length = 0;
gopsToAlignWith.length = 0;
this.trigger('reset');
};
this.resetStream_ = function () {
trackDecodeInfo.clearDtsInfo(track); // reset config and pps because they may differ across segments
// for instance, when we are rendition switching
config = undefined;
pps = undefined;
}; // Search for a candidate Gop for gop-fusion from the gop cache and
// return it or return null if no good candidate was found
this.getGopForFusion_ = function (nalUnit) {
var halfSecond = 45000,
// Half-a-second in a 90khz clock
allowableOverlap = 10000,
// About 3 frames @ 30fps
nearestDistance = Infinity,
dtsDistance,
nearestGopObj,
currentGop,
currentGopObj,
i; // Search for the GOP nearest to the beginning of this nal unit
for (i = 0; i < this.gopCache_.length; i++) {
currentGopObj = this.gopCache_[i];
currentGop = currentGopObj.gop; // Reject Gops with different SPS or PPS
if (!(track.pps && arrayEquals(track.pps[0], currentGopObj.pps[0])) || !(track.sps && arrayEquals(track.sps[0], currentGopObj.sps[0]))) {
continue;
} // Reject Gops that would require a negative baseMediaDecodeTime
if (currentGop.dts < track.timelineStartInfo.dts) {
continue;
} // The distance between the end of the gop and the start of the nalUnit
dtsDistance = nalUnit.dts - currentGop.dts - currentGop.duration; // Only consider GOPS that start before the nal unit and end within
// a half-second of the nal unit
if (dtsDistance >= -allowableOverlap && dtsDistance <= halfSecond) {
// Always use the closest GOP we found if there is more than
// one candidate
if (!nearestGopObj || nearestDistance > dtsDistance) {
nearestGopObj = currentGopObj;
nearestDistance = dtsDistance;
}
}
}
if (nearestGopObj) {
return nearestGopObj.gop;
}
return null;
}; // trim gop list to the first gop found that has a matching pts with a gop in the list
// of gopsToAlignWith starting from the START of the list
this.alignGopsAtStart_ = function (gops) {
var alignIndex, gopIndex, align, gop, byteLength, nalCount, duration, alignedGops;
byteLength = gops.byteLength;
nalCount = gops.nalCount;
duration = gops.duration;
alignIndex = gopIndex = 0;
while (alignIndex < gopsToAlignWith.length && gopIndex < gops.length) {
align = gopsToAlignWith[alignIndex];
gop = gops[gopIndex];
if (align.pts === gop.pts) {
break;
}
if (gop.pts > align.pts) {
// this current gop starts after the current gop we want to align on, so increment
// align index
alignIndex++;
continue;
} // current gop starts before the current gop we want to align on. so increment gop
// index
gopIndex++;
byteLength -= gop.byteLength;
nalCount -= gop.nalCount;
duration -= gop.duration;
}
if (gopIndex === 0) {
// no gops to trim
return gops;
}
if (gopIndex === gops.length) {
// all gops trimmed, skip appending all gops
return null;
}
alignedGops = gops.slice(gopIndex);
alignedGops.byteLength = byteLength;
alignedGops.duration = duration;
alignedGops.nalCount = nalCount;
alignedGops.pts = alignedGops[0].pts;
alignedGops.dts = alignedGops[0].dts;
return alignedGops;
}; // trim gop list to the first gop found that has a matching pts with a gop in the list
// of gopsToAlignWith starting from the END of the list
this.alignGopsAtEnd_ = function (gops) {
var alignIndex, gopIndex, align, gop, alignEndIndex, matchFound;
alignIndex = gopsToAlignWith.length - 1;
gopIndex = gops.length - 1;
alignEndIndex = null;
matchFound = false;
while (alignIndex >= 0 && gopIndex >= 0) {
align = gopsToAlignWith[alignIndex];
gop = gops[gopIndex];
if (align.pts === gop.pts) {
matchFound = true;
break;
}
if (align.pts > gop.pts) {
alignIndex--;
continue;
}
if (alignIndex === gopsToAlignWith.length - 1) {
// gop.pts is greater than the last alignment candidate. If no match is found
// by the end of this loop, we still want to append gops that come after this
// point
alignEndIndex = gopIndex;
}
gopIndex--;
}
if (!matchFound && alignEndIndex === null) {
return null;
}
var trimIndex;
if (matchFound) {
trimIndex = gopIndex;
} else {
trimIndex = alignEndIndex;
}
if (trimIndex === 0) {
return gops;
}
var alignedGops = gops.slice(trimIndex);
var metadata = alignedGops.reduce(function (total, gop) {
total.byteLength += gop.byteLength;
total.duration += gop.duration;
total.nalCount += gop.nalCount;
return total;
}, {
byteLength: 0,
duration: 0,
nalCount: 0
});
alignedGops.byteLength = metadata.byteLength;
alignedGops.duration = metadata.duration;
alignedGops.nalCount = metadata.nalCount;
alignedGops.pts = alignedGops[0].pts;
alignedGops.dts = alignedGops[0].dts;
return alignedGops;
};
this.alignGopsWith = function (newGopsToAlignWith) {
gopsToAlignWith = newGopsToAlignWith;
};
};
VideoSegmentStream.prototype = new Stream();
/**
* A Stream that can combine multiple streams (ie. audio & video)
* into a single output segment for MSE. Also supports audio-only
* and video-only streams.
* @param options {object} transmuxer options object
* @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
* in the source; false to adjust the first segment to start at media timeline start.
*/
CoalesceStream = function (options, metadataStream) {
// Number of Tracks per output segment
// If greater than 1, we combine multiple
// tracks into a single segment
this.numberOfTracks = 0;
this.metadataStream = metadataStream;
options = options || {};
if (typeof options.remux !== 'undefined') {
this.remuxTracks = !!options.remux;
} else {
this.remuxTracks = true;
}
if (typeof options.keepOriginalTimestamps === 'boolean') {
this.keepOriginalTimestamps = options.keepOriginalTimestamps;
} else {
this.keepOriginalTimestamps = false;
}
this.pendingTracks = [];
this.videoTrack = null;
this.pendingBoxes = [];
this.pendingCaptions = [];
this.pendingMetadata = [];
this.pendingBytes = 0;
this.emittedTracks = 0;
CoalesceStream.prototype.init.call(this); // Take output from multiple
this.push = function (output) {
// buffer incoming captions until the associated video segment
// finishes
if (output.content || output.text) {
return this.pendingCaptions.push(output);
} // buffer incoming id3 tags until the final flush
if (output.frames) {
return this.pendingMetadata.push(output);
} // Add this track to the list of pending tracks and store
// important information required for the construction of
// the final segment
this.pendingTracks.push(output.track);
this.pendingBytes += output.boxes.byteLength; // TODO: is there an issue for this against chrome?
// We unshift audio and push video because
// as of Chrome 75 when switching from
// one init segment to another if the video
// mdat does not appear after the audio mdat
// only audio will play for the duration of our transmux.
if (output.track.type === 'video') {
this.videoTrack = output.track;
this.pendingBoxes.push(output.boxes);
}
if (output.track.type === 'audio') {
this.audioTrack = output.track;
this.pendingBoxes.unshift(output.boxes);
}
};
};
CoalesceStream.prototype = new Stream();
CoalesceStream.prototype.flush = function (flushSource) {
var offset = 0,
event = {
captions: [],
captionStreams: {},
metadata: [],
info: {}
},
caption,
id3,
initSegment,
timelineStartPts = 0,
i;
if (this.pendingTracks.length < this.numberOfTracks) {
if (flushSource !== 'VideoSegmentStream' && flushSource !== 'AudioSegmentStream') {
// Return because we haven't received a flush from a data-generating
// portion of the segment (meaning that we have only recieved meta-data
// or captions.)
return;
} else if (this.remuxTracks) {
// Return until we have enough tracks from the pipeline to remux (if we
// are remuxing audio and video into a single MP4)
return;
} else if (this.pendingTracks.length === 0) {
// In the case where we receive a flush without any data having been
// received we consider it an emitted track for the purposes of coalescing
// `done` events.
// We do this for the case where there is an audio and video track in the
// segment but no audio data. (seen in several playlists with alternate
// audio tracks and no audio present in the main TS segments.)
this.emittedTracks++;
if (this.emittedTracks >= this.numberOfTracks) {
this.trigger('done');
this.emittedTracks = 0;
}
return;
}
}
if (this.videoTrack) {
timelineStartPts = this.videoTrack.timelineStartInfo.pts;
VIDEO_PROPERTIES.forEach(function (prop) {
event.info[prop] = this.videoTrack[prop];
}, this);
} else if (this.audioTrack) {
timelineStartPts = this.audioTrack.timelineStartInfo.pts;
AUDIO_PROPERTIES.forEach(function (prop) {
event.info[prop] = this.audioTrack[prop];
}, this);
}
if (this.videoTrack || this.audioTrack) {
if (this.pendingTracks.length === 1) {
event.type = this.pendingTracks[0].type;
} else {
event.type = 'combined';
}
this.emittedTracks += this.pendingTracks.length;
initSegment = mp4.initSegment(this.pendingTracks); // Create a new typed array to hold the init segment
event.initSegment = new Uint8Array(initSegment.byteLength); // Create an init segment containing a moov
// and track definitions
event.initSegment.set(initSegment); // Create a new typed array to hold the moof+mdats
event.data = new Uint8Array(this.pendingBytes); // Append each moof+mdat (one per track) together
for (i = 0; i < this.pendingBoxes.length; i++) {
event.data.set(this.pendingBoxes[i], offset);
offset += this.pendingBoxes[i].byteLength;
} // Translate caption PTS times into second offsets to match the
// video timeline for the segment, and add track info
for (i = 0; i < this.pendingCaptions.length; i++) {
caption = this.pendingCaptions[i];
caption.startTime = clock.metadataTsToSeconds(caption.startPts, timelineStartPts, this.keepOriginalTimestamps);
caption.endTime = clock.metadataTsToSeconds(caption.endPts, timelineStartPts, this.keepOriginalTimestamps);
event.captionStreams[caption.stream] = true;
event.captions.push(caption);
} // Translate ID3 frame PTS times into second offsets to match the
// video timeline for the segment
for (i = 0; i < this.pendingMetadata.length; i++) {
id3 = this.pendingMetadata[i];
id3.cueTime = clock.metadataTsToSeconds(id3.pts, timelineStartPts, this.keepOriginalTimestamps);
event.metadata.push(id3);
} // We add this to every single emitted segment even though we only need
// it for the first
event.metadata.dispatchType = this.metadataStream.dispatchType; // Reset stream state
this.pendingTracks.length = 0;
this.videoTrack = null;
this.pendingBoxes.length = 0;
this.pendingCaptions.length = 0;
this.pendingBytes = 0;
this.pendingMetadata.length = 0; // Emit the built segment
// We include captions and ID3 tags for backwards compatibility,
// ideally we should send only video and audio in the data event
this.trigger('data', event); // Emit each caption to the outside world
// Ideally, this would happen immediately on parsing captions,
// but we need to ensure that video data is sent back first
// so that caption timing can be adjusted to match video timing
for (i = 0; i < event.captions.length; i++) {
caption = event.captions[i];
this.trigger('caption', caption);
} // Emit each id3 tag to the outside world
// Ideally, this would happen immediately on parsing the tag,
// but we need to ensure that video data is sent back first
// so that ID3 frame timing can be adjusted to match video timing
for (i = 0; i < event.metadata.length; i++) {
id3 = event.metadata[i];
this.trigger('id3Frame', id3);
}
} // Only emit `done` if all tracks have been flushed and emitted
if (this.emittedTracks >= this.numberOfTracks) {
this.trigger('done');
this.emittedTracks = 0;
}
};
CoalesceStream.prototype.setRemux = function (val) {
this.remuxTracks = val;
};
/**
* A Stream that expects MP2T binary data as input and produces
* corresponding media segments, suitable for use with Media Source
* Extension (MSE) implementations that support the ISO BMFF byte
* stream format, like Chrome.
*/
Transmuxer = function (options) {
var self = this,
hasFlushed = true,
videoTrack,
audioTrack;
Transmuxer.prototype.init.call(this);
options = options || {};
this.baseMediaDecodeTime = options.baseMediaDecodeTime || 0;
this.transmuxPipeline_ = {};
this.setupAacPipeline = function () {
var pipeline = {};
this.transmuxPipeline_ = pipeline;
pipeline.type = 'aac';
pipeline.metadataStream = new m2ts.MetadataStream(); // set up the parsing pipeline
pipeline.aacStream = new AacStream();
pipeline.audioTimestampRolloverStream = new m2ts.TimestampRolloverStream('audio');
pipeline.timedMetadataTimestampRolloverStream = new m2ts.TimestampRolloverStream('timed-metadata');
pipeline.adtsStream = new AdtsStream();
pipeline.coalesceStream = new CoalesceStream(options, pipeline.metadataStream);
pipeline.headOfPipeline = pipeline.aacStream;
pipeline.aacStream.pipe(pipeline.audioTimestampRolloverStream).pipe(pipeline.adtsStream);
pipeline.aacStream.pipe(pipeline.timedMetadataTimestampRolloverStream).pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream);
pipeline.metadataStream.on('timestamp', function (frame) {
pipeline.aacStream.setTimestamp(frame.timeStamp);
});
pipeline.aacStream.on('data', function (data) {
if (data.type !== 'timed-metadata' && data.type !== 'audio' || pipeline.audioSegmentStream) {
return;
}
audioTrack = audioTrack || {
timelineStartInfo: {
baseMediaDecodeTime: self.baseMediaDecodeTime
},
codec: 'adts',
type: 'audio'
}; // hook up the audio segment stream to the first track with aac data
pipeline.coalesceStream.numberOfTracks++;
pipeline.audioSegmentStream = new AudioSegmentStream(audioTrack, options);
pipeline.audioSegmentStream.on('log', self.getLogTrigger_('audioSegmentStream'));
pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo')); // Set up the final part of the audio pipeline
pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream); // emit pmt info
self.trigger('trackinfo', {
hasAudio: !!audioTrack,
hasVideo: !!videoTrack
});
}); // Re-emit any data coming from the coalesce stream to the outside world
pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data')); // Let the consumer know we have finished flushing the entire pipeline
pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
addPipelineLogRetriggers(this, pipeline);
};
this.setupTsPipeline = function () {
var pipeline = {};
this.transmuxPipeline_ = pipeline;
pipeline.type = 'ts';
pipeline.metadataStream = new m2ts.MetadataStream(); // set up the parsing pipeline
pipeline.packetStream = new m2ts.TransportPacketStream();
pipeline.parseStream = new m2ts.TransportParseStream();
pipeline.elementaryStream = new m2ts.ElementaryStream();
pipeline.timestampRolloverStream = new m2ts.TimestampRolloverStream();
pipeline.adtsStream = new AdtsStream();
pipeline.h264Stream = new H264Stream();
pipeline.captionStream = new m2ts.CaptionStream(options);
pipeline.coalesceStream = new CoalesceStream(options, pipeline.metadataStream);
pipeline.headOfPipeline = pipeline.packetStream; // disassemble MPEG2-TS packets into elementary streams
pipeline.packetStream.pipe(pipeline.parseStream).pipe(pipeline.elementaryStream).pipe(pipeline.timestampRolloverStream); // !!THIS ORDER IS IMPORTANT!!
// demux the streams
pipeline.timestampRolloverStream.pipe(pipeline.h264Stream);
pipeline.timestampRolloverStream.pipe(pipeline.adtsStream);
pipeline.timestampRolloverStream.pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream); // Hook up CEA-608/708 caption stream
pipeline.h264Stream.pipe(pipeline.captionStream).pipe(pipeline.coalesceStream);
pipeline.elementaryStream.on('data', function (data) {
var i;
if (data.type === 'metadata') {
i = data.tracks.length; // scan the tracks listed in the metadata
while (i--) {
if (!videoTrack && data.tracks[i].type === 'video') {
videoTrack = data.tracks[i];
videoTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
} else if (!audioTrack && data.tracks[i].type === 'audio') {
audioTrack = data.tracks[i];
audioTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
}
} // hook up the video segment stream to the first track with h264 data
if (videoTrack && !pipeline.videoSegmentStream) {
pipeline.coalesceStream.numberOfTracks++;
pipeline.videoSegmentStream = new VideoSegmentStream(videoTrack, options);
pipeline.videoSegmentStream.on('log', self.getLogTrigger_('videoSegmentStream'));
pipeline.videoSegmentStream.on('timelineStartInfo', function (timelineStartInfo) {
// When video emits timelineStartInfo data after a flush, we forward that
// info to the AudioSegmentStream, if it exists, because video timeline
// data takes precedence. Do not do this if keepOriginalTimestamps is set,
// because this is a particularly subtle form of timestamp alteration.
if (audioTrack && !options.keepOriginalTimestamps) {
audioTrack.timelineStartInfo = timelineStartInfo; // On the first segment we trim AAC frames that exist before the
// very earliest DTS we have seen in video because Chrome will
// interpret any video track with a baseMediaDecodeTime that is
// non-zero as a gap.
pipeline.audioSegmentStream.setEarliestDts(timelineStartInfo.dts - self.baseMediaDecodeTime);
}
});
pipeline.videoSegmentStream.on('processedGopsInfo', self.trigger.bind(self, 'gopInfo'));
pipeline.videoSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'videoSegmentTimingInfo'));
pipeline.videoSegmentStream.on('baseMediaDecodeTime', function (baseMediaDecodeTime) {
if (audioTrack) {
pipeline.audioSegmentStream.setVideoBaseMediaDecodeTime(baseMediaDecodeTime);
}
});
pipeline.videoSegmentStream.on('timingInfo', self.trigger.bind(self, 'videoTimingInfo')); // Set up the final part of the video pipeline
pipeline.h264Stream.pipe(pipeline.videoSegmentStream).pipe(pipeline.coalesceStream);
}
if (audioTrack && !pipeline.audioSegmentStream) {
// hook up the audio segment stream to the first track with aac data
pipeline.coalesceStream.numberOfTracks++;
pipeline.audioSegmentStream = new AudioSegmentStream(audioTrack, options);
pipeline.audioSegmentStream.on('log', self.getLogTrigger_('audioSegmentStream'));
pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo'));
pipeline.audioSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'audioSegmentTimingInfo')); // Set up the final part of the audio pipeline
pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream);
} // emit pmt info
self.trigger('trackinfo', {
hasAudio: !!audioTrack,
hasVideo: !!videoTrack
});
}
}); // Re-emit any data coming from the coalesce stream to the outside world
pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
pipeline.coalesceStream.on('id3Frame', function (id3Frame) {
id3Frame.dispatchType = pipeline.metadataStream.dispatchType;
self.trigger('id3Frame', id3Frame);
});
pipeline.coalesceStream.on('caption', this.trigger.bind(this, 'caption')); // Let the consumer know we have finished flushing the entire pipeline
pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
addPipelineLogRetriggers(this, pipeline);
}; // hook up the segment streams once track metadata is delivered
this.setBaseMediaDecodeTime = function (baseMediaDecodeTime) {
var pipeline = this.transmuxPipeline_;
if (!options.keepOriginalTimestamps) {
this.baseMediaDecodeTime = baseMediaDecodeTime;
}
if (audioTrack) {
audioTrack.timelineStartInfo.dts = undefined;
audioTrack.timelineStartInfo.pts = undefined;
trackDecodeInfo.clearDtsInfo(audioTrack);
if (pipeline.audioTimestampRolloverStream) {
pipeline.audioTimestampRolloverStream.discontinuity();
}
}
if (videoTrack) {
if (pipeline.videoSegmentStream) {
pipeline.videoSegmentStream.gopCache_ = [];
}
videoTrack.timelineStartInfo.dts = undefined;
videoTrack.timelineStartInfo.pts = undefined;
trackDecodeInfo.clearDtsInfo(videoTrack);
pipeline.captionStream.reset();
}
if (pipeline.timestampRolloverStream) {
pipeline.timestampRolloverStream.discontinuity();
}
};
this.setAudioAppendStart = function (timestamp) {
if (audioTrack) {
this.transmuxPipeline_.audioSegmentStream.setAudioAppendStart(timestamp);
}
};
this.setRemux = function (val) {
var pipeline = this.transmuxPipeline_;
options.remux = val;
if (pipeline && pipeline.coalesceStream) {
pipeline.coalesceStream.setRemux(val);
}
};
this.alignGopsWith = function (gopsToAlignWith) {
if (videoTrack && this.transmuxPipeline_.videoSegmentStream) {
this.transmuxPipeline_.videoSegmentStream.alignGopsWith(gopsToAlignWith);
}
};
this.getLogTrigger_ = function (key) {
var self = this;
return function (event) {
event.stream = key;
self.trigger('log', event);
};
}; // feed incoming data to the front of the parsing pipeline
this.push = function (data) {
if (hasFlushed) {
var isAac = isLikelyAacData(data);
if (isAac && this.transmuxPipeline_.type !== 'aac') {
this.setupAacPipeline();
} else if (!isAac && this.transmuxPipeline_.type !== 'ts') {
this.setupTsPipeline();
}
hasFlushed = false;
}
this.transmuxPipeline_.headOfPipeline.push(data);
}; // flush any buffered data
this.flush = function () {
hasFlushed = true; // Start at the top of the pipeline and flush all pending work
this.transmuxPipeline_.headOfPipeline.flush();
};
this.endTimeline = function () {
this.transmuxPipeline_.headOfPipeline.endTimeline();
};
this.reset = function () {
if (this.transmuxPipeline_.headOfPipeline) {
this.transmuxPipeline_.headOfPipeline.reset();
}
}; // Caption data has to be reset when seeking outside buffered range
this.resetCaptions = function () {
if (this.transmuxPipeline_.captionStream) {
this.transmuxPipeline_.captionStream.reset();
}
};
};
Transmuxer.prototype = new Stream();
var transmuxer = {
Transmuxer: Transmuxer,
VideoSegmentStream: VideoSegmentStream,
AudioSegmentStream: AudioSegmentStream,
AUDIO_PROPERTIES: AUDIO_PROPERTIES,
VIDEO_PROPERTIES: VIDEO_PROPERTIES,
// exported for testing
generateSegmentTimingInfo: generateSegmentTimingInfo
};
/**
* mux.js
*
* Copyright (c) Brightcove
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
*/
var toUnsigned$3 = function (value) {
return value >>> 0;
};
var toHexString$1 = function (value) {
return ('00' + value.toString(16)).slice(-2);
};
var bin = {
toUnsigned: toUnsigned$3,
toHexString: toHexString$1
};
var parseType$3 = function (buffer) {
var result = '';
result += String.fromCharCode(buffer[0]);
result += String.fromCharCode(buffer[1]);
result += String.fromCharCode(buffer[2]);
result += String.fromCharCode(buffer[3]);
return result;
};
var parseType_1 = parseType$3;
var toUnsigned$2 = bin.toUnsigned;
var parseType$2 = parseType_1;
var findBox$2 = function (data, path) {
var results = [],
i,
size,
type,
end,
subresults;
if (!path.length) {
// short-circuit the search for empty paths
return null;
}
for (i = 0; i < data.byteLength;) {
size = toUnsigned$2(data[i] << 24 | data[i + 1] << 16 | data[i + 2] << 8 | data[i + 3]);
type = parseType$2(data.subarray(i + 4, i + 8));
end = size > 1 ? i + size : data.byteLength;
if (type === path[0]) {
if (path.length === 1) {
// this is the end of the path and we've found the box we were
// looking for
results.push(data.subarray(i + 8, end));
} else {
// recursively search for the next box along the path
subresults = findBox$2(data.subarray(i + 8, end), path.slice(1));
if (subresults.length) {
results = results.concat(subresults);
}
}
}
i = end;
} // we've finished searching all of data
return results;
};
var findBox_1 = findBox$2;
var toUnsigned$1 = bin.toUnsigned;
var getUint64$2 = numbers.getUint64;
var tfdt = function (data) {
var result = {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4))
};
if (result.version === 1) {
result.baseMediaDecodeTime = getUint64$2(data.subarray(4));
} else {
result.baseMediaDecodeTime = toUnsigned$1(data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7]);
}
return result;
};
var parseTfdt$2 = tfdt;
var parseSampleFlags$1 = function (flags) {
return {
isLeading: (flags[0] & 0x0c) >>> 2,
dependsOn: flags[0] & 0x03,
isDependedOn: (flags[1] & 0xc0) >>> 6,
hasRedundancy: (flags[1] & 0x30) >>> 4,
paddingValue: (flags[1] & 0x0e) >>> 1,
isNonSyncSample: flags[1] & 0x01,
degradationPriority: flags[2] << 8 | flags[3]
};
};
var parseSampleFlags_1 = parseSampleFlags$1;
var parseSampleFlags = parseSampleFlags_1;
var trun = function (data) {
var result = {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4)),
samples: []
},
view = new DataView(data.buffer, data.byteOffset, data.byteLength),
// Flag interpretation
dataOffsetPresent = result.flags[2] & 0x01,
// compare with 2nd byte of 0x1
firstSampleFlagsPresent = result.flags[2] & 0x04,
// compare with 2nd byte of 0x4
sampleDurationPresent = result.flags[1] & 0x01,
// compare with 2nd byte of 0x100
sampleSizePresent = result.flags[1] & 0x02,
// compare with 2nd byte of 0x200
sampleFlagsPresent = result.flags[1] & 0x04,
// compare with 2nd byte of 0x400
sampleCompositionTimeOffsetPresent = result.flags[1] & 0x08,
// compare with 2nd byte of 0x800
sampleCount = view.getUint32(4),
offset = 8,
sample;
if (dataOffsetPresent) {
// 32 bit signed integer
result.dataOffset = view.getInt32(offset);
offset += 4;
} // Overrides the flags for the first sample only. The order of
// optional values will be: duration, size, compositionTimeOffset
if (firstSampleFlagsPresent && sampleCount) {
sample = {
flags: parseSampleFlags(data.subarray(offset, offset + 4))
};
offset += 4;
if (sampleDurationPresent) {
sample.duration = view.getUint32(offset);
offset += 4;
}
if (sampleSizePresent) {
sample.size = view.getUint32(offset);
offset += 4;
}
if (sampleCompositionTimeOffsetPresent) {
if (result.version === 1) {
sample.compositionTimeOffset = view.getInt32(offset);
} else {
sample.compositionTimeOffset = view.getUint32(offset);
}
offset += 4;
}
result.samples.push(sample);
sampleCount--;
}
while (sampleCount--) {
sample = {};
if (sampleDurationPresent) {
sample.duration = view.getUint32(offset);
offset += 4;
}
if (sampleSizePresent) {
sample.size = view.getUint32(offset);
offset += 4;
}
if (sampleFlagsPresent) {
sample.flags = parseSampleFlags(data.subarray(offset, offset + 4));
offset += 4;
}
if (sampleCompositionTimeOffsetPresent) {
if (result.version === 1) {
sample.compositionTimeOffset = view.getInt32(offset);
} else {
sample.compositionTimeOffset = view.getUint32(offset);
}
offset += 4;
}
result.samples.push(sample);
}
return result;
};
var parseTrun$2 = trun;
var tfhd = function (data) {
var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
result = {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4)),
trackId: view.getUint32(4)
},
baseDataOffsetPresent = result.flags[2] & 0x01,
sampleDescriptionIndexPresent = result.flags[2] & 0x02,
defaultSampleDurationPresent = result.flags[2] & 0x08,
defaultSampleSizePresent = result.flags[2] & 0x10,
defaultSampleFlagsPresent = result.flags[2] & 0x20,
durationIsEmpty = result.flags[0] & 0x010000,
defaultBaseIsMoof = result.flags[0] & 0x020000,
i;
i = 8;
if (baseDataOffsetPresent) {
i += 4; // truncate top 4 bytes
// FIXME: should we read the full 64 bits?
result.baseDataOffset = view.getUint32(12);
i += 4;
}
if (sampleDescriptionIndexPresent) {
result.sampleDescriptionIndex = view.getUint32(i);
i += 4;
}
if (defaultSampleDurationPresent) {
result.defaultSampleDuration = view.getUint32(i);
i += 4;
}
if (defaultSampleSizePresent) {
result.defaultSampleSize = view.getUint32(i);
i += 4;
}
if (defaultSampleFlagsPresent) {
result.defaultSampleFlags = view.getUint32(i);
}
if (durationIsEmpty) {
result.durationIsEmpty = true;
}
if (!baseDataOffsetPresent && defaultBaseIsMoof) {
result.baseDataOffsetIsMoof = true;
}
return result;
};
var parseTfhd$2 = tfhd;
var win;
if (typeof window !== "undefined") {
win = window;
} else if (typeof commonjsGlobal !== "undefined") {
win = commonjsGlobal;
} else if (typeof self !== "undefined") {
win = self;
} else {
win = {};
}
var window_1 = win;
/**
* mux.js
*
* Copyright (c) Brightcove
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
*
* Reads in-band CEA-708 captions out of FMP4 segments.
* @see https://en.wikipedia.org/wiki/CEA-708
*/
var discardEmulationPreventionBytes = captionPacketParser.discardEmulationPreventionBytes;
var CaptionStream = captionStream.CaptionStream;
var findBox$1 = findBox_1;
var parseTfdt$1 = parseTfdt$2;
var parseTrun$1 = parseTrun$2;
var parseTfhd$1 = parseTfhd$2;
var window$2 = window_1;
/**
* Maps an offset in the mdat to a sample based on the the size of the samples.
* Assumes that `parseSamples` has been called first.
*
* @param {Number} offset - The offset into the mdat
* @param {Object[]} samples - An array of samples, parsed using `parseSamples`
* @return {?Object} The matching sample, or null if no match was found.
*
* @see ISO-BMFF-12/2015, Section 8.8.8
**/
var mapToSample = function (offset, samples) {
var approximateOffset = offset;
for (var i = 0; i < samples.length; i++) {
var sample = samples[i];
if (approximateOffset < sample.size) {
return sample;
}
approximateOffset -= sample.size;
}
return null;
};
/**
* Finds SEI nal units contained in a Media Data Box.
* Assumes that `parseSamples` has been called first.
*
* @param {Uint8Array} avcStream - The bytes of the mdat
* @param {Object[]} samples - The samples parsed out by `parseSamples`
* @param {Number} trackId - The trackId of this video track
* @return {Object[]} seiNals - the parsed SEI NALUs found.
* The contents of the seiNal should match what is expected by
* CaptionStream.push (nalUnitType, size, data, escapedRBSP, pts, dts)
*
* @see ISO-BMFF-12/2015, Section 8.1.1
* @see Rec. ITU-T H.264, 7.3.2.3.1
**/
var findSeiNals = function (avcStream, samples, trackId) {
var avcView = new DataView(avcStream.buffer, avcStream.byteOffset, avcStream.byteLength),
result = {
logs: [],
seiNals: []
},
seiNal,
i,
length,
lastMatchedSample;
for (i = 0; i + 4 < avcStream.length; i += length) {
length = avcView.getUint32(i);
i += 4; // Bail if this doesn't appear to be an H264 stream
if (length <= 0) {
continue;
}
switch (avcStream[i] & 0x1F) {
case 0x06:
var data = avcStream.subarray(i + 1, i + 1 + length);
var matchingSample = mapToSample(i, samples);
seiNal = {
nalUnitType: 'sei_rbsp',
size: length,
data: data,
escapedRBSP: discardEmulationPreventionBytes(data),
trackId: trackId
};
if (matchingSample) {
seiNal.pts = matchingSample.pts;
seiNal.dts = matchingSample.dts;
lastMatchedSample = matchingSample;
} else if (lastMatchedSample) {
// If a matching sample cannot be found, use the last
// sample's values as they should be as close as possible
seiNal.pts = lastMatchedSample.pts;
seiNal.dts = lastMatchedSample.dts;
} else {
result.logs.push({
level: 'warn',
message: 'We\'ve encountered a nal unit without data at ' + i + ' for trackId ' + trackId + '. See mux.js#223.'
});
break;
}
result.seiNals.push(seiNal);
break;
}
}
return result;
};
/**
* Parses sample information out of Track Run Boxes and calculates
* the absolute presentation and decode timestamps of each sample.
*
* @param {Array} truns - The Trun Run boxes to be parsed
* @param {Number|BigInt} baseMediaDecodeTime - base media decode time from tfdt
@see ISO-BMFF-12/2015, Section 8.8.12
* @param {Object} tfhd - The parsed Track Fragment Header
* @see inspect.parseTfhd
* @return {Object[]} the parsed samples
*
* @see ISO-BMFF-12/2015, Section 8.8.8
**/
var parseSamples = function (truns, baseMediaDecodeTime, tfhd) {
var currentDts = baseMediaDecodeTime;
var defaultSampleDuration = tfhd.defaultSampleDuration || 0;
var defaultSampleSize = tfhd.defaultSampleSize || 0;
var trackId = tfhd.trackId;
var allSamples = [];
truns.forEach(function (trun) {
// Note: We currently do not parse the sample table as well
// as the trun. It's possible some sources will require this.
// moov > trak > mdia > minf > stbl
var trackRun = parseTrun$1(trun);
var samples = trackRun.samples;
samples.forEach(function (sample) {
if (sample.duration === undefined) {
sample.duration = defaultSampleDuration;
}
if (sample.size === undefined) {
sample.size = defaultSampleSize;
}
sample.trackId = trackId;
sample.dts = currentDts;
if (sample.compositionTimeOffset === undefined) {
sample.compositionTimeOffset = 0;
}
if (typeof currentDts === 'bigint') {
sample.pts = currentDts + window$2.BigInt(sample.compositionTimeOffset);
currentDts += window$2.BigInt(sample.duration);
} else {
sample.pts = currentDts + sample.compositionTimeOffset;
currentDts += sample.duration;
}
});
allSamples = allSamples.concat(samples);
});
return allSamples;
};
/**
* Parses out caption nals from an FMP4 segment's video tracks.
*
* @param {Uint8Array} segment - The bytes of a single segment
* @param {Number} videoTrackId - The trackId of a video track in the segment
* @return {Object.} A mapping of video trackId to
* a list of seiNals found in that track
**/
var parseCaptionNals = function (segment, videoTrackId) {
// To get the samples
var trafs = findBox$1(segment, ['moof', 'traf']); // To get SEI NAL units
var mdats = findBox$1(segment, ['mdat']);
var captionNals = {};
var mdatTrafPairs = []; // Pair up each traf with a mdat as moofs and mdats are in pairs
mdats.forEach(function (mdat, index) {
var matchingTraf = trafs[index];
mdatTrafPairs.push({
mdat: mdat,
traf: matchingTraf
});
});
mdatTrafPairs.forEach(function (pair) {
var mdat = pair.mdat;
var traf = pair.traf;
var tfhd = findBox$1(traf, ['tfhd']); // Exactly 1 tfhd per traf
var headerInfo = parseTfhd$1(tfhd[0]);
var trackId = headerInfo.trackId;
var tfdt = findBox$1(traf, ['tfdt']); // Either 0 or 1 tfdt per traf
var baseMediaDecodeTime = tfdt.length > 0 ? parseTfdt$1(tfdt[0]).baseMediaDecodeTime : 0;
var truns = findBox$1(traf, ['trun']);
var samples;
var result; // Only parse video data for the chosen video track
if (videoTrackId === trackId && truns.length > 0) {
samples = parseSamples(truns, baseMediaDecodeTime, headerInfo);
result = findSeiNals(mdat, samples, trackId);
if (!captionNals[trackId]) {
captionNals[trackId] = {
seiNals: [],
logs: []
};
}
captionNals[trackId].seiNals = captionNals[trackId].seiNals.concat(result.seiNals);
captionNals[trackId].logs = captionNals[trackId].logs.concat(result.logs);
}
});
return captionNals;
};
/**
* Parses out inband captions from an MP4 container and returns
* caption objects that can be used by WebVTT and the TextTrack API.
* @see https://developer.mozilla.org/en-US/docs/Web/API/VTTCue
* @see https://developer.mozilla.org/en-US/docs/Web/API/TextTrack
* Assumes that `probe.getVideoTrackIds` and `probe.timescale` have been called first
*
* @param {Uint8Array} segment - The fmp4 segment containing embedded captions
* @param {Number} trackId - The id of the video track to parse
* @param {Number} timescale - The timescale for the video track from the init segment
*
* @return {?Object[]} parsedCaptions - A list of captions or null if no video tracks
* @return {Number} parsedCaptions[].startTime - The time to show the caption in seconds
* @return {Number} parsedCaptions[].endTime - The time to stop showing the caption in seconds
* @return {Object[]} parsedCaptions[].content - A list of individual caption segments
* @return {String} parsedCaptions[].content.text - The visible content of the caption segment
* @return {Number} parsedCaptions[].content.line - The line height from 1-15 for positioning of the caption segment
* @return {Number} parsedCaptions[].content.position - The column indent percentage for cue positioning from 10-80
**/
var parseEmbeddedCaptions = function (segment, trackId, timescale) {
var captionNals; // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
if (trackId === null) {
return null;
}
captionNals = parseCaptionNals(segment, trackId);
var trackNals = captionNals[trackId] || {};
return {
seiNals: trackNals.seiNals,
logs: trackNals.logs,
timescale: timescale
};
};
/**
* Converts SEI NALUs into captions that can be used by video.js
**/
var CaptionParser = function () {
var isInitialized = false;
var captionStream; // Stores segments seen before trackId and timescale are set
var segmentCache; // Stores video track ID of the track being parsed
var trackId; // Stores the timescale of the track being parsed
var timescale; // Stores captions parsed so far
var parsedCaptions; // Stores whether we are receiving partial data or not
var parsingPartial;
/**
* A method to indicate whether a CaptionParser has been initalized
* @returns {Boolean}
**/
this.isInitialized = function () {
return isInitialized;
};
/**
* Initializes the underlying CaptionStream, SEI NAL parsing
* and management, and caption collection
**/
this.init = function (options) {
captionStream = new CaptionStream();
isInitialized = true;
parsingPartial = options ? options.isPartial : false; // Collect dispatched captions
captionStream.on('data', function (event) {
// Convert to seconds in the source's timescale
event.startTime = event.startPts / timescale;
event.endTime = event.endPts / timescale;
parsedCaptions.captions.push(event);
parsedCaptions.captionStreams[event.stream] = true;
});
captionStream.on('log', function (log) {
parsedCaptions.logs.push(log);
});
};
/**
* Determines if a new video track will be selected
* or if the timescale changed
* @return {Boolean}
**/
this.isNewInit = function (videoTrackIds, timescales) {
if (videoTrackIds && videoTrackIds.length === 0 || timescales && typeof timescales === 'object' && Object.keys(timescales).length === 0) {
return false;
}
return trackId !== videoTrackIds[0] || timescale !== timescales[trackId];
};
/**
* Parses out SEI captions and interacts with underlying
* CaptionStream to return dispatched captions
*
* @param {Uint8Array} segment - The fmp4 segment containing embedded captions
* @param {Number[]} videoTrackIds - A list of video tracks found in the init segment
* @param {Object.} timescales - The timescales found in the init segment
* @see parseEmbeddedCaptions
* @see m2ts/caption-stream.js
**/
this.parse = function (segment, videoTrackIds, timescales) {
var parsedData;
if (!this.isInitialized()) {
return null; // This is not likely to be a video segment
} else if (!videoTrackIds || !timescales) {
return null;
} else if (this.isNewInit(videoTrackIds, timescales)) {
// Use the first video track only as there is no
// mechanism to switch to other video tracks
trackId = videoTrackIds[0];
timescale = timescales[trackId]; // If an init segment has not been seen yet, hold onto segment
// data until we have one.
// the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
} else if (trackId === null || !timescale) {
segmentCache.push(segment);
return null;
} // Now that a timescale and trackId is set, parse cached segments
while (segmentCache.length > 0) {
var cachedSegment = segmentCache.shift();
this.parse(cachedSegment, videoTrackIds, timescales);
}
parsedData = parseEmbeddedCaptions(segment, trackId, timescale);
if (parsedData && parsedData.logs) {
parsedCaptions.logs = parsedCaptions.logs.concat(parsedData.logs);
}
if (parsedData === null || !parsedData.seiNals) {
if (parsedCaptions.logs.length) {
return {
logs: parsedCaptions.logs,
captions: [],
captionStreams: []
};
}
return null;
}
this.pushNals(parsedData.seiNals); // Force the parsed captions to be dispatched
this.flushStream();
return parsedCaptions;
};
/**
* Pushes SEI NALUs onto CaptionStream
* @param {Object[]} nals - A list of SEI nals parsed using `parseCaptionNals`
* Assumes that `parseCaptionNals` has been called first
* @see m2ts/caption-stream.js
**/
this.pushNals = function (nals) {
if (!this.isInitialized() || !nals || nals.length === 0) {
return null;
}
nals.forEach(function (nal) {
captionStream.push(nal);
});
};
/**
* Flushes underlying CaptionStream to dispatch processed, displayable captions
* @see m2ts/caption-stream.js
**/
this.flushStream = function () {
if (!this.isInitialized()) {
return null;
}
if (!parsingPartial) {
captionStream.flush();
} else {
captionStream.partialFlush();
}
};
/**
* Reset caption buckets for new data
**/
this.clearParsedCaptions = function () {
parsedCaptions.captions = [];
parsedCaptions.captionStreams = {};
parsedCaptions.logs = [];
};
/**
* Resets underlying CaptionStream
* @see m2ts/caption-stream.js
**/
this.resetCaptionStream = function () {
if (!this.isInitialized()) {
return null;
}
captionStream.reset();
};
/**
* Convenience method to clear all captions flushed from the
* CaptionStream and still being parsed
* @see m2ts/caption-stream.js
**/
this.clearAllCaptions = function () {
this.clearParsedCaptions();
this.resetCaptionStream();
};
/**
* Reset caption parser
**/
this.reset = function () {
segmentCache = [];
trackId = null;
timescale = null;
if (!parsedCaptions) {
parsedCaptions = {
captions: [],
// CC1, CC2, CC3, CC4
captionStreams: {},
logs: []
};
} else {
this.clearParsedCaptions();
}
this.resetCaptionStream();
};
this.reset();
};
var captionParser = CaptionParser;
/**
* Returns the first string in the data array ending with a null char '\0'
* @param {UInt8} data
* @returns the string with the null char
*/
var uint8ToCString$1 = function (data) {
var index = 0;
var curChar = String.fromCharCode(data[index]);
var retString = '';
while (curChar !== '\0') {
retString += curChar;
index++;
curChar = String.fromCharCode(data[index]);
} // Add nullChar
retString += curChar;
return retString;
};
var string = {
uint8ToCString: uint8ToCString$1
};
var uint8ToCString = string.uint8ToCString;
var getUint64$1 = numbers.getUint64;
/**
* Based on: ISO/IEC 23009 Section: 5.10.3.3
* References:
* https://dashif-documents.azurewebsites.net/Events/master/event.html#emsg-format
* https://aomediacodec.github.io/id3-emsg/
*
* Takes emsg box data as a uint8 array and returns a emsg box object
* @param {UInt8Array} boxData data from emsg box
* @returns A parsed emsg box object
*/
var parseEmsgBox = function (boxData) {
// version + flags
var offset = 4;
var version = boxData[0];
var scheme_id_uri, value, timescale, presentation_time, presentation_time_delta, event_duration, id, message_data;
if (version === 0) {
scheme_id_uri = uint8ToCString(boxData.subarray(offset));
offset += scheme_id_uri.length;
value = uint8ToCString(boxData.subarray(offset));
offset += value.length;
var dv = new DataView(boxData.buffer);
timescale = dv.getUint32(offset);
offset += 4;
presentation_time_delta = dv.getUint32(offset);
offset += 4;
event_duration = dv.getUint32(offset);
offset += 4;
id = dv.getUint32(offset);
offset += 4;
} else if (version === 1) {
var dv = new DataView(boxData.buffer);
timescale = dv.getUint32(offset);
offset += 4;
presentation_time = getUint64$1(boxData.subarray(offset));
offset += 8;
event_duration = dv.getUint32(offset);
offset += 4;
id = dv.getUint32(offset);
offset += 4;
scheme_id_uri = uint8ToCString(boxData.subarray(offset));
offset += scheme_id_uri.length;
value = uint8ToCString(boxData.subarray(offset));
offset += value.length;
}
message_data = new Uint8Array(boxData.subarray(offset, boxData.byteLength));
var emsgBox = {
scheme_id_uri,
value,
// if timescale is undefined or 0 set to 1
timescale: timescale ? timescale : 1,
presentation_time,
presentation_time_delta,
event_duration,
id,
message_data
};
return isValidEmsgBox(version, emsgBox) ? emsgBox : undefined;
};
/**
* Scales a presentation time or time delta with an offset with a provided timescale
* @param {number} presentationTime
* @param {number} timescale
* @param {number} timeDelta
* @param {number} offset
* @returns the scaled time as a number
*/
var scaleTime = function (presentationTime, timescale, timeDelta, offset) {
return presentationTime || presentationTime === 0 ? presentationTime / timescale : offset + timeDelta / timescale;
};
/**
* Checks the emsg box data for validity based on the version
* @param {number} version of the emsg box to validate
* @param {Object} emsg the emsg data to validate
* @returns if the box is valid as a boolean
*/
var isValidEmsgBox = function (version, emsg) {
var hasScheme = emsg.scheme_id_uri !== '\0';
var isValidV0Box = version === 0 && isDefined(emsg.presentation_time_delta) && hasScheme;
var isValidV1Box = version === 1 && isDefined(emsg.presentation_time) && hasScheme; // Only valid versions of emsg are 0 and 1
return !(version > 1) && isValidV0Box || isValidV1Box;
}; // Utility function to check if an object is defined
var isDefined = function (data) {
return data !== undefined || data !== null;
};
var emsg$1 = {
parseEmsgBox: parseEmsgBox,
scaleTime: scaleTime
};
/**
* mux.js
*
* Copyright (c) Brightcove
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
*
* Utilities to detect basic properties and metadata about MP4s.
*/
var toUnsigned = bin.toUnsigned;
var toHexString = bin.toHexString;
var findBox = findBox_1;
var parseType$1 = parseType_1;
var emsg = emsg$1;
var parseTfhd = parseTfhd$2;
var parseTrun = parseTrun$2;
var parseTfdt = parseTfdt$2;
var getUint64 = numbers.getUint64;
var timescale, startTime, compositionStartTime, getVideoTrackIds, getTracks, getTimescaleFromMediaHeader, getEmsgID3;
var window$1 = window_1;
var parseId3Frames = parseId3.parseId3Frames;
/**
* Parses an MP4 initialization segment and extracts the timescale
* values for any declared tracks. Timescale values indicate the
* number of clock ticks per second to assume for time-based values
* elsewhere in the MP4.
*
* To determine the start time of an MP4, you need two pieces of
* information: the timescale unit and the earliest base media decode
* time. Multiple timescales can be specified within an MP4 but the
* base media decode time is always expressed in the timescale from
* the media header box for the track:
* ```
* moov > trak > mdia > mdhd.timescale
* ```
* @param init {Uint8Array} the bytes of the init segment
* @return {object} a hash of track ids to timescale values or null if
* the init segment is malformed.
*/
timescale = function (init) {
var result = {},
traks = findBox(init, ['moov', 'trak']); // mdhd timescale
return traks.reduce(function (result, trak) {
var tkhd, version, index, id, mdhd;
tkhd = findBox(trak, ['tkhd'])[0];
if (!tkhd) {
return null;
}
version = tkhd[0];
index = version === 0 ? 12 : 20;
id = toUnsigned(tkhd[index] << 24 | tkhd[index + 1] << 16 | tkhd[index + 2] << 8 | tkhd[index + 3]);
mdhd = findBox(trak, ['mdia', 'mdhd'])[0];
if (!mdhd) {
return null;
}
version = mdhd[0];
index = version === 0 ? 12 : 20;
result[id] = toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
return result;
}, result);
};
/**
* Determine the base media decode start time, in seconds, for an MP4
* fragment. If multiple fragments are specified, the earliest time is
* returned.
*
* The base media decode time can be parsed from track fragment
* metadata:
* ```
* moof > traf > tfdt.baseMediaDecodeTime
* ```
* It requires the timescale value from the mdhd to interpret.
*
* @param timescale {object} a hash of track ids to timescale values.
* @return {number} the earliest base media decode start time for the
* fragment, in seconds
*/
startTime = function (timescale, fragment) {
var trafs; // we need info from two childrend of each track fragment box
trafs = findBox(fragment, ['moof', 'traf']); // determine the start times for each track
var lowestTime = trafs.reduce(function (acc, traf) {
var tfhd = findBox(traf, ['tfhd'])[0]; // get the track id from the tfhd
var id = toUnsigned(tfhd[4] << 24 | tfhd[5] << 16 | tfhd[6] << 8 | tfhd[7]); // assume a 90kHz clock if no timescale was specified
var scale = timescale[id] || 90e3; // get the base media decode time from the tfdt
var tfdt = findBox(traf, ['tfdt'])[0];
var dv = new DataView(tfdt.buffer, tfdt.byteOffset, tfdt.byteLength);
var baseTime; // version 1 is 64 bit
if (tfdt[0] === 1) {
baseTime = getUint64(tfdt.subarray(4, 12));
} else {
baseTime = dv.getUint32(4);
} // convert base time to seconds if it is a valid number.
let seconds;
if (typeof baseTime === 'bigint') {
seconds = baseTime / window$1.BigInt(scale);
} else if (typeof baseTime === 'number' && !isNaN(baseTime)) {
seconds = baseTime / scale;
}
if (seconds < Number.MAX_SAFE_INTEGER) {
seconds = Number(seconds);
}
if (seconds < acc) {
acc = seconds;
}
return acc;
}, Infinity);
return typeof lowestTime === 'bigint' || isFinite(lowestTime) ? lowestTime : 0;
};
/**
* Determine the composition start, in seconds, for an MP4
* fragment.
*
* The composition start time of a fragment can be calculated using the base
* media decode time, composition time offset, and timescale, as follows:
*
* compositionStartTime = (baseMediaDecodeTime + compositionTimeOffset) / timescale
*
* All of the aforementioned information is contained within a media fragment's
* `traf` box, except for timescale info, which comes from the initialization
* segment, so a track id (also contained within a `traf`) is also necessary to
* associate it with a timescale
*
*
* @param timescales {object} - a hash of track ids to timescale values.
* @param fragment {Unit8Array} - the bytes of a media segment
* @return {number} the composition start time for the fragment, in seconds
**/
compositionStartTime = function (timescales, fragment) {
var trafBoxes = findBox(fragment, ['moof', 'traf']);
var baseMediaDecodeTime = 0;
var compositionTimeOffset = 0;
var trackId;
if (trafBoxes && trafBoxes.length) {
// The spec states that track run samples contained within a `traf` box are contiguous, but
// it does not explicitly state whether the `traf` boxes themselves are contiguous.
// We will assume that they are, so we only need the first to calculate start time.
var tfhd = findBox(trafBoxes[0], ['tfhd'])[0];
var trun = findBox(trafBoxes[0], ['trun'])[0];
var tfdt = findBox(trafBoxes[0], ['tfdt'])[0];
if (tfhd) {
var parsedTfhd = parseTfhd(tfhd);
trackId = parsedTfhd.trackId;
}
if (tfdt) {
var parsedTfdt = parseTfdt(tfdt);
baseMediaDecodeTime = parsedTfdt.baseMediaDecodeTime;
}
if (trun) {
var parsedTrun = parseTrun(trun);
if (parsedTrun.samples && parsedTrun.samples.length) {
compositionTimeOffset = parsedTrun.samples[0].compositionTimeOffset || 0;
}
}
} // Get timescale for this specific track. Assume a 90kHz clock if no timescale was
// specified.
var timescale = timescales[trackId] || 90e3; // return the composition start time, in seconds
if (typeof baseMediaDecodeTime === 'bigint') {
compositionTimeOffset = window$1.BigInt(compositionTimeOffset);
timescale = window$1.BigInt(timescale);
}
var result = (baseMediaDecodeTime + compositionTimeOffset) / timescale;
if (typeof result === 'bigint' && result < Number.MAX_SAFE_INTEGER) {
result = Number(result);
}
return result;
};
/**
* Find the trackIds of the video tracks in this source.
* Found by parsing the Handler Reference and Track Header Boxes:
* moov > trak > mdia > hdlr
* moov > trak > tkhd
*
* @param {Uint8Array} init - The bytes of the init segment for this source
* @return {Number[]} A list of trackIds
*
* @see ISO-BMFF-12/2015, Section 8.4.3
**/
getVideoTrackIds = function (init) {
var traks = findBox(init, ['moov', 'trak']);
var videoTrackIds = [];
traks.forEach(function (trak) {
var hdlrs = findBox(trak, ['mdia', 'hdlr']);
var tkhds = findBox(trak, ['tkhd']);
hdlrs.forEach(function (hdlr, index) {
var handlerType = parseType$1(hdlr.subarray(8, 12));
var tkhd = tkhds[index];
var view;
var version;
var trackId;
if (handlerType === 'vide') {
view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
version = view.getUint8(0);
trackId = version === 0 ? view.getUint32(12) : view.getUint32(20);
videoTrackIds.push(trackId);
}
});
});
return videoTrackIds;
};
getTimescaleFromMediaHeader = function (mdhd) {
// mdhd is a FullBox, meaning it will have its own version as the first byte
var version = mdhd[0];
var index = version === 0 ? 12 : 20;
return toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
};
/**
* Get all the video, audio, and hint tracks from a non fragmented
* mp4 segment
*/
getTracks = function (init) {
var traks = findBox(init, ['moov', 'trak']);
var tracks = [];
traks.forEach(function (trak) {
var track = {};
var tkhd = findBox(trak, ['tkhd'])[0];
var view, tkhdVersion; // id
if (tkhd) {
view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
tkhdVersion = view.getUint8(0);
track.id = tkhdVersion === 0 ? view.getUint32(12) : view.getUint32(20);
}
var hdlr = findBox(trak, ['mdia', 'hdlr'])[0]; // type
if (hdlr) {
var type = parseType$1(hdlr.subarray(8, 12));
if (type === 'vide') {
track.type = 'video';
} else if (type === 'soun') {
track.type = 'audio';
} else {
track.type = type;
}
} // codec
var stsd = findBox(trak, ['mdia', 'minf', 'stbl', 'stsd'])[0];
if (stsd) {
var sampleDescriptions = stsd.subarray(8); // gives the codec type string
track.codec = parseType$1(sampleDescriptions.subarray(4, 8));
var codecBox = findBox(sampleDescriptions, [track.codec])[0];
var codecConfig, codecConfigType;
if (codecBox) {
// https://tools.ietf.org/html/rfc6381#section-3.3
if (/^[asm]vc[1-9]$/i.test(track.codec)) {
// we don't need anything but the "config" parameter of the
// avc1 codecBox
codecConfig = codecBox.subarray(78);
codecConfigType = parseType$1(codecConfig.subarray(4, 8));
if (codecConfigType === 'avcC' && codecConfig.length > 11) {
track.codec += '.'; // left padded with zeroes for single digit hex
// profile idc
track.codec += toHexString(codecConfig[9]); // the byte containing the constraint_set flags
track.codec += toHexString(codecConfig[10]); // level idc
track.codec += toHexString(codecConfig[11]);
} else {
// TODO: show a warning that we couldn't parse the codec
// and are using the default
track.codec = 'avc1.4d400d';
}
} else if (/^mp4[a,v]$/i.test(track.codec)) {
// we do not need anything but the streamDescriptor of the mp4a codecBox
codecConfig = codecBox.subarray(28);
codecConfigType = parseType$1(codecConfig.subarray(4, 8));
if (codecConfigType === 'esds' && codecConfig.length > 20 && codecConfig[19] !== 0) {
track.codec += '.' + toHexString(codecConfig[19]); // this value is only a single digit
track.codec += '.' + toHexString(codecConfig[20] >>> 2 & 0x3f).replace(/^0/, '');
} else {
// TODO: show a warning that we couldn't parse the codec
// and are using the default
track.codec = 'mp4a.40.2';
}
} else {
// flac, opus, etc
track.codec = track.codec.toLowerCase();
}
}
}
var mdhd = findBox(trak, ['mdia', 'mdhd'])[0];
if (mdhd) {
track.timescale = getTimescaleFromMediaHeader(mdhd);
}
tracks.push(track);
});
return tracks;
};
/**
* Returns an array of emsg ID3 data from the provided segmentData.
* An offset can also be provided as the Latest Arrival Time to calculate
* the Event Start Time of v0 EMSG boxes.
* See: https://dashif-documents.azurewebsites.net/Events/master/event.html#Inband-event-timing
*
* @param {Uint8Array} segmentData the segment byte array.
* @param {number} offset the segment start time or Latest Arrival Time,
* @return {Object[]} an array of ID3 parsed from EMSG boxes
*/
getEmsgID3 = function (segmentData, offset = 0) {
var emsgBoxes = findBox(segmentData, ['emsg']);
return emsgBoxes.map(data => {
var parsedBox = emsg.parseEmsgBox(new Uint8Array(data));
var parsedId3Frames = parseId3Frames(parsedBox.message_data);
return {
cueTime: emsg.scaleTime(parsedBox.presentation_time, parsedBox.timescale, parsedBox.presentation_time_delta, offset),
duration: emsg.scaleTime(parsedBox.event_duration, parsedBox.timescale),
frames: parsedId3Frames
};
});
};
var probe$2 = {
// export mp4 inspector's findBox and parseType for backwards compatibility
findBox: findBox,
parseType: parseType$1,
timescale: timescale,
startTime: startTime,
compositionStartTime: compositionStartTime,
videoTrackIds: getVideoTrackIds,
tracks: getTracks,
getTimescaleFromMediaHeader: getTimescaleFromMediaHeader,
getEmsgID3: getEmsgID3
};
/**
* mux.js
*
* Copyright (c) Brightcove
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
*
* Utilities to detect basic properties and metadata about TS Segments.
*/
var StreamTypes$1 = streamTypes;
var parsePid = function (packet) {
var pid = packet[1] & 0x1f;
pid <<= 8;
pid |= packet[2];
return pid;
};
var parsePayloadUnitStartIndicator = function (packet) {
return !!(packet[1] & 0x40);
};
var parseAdaptionField = function (packet) {
var offset = 0; // if an adaption field is present, its length is specified by the
// fifth byte of the TS packet header. The adaptation field is
// used to add stuffing to PES packets that don't fill a complete
// TS packet, and to specify some forms of timing and control data
// that we do not currently use.
if ((packet[3] & 0x30) >>> 4 > 0x01) {
offset += packet[4] + 1;
}
return offset;
};
var parseType = function (packet, pmtPid) {
var pid = parsePid(packet);
if (pid === 0) {
return 'pat';
} else if (pid === pmtPid) {
return 'pmt';
} else if (pmtPid) {
return 'pes';
}
return null;
};
var parsePat = function (packet) {
var pusi = parsePayloadUnitStartIndicator(packet);
var offset = 4 + parseAdaptionField(packet);
if (pusi) {
offset += packet[offset] + 1;
}
return (packet[offset + 10] & 0x1f) << 8 | packet[offset + 11];
};
var parsePmt = function (packet) {
var programMapTable = {};
var pusi = parsePayloadUnitStartIndicator(packet);
var payloadOffset = 4 + parseAdaptionField(packet);
if (pusi) {
payloadOffset += packet[payloadOffset] + 1;
} // PMTs can be sent ahead of the time when they should actually
// take effect. We don't believe this should ever be the case
// for HLS but we'll ignore "forward" PMT declarations if we see
// them. Future PMT declarations have the current_next_indicator
// set to zero.
if (!(packet[payloadOffset + 5] & 0x01)) {
return;
}
var sectionLength, tableEnd, programInfoLength; // the mapping table ends at the end of the current section
sectionLength = (packet[payloadOffset + 1] & 0x0f) << 8 | packet[payloadOffset + 2];
tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
// long the program info descriptors are
programInfoLength = (packet[payloadOffset + 10] & 0x0f) << 8 | packet[payloadOffset + 11]; // advance the offset to the first entry in the mapping table
var offset = 12 + programInfoLength;
while (offset < tableEnd) {
var i = payloadOffset + offset; // add an entry that maps the elementary_pid to the stream_type
programMapTable[(packet[i + 1] & 0x1F) << 8 | packet[i + 2]] = packet[i]; // move to the next table entry
// skip past the elementary stream descriptors, if present
offset += ((packet[i + 3] & 0x0F) << 8 | packet[i + 4]) + 5;
}
return programMapTable;
};
var parsePesType = function (packet, programMapTable) {
var pid = parsePid(packet);
var type = programMapTable[pid];
switch (type) {
case StreamTypes$1.H264_STREAM_TYPE:
return 'video';
case StreamTypes$1.ADTS_STREAM_TYPE:
return 'audio';
case StreamTypes$1.METADATA_STREAM_TYPE:
return 'timed-metadata';
default:
return null;
}
};
var parsePesTime = function (packet) {
var pusi = parsePayloadUnitStartIndicator(packet);
if (!pusi) {
return null;
}
var offset = 4 + parseAdaptionField(packet);
if (offset >= packet.byteLength) {
// From the H 222.0 MPEG-TS spec
// "For transport stream packets carrying PES packets, stuffing is needed when there
// is insufficient PES packet data to completely fill the transport stream packet
// payload bytes. Stuffing is accomplished by defining an adaptation field longer than
// the sum of the lengths of the data elements in it, so that the payload bytes
// remaining after the adaptation field exactly accommodates the available PES packet
// data."
//
// If the offset is >= the length of the packet, then the packet contains no data
// and instead is just adaption field stuffing bytes
return null;
}
var pes = null;
var ptsDtsFlags; // PES packets may be annotated with a PTS value, or a PTS value
// and a DTS value. Determine what combination of values is
// available to work with.
ptsDtsFlags = packet[offset + 7]; // PTS and DTS are normally stored as a 33-bit number. Javascript
// performs all bitwise operations on 32-bit integers but javascript
// supports a much greater range (52-bits) of integer using standard
// mathematical operations.
// We construct a 31-bit value using bitwise operators over the 31
// most significant bits and then multiply by 4 (equal to a left-shift
// of 2) before we add the final 2 least significant bits of the
// timestamp (equal to an OR.)
if (ptsDtsFlags & 0xC0) {
pes = {}; // the PTS and DTS are not written out directly. For information
// on how they are encoded, see
// http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
pes.pts = (packet[offset + 9] & 0x0E) << 27 | (packet[offset + 10] & 0xFF) << 20 | (packet[offset + 11] & 0xFE) << 12 | (packet[offset + 12] & 0xFF) << 5 | (packet[offset + 13] & 0xFE) >>> 3;
pes.pts *= 4; // Left shift by 2
pes.pts += (packet[offset + 13] & 0x06) >>> 1; // OR by the two LSBs
pes.dts = pes.pts;
if (ptsDtsFlags & 0x40) {
pes.dts = (packet[offset + 14] & 0x0E) << 27 | (packet[offset + 15] & 0xFF) << 20 | (packet[offset + 16] & 0xFE) << 12 | (packet[offset + 17] & 0xFF) << 5 | (packet[offset + 18] & 0xFE) >>> 3;
pes.dts *= 4; // Left shift by 2
pes.dts += (packet[offset + 18] & 0x06) >>> 1; // OR by the two LSBs
}
}
return pes;
};
var parseNalUnitType = function (type) {
switch (type) {
case 0x05:
return 'slice_layer_without_partitioning_rbsp_idr';
case 0x06:
return 'sei_rbsp';
case 0x07:
return 'seq_parameter_set_rbsp';
case 0x08:
return 'pic_parameter_set_rbsp';
case 0x09:
return 'access_unit_delimiter_rbsp';
default:
return null;
}
};
var videoPacketContainsKeyFrame = function (packet) {
var offset = 4 + parseAdaptionField(packet);
var frameBuffer = packet.subarray(offset);
var frameI = 0;
var frameSyncPoint = 0;
var foundKeyFrame = false;
var nalType; // advance the sync point to a NAL start, if necessary
for (; frameSyncPoint < frameBuffer.byteLength - 3; frameSyncPoint++) {
if (frameBuffer[frameSyncPoint + 2] === 1) {
// the sync point is properly aligned
frameI = frameSyncPoint + 5;
break;
}
}
while (frameI < frameBuffer.byteLength) {
// look at the current byte to determine if we've hit the end of
// a NAL unit boundary
switch (frameBuffer[frameI]) {
case 0:
// skip past non-sync sequences
if (frameBuffer[frameI - 1] !== 0) {
frameI += 2;
break;
} else if (frameBuffer[frameI - 2] !== 0) {
frameI++;
break;
}
if (frameSyncPoint + 3 !== frameI - 2) {
nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
foundKeyFrame = true;
}
} // drop trailing zeroes
do {
frameI++;
} while (frameBuffer[frameI] !== 1 && frameI < frameBuffer.length);
frameSyncPoint = frameI - 2;
frameI += 3;
break;
case 1:
// skip past non-sync sequences
if (frameBuffer[frameI - 1] !== 0 || frameBuffer[frameI - 2] !== 0) {
frameI += 3;
break;
}
nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
foundKeyFrame = true;
}
frameSyncPoint = frameI - 2;
frameI += 3;
break;
default:
// the current byte isn't a one or zero, so it cannot be part
// of a sync sequence
frameI += 3;
break;
}
}
frameBuffer = frameBuffer.subarray(frameSyncPoint);
frameI -= frameSyncPoint;
frameSyncPoint = 0; // parse the final nal
if (frameBuffer && frameBuffer.byteLength > 3) {
nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
foundKeyFrame = true;
}
}
return foundKeyFrame;
};
var probe$1 = {
parseType: parseType,
parsePat: parsePat,
parsePmt: parsePmt,
parsePayloadUnitStartIndicator: parsePayloadUnitStartIndicator,
parsePesType: parsePesType,
parsePesTime: parsePesTime,
videoPacketContainsKeyFrame: videoPacketContainsKeyFrame
};
/**
* mux.js
*
* Copyright (c) Brightcove
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
*
* Parse mpeg2 transport stream packets to extract basic timing information
*/
var StreamTypes = streamTypes;
var handleRollover = timestampRolloverStream.handleRollover;
var probe = {};
probe.ts = probe$1;
probe.aac = utils;
var ONE_SECOND_IN_TS = clock$2.ONE_SECOND_IN_TS;
var MP2T_PACKET_LENGTH = 188,
// bytes
SYNC_BYTE = 0x47;
/**
* walks through segment data looking for pat and pmt packets to parse out
* program map table information
*/
var parsePsi_ = function (bytes, pmt) {
var startIndex = 0,
endIndex = MP2T_PACKET_LENGTH,
packet,
type;
while (endIndex < bytes.byteLength) {
// Look for a pair of start and end sync bytes in the data..
if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
// We found a packet
packet = bytes.subarray(startIndex, endIndex);
type = probe.ts.parseType(packet, pmt.pid);
switch (type) {
case 'pat':
pmt.pid = probe.ts.parsePat(packet);
break;
case 'pmt':
var table = probe.ts.parsePmt(packet);
pmt.table = pmt.table || {};
Object.keys(table).forEach(function (key) {
pmt.table[key] = table[key];
});
break;
}
startIndex += MP2T_PACKET_LENGTH;
endIndex += MP2T_PACKET_LENGTH;
continue;
} // If we get here, we have somehow become de-synchronized and we need to step
// forward one byte at a time until we find a pair of sync bytes that denote
// a packet
startIndex++;
endIndex++;
}
};
/**
* walks through the segment data from the start and end to get timing information
* for the first and last audio pes packets
*/
var parseAudioPes_ = function (bytes, pmt, result) {
var startIndex = 0,
endIndex = MP2T_PACKET_LENGTH,
packet,
type,
pesType,
pusi,
parsed;
var endLoop = false; // Start walking from start of segment to get first audio packet
while (endIndex <= bytes.byteLength) {
// Look for a pair of start and end sync bytes in the data..
if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
// We found a packet
packet = bytes.subarray(startIndex, endIndex);
type = probe.ts.parseType(packet, pmt.pid);
switch (type) {
case 'pes':
pesType = probe.ts.parsePesType(packet, pmt.table);
pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
if (pesType === 'audio' && pusi) {
parsed = probe.ts.parsePesTime(packet);
if (parsed) {
parsed.type = 'audio';
result.audio.push(parsed);
endLoop = true;
}
}
break;
}
if (endLoop) {
break;
}
startIndex += MP2T_PACKET_LENGTH;
endIndex += MP2T_PACKET_LENGTH;
continue;
} // If we get here, we have somehow become de-synchronized and we need to step
// forward one byte at a time until we find a pair of sync bytes that denote
// a packet
startIndex++;
endIndex++;
} // Start walking from end of segment to get last audio packet
endIndex = bytes.byteLength;
startIndex = endIndex - MP2T_PACKET_LENGTH;
endLoop = false;
while (startIndex >= 0) {
// Look for a pair of start and end sync bytes in the data..
if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
// We found a packet
packet = bytes.subarray(startIndex, endIndex);
type = probe.ts.parseType(packet, pmt.pid);
switch (type) {
case 'pes':
pesType = probe.ts.parsePesType(packet, pmt.table);
pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
if (pesType === 'audio' && pusi) {
parsed = probe.ts.parsePesTime(packet);
if (parsed) {
parsed.type = 'audio';
result.audio.push(parsed);
endLoop = true;
}
}
break;
}
if (endLoop) {
break;
}
startIndex -= MP2T_PACKET_LENGTH;
endIndex -= MP2T_PACKET_LENGTH;
continue;
} // If we get here, we have somehow become de-synchronized and we need to step
// forward one byte at a time until we find a pair of sync bytes that denote
// a packet
startIndex--;
endIndex--;
}
};
/**
* walks through the segment data from the start and end to get timing information
* for the first and last video pes packets as well as timing information for the first
* key frame.
*/
var parseVideoPes_ = function (bytes, pmt, result) {
var startIndex = 0,
endIndex = MP2T_PACKET_LENGTH,
packet,
type,
pesType,
pusi,
parsed,
frame,
i,
pes;
var endLoop = false;
var currentFrame = {
data: [],
size: 0
}; // Start walking from start of segment to get first video packet
while (endIndex < bytes.byteLength) {
// Look for a pair of start and end sync bytes in the data..
if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
// We found a packet
packet = bytes.subarray(startIndex, endIndex);
type = probe.ts.parseType(packet, pmt.pid);
switch (type) {
case 'pes':
pesType = probe.ts.parsePesType(packet, pmt.table);
pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
if (pesType === 'video') {
if (pusi && !endLoop) {
parsed = probe.ts.parsePesTime(packet);
if (parsed) {
parsed.type = 'video';
result.video.push(parsed);
endLoop = true;
}
}
if (!result.firstKeyFrame) {
if (pusi) {
if (currentFrame.size !== 0) {
frame = new Uint8Array(currentFrame.size);
i = 0;
while (currentFrame.data.length) {
pes = currentFrame.data.shift();
frame.set(pes, i);
i += pes.byteLength;
}
if (probe.ts.videoPacketContainsKeyFrame(frame)) {
var firstKeyFrame = probe.ts.parsePesTime(frame); // PTS/DTS may not be available. Simply *not* setting
// the keyframe seems to work fine with HLS playback
// and definitely preferable to a crash with TypeError...
if (firstKeyFrame) {
result.firstKeyFrame = firstKeyFrame;
result.firstKeyFrame.type = 'video';
} else {
// eslint-disable-next-line
console.warn('Failed to extract PTS/DTS from PES at first keyframe. ' + 'This could be an unusual TS segment, or else mux.js did not ' + 'parse your TS segment correctly. If you know your TS ' + 'segments do contain PTS/DTS on keyframes please file a bug ' + 'report! You can try ffprobe to double check for yourself.');
}
}
currentFrame.size = 0;
}
}
currentFrame.data.push(packet);
currentFrame.size += packet.byteLength;
}
}
break;
}
if (endLoop && result.firstKeyFrame) {
break;
}
startIndex += MP2T_PACKET_LENGTH;
endIndex += MP2T_PACKET_LENGTH;
continue;
} // If we get here, we have somehow become de-synchronized and we need to step
// forward one byte at a time until we find a pair of sync bytes that denote
// a packet
startIndex++;
endIndex++;
} // Start walking from end of segment to get last video packet
endIndex = bytes.byteLength;
startIndex = endIndex - MP2T_PACKET_LENGTH;
endLoop = false;
while (startIndex >= 0) {
// Look for a pair of start and end sync bytes in the data..
if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
// We found a packet
packet = bytes.subarray(startIndex, endIndex);
type = probe.ts.parseType(packet, pmt.pid);
switch (type) {
case 'pes':
pesType = probe.ts.parsePesType(packet, pmt.table);
pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
if (pesType === 'video' && pusi) {
parsed = probe.ts.parsePesTime(packet);
if (parsed) {
parsed.type = 'video';
result.video.push(parsed);
endLoop = true;
}
}
break;
}
if (endLoop) {
break;
}
startIndex -= MP2T_PACKET_LENGTH;
endIndex -= MP2T_PACKET_LENGTH;
continue;
} // If we get here, we have somehow become de-synchronized and we need to step
// forward one byte at a time until we find a pair of sync bytes that denote
// a packet
startIndex--;
endIndex--;
}
};
/**
* Adjusts the timestamp information for the segment to account for
* rollover and convert to seconds based on pes packet timescale (90khz clock)
*/
var adjustTimestamp_ = function (segmentInfo, baseTimestamp) {
if (segmentInfo.audio && segmentInfo.audio.length) {
var audioBaseTimestamp = baseTimestamp;
if (typeof audioBaseTimestamp === 'undefined' || isNaN(audioBaseTimestamp)) {
audioBaseTimestamp = segmentInfo.audio[0].dts;
}
segmentInfo.audio.forEach(function (info) {
info.dts = handleRollover(info.dts, audioBaseTimestamp);
info.pts = handleRollover(info.pts, audioBaseTimestamp); // time in seconds
info.dtsTime = info.dts / ONE_SECOND_IN_TS;
info.ptsTime = info.pts / ONE_SECOND_IN_TS;
});
}
if (segmentInfo.video && segmentInfo.video.length) {
var videoBaseTimestamp = baseTimestamp;
if (typeof videoBaseTimestamp === 'undefined' || isNaN(videoBaseTimestamp)) {
videoBaseTimestamp = segmentInfo.video[0].dts;
}
segmentInfo.video.forEach(function (info) {
info.dts = handleRollover(info.dts, videoBaseTimestamp);
info.pts = handleRollover(info.pts, videoBaseTimestamp); // time in seconds
info.dtsTime = info.dts / ONE_SECOND_IN_TS;
info.ptsTime = info.pts / ONE_SECOND_IN_TS;
});
if (segmentInfo.firstKeyFrame) {
var frame = segmentInfo.firstKeyFrame;
frame.dts = handleRollover(frame.dts, videoBaseTimestamp);
frame.pts = handleRollover(frame.pts, videoBaseTimestamp); // time in seconds
frame.dtsTime = frame.dts / ONE_SECOND_IN_TS;
frame.ptsTime = frame.pts / ONE_SECOND_IN_TS;
}
}
};
/**
* inspects the aac data stream for start and end time information
*/
var inspectAac_ = function (bytes) {
var endLoop = false,
audioCount = 0,
sampleRate = null,
timestamp = null,
frameSize = 0,
byteIndex = 0,
packet;
while (bytes.length - byteIndex >= 3) {
var type = probe.aac.parseType(bytes, byteIndex);
switch (type) {
case 'timed-metadata':
// Exit early because we don't have enough to parse
// the ID3 tag header
if (bytes.length - byteIndex < 10) {
endLoop = true;
break;
}
frameSize = probe.aac.parseId3TagSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer
// to emit a full packet
if (frameSize > bytes.length) {
endLoop = true;
break;
}
if (timestamp === null) {
packet = bytes.subarray(byteIndex, byteIndex + frameSize);
timestamp = probe.aac.parseAacTimestamp(packet);
}
byteIndex += frameSize;
break;
case 'audio':
// Exit early because we don't have enough to parse
// the ADTS frame header
if (bytes.length - byteIndex < 7) {
endLoop = true;
break;
}
frameSize = probe.aac.parseAdtsSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer
// to emit a full packet
if (frameSize > bytes.length) {
endLoop = true;
break;
}
if (sampleRate === null) {
packet = bytes.subarray(byteIndex, byteIndex + frameSize);
sampleRate = probe.aac.parseSampleRate(packet);
}
audioCount++;
byteIndex += frameSize;
break;
default:
byteIndex++;
break;
}
if (endLoop) {
return null;
}
}
if (sampleRate === null || timestamp === null) {
return null;
}
var audioTimescale = ONE_SECOND_IN_TS / sampleRate;
var result = {
audio: [{
type: 'audio',
dts: timestamp,
pts: timestamp
}, {
type: 'audio',
dts: timestamp + audioCount * 1024 * audioTimescale,
pts: timestamp + audioCount * 1024 * audioTimescale
}]
};
return result;
};
/**
* inspects the transport stream segment data for start and end time information
* of the audio and video tracks (when present) as well as the first key frame's
* start time.
*/
var inspectTs_ = function (bytes) {
var pmt = {
pid: null,
table: null
};
var result = {};
parsePsi_(bytes, pmt);
for (var pid in pmt.table) {
if (pmt.table.hasOwnProperty(pid)) {
var type = pmt.table[pid];
switch (type) {
case StreamTypes.H264_STREAM_TYPE:
result.video = [];
parseVideoPes_(bytes, pmt, result);
if (result.video.length === 0) {
delete result.video;
}
break;
case StreamTypes.ADTS_STREAM_TYPE:
result.audio = [];
parseAudioPes_(bytes, pmt, result);
if (result.audio.length === 0) {
delete result.audio;
}
break;
}
}
}
return result;
};
/**
* Inspects segment byte data and returns an object with start and end timing information
*
* @param {Uint8Array} bytes The segment byte data
* @param {Number} baseTimestamp Relative reference timestamp used when adjusting frame
* timestamps for rollover. This value must be in 90khz clock.
* @return {Object} Object containing start and end frame timing info of segment.
*/
var inspect = function (bytes, baseTimestamp) {
var isAacData = probe.aac.isLikelyAacData(bytes);
var result;
if (isAacData) {
result = inspectAac_(bytes);
} else {
result = inspectTs_(bytes);
}
if (!result || !result.audio && !result.video) {
return null;
}
adjustTimestamp_(result, baseTimestamp);
return result;
};
var tsInspector = {
inspect: inspect,
parseAudioPes_: parseAudioPes_
};
/* global self */
/**
* Re-emits transmuxer events by converting them into messages to the
* world outside the worker.
*
* @param {Object} transmuxer the transmuxer to wire events on
* @private
*/
const wireTransmuxerEvents = function (self, transmuxer) {
transmuxer.on('data', function (segment) {
// transfer ownership of the underlying ArrayBuffer
// instead of doing a copy to save memory
// ArrayBuffers are transferable but generic TypedArrays are not
// @link https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Using_web_workers#Passing_data_by_transferring_ownership_(transferable_objects)
const initArray = segment.initSegment;
segment.initSegment = {
data: initArray.buffer,
byteOffset: initArray.byteOffset,
byteLength: initArray.byteLength
};
const typedArray = segment.data;
segment.data = typedArray.buffer;
self.postMessage({
action: 'data',
segment,
byteOffset: typedArray.byteOffset,
byteLength: typedArray.byteLength
}, [segment.data]);
});
transmuxer.on('done', function (data) {
self.postMessage({
action: 'done'
});
});
transmuxer.on('gopInfo', function (gopInfo) {
self.postMessage({
action: 'gopInfo',
gopInfo
});
});
transmuxer.on('videoSegmentTimingInfo', function (timingInfo) {
const videoSegmentTimingInfo = {
start: {
decode: clock$2.videoTsToSeconds(timingInfo.start.dts),
presentation: clock$2.videoTsToSeconds(timingInfo.start.pts)
},
end: {
decode: clock$2.videoTsToSeconds(timingInfo.end.dts),
presentation: clock$2.videoTsToSeconds(timingInfo.end.pts)
},
baseMediaDecodeTime: clock$2.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
};
if (timingInfo.prependedContentDuration) {
videoSegmentTimingInfo.prependedContentDuration = clock$2.videoTsToSeconds(timingInfo.prependedContentDuration);
}
self.postMessage({
action: 'videoSegmentTimingInfo',
videoSegmentTimingInfo
});
});
transmuxer.on('audioSegmentTimingInfo', function (timingInfo) {
// Note that all times for [audio/video]SegmentTimingInfo events are in video clock
const audioSegmentTimingInfo = {
start: {
decode: clock$2.videoTsToSeconds(timingInfo.start.dts),
presentation: clock$2.videoTsToSeconds(timingInfo.start.pts)
},
end: {
decode: clock$2.videoTsToSeconds(timingInfo.end.dts),
presentation: clock$2.videoTsToSeconds(timingInfo.end.pts)
},
baseMediaDecodeTime: clock$2.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
};
if (timingInfo.prependedContentDuration) {
audioSegmentTimingInfo.prependedContentDuration = clock$2.videoTsToSeconds(timingInfo.prependedContentDuration);
}
self.postMessage({
action: 'audioSegmentTimingInfo',
audioSegmentTimingInfo
});
});
transmuxer.on('id3Frame', function (id3Frame) {
self.postMessage({
action: 'id3Frame',
id3Frame
});
});
transmuxer.on('caption', function (caption) {
self.postMessage({
action: 'caption',
caption
});
});
transmuxer.on('trackinfo', function (trackInfo) {
self.postMessage({
action: 'trackinfo',
trackInfo
});
});
transmuxer.on('audioTimingInfo', function (audioTimingInfo) {
// convert to video TS since we prioritize video time over audio
self.postMessage({
action: 'audioTimingInfo',
audioTimingInfo: {
start: clock$2.videoTsToSeconds(audioTimingInfo.start),
end: clock$2.videoTsToSeconds(audioTimingInfo.end)
}
});
});
transmuxer.on('videoTimingInfo', function (videoTimingInfo) {
self.postMessage({
action: 'videoTimingInfo',
videoTimingInfo: {
start: clock$2.videoTsToSeconds(videoTimingInfo.start),
end: clock$2.videoTsToSeconds(videoTimingInfo.end)
}
});
});
transmuxer.on('log', function (log) {
self.postMessage({
action: 'log',
log
});
});
};
/**
* All incoming messages route through this hash. If no function exists
* to handle an incoming message, then we ignore the message.
*
* @class MessageHandlers
* @param {Object} options the options to initialize with
*/
class MessageHandlers {
constructor(self, options) {
this.options = options || {};
this.self = self;
this.init();
}
/**
* initialize our web worker and wire all the events.
*/
init() {
if (this.transmuxer) {
this.transmuxer.dispose();
}
this.transmuxer = new transmuxer.Transmuxer(this.options);
wireTransmuxerEvents(this.self, this.transmuxer);
}
pushMp4Captions(data) {
if (!this.captionParser) {
this.captionParser = new captionParser();
this.captionParser.init();
}
const segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
const parsed = this.captionParser.parse(segment, data.trackIds, data.timescales);
this.self.postMessage({
action: 'mp4Captions',
captions: parsed && parsed.captions || [],
logs: parsed && parsed.logs || [],
data: segment.buffer
}, [segment.buffer]);
}
probeMp4StartTime({
timescales,
data
}) {
const startTime = probe$2.startTime(timescales, data);
this.self.postMessage({
action: 'probeMp4StartTime',
startTime,
data
}, [data.buffer]);
}
probeMp4Tracks({
data
}) {
const tracks = probe$2.tracks(data);
this.self.postMessage({
action: 'probeMp4Tracks',
tracks,
data
}, [data.buffer]);
}
/**
* Probes an mp4 segment for EMSG boxes containing ID3 data.
* https://aomediacodec.github.io/id3-emsg/
*
* @param {Uint8Array} data segment data
* @param {number} offset segment start time
* @return {Object[]} an array of ID3 frames
*/
probeEmsgID3({
data,
offset
}) {
const id3Frames = probe$2.getEmsgID3(data, offset);
this.self.postMessage({
action: 'probeEmsgID3',
id3Frames,
emsgData: data
}, [data.buffer]);
}
/**
* Probe an mpeg2-ts segment to determine the start time of the segment in it's
* internal "media time," as well as whether it contains video and/or audio.
*
* @private
* @param {Uint8Array} bytes - segment bytes
* @param {number} baseStartTime
* Relative reference timestamp used when adjusting frame timestamps for rollover.
* This value should be in seconds, as it's converted to a 90khz clock within the
* function body.
* @return {Object} The start time of the current segment in "media time" as well as
* whether it contains video and/or audio
*/
probeTs({
data,
baseStartTime
}) {
const tsStartTime = typeof baseStartTime === 'number' && !isNaN(baseStartTime) ? baseStartTime * clock$2.ONE_SECOND_IN_TS : void 0;
const timeInfo = tsInspector.inspect(data, tsStartTime);
let result = null;
if (timeInfo) {
result = {
// each type's time info comes back as an array of 2 times, start and end
hasVideo: timeInfo.video && timeInfo.video.length === 2 || false,
hasAudio: timeInfo.audio && timeInfo.audio.length === 2 || false
};
if (result.hasVideo) {
result.videoStart = timeInfo.video[0].ptsTime;
}
if (result.hasAudio) {
result.audioStart = timeInfo.audio[0].ptsTime;
}
}
this.self.postMessage({
action: 'probeTs',
result,
data
}, [data.buffer]);
}
clearAllMp4Captions() {
if (this.captionParser) {
this.captionParser.clearAllCaptions();
}
}
clearParsedMp4Captions() {
if (this.captionParser) {
this.captionParser.clearParsedCaptions();
}
}
/**
* Adds data (a ts segment) to the start of the transmuxer pipeline for
* processing.
*
* @param {ArrayBuffer} data data to push into the muxer
*/
push(data) {
// Cast array buffer to correct type for transmuxer
const segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
this.transmuxer.push(segment);
}
/**
* Recreate the transmuxer so that the next segment added via `push`
* start with a fresh transmuxer.
*/
reset() {
this.transmuxer.reset();
}
/**
* Set the value that will be used as the `baseMediaDecodeTime` time for the
* next segment pushed in. Subsequent segments will have their `baseMediaDecodeTime`
* set relative to the first based on the PTS values.
*
* @param {Object} data used to set the timestamp offset in the muxer
*/
setTimestampOffset(data) {
const timestampOffset = data.timestampOffset || 0;
this.transmuxer.setBaseMediaDecodeTime(Math.round(clock$2.secondsToVideoTs(timestampOffset)));
}
setAudioAppendStart(data) {
this.transmuxer.setAudioAppendStart(Math.ceil(clock$2.secondsToVideoTs(data.appendStart)));
}
setRemux(data) {
this.transmuxer.setRemux(data.remux);
}
/**
* Forces the pipeline to finish processing the last segment and emit it's
* results.
*
* @param {Object} data event data, not really used
*/
flush(data) {
this.transmuxer.flush(); // transmuxed done action is fired after both audio/video pipelines are flushed
self.postMessage({
action: 'done',
type: 'transmuxed'
});
}
endTimeline() {
this.transmuxer.endTimeline(); // transmuxed endedtimeline action is fired after both audio/video pipelines end their
// timelines
self.postMessage({
action: 'endedtimeline',
type: 'transmuxed'
});
}
alignGopsWith(data) {
this.transmuxer.alignGopsWith(data.gopsToAlignWith.slice());
}
}
/**
* Our web worker interface so that things can talk to mux.js
* that will be running in a web worker. the scope is passed to this by
* webworkify.
*
* @param {Object} self the scope for the web worker
*/
self.onmessage = function (event) {
if (event.data.action === 'init' && event.data.options) {
this.messageHandlers = new MessageHandlers(self, event.data.options);
return;
}
if (!this.messageHandlers) {
this.messageHandlers = new MessageHandlers(self);
}
if (event.data && event.data.action && event.data.action !== 'init') {
if (this.messageHandlers[event.data.action]) {
this.messageHandlers[event.data.action](event.data);
}
}
};
}));
var TransmuxWorker = factory(workerCode$1);
/* rollup-plugin-worker-factory end for worker!/home/runner/work/http-streaming/http-streaming/src/transmuxer-worker.js */
const handleData_ = (event, transmuxedData, callback) => {
const {
type,
initSegment,
captions,
captionStreams,
metadata,
videoFrameDtsTime,
videoFramePtsTime
} = event.data.segment;
transmuxedData.buffer.push({
captions,
captionStreams,
metadata
});
const boxes = event.data.segment.boxes || {
data: event.data.segment.data
};
const result = {
type,
// cast ArrayBuffer to TypedArray
data: new Uint8Array(boxes.data, boxes.data.byteOffset, boxes.data.byteLength),
initSegment: new Uint8Array(initSegment.data, initSegment.byteOffset, initSegment.byteLength)
};
if (typeof videoFrameDtsTime !== 'undefined') {
result.videoFrameDtsTime = videoFrameDtsTime;
}
if (typeof videoFramePtsTime !== 'undefined') {
result.videoFramePtsTime = videoFramePtsTime;
}
callback(result);
};
const handleDone_ = ({
transmuxedData,
callback
}) => {
// Previously we only returned data on data events,
// not on done events. Clear out the buffer to keep that consistent.
transmuxedData.buffer = []; // all buffers should have been flushed from the muxer, so start processing anything we
// have received
callback(transmuxedData);
};
const handleGopInfo_ = (event, transmuxedData) => {
transmuxedData.gopInfo = event.data.gopInfo;
};
const processTransmux = options => {
const {
transmuxer,
bytes,
audioAppendStart,
gopsToAlignWith,
remux,
onData,
onTrackInfo,
onAudioTimingInfo,
onVideoTimingInfo,
onVideoSegmentTimingInfo,
onAudioSegmentTimingInfo,
onId3,
onCaptions,
onDone,
onEndedTimeline,
onTransmuxerLog,
isEndOfTimeline,
segment,
triggerSegmentEventFn
} = options;
const transmuxedData = {
buffer: []
};
let waitForEndedTimelineEvent = isEndOfTimeline;
const handleMessage = event => {
if (transmuxer.currentTransmux !== options) {
// disposed
return;
}
if (event.data.action === 'data') {
handleData_(event, transmuxedData, onData);
}
if (event.data.action === 'trackinfo') {
onTrackInfo(event.data.trackInfo);
}
if (event.data.action === 'gopInfo') {
handleGopInfo_(event, transmuxedData);
}
if (event.data.action === 'audioTimingInfo') {
onAudioTimingInfo(event.data.audioTimingInfo);
}
if (event.data.action === 'videoTimingInfo') {
onVideoTimingInfo(event.data.videoTimingInfo);
}
if (event.data.action === 'videoSegmentTimingInfo') {
onVideoSegmentTimingInfo(event.data.videoSegmentTimingInfo);
}
if (event.data.action === 'audioSegmentTimingInfo') {
onAudioSegmentTimingInfo(event.data.audioSegmentTimingInfo);
}
if (event.data.action === 'id3Frame') {
onId3([event.data.id3Frame], event.data.id3Frame.dispatchType);
}
if (event.data.action === 'caption') {
onCaptions(event.data.caption);
}
if (event.data.action === 'endedtimeline') {
waitForEndedTimelineEvent = false;
onEndedTimeline();
}
if (event.data.action === 'log') {
onTransmuxerLog(event.data.log);
} // wait for the transmuxed event since we may have audio and video
if (event.data.type !== 'transmuxed') {
return;
} // If the "endedtimeline" event has not yet fired, and this segment represents the end
// of a timeline, that means there may still be data events before the segment
// processing can be considerred complete. In that case, the final event should be
// an "endedtimeline" event with the type "transmuxed."
if (waitForEndedTimelineEvent) {
return;
}
transmuxer.onmessage = null;
handleDone_({
transmuxedData,
callback: onDone
});
/* eslint-disable no-use-before-define */
dequeue(transmuxer);
/* eslint-enable */
};
const handleError = () => {
const error = {
message: 'Received an error message from the transmuxer worker',
metadata: {
errorType: videojs.Error.StreamingFailedToTransmuxSegment,
segmentInfo: segmentInfoPayload({
segment
})
}
};
onDone(null, error);
};
transmuxer.onmessage = handleMessage;
transmuxer.onerror = handleError;
if (audioAppendStart) {
transmuxer.postMessage({
action: 'setAudioAppendStart',
appendStart: audioAppendStart
});
} // allow empty arrays to be passed to clear out GOPs
if (Array.isArray(gopsToAlignWith)) {
transmuxer.postMessage({
action: 'alignGopsWith',
gopsToAlignWith
});
}
if (typeof remux !== 'undefined') {
transmuxer.postMessage({
action: 'setRemux',
remux
});
}
if (bytes.byteLength) {
const buffer = bytes instanceof ArrayBuffer ? bytes : bytes.buffer;
const byteOffset = bytes instanceof ArrayBuffer ? 0 : bytes.byteOffset;
triggerSegmentEventFn({
type: 'segmenttransmuxingstart',
segment
});
transmuxer.postMessage({
action: 'push',
// Send the typed-array of data as an ArrayBuffer so that
// it can be sent as a "Transferable" and avoid the costly
// memory copy
data: buffer,
// To recreate the original typed-array, we need information
// about what portion of the ArrayBuffer it was a view into
byteOffset,
byteLength: bytes.byteLength
}, [buffer]);
}
if (isEndOfTimeline) {
transmuxer.postMessage({
action: 'endTimeline'
});
} // even if we didn't push any bytes, we have to make sure we flush in case we reached
// the end of the segment
transmuxer.postMessage({
action: 'flush'
});
};
const dequeue = transmuxer => {
transmuxer.currentTransmux = null;
if (transmuxer.transmuxQueue.length) {
transmuxer.currentTransmux = transmuxer.transmuxQueue.shift();
if (typeof transmuxer.currentTransmux === 'function') {
transmuxer.currentTransmux();
} else {
processTransmux(transmuxer.currentTransmux);
}
}
};
const processAction = (transmuxer, action) => {
transmuxer.postMessage({
action
});
dequeue(transmuxer);
};
const enqueueAction = (action, transmuxer) => {
if (!transmuxer.currentTransmux) {
transmuxer.currentTransmux = action;
processAction(transmuxer, action);
return;
}
transmuxer.transmuxQueue.push(processAction.bind(null, transmuxer, action));
};
const reset = transmuxer => {
enqueueAction('reset', transmuxer);
};
const endTimeline = transmuxer => {
enqueueAction('endTimeline', transmuxer);
};
const transmux = options => {
if (!options.transmuxer.currentTransmux) {
options.transmuxer.currentTransmux = options;
processTransmux(options);
return;
}
options.transmuxer.transmuxQueue.push(options);
};
const createTransmuxer = options => {
const transmuxer = new TransmuxWorker();
transmuxer.currentTransmux = null;
transmuxer.transmuxQueue = [];
const term = transmuxer.terminate;
transmuxer.terminate = () => {
transmuxer.currentTransmux = null;
transmuxer.transmuxQueue.length = 0;
return term.call(transmuxer);
};
transmuxer.postMessage({
action: 'init',
options
});
return transmuxer;
};
var segmentTransmuxer = {
reset,
endTimeline,
transmux,
createTransmuxer
};
const workerCallback = function (options) {
const transmuxer = options.transmuxer;
const endAction = options.endAction || options.action;
const callback = options.callback;
const message = _extends({}, options, {
endAction: null,
transmuxer: null,
callback: null
});
const listenForEndEvent = event => {
if (event.data.action !== endAction) {
return;
}
transmuxer.removeEventListener('message', listenForEndEvent); // transfer ownership of bytes back to us.
if (event.data.data) {
event.data.data = new Uint8Array(event.data.data, options.byteOffset || 0, options.byteLength || event.data.data.byteLength);
if (options.data) {
options.data = event.data.data;
}
}
callback(event.data);
};
transmuxer.addEventListener('message', listenForEndEvent);
if (options.data) {
const isArrayBuffer = options.data instanceof ArrayBuffer;
message.byteOffset = isArrayBuffer ? 0 : options.data.byteOffset;
message.byteLength = options.data.byteLength;
const transfers = [isArrayBuffer ? options.data : options.data.buffer];
transmuxer.postMessage(message, transfers);
} else {
transmuxer.postMessage(message);
}
};
const REQUEST_ERRORS = {
FAILURE: 2,
TIMEOUT: -101,
ABORTED: -102
};
/**
* Abort all requests
*
* @param {Object} activeXhrs - an object that tracks all XHR requests
*/
const abortAll = activeXhrs => {
activeXhrs.forEach(xhr => {
xhr.abort();
});
};
/**
* Gather important bandwidth stats once a request has completed
*
* @param {Object} request - the XHR request from which to gather stats
*/
const getRequestStats = request => {
return {
bandwidth: request.bandwidth,
bytesReceived: request.bytesReceived || 0,
roundTripTime: request.roundTripTime || 0
};
};
/**
* If possible gather bandwidth stats as a request is in
* progress
*
* @param {Event} progressEvent - an event object from an XHR's progress event
*/
const getProgressStats = progressEvent => {
const request = progressEvent.target;
const roundTripTime = Date.now() - request.requestTime;
const stats = {
bandwidth: Infinity,
bytesReceived: 0,
roundTripTime: roundTripTime || 0
};
stats.bytesReceived = progressEvent.loaded; // This can result in Infinity if stats.roundTripTime is 0 but that is ok
// because we should only use bandwidth stats on progress to determine when
// abort a request early due to insufficient bandwidth
stats.bandwidth = Math.floor(stats.bytesReceived / stats.roundTripTime * 8 * 1000);
return stats;
};
/**
* Handle all error conditions in one place and return an object
* with all the information
*
* @param {Error|null} error - if non-null signals an error occured with the XHR
* @param {Object} request - the XHR request that possibly generated the error
*/
const handleErrors = (error, request) => {
const {
requestType
} = request;
const metadata = getStreamingNetworkErrorMetadata({
requestType,
request,
error
});
if (request.timedout) {
return {
status: request.status,
message: 'HLS request timed-out at URL: ' + request.uri,
code: REQUEST_ERRORS.TIMEOUT,
xhr: request,
metadata
};
}
if (request.aborted) {
return {
status: request.status,
message: 'HLS request aborted at URL: ' + request.uri,
code: REQUEST_ERRORS.ABORTED,
xhr: request,
metadata
};
}
if (error) {
return {
status: request.status,
message: 'HLS request errored at URL: ' + request.uri,
code: REQUEST_ERRORS.FAILURE,
xhr: request,
metadata
};
}
if (request.responseType === 'arraybuffer' && request.response.byteLength === 0) {
return {
status: request.status,
message: 'Empty HLS response at URL: ' + request.uri,
code: REQUEST_ERRORS.FAILURE,
xhr: request,
metadata
};
}
return null;
};
/**
* Handle responses for key data and convert the key data to the correct format
* for the decryption step later
*
* @param {Object} segment - a simplified copy of the segmentInfo object
* from SegmentLoader
* @param {Array} objects - objects to add the key bytes to.
* @param {Function} finishProcessingFn - a callback to execute to continue processing
* this request
*/
const handleKeyResponse = (segment, objects, finishProcessingFn, triggerSegmentEventFn) => (error, request) => {
const response = request.response;
const errorObj = handleErrors(error, request);
if (errorObj) {
return finishProcessingFn(errorObj, segment);
}
if (response.byteLength !== 16) {
return finishProcessingFn({
status: request.status,
message: 'Invalid HLS key at URL: ' + request.uri,
code: REQUEST_ERRORS.FAILURE,
xhr: request
}, segment);
}
const view = new DataView(response);
const bytes = new Uint32Array([view.getUint32(0), view.getUint32(4), view.getUint32(8), view.getUint32(12)]);
for (let i = 0; i < objects.length; i++) {
objects[i].bytes = bytes;
}
const keyInfo = {
uri: request.uri
};
triggerSegmentEventFn({
type: 'segmentkeyloadcomplete',
segment,
keyInfo
});
return finishProcessingFn(null, segment);
};
const parseInitSegment = (segment, callback) => {
const type = detectContainerForBytes(segment.map.bytes); // TODO: We should also handle ts init segments here, but we
// only know how to parse mp4 init segments at the moment
if (type !== 'mp4') {
const uri = segment.map.resolvedUri || segment.map.uri;
const mediaType = type || 'unknown';
return callback({
internal: true,
message: `Found unsupported ${mediaType} container for initialization segment at URL: ${uri}`,
code: REQUEST_ERRORS.FAILURE,
metadata: {
mediaType
}
});
}
workerCallback({
action: 'probeMp4Tracks',
data: segment.map.bytes,
transmuxer: segment.transmuxer,
callback: ({
tracks,
data
}) => {
// transfer bytes back to us
segment.map.bytes = data;
tracks.forEach(function (track) {
segment.map.tracks = segment.map.tracks || {}; // only support one track of each type for now
if (segment.map.tracks[track.type]) {
return;
}
segment.map.tracks[track.type] = track;
if (typeof track.id === 'number' && track.timescale) {
segment.map.timescales = segment.map.timescales || {};
segment.map.timescales[track.id] = track.timescale;
}
});
return callback(null);
}
});
};
/**
* Handle init-segment responses
*
* @param {Object} segment - a simplified copy of the segmentInfo object
* from SegmentLoader
* @param {Function} finishProcessingFn - a callback to execute to continue processing
* this request
*/
const handleInitSegmentResponse = ({
segment,
finishProcessingFn,
triggerSegmentEventFn
}) => (error, request) => {
const errorObj = handleErrors(error, request);
if (errorObj) {
return finishProcessingFn(errorObj, segment);
}
const bytes = new Uint8Array(request.response);
triggerSegmentEventFn({
type: 'segmentloaded',
segment
}); // init segment is encypted, we will have to wait
// until the key request is done to decrypt.
if (segment.map.key) {
segment.map.encryptedBytes = bytes;
return finishProcessingFn(null, segment);
}
segment.map.bytes = bytes;
parseInitSegment(segment, function (parseError) {
if (parseError) {
parseError.xhr = request;
parseError.status = request.status;
return finishProcessingFn(parseError, segment);
}
finishProcessingFn(null, segment);
});
};
/**
* Response handler for segment-requests being sure to set the correct
* property depending on whether the segment is encryped or not
* Also records and keeps track of stats that are used for ABR purposes
*
* @param {Object} segment - a simplified copy of the segmentInfo object
* from SegmentLoader
* @param {Function} finishProcessingFn - a callback to execute to continue processing
* this request
*/
const handleSegmentResponse = ({
segment,
finishProcessingFn,
responseType,
triggerSegmentEventFn
}) => (error, request) => {
const errorObj = handleErrors(error, request);
if (errorObj) {
return finishProcessingFn(errorObj, segment);
}
triggerSegmentEventFn({
type: 'segmentloaded',
segment
});
const newBytes =
// although responseText "should" exist, this guard serves to prevent an error being
// thrown for two primary cases:
// 1. the mime type override stops working, or is not implemented for a specific
// browser
// 2. when using mock XHR libraries like sinon that do not allow the override behavior
responseType === 'arraybuffer' || !request.responseText ? request.response : stringToArrayBuffer(request.responseText.substring(segment.lastReachedChar || 0));
segment.stats = getRequestStats(request);
if (segment.key) {
segment.encryptedBytes = new Uint8Array(newBytes);
} else {
segment.bytes = new Uint8Array(newBytes);
}
return finishProcessingFn(null, segment);
};
const transmuxAndNotify = ({
segment,
bytes,
trackInfoFn,
timingInfoFn,
videoSegmentTimingInfoFn,
audioSegmentTimingInfoFn,
id3Fn,
captionsFn,
isEndOfTimeline,
endedTimelineFn,
dataFn,
doneFn,
onTransmuxerLog,
triggerSegmentEventFn
}) => {
const fmp4Tracks = segment.map && segment.map.tracks || {};
const isMuxed = Boolean(fmp4Tracks.audio && fmp4Tracks.video); // Keep references to each function so we can null them out after we're done with them.
// One reason for this is that in the case of full segments, we want to trust start
// times from the probe, rather than the transmuxer.
let audioStartFn = timingInfoFn.bind(null, segment, 'audio', 'start');
const audioEndFn = timingInfoFn.bind(null, segment, 'audio', 'end');
let videoStartFn = timingInfoFn.bind(null, segment, 'video', 'start');
const videoEndFn = timingInfoFn.bind(null, segment, 'video', 'end');
const finish = () => transmux({
bytes,
transmuxer: segment.transmuxer,
audioAppendStart: segment.audioAppendStart,
gopsToAlignWith: segment.gopsToAlignWith,
remux: isMuxed,
onData: result => {
result.type = result.type === 'combined' ? 'video' : result.type;
dataFn(segment, result);
},
onTrackInfo: trackInfo => {
if (trackInfoFn) {
if (isMuxed) {
trackInfo.isMuxed = true;
}
trackInfoFn(segment, trackInfo);
}
},
onAudioTimingInfo: audioTimingInfo => {
// we only want the first start value we encounter
if (audioStartFn && typeof audioTimingInfo.start !== 'undefined') {
audioStartFn(audioTimingInfo.start);
audioStartFn = null;
} // we want to continually update the end time
if (audioEndFn && typeof audioTimingInfo.end !== 'undefined') {
audioEndFn(audioTimingInfo.end);
}
},
onVideoTimingInfo: videoTimingInfo => {
// we only want the first start value we encounter
if (videoStartFn && typeof videoTimingInfo.start !== 'undefined') {
videoStartFn(videoTimingInfo.start);
videoStartFn = null;
} // we want to continually update the end time
if (videoEndFn && typeof videoTimingInfo.end !== 'undefined') {
videoEndFn(videoTimingInfo.end);
}
},
onVideoSegmentTimingInfo: videoSegmentTimingInfo => {
const timingInfo = {
pts: {
start: videoSegmentTimingInfo.start.presentation,
end: videoSegmentTimingInfo.end.presentation
},
dts: {
start: videoSegmentTimingInfo.start.decode,
end: videoSegmentTimingInfo.end.decode
}
};
triggerSegmentEventFn({
type: 'segmenttransmuxingtiminginfoavailable',
segment,
timingInfo
});
videoSegmentTimingInfoFn(videoSegmentTimingInfo);
},
onAudioSegmentTimingInfo: audioSegmentTimingInfo => {
const timingInfo = {
pts: {
start: audioSegmentTimingInfo.start.pts,
end: audioSegmentTimingInfo.end.pts
},
dts: {
start: audioSegmentTimingInfo.start.dts,
end: audioSegmentTimingInfo.end.dts
}
};
triggerSegmentEventFn({
type: 'segmenttransmuxingtiminginfoavailable',
segment,
timingInfo
});
audioSegmentTimingInfoFn(audioSegmentTimingInfo);
},
onId3: (id3Frames, dispatchType) => {
id3Fn(segment, id3Frames, dispatchType);
},
onCaptions: captions => {
captionsFn(segment, [captions]);
},
isEndOfTimeline,
onEndedTimeline: () => {
endedTimelineFn();
},
onTransmuxerLog,
onDone: (result, error) => {
if (!doneFn) {
return;
}
result.type = result.type === 'combined' ? 'video' : result.type;
triggerSegmentEventFn({
type: 'segmenttransmuxingcomplete',
segment
});
doneFn(error, segment, result);
},
segment,
triggerSegmentEventFn
}); // In the transmuxer, we don't yet have the ability to extract a "proper" start time.
// Meaning cached frame data may corrupt our notion of where this segment
// really starts. To get around this, probe for the info needed.
workerCallback({
action: 'probeTs',
transmuxer: segment.transmuxer,
data: bytes,
baseStartTime: segment.baseStartTime,
callback: data => {
segment.bytes = bytes = data.data;
const probeResult = data.result;
if (probeResult) {
trackInfoFn(segment, {
hasAudio: probeResult.hasAudio,
hasVideo: probeResult.hasVideo,
isMuxed
});
trackInfoFn = null;
}
finish();
}
});
};
const handleSegmentBytes = ({
segment,
bytes,
trackInfoFn,
timingInfoFn,
videoSegmentTimingInfoFn,
audioSegmentTimingInfoFn,
id3Fn,
captionsFn,
isEndOfTimeline,
endedTimelineFn,
dataFn,
doneFn,
onTransmuxerLog,
triggerSegmentEventFn
}) => {
let bytesAsUint8Array = new Uint8Array(bytes); // TODO:
// We should have a handler that fetches the number of bytes required
// to check if something is fmp4. This will allow us to save bandwidth
// because we can only exclude a playlist and abort requests
// by codec after trackinfo triggers.
if (isLikelyFmp4MediaSegment(bytesAsUint8Array)) {
segment.isFmp4 = true;
const {
tracks
} = segment.map;
const trackInfo = {
isFmp4: true,
hasVideo: !!tracks.video,
hasAudio: !!tracks.audio
}; // if we have a audio track, with a codec that is not set to
// encrypted audio
if (tracks.audio && tracks.audio.codec && tracks.audio.codec !== 'enca') {
trackInfo.audioCodec = tracks.audio.codec;
} // if we have a video track, with a codec that is not set to
// encrypted video
if (tracks.video && tracks.video.codec && tracks.video.codec !== 'encv') {
trackInfo.videoCodec = tracks.video.codec;
}
if (tracks.video && tracks.audio) {
trackInfo.isMuxed = true;
} // since we don't support appending fmp4 data on progress, we know we have the full
// segment here
trackInfoFn(segment, trackInfo); // The probe doesn't provide the segment end time, so only callback with the start
// time. The end time can be roughly calculated by the receiver using the duration.
//
// Note that the start time returned by the probe reflects the baseMediaDecodeTime, as
// that is the true start of the segment (where the playback engine should begin
// decoding).
const finishLoading = (captions, id3Frames) => {
// if the track still has audio at this point it is only possible
// for it to be audio only. See `tracks.video && tracks.audio` if statement
// above.
// we make sure to use segment.bytes here as that
dataFn(segment, {
data: bytesAsUint8Array,
type: trackInfo.hasAudio && !trackInfo.isMuxed ? 'audio' : 'video'
});
if (id3Frames && id3Frames.length) {
id3Fn(segment, id3Frames);
}
if (captions && captions.length) {
captionsFn(segment, captions);
}
doneFn(null, segment, {});
};
workerCallback({
action: 'probeMp4StartTime',
timescales: segment.map.timescales,
data: bytesAsUint8Array,
transmuxer: segment.transmuxer,
callback: ({
data,
startTime
}) => {
// transfer bytes back to us
bytes = data.buffer;
segment.bytes = bytesAsUint8Array = data;
if (trackInfo.hasAudio && !trackInfo.isMuxed) {
timingInfoFn(segment, 'audio', 'start', startTime);
}
if (trackInfo.hasVideo) {
timingInfoFn(segment, 'video', 'start', startTime);
}
workerCallback({
action: 'probeEmsgID3',
data: bytesAsUint8Array,
transmuxer: segment.transmuxer,
offset: startTime,
callback: ({
emsgData,
id3Frames
}) => {
// transfer bytes back to us
bytes = emsgData.buffer;
segment.bytes = bytesAsUint8Array = emsgData; // Run through the CaptionParser in case there are captions.
// Initialize CaptionParser if it hasn't been yet
if (!tracks.video || !emsgData.byteLength || !segment.transmuxer) {
finishLoading(undefined, id3Frames);
return;
}
workerCallback({
action: 'pushMp4Captions',
endAction: 'mp4Captions',
transmuxer: segment.transmuxer,
data: bytesAsUint8Array,
timescales: segment.map.timescales,
trackIds: [tracks.video.id],
callback: message => {
// transfer bytes back to us
bytes = message.data.buffer;
segment.bytes = bytesAsUint8Array = message.data;
message.logs.forEach(function (log) {
onTransmuxerLog(merge(log, {
stream: 'mp4CaptionParser'
}));
});
finishLoading(message.captions, id3Frames);
}
});
}
});
}
});
return;
} // VTT or other segments that don't need processing
if (!segment.transmuxer) {
doneFn(null, segment, {});
return;
}
if (typeof segment.container === 'undefined') {
segment.container = detectContainerForBytes(bytesAsUint8Array);
}
if (segment.container !== 'ts' && segment.container !== 'aac') {
trackInfoFn(segment, {
hasAudio: false,
hasVideo: false
});
doneFn(null, segment, {});
return;
} // ts or aac
transmuxAndNotify({
segment,
bytes,
trackInfoFn,
timingInfoFn,
videoSegmentTimingInfoFn,
audioSegmentTimingInfoFn,
id3Fn,
captionsFn,
isEndOfTimeline,
endedTimelineFn,
dataFn,
doneFn,
onTransmuxerLog,
triggerSegmentEventFn
});
};
const decrypt = function ({
id,
key,
encryptedBytes,
decryptionWorker,
segment,
doneFn
}, callback) {
const decryptionHandler = event => {
if (event.data.source === id) {
decryptionWorker.removeEventListener('message', decryptionHandler);
const decrypted = event.data.decrypted;
callback(new Uint8Array(decrypted.bytes, decrypted.byteOffset, decrypted.byteLength));
}
};
decryptionWorker.onerror = () => {
const message = 'An error occurred in the decryption worker';
const segmentInfo = segmentInfoPayload({
segment
});
const decryptError = {
message,
metadata: {
error: new Error(message),
errorType: videojs.Error.StreamingFailedToDecryptSegment,
segmentInfo,
keyInfo: {
uri: segment.key.resolvedUri || segment.map.key.resolvedUri
}
}
};
doneFn(decryptError, segment);
};
decryptionWorker.addEventListener('message', decryptionHandler);
let keyBytes;
if (key.bytes.slice) {
keyBytes = key.bytes.slice();
} else {
keyBytes = new Uint32Array(Array.prototype.slice.call(key.bytes));
} // incrementally decrypt the bytes
decryptionWorker.postMessage(createTransferableMessage({
source: id,
encrypted: encryptedBytes,
key: keyBytes,
iv: key.iv
}), [encryptedBytes.buffer, keyBytes.buffer]);
};
/**
* Decrypt the segment via the decryption web worker
*
* @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
* routines
* @param {Object} segment - a simplified copy of the segmentInfo object
* from SegmentLoader
* @param {Function} trackInfoFn - a callback that receives track info
* @param {Function} timingInfoFn - a callback that receives timing info
* @param {Function} videoSegmentTimingInfoFn
* a callback that receives video timing info based on media times and
* any adjustments made by the transmuxer
* @param {Function} audioSegmentTimingInfoFn
* a callback that receives audio timing info based on media times and
* any adjustments made by the transmuxer
* @param {boolean} isEndOfTimeline
* true if this segment represents the last segment in a timeline
* @param {Function} endedTimelineFn
* a callback made when a timeline is ended, will only be called if
* isEndOfTimeline is true
* @param {Function} dataFn - a callback that is executed when segment bytes are available
* and ready to use
* @param {Function} doneFn - a callback that is executed after decryption has completed
*/
const decryptSegment = ({
decryptionWorker,
segment,
trackInfoFn,
timingInfoFn,
videoSegmentTimingInfoFn,
audioSegmentTimingInfoFn,
id3Fn,
captionsFn,
isEndOfTimeline,
endedTimelineFn,
dataFn,
doneFn,
onTransmuxerLog,
triggerSegmentEventFn
}) => {
triggerSegmentEventFn({
type: 'segmentdecryptionstart'
});
decrypt({
id: segment.requestId,
key: segment.key,
encryptedBytes: segment.encryptedBytes,
decryptionWorker,
segment,
doneFn
}, decryptedBytes => {
segment.bytes = decryptedBytes;
triggerSegmentEventFn({
type: 'segmentdecryptioncomplete',
segment
});
handleSegmentBytes({
segment,
bytes: segment.bytes,
trackInfoFn,
timingInfoFn,
videoSegmentTimingInfoFn,
audioSegmentTimingInfoFn,
id3Fn,
captionsFn,
isEndOfTimeline,
endedTimelineFn,
dataFn,
doneFn,
onTransmuxerLog,
triggerSegmentEventFn
});
});
};
/**
* This function waits for all XHRs to finish (with either success or failure)
* before continueing processing via it's callback. The function gathers errors
* from each request into a single errors array so that the error status for
* each request can be examined later.
*
* @param {Object} activeXhrs - an object that tracks all XHR requests
* @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
* routines
* @param {Function} trackInfoFn - a callback that receives track info
* @param {Function} timingInfoFn - a callback that receives timing info
* @param {Function} videoSegmentTimingInfoFn
* a callback that receives video timing info based on media times and
* any adjustments made by the transmuxer
* @param {Function} audioSegmentTimingInfoFn
* a callback that receives audio timing info based on media times and
* any adjustments made by the transmuxer
* @param {Function} id3Fn - a callback that receives ID3 metadata
* @param {Function} captionsFn - a callback that receives captions
* @param {boolean} isEndOfTimeline
* true if this segment represents the last segment in a timeline
* @param {Function} endedTimelineFn
* a callback made when a timeline is ended, will only be called if
* isEndOfTimeline is true
* @param {Function} dataFn - a callback that is executed when segment bytes are available
* and ready to use
* @param {Function} doneFn - a callback that is executed after all resources have been
* downloaded and any decryption completed
*/
const waitForCompletion = ({
activeXhrs,
decryptionWorker,
trackInfoFn,
timingInfoFn,
videoSegmentTimingInfoFn,
audioSegmentTimingInfoFn,
id3Fn,
captionsFn,
isEndOfTimeline,
endedTimelineFn,
dataFn,
doneFn,
onTransmuxerLog,
triggerSegmentEventFn
}) => {
let count = 0;
let didError = false;
return (error, segment) => {
if (didError) {
return;
}
if (error) {
didError = true; // If there are errors, we have to abort any outstanding requests
abortAll(activeXhrs); // Even though the requests above are aborted, and in theory we could wait until we
// handle the aborted events from those requests, there are some cases where we may
// never get an aborted event. For instance, if the network connection is lost and
// there were two requests, the first may have triggered an error immediately, while
// the second request remains unsent. In that case, the aborted algorithm will not
// trigger an abort: see https://xhr.spec.whatwg.org/#the-abort()-method
//
// We also can't rely on the ready state of the XHR, since the request that
// triggered the connection error may also show as a ready state of 0 (unsent).
// Therefore, we have to finish this group of requests immediately after the first
// seen error.
return doneFn(error, segment);
}
count += 1;
if (count === activeXhrs.length) {
const segmentFinish = function () {
if (segment.encryptedBytes) {
return decryptSegment({
decryptionWorker,
segment,
trackInfoFn,
timingInfoFn,
videoSegmentTimingInfoFn,
audioSegmentTimingInfoFn,
id3Fn,
captionsFn,
isEndOfTimeline,
endedTimelineFn,
dataFn,
doneFn,
onTransmuxerLog,
triggerSegmentEventFn
});
} // Otherwise, everything is ready just continue
handleSegmentBytes({
segment,
bytes: segment.bytes,
trackInfoFn,
timingInfoFn,
videoSegmentTimingInfoFn,
audioSegmentTimingInfoFn,
id3Fn,
captionsFn,
isEndOfTimeline,
endedTimelineFn,
dataFn,
doneFn,
onTransmuxerLog,
triggerSegmentEventFn
});
}; // Keep track of when *all* of the requests have completed
segment.endOfAllRequests = Date.now();
if (segment.map && segment.map.encryptedBytes && !segment.map.bytes) {
triggerSegmentEventFn({
type: 'segmentdecryptionstart',
segment
});
return decrypt({
decryptionWorker,
// add -init to the "id" to differentiate between segment
// and init segment decryption, just in case they happen
// at the same time at some point in the future.
id: segment.requestId + '-init',
encryptedBytes: segment.map.encryptedBytes,
key: segment.map.key,
segment,
doneFn
}, decryptedBytes => {
segment.map.bytes = decryptedBytes;
triggerSegmentEventFn({
type: 'segmentdecryptioncomplete',
segment
});
parseInitSegment(segment, parseError => {
if (parseError) {
abortAll(activeXhrs);
return doneFn(parseError, segment);
}
segmentFinish();
});
});
}
segmentFinish();
}
};
};
/**
* Calls the abort callback if any request within the batch was aborted. Will only call
* the callback once per batch of requests, even if multiple were aborted.
*
* @param {Object} loadendState - state to check to see if the abort function was called
* @param {Function} abortFn - callback to call for abort
*/
const handleLoadEnd = ({
loadendState,
abortFn
}) => event => {
const request = event.target;
if (request.aborted && abortFn && !loadendState.calledAbortFn) {
abortFn();
loadendState.calledAbortFn = true;
}
};
/**
* Simple progress event callback handler that gathers some stats before
* executing a provided callback with the `segment` object
*
* @param {Object} segment - a simplified copy of the segmentInfo object
* from SegmentLoader
* @param {Function} progressFn - a callback that is executed each time a progress event
* is received
* @param {Function} trackInfoFn - a callback that receives track info
* @param {Function} timingInfoFn - a callback that receives timing info
* @param {Function} videoSegmentTimingInfoFn
* a callback that receives video timing info based on media times and
* any adjustments made by the transmuxer
* @param {Function} audioSegmentTimingInfoFn
* a callback that receives audio timing info based on media times and
* any adjustments made by the transmuxer
* @param {boolean} isEndOfTimeline
* true if this segment represents the last segment in a timeline
* @param {Function} endedTimelineFn
* a callback made when a timeline is ended, will only be called if
* isEndOfTimeline is true
* @param {Function} dataFn - a callback that is executed when segment bytes are available
* and ready to use
* @param {Event} event - the progress event object from XMLHttpRequest
*/
const handleProgress = ({
segment,
progressFn,
trackInfoFn,
timingInfoFn,
videoSegmentTimingInfoFn,
audioSegmentTimingInfoFn,
id3Fn,
captionsFn,
isEndOfTimeline,
endedTimelineFn,
dataFn
}) => event => {
const request = event.target;
if (request.aborted) {
return;
}
segment.stats = merge(segment.stats, getProgressStats(event)); // record the time that we receive the first byte of data
if (!segment.stats.firstBytesReceivedAt && segment.stats.bytesReceived) {
segment.stats.firstBytesReceivedAt = Date.now();
}
return progressFn(event, segment);
};
/**
* Load all resources and does any processing necessary for a media-segment
*
* Features:
* decrypts the media-segment if it has a key uri and an iv
* aborts *all* requests if *any* one request fails
*
* The segment object, at minimum, has the following format:
* {
* resolvedUri: String,
* [transmuxer]: Object,
* [byterange]: {
* offset: Number,
* length: Number
* },
* [key]: {
* resolvedUri: String
* [byterange]: {
* offset: Number,
* length: Number
* },
* iv: {
* bytes: Uint32Array
* }
* },
* [map]: {
* resolvedUri: String,
* [byterange]: {
* offset: Number,
* length: Number
* },
* [bytes]: Uint8Array
* }
* }
* ...where [name] denotes optional properties
*
* @param {Function} xhr - an instance of the xhr wrapper in xhr.js
* @param {Object} xhrOptions - the base options to provide to all xhr requests
* @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128
* decryption routines
* @param {Object} segment - a simplified copy of the segmentInfo object
* from SegmentLoader
* @param {Function} abortFn - a callback called (only once) if any piece of a request was
* aborted
* @param {Function} progressFn - a callback that receives progress events from the main
* segment's xhr request
* @param {Function} trackInfoFn - a callback that receives track info
* @param {Function} timingInfoFn - a callback that receives timing info
* @param {Function} videoSegmentTimingInfoFn
* a callback that receives video timing info based on media times and
* any adjustments made by the transmuxer
* @param {Function} audioSegmentTimingInfoFn
* a callback that receives audio timing info based on media times and
* any adjustments made by the transmuxer
* @param {Function} id3Fn - a callback that receives ID3 metadata
* @param {Function} captionsFn - a callback that receives captions
* @param {boolean} isEndOfTimeline
* true if this segment represents the last segment in a timeline
* @param {Function} endedTimelineFn
* a callback made when a timeline is ended, will only be called if
* isEndOfTimeline is true
* @param {Function} dataFn - a callback that receives data from the main segment's xhr
* request, transmuxed if needed
* @param {Function} doneFn - a callback that is executed only once all requests have
* succeeded or failed
* @return {Function} a function that, when invoked, immediately aborts all
* outstanding requests
*/
const mediaSegmentRequest = ({
xhr,
xhrOptions,
decryptionWorker,
segment,
abortFn,
progressFn,
trackInfoFn,
timingInfoFn,
videoSegmentTimingInfoFn,
audioSegmentTimingInfoFn,
id3Fn,
captionsFn,
isEndOfTimeline,
endedTimelineFn,
dataFn,
doneFn,
onTransmuxerLog,
triggerSegmentEventFn
}) => {
const activeXhrs = [];
const finishProcessingFn = waitForCompletion({
activeXhrs,
decryptionWorker,
trackInfoFn,
timingInfoFn,
videoSegmentTimingInfoFn,
audioSegmentTimingInfoFn,
id3Fn,
captionsFn,
isEndOfTimeline,
endedTimelineFn,
dataFn,
doneFn,
onTransmuxerLog,
triggerSegmentEventFn
}); // optionally, request the decryption key
if (segment.key && !segment.key.bytes) {
const objects = [segment.key];
if (segment.map && !segment.map.bytes && segment.map.key && segment.map.key.resolvedUri === segment.key.resolvedUri) {
objects.push(segment.map.key);
}
const keyRequestOptions = merge(xhrOptions, {
uri: segment.key.resolvedUri,
responseType: 'arraybuffer',
requestType: 'segment-key'
});
const keyRequestCallback = handleKeyResponse(segment, objects, finishProcessingFn, triggerSegmentEventFn);
const keyInfo = {
uri: segment.key.resolvedUri
};
triggerSegmentEventFn({
type: 'segmentkeyloadstart',
segment,
keyInfo
});
const keyXhr = xhr(keyRequestOptions, keyRequestCallback);
activeXhrs.push(keyXhr);
} // optionally, request the associated media init segment
if (segment.map && !segment.map.bytes) {
const differentMapKey = segment.map.key && (!segment.key || segment.key.resolvedUri !== segment.map.key.resolvedUri);
if (differentMapKey) {
const mapKeyRequestOptions = merge(xhrOptions, {
uri: segment.map.key.resolvedUri,
responseType: 'arraybuffer',
requestType: 'segment-key'
});
const mapKeyRequestCallback = handleKeyResponse(segment, [segment.map.key], finishProcessingFn, triggerSegmentEventFn);
const keyInfo = {
uri: segment.map.key.resolvedUri
};
triggerSegmentEventFn({
type: 'segmentkeyloadstart',
segment,
keyInfo
});
const mapKeyXhr = xhr(mapKeyRequestOptions, mapKeyRequestCallback);
activeXhrs.push(mapKeyXhr);
}
const initSegmentOptions = merge(xhrOptions, {
uri: segment.map.resolvedUri,
responseType: 'arraybuffer',
headers: segmentXhrHeaders(segment.map),
requestType: 'segment-media-initialization'
});
const initSegmentRequestCallback = handleInitSegmentResponse({
segment,
finishProcessingFn,
triggerSegmentEventFn
});
triggerSegmentEventFn({
type: 'segmentloadstart',
segment
});
const initSegmentXhr = xhr(initSegmentOptions, initSegmentRequestCallback);
activeXhrs.push(initSegmentXhr);
}
const segmentRequestOptions = merge(xhrOptions, {
uri: segment.part && segment.part.resolvedUri || segment.resolvedUri,
responseType: 'arraybuffer',
headers: segmentXhrHeaders(segment),
requestType: 'segment'
});
const segmentRequestCallback = handleSegmentResponse({
segment,
finishProcessingFn,
responseType: segmentRequestOptions.responseType,
triggerSegmentEventFn
});
triggerSegmentEventFn({
type: 'segmentloadstart',
segment
});
const segmentXhr = xhr(segmentRequestOptions, segmentRequestCallback);
segmentXhr.addEventListener('progress', handleProgress({
segment,
progressFn,
trackInfoFn,
timingInfoFn,
videoSegmentTimingInfoFn,
audioSegmentTimingInfoFn,
id3Fn,
captionsFn,
isEndOfTimeline,
endedTimelineFn,
dataFn
}));
activeXhrs.push(segmentXhr); // since all parts of the request must be considered, but should not make callbacks
// multiple times, provide a shared state object
const loadendState = {};
activeXhrs.forEach(activeXhr => {
activeXhr.addEventListener('loadend', handleLoadEnd({
loadendState,
abortFn
}));
});
return () => abortAll(activeXhrs);
};
/**
* @file - codecs.js - Handles tasks regarding codec strings such as translating them to
* codec strings, or translating codec strings into objects that can be examined.
*/
const logFn$1 = logger('CodecUtils');
/**
* Returns a set of codec strings parsed from the playlist or the default
* codec strings if no codecs were specified in the playlist
*
* @param {Playlist} media the current media playlist
* @return {Object} an object with the video and audio codecs
*/
const getCodecs = function (media) {
// if the codecs were explicitly specified, use them instead of the
// defaults
const mediaAttributes = media.attributes || {};
if (mediaAttributes.CODECS) {
return parseCodecs(mediaAttributes.CODECS);
}
};
const isMaat = (main, media) => {
const mediaAttributes = media.attributes || {};
return main && main.mediaGroups && main.mediaGroups.AUDIO && mediaAttributes.AUDIO && main.mediaGroups.AUDIO[mediaAttributes.AUDIO];
};
const isMuxed = (main, media) => {
if (!isMaat(main, media)) {
return true;
}
const mediaAttributes = media.attributes || {};
const audioGroup = main.mediaGroups.AUDIO[mediaAttributes.AUDIO];
for (const groupId in audioGroup) {
// If an audio group has a URI (the case for HLS, as HLS will use external playlists),
// or there are listed playlists (the case for DASH, as the manifest will have already
// provided all of the details necessary to generate the audio playlist, as opposed to
// HLS' externally requested playlists), then the content is demuxed.
if (!audioGroup[groupId].uri && !audioGroup[groupId].playlists) {
return true;
}
}
return false;
};
const unwrapCodecList = function (codecList) {
const codecs = {};
codecList.forEach(({
mediaType,
type,
details
}) => {
codecs[mediaType] = codecs[mediaType] || [];
codecs[mediaType].push(translateLegacyCodec(`${type}${details}`));
});
Object.keys(codecs).forEach(function (mediaType) {
if (codecs[mediaType].length > 1) {
logFn$1(`multiple ${mediaType} codecs found as attributes: ${codecs[mediaType].join(', ')}. Setting playlist codecs to null so that we wait for mux.js to probe segments for real codecs.`);
codecs[mediaType] = null;
return;
}
codecs[mediaType] = codecs[mediaType][0];
});
return codecs;
};
const codecCount = function (codecObj) {
let count = 0;
if (codecObj.audio) {
count++;
}
if (codecObj.video) {
count++;
}
return count;
};
/**
* Calculates the codec strings for a working configuration of
* SourceBuffers to play variant streams in a main playlist. If
* there is no possible working configuration, an empty object will be
* returned.
*
* @param main {Object} the m3u8 object for the main playlist
* @param media {Object} the m3u8 object for the variant playlist
* @return {Object} the codec strings.
*
* @private
*/
const codecsForPlaylist = function (main, media) {
const mediaAttributes = media.attributes || {};
const codecInfo = unwrapCodecList(getCodecs(media) || []); // HLS with multiple-audio tracks must always get an audio codec.
// Put another way, there is no way to have a video-only multiple-audio HLS!
if (isMaat(main, media) && !codecInfo.audio) {
if (!isMuxed(main, media)) {
// It is possible for codecs to be specified on the audio media group playlist but
// not on the rendition playlist. This is mostly the case for DASH, where audio and
// video are always separate (and separately specified).
const defaultCodecs = unwrapCodecList(codecsFromDefault(main, mediaAttributes.AUDIO) || []);
if (defaultCodecs.audio) {
codecInfo.audio = defaultCodecs.audio;
}
}
}
return codecInfo;
};
const logFn = logger('PlaylistSelector');
const representationToString = function (representation) {
if (!representation || !representation.playlist) {
return;
}
const playlist = representation.playlist;
return JSON.stringify({
id: playlist.id,
bandwidth: representation.bandwidth,
width: representation.width,
height: representation.height,
codecs: playlist.attributes && playlist.attributes.CODECS || ''
});
}; // Utilities
/**
* Returns the CSS value for the specified property on an element
* using `getComputedStyle`. Firefox has a long-standing issue where
* getComputedStyle() may return null when running in an iframe with
* `display: none`.
*
* @see https://bugzilla.mozilla.org/show_bug.cgi?id=548397
* @param {HTMLElement} el the htmlelement to work on
* @param {string} the proprety to get the style for
*/
const safeGetComputedStyle = function (el, property) {
if (!el) {
return '';
}
const result = window$1.getComputedStyle(el);
if (!result) {
return '';
}
return result[property];
};
/**
* Resuable stable sort function
*
* @param {Playlists} array
* @param {Function} sortFn Different comparators
* @function stableSort
*/
const stableSort = function (array, sortFn) {
const newArray = array.slice();
array.sort(function (left, right) {
const cmp = sortFn(left, right);
if (cmp === 0) {
return newArray.indexOf(left) - newArray.indexOf(right);
}
return cmp;
});
};
/**
* A comparator function to sort two playlist object by bandwidth.
*
* @param {Object} left a media playlist object
* @param {Object} right a media playlist object
* @return {number} Greater than zero if the bandwidth attribute of
* left is greater than the corresponding attribute of right. Less
* than zero if the bandwidth of right is greater than left and
* exactly zero if the two are equal.
*/
const comparePlaylistBandwidth = function (left, right) {
let leftBandwidth;
let rightBandwidth;
if (left.attributes.BANDWIDTH) {
leftBandwidth = left.attributes.BANDWIDTH;
}
leftBandwidth = leftBandwidth || window$1.Number.MAX_VALUE;
if (right.attributes.BANDWIDTH) {
rightBandwidth = right.attributes.BANDWIDTH;
}
rightBandwidth = rightBandwidth || window$1.Number.MAX_VALUE;
return leftBandwidth - rightBandwidth;
};
/**
* A comparator function to sort two playlist object by resolution (width).
*
* @param {Object} left a media playlist object
* @param {Object} right a media playlist object
* @return {number} Greater than zero if the resolution.width attribute of
* left is greater than the corresponding attribute of right. Less
* than zero if the resolution.width of right is greater than left and
* exactly zero if the two are equal.
*/
const comparePlaylistResolution = function (left, right) {
let leftWidth;
let rightWidth;
if (left.attributes.RESOLUTION && left.attributes.RESOLUTION.width) {
leftWidth = left.attributes.RESOLUTION.width;
}
leftWidth = leftWidth || window$1.Number.MAX_VALUE;
if (right.attributes.RESOLUTION && right.attributes.RESOLUTION.width) {
rightWidth = right.attributes.RESOLUTION.width;
}
rightWidth = rightWidth || window$1.Number.MAX_VALUE; // NOTE - Fallback to bandwidth sort as appropriate in cases where multiple renditions
// have the same media dimensions/ resolution
if (leftWidth === rightWidth && left.attributes.BANDWIDTH && right.attributes.BANDWIDTH) {
return left.attributes.BANDWIDTH - right.attributes.BANDWIDTH;
}
return leftWidth - rightWidth;
};
/**
* Chooses the appropriate media playlist based on bandwidth and player size
*
* @param {Object} main
* Object representation of the main manifest
* @param {number} playerBandwidth
* Current calculated bandwidth of the player
* @param {number} playerWidth
* Current width of the player element (should account for the device pixel ratio)
* @param {number} playerHeight
* Current height of the player element (should account for the device pixel ratio)
* @param {boolean} limitRenditionByPlayerDimensions
* True if the player width and height should be used during the selection, false otherwise
* @param {Object} playlistController
* the current playlistController object
* @return {Playlist} the highest bitrate playlist less than the
* currently detected bandwidth, accounting for some amount of
* bandwidth variance
*/
let simpleSelector = function (main, playerBandwidth, playerWidth, playerHeight, limitRenditionByPlayerDimensions, playlistController) {
// If we end up getting called before `main` is available, exit early
if (!main) {
return;
}
const options = {
bandwidth: playerBandwidth,
width: playerWidth,
height: playerHeight,
limitRenditionByPlayerDimensions
};
let playlists = main.playlists; // if playlist is audio only, select between currently active audio group playlists.
if (Playlist.isAudioOnly(main)) {
playlists = playlistController.getAudioTrackPlaylists_(); // add audioOnly to options so that we log audioOnly: true
// at the buttom of this function for debugging.
options.audioOnly = true;
} // convert the playlists to an intermediary representation to make comparisons easier
let sortedPlaylistReps = playlists.map(playlist => {
let bandwidth;
const width = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.width;
const height = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height;
bandwidth = playlist.attributes && playlist.attributes.BANDWIDTH;
bandwidth = bandwidth || window$1.Number.MAX_VALUE;
return {
bandwidth,
width,
height,
playlist
};
});
stableSort(sortedPlaylistReps, (left, right) => left.bandwidth - right.bandwidth); // filter out any playlists that have been excluded due to
// incompatible configurations
sortedPlaylistReps = sortedPlaylistReps.filter(rep => !Playlist.isIncompatible(rep.playlist)); // filter out any playlists that have been disabled manually through the representations
// api or excluded temporarily due to playback errors.
let enabledPlaylistReps = sortedPlaylistReps.filter(rep => Playlist.isEnabled(rep.playlist));
if (!enabledPlaylistReps.length) {
// if there are no enabled playlists, then they have all been excluded or disabled
// by the user through the representations api. In this case, ignore exclusion and
// fallback to what the user wants by using playlists the user has not disabled.
enabledPlaylistReps = sortedPlaylistReps.filter(rep => !Playlist.isDisabled(rep.playlist));
} // filter out any variant that has greater effective bitrate
// than the current estimated bandwidth
const bandwidthPlaylistReps = enabledPlaylistReps.filter(rep => rep.bandwidth * Config.BANDWIDTH_VARIANCE < playerBandwidth);
let highestRemainingBandwidthRep = bandwidthPlaylistReps[bandwidthPlaylistReps.length - 1]; // get all of the renditions with the same (highest) bandwidth
// and then taking the very first element
const bandwidthBestRep = bandwidthPlaylistReps.filter(rep => rep.bandwidth === highestRemainingBandwidthRep.bandwidth)[0]; // if we're not going to limit renditions by player size, make an early decision.
if (limitRenditionByPlayerDimensions === false) {
const chosenRep = bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
if (chosenRep && chosenRep.playlist) {
let type = 'sortedPlaylistReps';
if (bandwidthBestRep) {
type = 'bandwidthBestRep';
}
if (enabledPlaylistReps[0]) {
type = 'enabledPlaylistReps';
}
logFn(`choosing ${representationToString(chosenRep)} using ${type} with options`, options);
return chosenRep.playlist;
}
logFn('could not choose a playlist with options', options);
return null;
} // filter out playlists without resolution information
const haveResolution = bandwidthPlaylistReps.filter(rep => rep.width && rep.height); // sort variants by resolution
stableSort(haveResolution, (left, right) => left.width - right.width); // if we have the exact resolution as the player use it
const resolutionBestRepList = haveResolution.filter(rep => rep.width === playerWidth && rep.height === playerHeight);
highestRemainingBandwidthRep = resolutionBestRepList[resolutionBestRepList.length - 1]; // ensure that we pick the highest bandwidth variant that have exact resolution
const resolutionBestRep = resolutionBestRepList.filter(rep => rep.bandwidth === highestRemainingBandwidthRep.bandwidth)[0];
let resolutionPlusOneList;
let resolutionPlusOneSmallest;
let resolutionPlusOneRep; // find the smallest variant that is larger than the player
// if there is no match of exact resolution
if (!resolutionBestRep) {
resolutionPlusOneList = haveResolution.filter(rep => rep.width > playerWidth || rep.height > playerHeight); // find all the variants have the same smallest resolution
resolutionPlusOneSmallest = resolutionPlusOneList.filter(rep => rep.width === resolutionPlusOneList[0].width && rep.height === resolutionPlusOneList[0].height); // ensure that we also pick the highest bandwidth variant that
// is just-larger-than the video player
highestRemainingBandwidthRep = resolutionPlusOneSmallest[resolutionPlusOneSmallest.length - 1];
resolutionPlusOneRep = resolutionPlusOneSmallest.filter(rep => rep.bandwidth === highestRemainingBandwidthRep.bandwidth)[0];
}
let leastPixelDiffRep; // If this selector proves to be better than others,
// resolutionPlusOneRep and resolutionBestRep and all
// the code involving them should be removed.
if (playlistController.leastPixelDiffSelector) {
// find the variant that is closest to the player's pixel size
const leastPixelDiffList = haveResolution.map(rep => {
rep.pixelDiff = Math.abs(rep.width - playerWidth) + Math.abs(rep.height - playerHeight);
return rep;
}); // get the highest bandwidth, closest resolution playlist
stableSort(leastPixelDiffList, (left, right) => {
// sort by highest bandwidth if pixelDiff is the same
if (left.pixelDiff === right.pixelDiff) {
return right.bandwidth - left.bandwidth;
}
return left.pixelDiff - right.pixelDiff;
});
leastPixelDiffRep = leastPixelDiffList[0];
} // fallback chain of variants
const chosenRep = leastPixelDiffRep || resolutionPlusOneRep || resolutionBestRep || bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
if (chosenRep && chosenRep.playlist) {
let type = 'sortedPlaylistReps';
if (leastPixelDiffRep) {
type = 'leastPixelDiffRep';
} else if (resolutionPlusOneRep) {
type = 'resolutionPlusOneRep';
} else if (resolutionBestRep) {
type = 'resolutionBestRep';
} else if (bandwidthBestRep) {
type = 'bandwidthBestRep';
} else if (enabledPlaylistReps[0]) {
type = 'enabledPlaylistReps';
}
logFn(`choosing ${representationToString(chosenRep)} using ${type} with options`, options);
return chosenRep.playlist;
}
logFn('could not choose a playlist with options', options);
return null;
};
/**
* Chooses the appropriate media playlist based on the most recent
* bandwidth estimate and the player size.
*
* Expects to be called within the context of an instance of VhsHandler
*
* @return {Playlist} the highest bitrate playlist less than the
* currently detected bandwidth, accounting for some amount of
* bandwidth variance
*/
const lastBandwidthSelector = function () {
let pixelRatio = this.useDevicePixelRatio ? window$1.devicePixelRatio || 1 : 1;
if (!isNaN(this.customPixelRatio)) {
pixelRatio = this.customPixelRatio;
}
return simpleSelector(this.playlists.main, this.systemBandwidth, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.playlistController_);
};
/**
* Chooses the appropriate media playlist based on an
* exponential-weighted moving average of the bandwidth after
* filtering for player size.
*
* Expects to be called within the context of an instance of VhsHandler
*
* @param {number} decay - a number between 0 and 1. Higher values of
* this parameter will cause previous bandwidth estimates to lose
* significance more quickly.
* @return {Function} a function which can be invoked to create a new
* playlist selector function.
* @see https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
*/
const movingAverageBandwidthSelector = function (decay) {
let average = -1;
let lastSystemBandwidth = -1;
if (decay < 0 || decay > 1) {
throw new Error('Moving average bandwidth decay must be between 0 and 1.');
}
return function () {
let pixelRatio = this.useDevicePixelRatio ? window$1.devicePixelRatio || 1 : 1;
if (!isNaN(this.customPixelRatio)) {
pixelRatio = this.customPixelRatio;
}
if (average < 0) {
average = this.systemBandwidth;
lastSystemBandwidth = this.systemBandwidth;
} // stop the average value from decaying for every 250ms
// when the systemBandwidth is constant
// and
// stop average from setting to a very low value when the
// systemBandwidth becomes 0 in case of chunk cancellation
if (this.systemBandwidth > 0 && this.systemBandwidth !== lastSystemBandwidth) {
average = decay * this.systemBandwidth + (1 - decay) * average;
lastSystemBandwidth = this.systemBandwidth;
}
return simpleSelector(this.playlists.main, average, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.playlistController_);
};
};
/**
* Chooses the appropriate media playlist based on the potential to rebuffer
*
* @param {Object} settings
* Object of information required to use this selector
* @param {Object} settings.main
* Object representation of the main manifest
* @param {number} settings.currentTime
* The current time of the player
* @param {number} settings.bandwidth
* Current measured bandwidth
* @param {number} settings.duration
* Duration of the media
* @param {number} settings.segmentDuration
* Segment duration to be used in round trip time calculations
* @param {number} settings.timeUntilRebuffer
* Time left in seconds until the player has to rebuffer
* @param {number} settings.currentTimeline
* The current timeline segments are being loaded from
* @param {SyncController} settings.syncController
* SyncController for determining if we have a sync point for a given playlist
* @return {Object|null}
* {Object} return.playlist
* The highest bandwidth playlist with the least amount of rebuffering
* {Number} return.rebufferingImpact
* The amount of time in seconds switching to this playlist will rebuffer. A
* negative value means that switching will cause zero rebuffering.
*/
const minRebufferMaxBandwidthSelector = function (settings) {
const {
main,
currentTime,
bandwidth,
duration,
segmentDuration,
timeUntilRebuffer,
currentTimeline,
syncController
} = settings; // filter out any playlists that have been excluded due to
// incompatible configurations
const compatiblePlaylists = main.playlists.filter(playlist => !Playlist.isIncompatible(playlist)); // filter out any playlists that have been disabled manually through the representations
// api or excluded temporarily due to playback errors.
let enabledPlaylists = compatiblePlaylists.filter(Playlist.isEnabled);
if (!enabledPlaylists.length) {
// if there are no enabled playlists, then they have all been excluded or disabled
// by the user through the representations api. In this case, ignore exclusion and
// fallback to what the user wants by using playlists the user has not disabled.
enabledPlaylists = compatiblePlaylists.filter(playlist => !Playlist.isDisabled(playlist));
}
const bandwidthPlaylists = enabledPlaylists.filter(Playlist.hasAttribute.bind(null, 'BANDWIDTH'));
const rebufferingEstimates = bandwidthPlaylists.map(playlist => {
const syncPoint = syncController.getSyncPoint(playlist, duration, currentTimeline, currentTime); // If there is no sync point for this playlist, switching to it will require a
// sync request first. This will double the request time
const numRequests = syncPoint ? 1 : 2;
const requestTimeEstimate = Playlist.estimateSegmentRequestTime(segmentDuration, bandwidth, playlist);
const rebufferingImpact = requestTimeEstimate * numRequests - timeUntilRebuffer;
return {
playlist,
rebufferingImpact
};
});
const noRebufferingPlaylists = rebufferingEstimates.filter(estimate => estimate.rebufferingImpact <= 0); // Sort by bandwidth DESC
stableSort(noRebufferingPlaylists, (a, b) => comparePlaylistBandwidth(b.playlist, a.playlist));
if (noRebufferingPlaylists.length) {
return noRebufferingPlaylists[0];
}
stableSort(rebufferingEstimates, (a, b) => a.rebufferingImpact - b.rebufferingImpact);
return rebufferingEstimates[0] || null;
};
/**
* Chooses the appropriate media playlist, which in this case is the lowest bitrate
* one with video. If no renditions with video exist, return the lowest audio rendition.
*
* Expects to be called within the context of an instance of VhsHandler
*
* @return {Object|null}
* {Object} return.playlist
* The lowest bitrate playlist that contains a video codec. If no such rendition
* exists pick the lowest audio rendition.
*/
const lowestBitrateCompatibleVariantSelector = function () {
// filter out any playlists that have been excluded due to
// incompatible configurations or playback errors
const playlists = this.playlists.main.playlists.filter(Playlist.isEnabled); // Sort ascending by bitrate
stableSort(playlists, (a, b) => comparePlaylistBandwidth(a, b)); // Parse and assume that playlists with no video codec have no video
// (this is not necessarily true, although it is generally true).
//
// If an entire manifest has no valid videos everything will get filtered
// out.
const playlistsWithVideo = playlists.filter(playlist => !!codecsForPlaylist(this.playlists.main, playlist).video);
return playlistsWithVideo[0] || null;
};
/**
* Combine all segments into a single Uint8Array
*
* @param {Object} segmentObj
* @return {Uint8Array} concatenated bytes
* @private
*/
const concatSegments = segmentObj => {
let offset = 0;
let tempBuffer;
if (segmentObj.bytes) {
tempBuffer = new Uint8Array(segmentObj.bytes); // combine the individual segments into one large typed-array
segmentObj.segments.forEach(segment => {
tempBuffer.set(segment, offset);
offset += segment.byteLength;
});
}
return tempBuffer;
};
/**
* Example:
* https://host.com/path1/path2/path3/segment.ts?arg1=val1
* -->
* path3/segment.ts
*
* @param resolvedUri
* @return {string}
*/
function compactSegmentUrlDescription(resolvedUri) {
try {
return new URL(resolvedUri).pathname.split('/').slice(-2).join('/');
} catch (e) {
return '';
}
}
/**
* @file text-tracks.js
*/
/**
* Create captions text tracks on video.js if they do not exist
*
* @param {Object} inbandTextTracks a reference to current inbandTextTracks
* @param {Object} tech the video.js tech
* @param {Object} captionStream the caption stream to create
* @private
*/
const createCaptionsTrackIfNotExists = function (inbandTextTracks, tech, captionStream) {
if (!inbandTextTracks[captionStream]) {
tech.trigger({
type: 'usage',
name: 'vhs-608'
});
let instreamId = captionStream; // we need to translate SERVICEn for 708 to how mux.js currently labels them
if (/^cc708_/.test(captionStream)) {
instreamId = 'SERVICE' + captionStream.split('_')[1];
}
const track = tech.textTracks().getTrackById(instreamId);
if (track) {
// Resuse an existing track with a CC# id because this was
// very likely created by videojs-contrib-hls from information
// in the m3u8 for us to use
inbandTextTracks[captionStream] = track;
} else {
// This section gets called when we have caption services that aren't specified in the manifest.
// Manifest level caption services are handled in media-groups.js under CLOSED-CAPTIONS.
const captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};
let label = captionStream;
let language = captionStream;
let def = false;
const captionService = captionServices[instreamId];
if (captionService) {
label = captionService.label;
language = captionService.language;
def = captionService.default;
} // Otherwise, create a track with the default `CC#` label and
// without a language
inbandTextTracks[captionStream] = tech.addRemoteTextTrack({
kind: 'captions',
id: instreamId,
// TODO: investigate why this doesn't seem to turn the caption on by default
default: def,
label,
language
}, false).track;
}
}
};
/**
* Add caption text track data to a source handler given an array of captions
*
* @param {Object}
* @param {Object} inbandTextTracks the inband text tracks
* @param {number} timestampOffset the timestamp offset of the source buffer
* @param {Array} captionArray an array of caption data
* @private
*/
const addCaptionData = function ({
inbandTextTracks,
captionArray,
timestampOffset
}) {
if (!captionArray) {
return;
}
const Cue = window$1.WebKitDataCue || window$1.VTTCue;
captionArray.forEach(caption => {
const track = caption.stream; // in CEA 608 captions, video.js/mux.js sends a content array
// with positioning data
if (caption.content) {
caption.content.forEach(value => {
const cue = new Cue(caption.startTime + timestampOffset, caption.endTime + timestampOffset, value.text);
cue.line = value.line;
cue.align = 'left';
cue.position = value.position;
cue.positionAlign = 'line-left';
inbandTextTracks[track].addCue(cue);
});
} else {
// otherwise, a text value with combined captions is sent
inbandTextTracks[track].addCue(new Cue(caption.startTime + timestampOffset, caption.endTime + timestampOffset, caption.text));
}
});
};
/**
* Define properties on a cue for backwards compatability,
* but warn the user that the way that they are using it
* is depricated and will be removed at a later date.
*
* @param {Cue} cue the cue to add the properties on
* @private
*/
const deprecateOldCue = function (cue) {
Object.defineProperties(cue.frame, {
id: {
get() {
videojs.log.warn('cue.frame.id is deprecated. Use cue.value.key instead.');
return cue.value.key;
}
},
value: {
get() {
videojs.log.warn('cue.frame.value is deprecated. Use cue.value.data instead.');
return cue.value.data;
}
},
privateData: {
get() {
videojs.log.warn('cue.frame.privateData is deprecated. Use cue.value.data instead.');
return cue.value.data;
}
}
});
};
/**
* Add metadata text track data to a source handler given an array of metadata
*
* @param {Object}
* @param {Object} inbandTextTracks the inband text tracks
* @param {Array} metadataArray an array of meta data
* @param {number} timestampOffset the timestamp offset of the source buffer
* @param {number} videoDuration the duration of the video
* @private
*/
const addMetadata = ({
inbandTextTracks,
metadataArray,
timestampOffset,
videoDuration
}) => {
if (!metadataArray) {
return;
}
const Cue = window$1.WebKitDataCue || window$1.VTTCue;
const metadataTrack = inbandTextTracks.metadataTrack_;
if (!metadataTrack) {
return;
}
metadataArray.forEach(metadata => {
const time = metadata.cueTime + timestampOffset; // if time isn't a finite number between 0 and Infinity, like NaN,
// ignore this bit of metadata.
// This likely occurs when you have an non-timed ID3 tag like TIT2,
// which is the "Title/Songname/Content description" frame
if (typeof time !== 'number' || window$1.isNaN(time) || time < 0 || !(time < Infinity)) {
return;
} // If we have no frames, we can't create a cue.
if (!metadata.frames || !metadata.frames.length) {
return;
}
metadata.frames.forEach(frame => {
const cue = new Cue(time, time, frame.value || frame.url || frame.data || '');
cue.frame = frame;
cue.value = frame;
deprecateOldCue(cue);
metadataTrack.addCue(cue);
});
});
if (!metadataTrack.cues || !metadataTrack.cues.length) {
return;
} // Updating the metadeta cues so that
// the endTime of each cue is the startTime of the next cue
// the endTime of last cue is the duration of the video
const cues = metadataTrack.cues;
const cuesArray = []; // Create a copy of the TextTrackCueList...
// ...disregarding cues with a falsey value
for (let i = 0; i < cues.length; i++) {
if (cues[i]) {
cuesArray.push(cues[i]);
}
} // Group cues by their startTime value
const cuesGroupedByStartTime = cuesArray.reduce((obj, cue) => {
const timeSlot = obj[cue.startTime] || [];
timeSlot.push(cue);
obj[cue.startTime] = timeSlot;
return obj;
}, {}); // Sort startTimes by ascending order
const sortedStartTimes = Object.keys(cuesGroupedByStartTime).sort((a, b) => Number(a) - Number(b)); // Map each cue group's endTime to the next group's startTime
sortedStartTimes.forEach((startTime, idx) => {
const cueGroup = cuesGroupedByStartTime[startTime];
const finiteDuration = isFinite(videoDuration) ? videoDuration : startTime;
const nextTime = Number(sortedStartTimes[idx + 1]) || finiteDuration; // Map each cue's endTime the next group's startTime
cueGroup.forEach(cue => {
cue.endTime = nextTime;
});
});
}; // object for mapping daterange attributes
const dateRangeAttr = {
id: 'ID',
class: 'CLASS',
startDate: 'START-DATE',
duration: 'DURATION',
endDate: 'END-DATE',
endOnNext: 'END-ON-NEXT',
plannedDuration: 'PLANNED-DURATION',
scte35Out: 'SCTE35-OUT',
scte35In: 'SCTE35-IN'
};
const dateRangeKeysToOmit = new Set(['id', 'class', 'startDate', 'duration', 'endDate', 'endOnNext', 'startTime', 'endTime', 'processDateRange']);
/**
* Add DateRange metadata text track to a source handler given an array of metadata
*
* @param {Object}
* @param {Object} inbandTextTracks the inband text tracks
* @param {Array} dateRanges parsed media playlist
* @private
*/
const addDateRangeMetadata = ({
inbandTextTracks,
dateRanges
}) => {
const metadataTrack = inbandTextTracks.metadataTrack_;
if (!metadataTrack) {
return;
}
const Cue = window$1.WebKitDataCue || window$1.VTTCue;
dateRanges.forEach(dateRange => {
// we generate multiple cues for each date range with different attributes
for (const key of Object.keys(dateRange)) {
if (dateRangeKeysToOmit.has(key)) {
continue;
}
const cue = new Cue(dateRange.startTime, dateRange.endTime, '');
cue.id = dateRange.id;
cue.type = 'com.apple.quicktime.HLS';
cue.value = {
key: dateRangeAttr[key],
data: dateRange[key]
};
if (key === 'scte35Out' || key === 'scte35In') {
cue.value.data = new Uint8Array(cue.value.data.match(/[\da-f]{2}/gi)).buffer;
}
metadataTrack.addCue(cue);
}
dateRange.processDateRange();
});
};
/**
* Create metadata text track on video.js if it does not exist
*
* @param {Object} inbandTextTracks a reference to current inbandTextTracks
* @param {string} dispatchType the inband metadata track dispatch type
* @param {Object} tech the video.js tech
* @private
*/
const createMetadataTrackIfNotExists = (inbandTextTracks, dispatchType, tech) => {
if (inbandTextTracks.metadataTrack_) {
return;
}
inbandTextTracks.metadataTrack_ = tech.addRemoteTextTrack({
kind: 'metadata',
label: 'Timed Metadata'
}, false).track;
if (!videojs.browser.IS_ANY_SAFARI) {
inbandTextTracks.metadataTrack_.inBandMetadataTrackDispatchType = dispatchType;
}
};
/**
* Remove cues from a track on video.js.
*
* @param {Double} start start of where we should remove the cue
* @param {Double} end end of where the we should remove the cue
* @param {Object} track the text track to remove the cues from
* @private
*/
const removeCuesFromTrack = function (start, end, track) {
let i;
let cue;
if (!track) {
return;
}
if (!track.cues) {
return;
}
i = track.cues.length;
while (i--) {
cue = track.cues[i]; // Remove any cue within the provided start and end time
if (cue.startTime >= start && cue.endTime <= end) {
track.removeCue(cue);
}
}
};
/**
* Remove duplicate cues from a track on video.js (a cue is considered a
* duplicate if it has the same time interval and text as another)
*
* @param {Object} track the text track to remove the duplicate cues from
* @private
*/
const removeDuplicateCuesFromTrack = function (track) {
const cues = track.cues;
if (!cues) {
return;
}
const uniqueCues = {};
for (let i = cues.length - 1; i >= 0; i--) {
const cue = cues[i];
const cueKey = `${cue.startTime}-${cue.endTime}-${cue.text}`;
if (uniqueCues[cueKey]) {
track.removeCue(cue);
} else {
uniqueCues[cueKey] = cue;
}
}
};
/**
* Returns a list of gops in the buffer that have a pts value of 3 seconds or more in
* front of current time.
*
* @param {Array} buffer
* The current buffer of gop information
* @param {number} currentTime
* The current time
* @param {Double} mapping
* Offset to map display time to stream presentation time
* @return {Array}
* List of gops considered safe to append over
*/
const gopsSafeToAlignWith = (buffer, currentTime, mapping) => {
if (typeof currentTime === 'undefined' || currentTime === null || !buffer.length) {
return [];
} // pts value for current time + 3 seconds to give a bit more wiggle room
const currentTimePts = Math.ceil((currentTime - mapping + 3) * ONE_SECOND_IN_TS);
let i;
for (i = 0; i < buffer.length; i++) {
if (buffer[i].pts > currentTimePts) {
break;
}
}
return buffer.slice(i);
};
/**
* Appends gop information (timing and byteLength) received by the transmuxer for the
* gops appended in the last call to appendBuffer
*
* @param {Array} buffer
* The current buffer of gop information
* @param {Array} gops
* List of new gop information
* @param {boolean} replace
* If true, replace the buffer with the new gop information. If false, append the
* new gop information to the buffer in the right location of time.
* @return {Array}
* Updated list of gop information
*/
const updateGopBuffer = (buffer, gops, replace) => {
if (!gops.length) {
return buffer;
}
if (replace) {
// If we are in safe append mode, then completely overwrite the gop buffer
// with the most recent appeneded data. This will make sure that when appending
// future segments, we only try to align with gops that are both ahead of current
// time and in the last segment appended.
return gops.slice();
}
const start = gops[0].pts;
let i = 0;
for (i; i < buffer.length; i++) {
if (buffer[i].pts >= start) {
break;
}
}
return buffer.slice(0, i).concat(gops);
};
/**
* Removes gop information in buffer that overlaps with provided start and end
*
* @param {Array} buffer
* The current buffer of gop information
* @param {Double} start
* position to start the remove at
* @param {Double} end
* position to end the remove at
* @param {Double} mapping
* Offset to map display time to stream presentation time
*/
const removeGopBuffer = (buffer, start, end, mapping) => {
const startPts = Math.ceil((start - mapping) * ONE_SECOND_IN_TS);
const endPts = Math.ceil((end - mapping) * ONE_SECOND_IN_TS);
const updatedBuffer = buffer.slice();
let i = buffer.length;
while (i--) {
if (buffer[i].pts <= endPts) {
break;
}
}
if (i === -1) {
// no removal because end of remove range is before start of buffer
return updatedBuffer;
}
let j = i + 1;
while (j--) {
if (buffer[j].pts <= startPts) {
break;
}
} // clamp remove range start to 0 index
j = Math.max(j, 0);
updatedBuffer.splice(j, i - j + 1);
return updatedBuffer;
};
const shallowEqual = function (a, b) {
// if both are undefined
// or one or the other is undefined
// they are not equal
if (!a && !b || !a && b || a && !b) {
return false;
} // they are the same object and thus, equal
if (a === b) {
return true;
} // sort keys so we can make sure they have
// all the same keys later.
const akeys = Object.keys(a).sort();
const bkeys = Object.keys(b).sort(); // different number of keys, not equal
if (akeys.length !== bkeys.length) {
return false;
}
for (let i = 0; i < akeys.length; i++) {
const key = akeys[i]; // different sorted keys, not equal
if (key !== bkeys[i]) {
return false;
} // different values, not equal
if (a[key] !== b[key]) {
return false;
}
}
return true;
};
/**
* The segment loader has no recourse except to fetch a segment in the
* current playlist and use the internal timestamps in that segment to
* generate a syncPoint. This function returns a good candidate index
* for that process.
*
* @param {Array} segments - the segments array from a playlist.
* @return {number} An index of a segment from the playlist to load
*/
const getSyncSegmentCandidate = function (currentTimeline, segments, targetTime) {
segments = segments || [];
const timelineSegments = [];
let time = 0;
for (let i = 0; i < segments.length; i++) {
const segment = segments[i];
if (currentTimeline === segment.timeline) {
timelineSegments.push(i);
time += segment.duration;
if (time > targetTime) {
return i;
}
}
}
if (timelineSegments.length === 0) {
return 0;
} // default to the last timeline segment
return timelineSegments[timelineSegments.length - 1];
}; // In the event of a quota exceeded error, keep at least one second of back buffer. This
// number was arbitrarily chosen and may be updated in the future, but seemed reasonable
// as a start to prevent any potential issues with removing content too close to the
// playhead.
const MIN_BACK_BUFFER = 1; // in ms
const CHECK_BUFFER_DELAY = 500;
const finite = num => typeof num === 'number' && isFinite(num); // With most content hovering around 30fps, if a segment has a duration less than a half
// frame at 30fps or one frame at 60fps, the bandwidth and throughput calculations will
// not accurately reflect the rest of the content.
const MIN_SEGMENT_DURATION_TO_SAVE_STATS = 1 / 60;
const illegalMediaSwitch = (loaderType, startingMedia, trackInfo) => {
// Although these checks should most likely cover non 'main' types, for now it narrows
// the scope of our checks.
if (loaderType !== 'main' || !startingMedia || !trackInfo) {
return null;
}
if (!trackInfo.hasAudio && !trackInfo.hasVideo) {
return 'Neither audio nor video found in segment.';
}
if (startingMedia.hasVideo && !trackInfo.hasVideo) {
return 'Only audio found in segment when we expected video.' + ' We can\'t switch to audio only from a stream that had video.' + ' To get rid of this message, please add codec information to the manifest.';
}
if (!startingMedia.hasVideo && trackInfo.hasVideo) {
return 'Video found in segment when we expected only audio.' + ' We can\'t switch to a stream with video from an audio only stream.' + ' To get rid of this message, please add codec information to the manifest.';
}
return null;
};
/**
* Calculates a time value that is safe to remove from the back buffer without interrupting
* playback.
*
* @param {TimeRange} seekable
* The current seekable range
* @param {number} currentTime
* The current time of the player
* @param {number} targetDuration
* The target duration of the current playlist
* @return {number}
* Time that is safe to remove from the back buffer without interrupting playback
*/
const safeBackBufferTrimTime = (seekable, currentTime, targetDuration) => {
// 30 seconds before the playhead provides a safe default for trimming.
//
// Choosing a reasonable default is particularly important for high bitrate content and
// VOD videos/live streams with large windows, as the buffer may end up overfilled and
// throw an APPEND_BUFFER_ERR.
let trimTime = currentTime - Config.BACK_BUFFER_LENGTH;
if (seekable.length) {
// Some live playlists may have a shorter window of content than the full allowed back
// buffer. For these playlists, don't save content that's no longer within the window.
trimTime = Math.max(trimTime, seekable.start(0));
} // Don't remove within target duration of the current time to avoid the possibility of
// removing the GOP currently being played, as removing it can cause playback stalls.
const maxTrimTime = currentTime - targetDuration;
return Math.min(maxTrimTime, trimTime);
};
const segmentInfoString = segmentInfo => {
const {
startOfSegment,
duration,
segment,
part,
playlist: {
mediaSequence: seq,
id,
segments = []
},
mediaIndex: index,
partIndex,
timeline
} = segmentInfo;
const segmentLen = segments.length - 1;
let selection = 'mediaIndex/partIndex increment';
if (segmentInfo.getMediaInfoForTime) {
selection = `getMediaInfoForTime (${segmentInfo.getMediaInfoForTime})`;
} else if (segmentInfo.isSyncRequest) {
selection = 'getSyncSegmentCandidate (isSyncRequest)';
}
if (segmentInfo.independent) {
selection += ` with independent ${segmentInfo.independent}`;
}
const hasPartIndex = typeof partIndex === 'number';
const name = segmentInfo.segment.uri ? 'segment' : 'pre-segment';
const zeroBasedPartCount = hasPartIndex ? getKnownPartCount({
preloadSegment: segment
}) - 1 : 0;
return `${name} [${seq + index}/${seq + segmentLen}]` + (hasPartIndex ? ` part [${partIndex}/${zeroBasedPartCount}]` : '') + ` segment start/end [${segment.start} => ${segment.end}]` + (hasPartIndex ? ` part start/end [${part.start} => ${part.end}]` : '') + ` startOfSegment [${startOfSegment}]` + ` duration [${duration}]` + ` timeline [${timeline}]` + ` selected by [${selection}]` + ` playlist [${id}]`;
};
const timingInfoPropertyForMedia = mediaType => `${mediaType}TimingInfo`;
/**
* Returns the timestamp offset to use for the segment.
*
* @param {number} segmentTimeline
* The timeline of the segment
* @param {number} currentTimeline
* The timeline currently being followed by the loader
* @param {number} startOfSegment
* The estimated segment start
* @param {TimeRange[]} buffered
* The loader's buffer
* @param {boolean} overrideCheck
* If true, no checks are made to see if the timestamp offset value should be set,
* but sets it directly to a value.
*
* @return {number|null}
* Either a number representing a new timestamp offset, or null if the segment is
* part of the same timeline
*/
const timestampOffsetForSegment = ({
segmentTimeline,
currentTimeline,
startOfSegment,
buffered,
overrideCheck
}) => {
// Check to see if we are crossing a discontinuity to see if we need to set the
// timestamp offset on the transmuxer and source buffer.
//
// Previously, we changed the timestampOffset if the start of this segment was less than
// the currently set timestampOffset, but this isn't desirable as it can produce bad
// behavior, especially around long running live streams.
if (!overrideCheck && segmentTimeline === currentTimeline) {
return null;
} // When changing renditions, it's possible to request a segment on an older timeline. For
// instance, given two renditions with the following:
//
// #EXTINF:10
// segment1
// #EXT-X-DISCONTINUITY
// #EXTINF:10
// segment2
// #EXTINF:10
// segment3
//
// And the current player state:
//
// current time: 8
// buffer: 0 => 20
//
// The next segment on the current rendition would be segment3, filling the buffer from
// 20s onwards. However, if a rendition switch happens after segment2 was requested,
// then the next segment to be requested will be segment1 from the new rendition in
// order to fill time 8 and onwards. Using the buffered end would result in repeated
// content (since it would position segment1 of the new rendition starting at 20s). This
// case can be identified when the new segment's timeline is a prior value. Instead of
// using the buffered end, the startOfSegment can be used, which, hopefully, will be
// more accurate to the actual start time of the segment.
if (segmentTimeline < currentTimeline) {
return startOfSegment;
} // segmentInfo.startOfSegment used to be used as the timestamp offset, however, that
// value uses the end of the last segment if it is available. While this value
// should often be correct, it's better to rely on the buffered end, as the new
// content post discontinuity should line up with the buffered end as if it were
// time 0 for the new content.
return buffered.length ? buffered.end(buffered.length - 1) : startOfSegment;
};
/**
* Returns whether or not the loader should wait for a timeline change from the timeline
* change controller before processing the segment.
*
* Primary timing in VHS goes by video. This is different from most media players, as
* audio is more often used as the primary timing source. For the foreseeable future, VHS
* will continue to use video as the primary timing source, due to the current logic and
* expectations built around it.
* Since the timing follows video, in order to maintain sync, the video loader is
* responsible for setting both audio and video source buffer timestamp offsets.
*
* Setting different values for audio and video source buffers could lead to
* desyncing. The following examples demonstrate some of the situations where this
* distinction is important. Note that all of these cases involve demuxed content. When
* content is muxed, the audio and video are packaged together, therefore syncing
* separate media playlists is not an issue.
*
* CASE 1: Audio prepares to load a new timeline before video:
*
* Timeline: 0 1
* Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
* Audio Loader: ^
* Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
* Video Loader ^
*
* In the above example, the audio loader is preparing to load the 6th segment, the first
* after a discontinuity, while the video loader is still loading the 5th segment, before
* the discontinuity.
*
* If the audio loader goes ahead and loads and appends the 6th segment before the video
* loader crosses the discontinuity, then when appended, the 6th audio segment will use
* the timestamp offset from timeline 0. This will likely lead to desyncing. In addition,
* the audio loader must provide the audioAppendStart value to trim the content in the
* transmuxer, and that value relies on the audio timestamp offset. Since the audio
* timestamp offset is set by the video (main) loader, the audio loader shouldn't load the
* segment until that value is provided.
*
* CASE 2: Video prepares to load a new timeline before audio:
*
* Timeline: 0 1
* Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
* Audio Loader: ^
* Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
* Video Loader ^
*
* In the above example, the video loader is preparing to load the 6th segment, the first
* after a discontinuity, while the audio loader is still loading the 5th segment, before
* the discontinuity.
*
* If the video loader goes ahead and loads and appends the 6th segment, then once the
* segment is loaded and processed, both the video and audio timestamp offsets will be
* set, since video is used as the primary timing source. This is to ensure content lines
* up appropriately, as any modifications to the video timing are reflected by audio when
* the video loader sets the audio and video timestamp offsets to the same value. However,
* setting the timestamp offset for audio before audio has had a chance to change
* timelines will likely lead to desyncing, as the audio loader will append segment 5 with
* a timestamp intended to apply to segments from timeline 1 rather than timeline 0.
*
* CASE 3: When seeking, audio prepares to load a new timeline before video
*
* Timeline: 0 1
* Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
* Audio Loader: ^
* Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
* Video Loader ^
*
* In the above example, both audio and video loaders are loading segments from timeline
* 0, but imagine that the seek originated from timeline 1.
*
* When seeking to a new timeline, the timestamp offset will be set based on the expected
* segment start of the loaded video segment. In order to maintain sync, the audio loader
* must wait for the video loader to load its segment and update both the audio and video
* timestamp offsets before it may load and append its own segment. This is the case
* whether the seek results in a mismatched segment request (e.g., the audio loader
* chooses to load segment 3 and the video loader chooses to load segment 4) or the
* loaders choose to load the same segment index from each playlist, as the segments may
* not be aligned perfectly, even for matching segment indexes.
*
* @param {Object} timelinechangeController
* @param {number} currentTimeline
* The timeline currently being followed by the loader
* @param {number} segmentTimeline
* The timeline of the segment being loaded
* @param {('main'|'audio')} loaderType
* The loader type
* @param {boolean} audioDisabled
* Whether the audio is disabled for the loader. This should only be true when the
* loader may have muxed audio in its segment, but should not append it, e.g., for
* the main loader when an alternate audio playlist is active.
*
* @return {boolean}
* Whether the loader should wait for a timeline change from the timeline change
* controller before processing the segment
*/
const shouldWaitForTimelineChange = ({
timelineChangeController,
currentTimeline,
segmentTimeline,
loaderType,
audioDisabled
}) => {
if (currentTimeline === segmentTimeline) {
return false;
}
if (loaderType === 'audio') {
const lastMainTimelineChange = timelineChangeController.lastTimelineChange({
type: 'main'
}); // Audio loader should wait if:
//
// * main hasn't had a timeline change yet (thus has not loaded its first segment)
// * main hasn't yet changed to the timeline audio is looking to load
return !lastMainTimelineChange || lastMainTimelineChange.to !== segmentTimeline;
} // The main loader only needs to wait for timeline changes if there's demuxed audio.
// Otherwise, there's nothing to wait for, since audio would be muxed into the main
// loader's segments (or the content is audio/video only and handled by the main
// loader).
if (loaderType === 'main' && audioDisabled) {
const pendingAudioTimelineChange = timelineChangeController.pendingTimelineChange({
type: 'audio'
}); // Main loader should wait for the audio loader if audio is not pending a timeline
// change to the current timeline.
//
// Since the main loader is responsible for setting the timestamp offset for both
// audio and video, the main loader must wait for audio to be about to change to its
// timeline before setting the offset, otherwise, if audio is behind in loading,
// segments from the previous timeline would be adjusted by the new timestamp offset.
//
// This requirement means that video will not cross a timeline until the audio is
// about to cross to it, so that way audio and video will always cross the timeline
// together.
//
// In addition to normal timeline changes, these rules also apply to the start of a
// stream (going from a non-existent timeline, -1, to timeline 0). It's important
// that these rules apply to the first timeline change because if they did not, it's
// possible that the main loader will cross two timelines before the audio loader has
// crossed one. Logic may be implemented to handle the startup as a special case, but
// it's easier to simply treat all timeline changes the same.
if (pendingAudioTimelineChange && pendingAudioTimelineChange.to === segmentTimeline) {
return false;
}
return true;
}
return false;
};
const shouldFixBadTimelineChanges = timelineChangeController => {
if (!timelineChangeController) {
return false;
}
const pendingAudioTimelineChange = timelineChangeController.pendingTimelineChange({
type: 'audio'
});
const pendingMainTimelineChange = timelineChangeController.pendingTimelineChange({
type: 'main'
});
const hasPendingTimelineChanges = pendingAudioTimelineChange && pendingMainTimelineChange;
const differentPendingChanges = hasPendingTimelineChanges && pendingAudioTimelineChange.to !== pendingMainTimelineChange.to;
const isNotInitialPendingTimelineChange = hasPendingTimelineChanges && pendingAudioTimelineChange.from !== -1 && pendingMainTimelineChange.from !== -1;
if (isNotInitialPendingTimelineChange && differentPendingChanges) {
return true;
}
return false;
};
/**
* Fixes certain bad timeline scenarios by resetting the loader.
*
* @param {SegmentLoader} segmentLoader
*/
const fixBadTimelineChange = segmentLoader => {
if (!segmentLoader) {
return;
}
segmentLoader.pause();
segmentLoader.resetEverything();
segmentLoader.load();
};
/**
* Check if the pending audio timeline change is behind the
* pending main timeline change.
*
* @param {SegmentLoader} segmentLoader
* @return {boolean}
*/
const isAudioTimelineBehind = segmentLoader => {
const pendingAudioTimelineChange = segmentLoader.timelineChangeController_.pendingTimelineChange({
type: 'audio'
});
const pendingMainTimelineChange = segmentLoader.timelineChangeController_.pendingTimelineChange({
type: 'main'
});
const hasPendingTimelineChanges = pendingAudioTimelineChange && pendingMainTimelineChange;
return hasPendingTimelineChanges && pendingAudioTimelineChange.to < pendingMainTimelineChange.to;
};
/**
* A method to check if the player is waiting for a timeline change, and fixes
* certain scenarios where the timelines need to be updated.
*
* @param {SegmentLoader} segmentLoader
*/
const checkAndFixTimelines = segmentLoader => {
const segmentInfo = segmentLoader.pendingSegment_;
if (!segmentInfo) {
return;
}
const waitingForTimelineChange = shouldWaitForTimelineChange({
timelineChangeController: segmentLoader.timelineChangeController_,
currentTimeline: segmentLoader.currentTimeline_,
segmentTimeline: segmentInfo.timeline,
loaderType: segmentLoader.loaderType_,
audioDisabled: segmentLoader.audioDisabled_
});
if (waitingForTimelineChange && shouldFixBadTimelineChanges(segmentLoader.timelineChangeController_)) {
// Audio being behind should only happen on DASH sources.
if (segmentLoader.sourceType_ === 'dash' && isAudioTimelineBehind(segmentLoader)) {
segmentLoader.timelineChangeController_.trigger('audioTimelineBehind');
return;
}
fixBadTimelineChange(segmentLoader);
}
};
const mediaDuration = timingInfos => {
let maxDuration = 0;
['video', 'audio'].forEach(function (type) {
const typeTimingInfo = timingInfos[`${type}TimingInfo`];
if (!typeTimingInfo) {
return;
}
const {
start,
end
} = typeTimingInfo;
let duration;
if (typeof start === 'bigint' || typeof end === 'bigint') {
duration = window$1.BigInt(end) - window$1.BigInt(start);
} else if (typeof start === 'number' && typeof end === 'number') {
duration = end - start;
}
if (typeof duration !== 'undefined' && duration > maxDuration) {
maxDuration = duration;
}
}); // convert back to a number if it is lower than MAX_SAFE_INTEGER
// as we only need BigInt when we are above that.
if (typeof maxDuration === 'bigint' && maxDuration < Number.MAX_SAFE_INTEGER) {
maxDuration = Number(maxDuration);
}
return maxDuration;
};
const segmentTooLong = ({
segmentDuration,
maxDuration
}) => {
// 0 duration segments are most likely due to metadata only segments or a lack of
// information.
if (!segmentDuration) {
return false;
} // For HLS:
//
// https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1
// The EXTINF duration of each Media Segment in the Playlist
// file, when rounded to the nearest integer, MUST be less than or equal
// to the target duration; longer segments can trigger playback stalls
// or other errors.
//
// For DASH, the mpd-parser uses the largest reported segment duration as the target
// duration. Although that reported duration is occasionally approximate (i.e., not
// exact), a strict check may report that a segment is too long more often in DASH.
return Math.round(segmentDuration) > maxDuration + TIME_FUDGE_FACTOR;
};
const getTroublesomeSegmentDurationMessage = (segmentInfo, sourceType) => {
// Right now we aren't following DASH's timing model exactly, so only perform
// this check for HLS content.
if (sourceType !== 'hls') {
return null;
}
const segmentDuration = mediaDuration({
audioTimingInfo: segmentInfo.audioTimingInfo,
videoTimingInfo: segmentInfo.videoTimingInfo
}); // Don't report if we lack information.
//
// If the segment has a duration of 0 it is either a lack of information or a
// metadata only segment and shouldn't be reported here.
if (!segmentDuration) {
return null;
}
const targetDuration = segmentInfo.playlist.targetDuration;
const isSegmentWayTooLong = segmentTooLong({
segmentDuration,
maxDuration: targetDuration * 2
});
const isSegmentSlightlyTooLong = segmentTooLong({
segmentDuration,
maxDuration: targetDuration
});
const segmentTooLongMessage = `Segment with index ${segmentInfo.mediaIndex} ` + `from playlist ${segmentInfo.playlist.id} ` + `has a duration of ${segmentDuration} ` + `when the reported duration is ${segmentInfo.duration} ` + `and the target duration is ${targetDuration}. ` + 'For HLS content, a duration in excess of the target duration may result in ' + 'playback issues. See the HLS specification section on EXT-X-TARGETDURATION for ' + 'more details: ' + 'https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1';
if (isSegmentWayTooLong || isSegmentSlightlyTooLong) {
return {
severity: isSegmentWayTooLong ? 'warn' : 'info',
message: segmentTooLongMessage
};
}
return null;
};
/**
*
* @param {Object} options type of segment loader and segment either segmentInfo or simple segment
* @return a segmentInfo payload for events or errors.
*/
const segmentInfoPayload = ({
type,
segment
}) => {
if (!segment) {
return;
}
const isEncrypted = Boolean(segment.key || segment.map && segment.map.ke);
const isMediaInitialization = Boolean(segment.map && !segment.map.bytes);
const start = segment.startOfSegment === undefined ? segment.start : segment.startOfSegment;
return {
type: type || segment.type,
uri: segment.resolvedUri || segment.uri,
start,
duration: segment.duration,
isEncrypted,
isMediaInitialization
};
};
/**
* An object that manages segment loading and appending.
*
* @class SegmentLoader
* @param {Object} options required and optional options
* @extends videojs.EventTarget
*/
class SegmentLoader extends videojs.EventTarget {
constructor(settings, options = {}) {
super(); // check pre-conditions
if (!settings) {
throw new TypeError('Initialization settings are required');
}
if (typeof settings.currentTime !== 'function') {
throw new TypeError('No currentTime getter specified');
}
if (!settings.mediaSource) {
throw new TypeError('No MediaSource specified');
} // public properties
this.bandwidth = settings.bandwidth;
this.throughput = {
rate: 0,
count: 0
};
this.roundTrip = NaN;
this.resetStats_();
this.mediaIndex = null;
this.partIndex = null; // private settings
this.hasPlayed_ = settings.hasPlayed;
this.currentTime_ = settings.currentTime;
this.seekable_ = settings.seekable;
this.seeking_ = settings.seeking;
this.duration_ = settings.duration;
this.mediaSource_ = settings.mediaSource;
this.vhs_ = settings.vhs;
this.loaderType_ = settings.loaderType;
this.currentMediaInfo_ = void 0;
this.startingMediaInfo_ = void 0;
this.segmentMetadataTrack_ = settings.segmentMetadataTrack;
this.goalBufferLength_ = settings.goalBufferLength;
this.sourceType_ = settings.sourceType;
this.sourceUpdater_ = settings.sourceUpdater;
this.inbandTextTracks_ = settings.inbandTextTracks;
this.state_ = 'INIT';
this.timelineChangeController_ = settings.timelineChangeController;
this.shouldSaveSegmentTimingInfo_ = true;
this.parse708captions_ = settings.parse708captions;
this.useDtsForTimestampOffset_ = settings.useDtsForTimestampOffset;
this.captionServices_ = settings.captionServices;
this.exactManifestTimings = settings.exactManifestTimings;
this.addMetadataToTextTrack = settings.addMetadataToTextTrack; // private instance variables
this.checkBufferTimeout_ = null;
this.error_ = void 0;
this.currentTimeline_ = -1;
this.shouldForceTimestampOffsetAfterResync_ = false;
this.pendingSegment_ = null;
this.xhrOptions_ = null;
this.pendingSegments_ = [];
this.audioDisabled_ = false;
this.isPendingTimestampOffset_ = false; // TODO possibly move gopBuffer and timeMapping info to a separate controller
this.gopBuffer_ = [];
this.timeMapping_ = 0;
this.safeAppend_ = false;
this.appendInitSegment_ = {
audio: true,
video: true
};
this.playlistOfLastInitSegment_ = {
audio: null,
video: null
};
this.callQueue_ = []; // If the segment loader prepares to load a segment, but does not have enough
// information yet to start the loading process (e.g., if the audio loader wants to
// load a segment from the next timeline but the main loader hasn't yet crossed that
// timeline), then the load call will be added to the queue until it is ready to be
// processed.
this.loadQueue_ = [];
this.metadataQueue_ = {
id3: [],
caption: []
};
this.waitingOnRemove_ = false;
this.quotaExceededErrorRetryTimeout_ = null; // Fragmented mp4 playback
this.activeInitSegmentId_ = null;
this.initSegments_ = {}; // HLSe playback
this.cacheEncryptionKeys_ = settings.cacheEncryptionKeys;
this.keyCache_ = {};
this.decrypter_ = settings.decrypter; // Manages the tracking and generation of sync-points, mappings
// between a time in the display time and a segment index within
// a playlist
this.syncController_ = settings.syncController;
this.syncPoint_ = {
segmentIndex: 0,
time: 0
};
this.transmuxer_ = this.createTransmuxer_();
this.triggerSyncInfoUpdate_ = () => this.trigger('syncinfoupdate');
this.syncController_.on('syncinfoupdate', this.triggerSyncInfoUpdate_);
this.mediaSource_.addEventListener('sourceopen', () => {
if (!this.isEndOfStream_()) {
this.ended_ = false;
}
}); // ...for determining the fetch location
this.fetchAtBuffer_ = false;
this.logger_ = logger(`SegmentLoader[${this.loaderType_}]`);
Object.defineProperty(this, 'state', {
get() {
return this.state_;
},
set(newState) {
if (newState !== this.state_) {
this.logger_(`${this.state_} -> ${newState}`);
this.state_ = newState;
this.trigger('statechange');
}
}
});
this.sourceUpdater_.on('ready', () => {
if (this.hasEnoughInfoToAppend_()) {
this.processCallQueue_();
} else {
checkAndFixTimelines(this);
}
});
this.sourceUpdater_.on('codecschange', metadata => {
this.trigger(_extends({
type: 'codecschange'
}, metadata));
}); // Only the main loader needs to listen for pending timeline changes, as the main
// loader should wait for audio to be ready to change its timeline so that both main
// and audio timelines change together. For more details, see the
// shouldWaitForTimelineChange function.
if (this.loaderType_ === 'main') {
this.timelineChangeController_.on('pendingtimelinechange', () => {
if (this.hasEnoughInfoToAppend_()) {
this.processCallQueue_();
} else {
checkAndFixTimelines(this);
}
});
} // The main loader only listens on pending timeline changes, but the audio loader,
// since its loads follow main, needs to listen on timeline changes. For more details,
// see the shouldWaitForTimelineChange function.
if (this.loaderType_ === 'audio') {
this.timelineChangeController_.on('timelinechange', metadata => {
this.trigger(_extends({
type: 'timelinechange'
}, metadata));
if (this.hasEnoughInfoToLoad_()) {
this.processLoadQueue_();
} else {
checkAndFixTimelines(this);
}
if (this.hasEnoughInfoToAppend_()) {
this.processCallQueue_();
} else {
checkAndFixTimelines(this);
}
});
}
}
/**
* TODO: Current sync controller consists of many hls-specific strategies
* media sequence sync is also hls-specific, and we would like to be protocol-agnostic on this level
* this should be a part of the sync-controller and sync controller should expect different strategy list based on the protocol.
*
* @return {MediaSequenceSync|null}
* @private
*/
get mediaSequenceSync_() {
return this.syncController_.getMediaSequenceSync(this.loaderType_);
}
createTransmuxer_() {
return segmentTransmuxer.createTransmuxer({
remux: false,
alignGopsAtEnd: this.safeAppend_,
keepOriginalTimestamps: true,
parse708captions: this.parse708captions_,
captionServices: this.captionServices_
});
}
/**
* reset all of our media stats
*
* @private
*/
resetStats_() {
this.mediaBytesTransferred = 0;
this.mediaRequests = 0;
this.mediaRequestsAborted = 0;
this.mediaRequestsTimedout = 0;
this.mediaRequestsErrored = 0;
this.mediaTransferDuration = 0;
this.mediaSecondsLoaded = 0;
this.mediaAppends = 0;
}
/**
* dispose of the SegmentLoader and reset to the default state
*/
dispose() {
this.trigger('dispose');
this.state = 'DISPOSED';
this.pause();
this.abort_();
if (this.transmuxer_) {
this.transmuxer_.terminate();
}
this.resetStats_();
if (this.checkBufferTimeout_) {
window$1.clearTimeout(this.checkBufferTimeout_);
}
if (this.syncController_ && this.triggerSyncInfoUpdate_) {
this.syncController_.off('syncinfoupdate', this.triggerSyncInfoUpdate_);
}
this.off();
}
setAudio(enable) {
this.audioDisabled_ = !enable;
if (enable) {
this.appendInitSegment_.audio = true;
} else {
// remove current track audio if it gets disabled
this.sourceUpdater_.removeAudio(0, this.duration_());
}
}
/**
* abort anything that is currently doing on with the SegmentLoader
* and reset to a default state
*/
abort() {
if (this.state !== 'WAITING') {
if (this.pendingSegment_) {
this.pendingSegment_ = null;
}
return;
}
this.abort_(); // We aborted the requests we were waiting on, so reset the loader's state to READY
// since we are no longer "waiting" on any requests. XHR callback is not always run
// when the request is aborted. This will prevent the loader from being stuck in the
// WAITING state indefinitely.
this.state = 'READY'; // don't wait for buffer check timeouts to begin fetching the
// next segment
if (!this.paused()) {
this.monitorBuffer_();
}
}
/**
* abort all pending xhr requests and null any pending segements
*
* @private
*/
abort_() {
if (this.pendingSegment_ && this.pendingSegment_.abortRequests) {
this.pendingSegment_.abortRequests();
} // clear out the segment being processed
this.pendingSegment_ = null;
this.callQueue_ = [];
this.loadQueue_ = [];
this.metadataQueue_.id3 = [];
this.metadataQueue_.caption = [];
this.timelineChangeController_.clearPendingTimelineChange(this.loaderType_);
this.waitingOnRemove_ = false;
window$1.clearTimeout(this.quotaExceededErrorRetryTimeout_);
this.quotaExceededErrorRetryTimeout_ = null;
}
checkForAbort_(requestId) {
// If the state is APPENDING, then aborts will not modify the state, meaning the first
// callback that happens should reset the state to READY so that loading can continue.
if (this.state === 'APPENDING' && !this.pendingSegment_) {
this.state = 'READY';
return true;
}
if (!this.pendingSegment_ || this.pendingSegment_.requestId !== requestId) {
return true;
}
return false;
}
/**
* set an error on the segment loader and null out any pending segements
*
* @param {Error} error the error to set on the SegmentLoader
* @return {Error} the error that was set or that is currently set
*/
error(error) {
if (typeof error !== 'undefined') {
this.logger_('error occurred:', error);
this.error_ = error;
}
this.pendingSegment_ = null;
return this.error_;
}
endOfStream() {
this.ended_ = true;
if (this.transmuxer_) {
// need to clear out any cached data to prepare for the new segment
segmentTransmuxer.reset(this.transmuxer_);
}
this.gopBuffer_.length = 0;
this.pause();
this.trigger('ended');
}
/**
* Indicates which time ranges are buffered
*
* @return {TimeRange}
* TimeRange object representing the current buffered ranges
*/
buffered_() {
const trackInfo = this.getMediaInfo_();
if (!this.sourceUpdater_ || !trackInfo) {
return createTimeRanges();
}
if (this.loaderType_ === 'main') {
const {
hasAudio,
hasVideo,
isMuxed
} = trackInfo;
if (hasVideo && hasAudio && !this.audioDisabled_ && !isMuxed) {
return this.sourceUpdater_.buffered();
}
if (hasVideo) {
return this.sourceUpdater_.videoBuffered();
}
} // One case that can be ignored for now is audio only with alt audio,
// as we don't yet have proper support for that.
return this.sourceUpdater_.audioBuffered();
}
/**
* Gets and sets init segment for the provided map
*
* @param {Object} map
* The map object representing the init segment to get or set
* @param {boolean=} set
* If true, the init segment for the provided map should be saved
* @return {Object}
* map object for desired init segment
*/
initSegmentForMap(map, set = false) {
if (!map) {
return null;
}
const id = initSegmentId(map);
let storedMap = this.initSegments_[id];
if (set && !storedMap && map.bytes) {
this.initSegments_[id] = storedMap = {
resolvedUri: map.resolvedUri,
byterange: map.byterange,
bytes: map.bytes,
tracks: map.tracks,
timescales: map.timescales
};
}
return storedMap || map;
}
/**
* Gets and sets key for the provided key
*
* @param {Object} key
* The key object representing the key to get or set
* @param {boolean=} set
* If true, the key for the provided key should be saved
* @return {Object}
* Key object for desired key
*/
segmentKey(key, set = false) {
if (!key) {
return null;
}
const id = segmentKeyId(key);
let storedKey = this.keyCache_[id]; // TODO: We should use the HTTP Expires header to invalidate our cache per
// https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-6.2.3
if (this.cacheEncryptionKeys_ && set && !storedKey && key.bytes) {
this.keyCache_[id] = storedKey = {
resolvedUri: key.resolvedUri,
bytes: key.bytes
};
}
const result = {
resolvedUri: (storedKey || key).resolvedUri
};
if (storedKey) {
result.bytes = storedKey.bytes;
}
return result;
}
/**
* Returns true if all configuration required for loading is present, otherwise false.
*
* @return {boolean} True if the all configuration is ready for loading
* @private
*/
couldBeginLoading_() {
return this.playlist_ && !this.paused();
}
/**
* load a playlist and start to fill the buffer
*/
load() {
// un-pause
this.monitorBuffer_(); // if we don't have a playlist yet, keep waiting for one to be
// specified
if (!this.playlist_) {
return;
} // if all the configuration is ready, initialize and begin loading
if (this.state === 'INIT' && this.couldBeginLoading_()) {
return this.init_();
} // if we're in the middle of processing a segment already, don't
// kick off an additional segment request
if (!this.couldBeginLoading_() || this.state !== 'READY' && this.state !== 'INIT') {
return;
}
this.state = 'READY';
}
/**
* Once all the starting parameters have been specified, begin
* operation. This method should only be invoked from the INIT
* state.
*
* @private
*/
init_() {
this.state = 'READY'; // if this is the audio segment loader, and it hasn't been inited before, then any old
// audio data from the muxed content should be removed
this.resetEverything();
return this.monitorBuffer_();
}
/**
* set a playlist on the segment loader
*
* @param {PlaylistLoader} media the playlist to set on the segment loader
*/
playlist(newPlaylist, options = {}) {
if (!newPlaylist) {
return;
}
const oldPlaylist = this.playlist_;
const segmentInfo = this.pendingSegment_;
this.playlist_ = newPlaylist;
this.xhrOptions_ = options; // when we haven't started playing yet, the start of a live playlist
// is always our zero-time so force a sync update each time the playlist
// is refreshed from the server
//
// Use the INIT state to determine if playback has started, as the playlist sync info
// should be fixed once requests begin (as sync points are generated based on sync
// info), but not before then.
if (this.state === 'INIT') {
newPlaylist.syncInfo = {
mediaSequence: newPlaylist.mediaSequence,
time: 0
}; // Setting the date time mapping means mapping the program date time (if available)
// to time 0 on the player's timeline. The playlist's syncInfo serves a similar
// purpose, mapping the initial mediaSequence to time zero. Since the syncInfo can
// be updated as the playlist is refreshed before the loader starts loading, the
// program date time mapping needs to be updated as well.
//
// This mapping is only done for the main loader because a program date time should
// map equivalently between playlists.
if (this.loaderType_ === 'main') {
this.syncController_.setDateTimeMappingForStart(newPlaylist);
}
}
let oldId = null;
if (oldPlaylist) {
if (oldPlaylist.id) {
oldId = oldPlaylist.id;
} else if (oldPlaylist.uri) {
oldId = oldPlaylist.uri;
}
}
this.logger_(`playlist update [${oldId} => ${newPlaylist.id || newPlaylist.uri}]`);
if (this.mediaSequenceSync_) {
this.mediaSequenceSync_.update(newPlaylist, this.currentTime_());
this.logger_(`Playlist update:
currentTime: ${this.currentTime_()}
bufferedEnd: ${lastBufferedEnd(this.buffered_())}
`, this.mediaSequenceSync_.diagnostics);
} // in VOD, this is always a rendition switch (or we updated our syncInfo above)
// in LIVE, we always want to update with new playlists (including refreshes)
this.trigger('syncinfoupdate'); // if we were unpaused but waiting for a playlist, start
// buffering now
if (this.state === 'INIT' && this.couldBeginLoading_()) {
return this.init_();
}
if (!oldPlaylist || oldPlaylist.uri !== newPlaylist.uri) {
if (this.mediaIndex !== null) {
// we must reset/resync the segment loader when we switch renditions and
// the segment loader is already synced to the previous rendition
// We only want to reset the loader here for LLHLS playback, as resetLoader sets fetchAtBuffer_
// to false, resulting in fetching segments at currentTime and causing repeated
// same-segment requests on playlist change. This erroneously drives up the playback watcher
// stalled segment count, as re-requesting segments at the currentTime or browser cached segments
// will not change the buffer.
// Reference for LLHLS fixes: https://github.com/videojs/http-streaming/pull/1201
const isLLHLS = !newPlaylist.endList && typeof newPlaylist.partTargetDuration === 'number';
if (isLLHLS) {
this.resetLoader();
} else {
this.resyncLoader();
}
}
this.currentMediaInfo_ = void 0;
this.trigger('playlistupdate'); // the rest of this function depends on `oldPlaylist` being defined
return;
} // we reloaded the same playlist so we are in a live scenario
// and we will likely need to adjust the mediaIndex
const mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;
this.logger_(`live window shift [${mediaSequenceDiff}]`); // update the mediaIndex on the SegmentLoader
// this is important because we can abort a request and this value must be
// equal to the last appended mediaIndex
if (this.mediaIndex !== null) {
this.mediaIndex -= mediaSequenceDiff; // this can happen if we are going to load the first segment, but get a playlist
// update during that. mediaIndex would go from 0 to -1 if mediaSequence in the
// new playlist was incremented by 1.
if (this.mediaIndex < 0) {
this.mediaIndex = null;
this.partIndex = null;
} else {
const segment = this.playlist_.segments[this.mediaIndex]; // partIndex should remain the same for the same segment
// unless parts fell off of the playlist for this segment.
// In that case we need to reset partIndex and resync
if (this.partIndex && (!segment.parts || !segment.parts.length || !segment.parts[this.partIndex])) {
const mediaIndex = this.mediaIndex;
this.logger_(`currently processing part (index ${this.partIndex}) no longer exists.`);
this.resetLoader(); // We want to throw away the partIndex and the data associated with it,
// as the part was dropped from our current playlists segment.
// The mediaIndex will still be valid so keep that around.
this.mediaIndex = mediaIndex;
}
}
} // update the mediaIndex on the SegmentInfo object
// this is important because we will update this.mediaIndex with this value
// in `handleAppendsDone_` after the segment has been successfully appended
if (segmentInfo) {
segmentInfo.mediaIndex -= mediaSequenceDiff;
if (segmentInfo.mediaIndex < 0) {
segmentInfo.mediaIndex = null;
segmentInfo.partIndex = null;
} else {
// we need to update the referenced segment so that timing information is
// saved for the new playlist's segment, however, if the segment fell off the
// playlist, we can leave the old reference and just lose the timing info
if (segmentInfo.mediaIndex >= 0) {
segmentInfo.segment = newPlaylist.segments[segmentInfo.mediaIndex];
}
if (segmentInfo.partIndex >= 0 && segmentInfo.segment.parts) {
segmentInfo.part = segmentInfo.segment.parts[segmentInfo.partIndex];
}
}
}
this.syncController_.saveExpiredSegmentInfo(oldPlaylist, newPlaylist);
}
/**
* Prevent the loader from fetching additional segments. If there
* is a segment request outstanding, it will finish processing
* before the loader halts. A segment loader can be unpaused by
* calling load().
*/
pause() {
if (this.checkBufferTimeout_) {
window$1.clearTimeout(this.checkBufferTimeout_);
this.checkBufferTimeout_ = null;
}
}
/**
* Returns whether the segment loader is fetching additional
* segments when given the opportunity. This property can be
* modified through calls to pause() and load().
*/
paused() {
return this.checkBufferTimeout_ === null;
}
/**
* Delete all the buffered data and reset the SegmentLoader
*
* @param {Function} [done] an optional callback to be executed when the remove
* operation is complete
*/
resetEverything(done) {
this.ended_ = false;
this.activeInitSegmentId_ = null;
this.appendInitSegment_ = {
audio: true,
video: true
};
this.resetLoader(); // remove from 0, the earliest point, to Infinity, to signify removal of everything.
// VTT Segment Loader doesn't need to do anything but in the regular SegmentLoader,
// we then clamp the value to duration if necessary.
this.remove(0, Infinity, done); // clears fmp4 captions
if (this.transmuxer_) {
this.transmuxer_.postMessage({
action: 'clearAllMp4Captions'
}); // reset the cache in the transmuxer
this.transmuxer_.postMessage({
action: 'reset'
});
}
}
/**
* Force the SegmentLoader to resync and start loading around the currentTime instead
* of starting at the end of the buffer
*
* Useful for fast quality changes
*/
resetLoader() {
this.fetchAtBuffer_ = false;
if (this.mediaSequenceSync_) {
this.mediaSequenceSync_.resetAppendedStatus();
}
this.resyncLoader();
}
/**
* Force the SegmentLoader to restart synchronization and make a conservative guess
* before returning to the simple walk-forward method
*/
resyncLoader() {
if (this.transmuxer_) {
// need to clear out any cached data to prepare for the new segment
segmentTransmuxer.reset(this.transmuxer_);
}
this.mediaIndex = null;
this.partIndex = null;
this.syncPoint_ = null;
this.isPendingTimestampOffset_ = false; // this is mainly to sync timing-info when switching between renditions with and without timestamp-rollover,
// so we don't want it for DASH or fragmented mp4 segments.
const isFmp4 = this.currentMediaInfo_ && this.currentMediaInfo_.isFmp4;
const isHlsTs = this.sourceType_ === 'hls' && !isFmp4;
if (isHlsTs) {
this.shouldForceTimestampOffsetAfterResync_ = true;
}
this.callQueue_ = [];
this.loadQueue_ = [];
this.metadataQueue_.id3 = [];
this.metadataQueue_.caption = [];
this.abort();
if (this.transmuxer_) {
this.transmuxer_.postMessage({
action: 'clearParsedMp4Captions'
});
}
}
/**
* Remove any data in the source buffer between start and end times
*
* @param {number} start - the start time of the region to remove from the buffer
* @param {number} end - the end time of the region to remove from the buffer
* @param {Function} [done] - an optional callback to be executed when the remove
* @param {boolean} force - force all remove operations to happen
* operation is complete
*/
remove(start, end, done = () => {}, force = false) {
// clamp end to duration if we need to remove everything.
// This is due to a browser bug that causes issues if we remove to Infinity.
// videojs/videojs-contrib-hls#1225
if (end === Infinity) {
end = this.duration_();
} // skip removes that would throw an error
// commonly happens during a rendition switch at the start of a video
// from start 0 to end 0
if (end <= start) {
this.logger_('skipping remove because end ${end} is <= start ${start}');
return;
}
if (!this.sourceUpdater_ || !this.getMediaInfo_()) {
this.logger_('skipping remove because no source updater or starting media info'); // nothing to remove if we haven't processed any media
return;
} // set it to one to complete this function's removes
let removesRemaining = 1;
const removeFinished = () => {
removesRemaining--;
if (removesRemaining === 0) {
done();
}
};
if (force || !this.audioDisabled_) {
removesRemaining++;
this.sourceUpdater_.removeAudio(start, end, removeFinished);
} // While it would be better to only remove video if the main loader has video, this
// should be safe with audio only as removeVideo will call back even if there's no
// video buffer.
//
// In theory we can check to see if there's video before calling the remove, but in
// the event that we're switching between renditions and from video to audio only
// (when we add support for that), we may need to clear the video contents despite
// what the new media will contain.
if (force || this.loaderType_ === 'main') {
this.gopBuffer_ = removeGopBuffer(this.gopBuffer_, start, end, this.timeMapping_);
removesRemaining++;
this.sourceUpdater_.removeVideo(start, end, removeFinished);
} // remove any captions and ID3 tags
for (const track in this.inbandTextTracks_) {
removeCuesFromTrack(start, end, this.inbandTextTracks_[track]);
}
removeCuesFromTrack(start, end, this.segmentMetadataTrack_); // finished this function's removes
removeFinished();
}
/**
* (re-)schedule monitorBufferTick_ to run as soon as possible
*
* @private
*/
monitorBuffer_() {
if (this.checkBufferTimeout_) {
window$1.clearTimeout(this.checkBufferTimeout_);
}
this.checkBufferTimeout_ = window$1.setTimeout(this.monitorBufferTick_.bind(this), 1);
}
/**
* As long as the SegmentLoader is in the READY state, periodically
* invoke fillBuffer_().
*
* @private
*/
monitorBufferTick_() {
if (this.state === 'READY') {
this.fillBuffer_();
}
if (this.checkBufferTimeout_) {
window$1.clearTimeout(this.checkBufferTimeout_);
}
this.checkBufferTimeout_ = window$1.setTimeout(this.monitorBufferTick_.bind(this), CHECK_BUFFER_DELAY);
}
/**
* fill the buffer with segements unless the sourceBuffers are
* currently updating
*
* Note: this function should only ever be called by monitorBuffer_
* and never directly
*
* @private
*/
fillBuffer_() {
// TODO since the source buffer maintains a queue, and we shouldn't call this function
// except when we're ready for the next segment, this check can most likely be removed
if (this.sourceUpdater_.updating()) {
return;
} // see if we need to begin loading immediately
const segmentInfo = this.chooseNextRequest_();
if (!segmentInfo) {
return;
}
const metadata = {
segmentInfo: segmentInfoPayload({
type: this.loaderType_,
segment: segmentInfo
})
};
this.trigger({
type: 'segmentselected',
metadata
});
if (typeof segmentInfo.timestampOffset === 'number') {
this.isPendingTimestampOffset_ = false;
this.timelineChangeController_.pendingTimelineChange({
type: this.loaderType_,
from: this.currentTimeline_,
to: segmentInfo.timeline
});
}
this.loadSegment_(segmentInfo);
}
/**
* Determines if we should call endOfStream on the media source based
* on the state of the buffer or if appened segment was the final
* segment in the playlist.
*
* @param {number} [mediaIndex] the media index of segment we last appended
* @param {Object} [playlist] a media playlist object
* @return {boolean} do we need to call endOfStream on the MediaSource
*/
isEndOfStream_(mediaIndex = this.mediaIndex, playlist = this.playlist_, partIndex = this.partIndex) {
if (!playlist || !this.mediaSource_) {
return false;
}
const segment = typeof mediaIndex === 'number' && playlist.segments[mediaIndex]; // mediaIndex is zero based but length is 1 based
const appendedLastSegment = mediaIndex + 1 === playlist.segments.length; // true if there are no parts, or this is the last part.
const appendedLastPart = !segment || !segment.parts || partIndex + 1 === segment.parts.length; // if we've buffered to the end of the video, we need to call endOfStream
// so that MediaSources can trigger the `ended` event when it runs out of
// buffered data instead of waiting for me
return playlist.endList && this.mediaSource_.readyState === 'open' && appendedLastSegment && appendedLastPart;
}
/**
* Determines what request should be made given current segment loader state.
*
* @return {Object} a request object that describes the segment/part to load
*/
chooseNextRequest_() {
const buffered = this.buffered_();
const bufferedEnd = lastBufferedEnd(buffered) || 0;
const bufferedTime = timeAheadOf(buffered, this.currentTime_());
const preloaded = !this.hasPlayed_() && bufferedTime >= 1;
const haveEnoughBuffer = bufferedTime >= this.goalBufferLength_();
const segments = this.playlist_.segments; // return no segment if:
// 1. we don't have segments
// 2. The video has not yet played and we already downloaded a segment
// 3. we already have enough buffered time
if (!segments.length || preloaded || haveEnoughBuffer) {
return null;
}
this.syncPoint_ = this.syncPoint_ || this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_(), this.loaderType_);
const next = {
partIndex: null,
mediaIndex: null,
startOfSegment: null,
playlist: this.playlist_,
isSyncRequest: Boolean(!this.syncPoint_)
};
if (next.isSyncRequest) {
next.mediaIndex = getSyncSegmentCandidate(this.currentTimeline_, segments, bufferedEnd);
this.logger_(`choose next request. Can not find sync point. Fallback to media Index: ${next.mediaIndex}`);
} else if (this.mediaIndex !== null) {
const segment = segments[this.mediaIndex];
const partIndex = typeof this.partIndex === 'number' ? this.partIndex : -1;
next.startOfSegment = segment.end ? segment.end : bufferedEnd;
if (segment.parts && segment.parts[partIndex + 1]) {
next.mediaIndex = this.mediaIndex;
next.partIndex = partIndex + 1;
} else {
next.mediaIndex = this.mediaIndex + 1;
}
} else {
let segmentIndex;
let partIndex;
let startTime;
const targetTime = this.fetchAtBuffer_ ? bufferedEnd : this.currentTime_();
if (this.mediaSequenceSync_) {
this.logger_(`chooseNextRequest_ request after Quality Switch:
For TargetTime: ${targetTime}.
CurrentTime: ${this.currentTime_()}
BufferedEnd: ${bufferedEnd}
Fetch At Buffer: ${this.fetchAtBuffer_}
`, this.mediaSequenceSync_.diagnostics);
}
if (this.mediaSequenceSync_ && this.mediaSequenceSync_.isReliable) {
const syncInfo = this.getSyncInfoFromMediaSequenceSync_(targetTime);
if (!syncInfo) {
const message = 'No sync info found while using media sequence sync';
this.error({
message,
metadata: {
errorType: videojs.Error.StreamingFailedToSelectNextSegment,
error: new Error(message)
}
});
this.logger_('chooseNextRequest_ - no sync info found using media sequence sync'); // no match
return null;
}
this.logger_(`chooseNextRequest_ mediaSequence syncInfo (${syncInfo.start} --> ${syncInfo.end})`);
segmentIndex = syncInfo.segmentIndex;
partIndex = syncInfo.partIndex;
startTime = syncInfo.start;
} else {
this.logger_('chooseNextRequest_ - fallback to a regular segment selection algorithm, based on a syncPoint.'); // fallback
const mediaInfoForTime = Playlist.getMediaInfoForTime({
exactManifestTimings: this.exactManifestTimings,
playlist: this.playlist_,
currentTime: targetTime,
startingPartIndex: this.syncPoint_.partIndex,
startingSegmentIndex: this.syncPoint_.segmentIndex,
startTime: this.syncPoint_.time
});
segmentIndex = mediaInfoForTime.segmentIndex;
partIndex = mediaInfoForTime.partIndex;
startTime = mediaInfoForTime.startTime;
}
next.getMediaInfoForTime = this.fetchAtBuffer_ ? `bufferedEnd ${targetTime}` : `currentTime ${targetTime}`;
next.mediaIndex = segmentIndex;
next.startOfSegment = startTime;
next.partIndex = partIndex;
this.logger_(`choose next request. Playlist switched and we have a sync point. Media Index: ${next.mediaIndex} `);
}
const nextSegment = segments[next.mediaIndex];
let nextPart = nextSegment && typeof next.partIndex === 'number' && nextSegment.parts && nextSegment.parts[next.partIndex]; // if the next segment index is invalid or
// the next partIndex is invalid do not choose a next segment.
if (!nextSegment || typeof next.partIndex === 'number' && !nextPart) {
return null;
} // if the next segment has parts, and we don't have a partIndex.
// Set partIndex to 0
if (typeof next.partIndex !== 'number' && nextSegment.parts) {
next.partIndex = 0;
nextPart = nextSegment.parts[0];
} // independentSegments applies to every segment in a playlist. If independentSegments appears in a main playlist,
// it applies to each segment in each media playlist.
// https://datatracker.ietf.org/doc/html/draft-pantos-http-live-streaming-23#section-4.3.5.1
const hasIndependentSegments = this.vhs_.playlists && this.vhs_.playlists.main && this.vhs_.playlists.main.independentSegments || this.playlist_.independentSegments; // if we have no buffered data then we need to make sure
// that the next part we append is "independent" if possible.
// So we check if the previous part is independent, and request
// it if it is.
if (!bufferedTime && nextPart && !hasIndependentSegments && !nextPart.independent) {
if (next.partIndex === 0) {
const lastSegment = segments[next.mediaIndex - 1];
const lastSegmentLastPart = lastSegment.parts && lastSegment.parts.length && lastSegment.parts[lastSegment.parts.length - 1];
if (lastSegmentLastPart && lastSegmentLastPart.independent) {
next.mediaIndex -= 1;
next.partIndex = lastSegment.parts.length - 1;
next.independent = 'previous segment';
}
} else if (nextSegment.parts[next.partIndex - 1].independent) {
next.partIndex -= 1;
next.independent = 'previous part';
}
}
const ended = this.mediaSource_ && this.mediaSource_.readyState === 'ended'; // do not choose a next segment if all of the following:
// 1. this is the last segment in the playlist
// 2. end of stream has been called on the media source already
// 3. the player is not seeking
if (next.mediaIndex >= segments.length - 1 && ended && !this.seeking_()) {
return null;
}
if (this.shouldForceTimestampOffsetAfterResync_) {
this.shouldForceTimestampOffsetAfterResync_ = false;
next.forceTimestampOffset = true;
this.logger_('choose next request. Force timestamp offset after loader resync');
}
return this.generateSegmentInfo_(next);
}
getSyncInfoFromMediaSequenceSync_(targetTime) {
if (!this.mediaSequenceSync_) {
return null;
} // we should pull the target time to the least available time if we drop out of sync for any reason
const finalTargetTime = Math.max(targetTime, this.mediaSequenceSync_.start);
if (targetTime !== finalTargetTime) {
this.logger_(`getSyncInfoFromMediaSequenceSync_. Pulled target time from ${targetTime} to ${finalTargetTime}`);
}
const mediaSequenceSyncInfo = this.mediaSequenceSync_.getSyncInfoForTime(finalTargetTime);
if (!mediaSequenceSyncInfo) {
// no match at all
return null;
}
if (!mediaSequenceSyncInfo.isAppended) {
// has a perfect match
return mediaSequenceSyncInfo;
} // has match, but segment was already appended.
// attempt to auto-advance to the nearest next segment:
const nextMediaSequenceSyncInfo = this.mediaSequenceSync_.getSyncInfoForTime(mediaSequenceSyncInfo.end);
if (!nextMediaSequenceSyncInfo) {
// no match at all
return null;
}
if (nextMediaSequenceSyncInfo.isAppended) {
this.logger_('getSyncInfoFromMediaSequenceSync_: We encounter unexpected scenario where next media sequence sync info is also appended!');
} // got match with the nearest next segment
return nextMediaSequenceSyncInfo;
}
generateSegmentInfo_(options) {
const {
independent,
playlist,
mediaIndex,
startOfSegment,
isSyncRequest,
partIndex,
forceTimestampOffset,
getMediaInfoForTime
} = options;
const segment = playlist.segments[mediaIndex];
const part = typeof partIndex === 'number' && segment.parts[partIndex];
const segmentInfo = {
requestId: 'segment-loader-' + Math.random(),
// resolve the segment URL relative to the playlist
uri: part && part.resolvedUri || segment.resolvedUri,
// the segment's mediaIndex at the time it was requested
mediaIndex,
partIndex: part ? partIndex : null,
// whether or not to update the SegmentLoader's state with this
// segment's mediaIndex
isSyncRequest,
startOfSegment,
// the segment's playlist
playlist,
// unencrypted bytes of the segment
bytes: null,
// when a key is defined for this segment, the encrypted bytes
encryptedBytes: null,
// The target timestampOffset for this segment when we append it
// to the source buffer
timestampOffset: null,
// The timeline that the segment is in
timeline: segment.timeline,
// The expected duration of the segment in seconds
duration: part && part.duration || segment.duration,
// retain the segment in case the playlist updates while doing an async process
segment,
part,
byteLength: 0,
transmuxer: this.transmuxer_,
// type of getMediaInfoForTime that was used to get this segment
getMediaInfoForTime,
independent
};
const overrideCheck = typeof forceTimestampOffset !== 'undefined' ? forceTimestampOffset : this.isPendingTimestampOffset_;
segmentInfo.timestampOffset = this.timestampOffsetForSegment_({
segmentTimeline: segment.timeline,
currentTimeline: this.currentTimeline_,
startOfSegment,
buffered: this.buffered_(),
overrideCheck
});
const audioBufferedEnd = lastBufferedEnd(this.sourceUpdater_.audioBuffered());
if (typeof audioBufferedEnd === 'number') {
// since the transmuxer is using the actual timing values, but the buffer is
// adjusted by the timestamp offset, we must adjust the value here
segmentInfo.audioAppendStart = audioBufferedEnd - this.sourceUpdater_.audioTimestampOffset();
}
if (this.sourceUpdater_.videoBuffered().length) {
segmentInfo.gopsToAlignWith = gopsSafeToAlignWith(this.gopBuffer_,
// since the transmuxer is using the actual timing values, but the time is
// adjusted by the timestmap offset, we must adjust the value here
this.currentTime_() - this.sourceUpdater_.videoTimestampOffset(), this.timeMapping_);
}
return segmentInfo;
} // get the timestampoffset for a segment,
// added so that vtt segment loader can override and prevent
// adding timestamp offsets.
timestampOffsetForSegment_(options) {
return timestampOffsetForSegment(options);
}
/**
* Determines if the network has enough bandwidth to complete the current segment
* request in a timely manner. If not, the request will be aborted early and bandwidth
* updated to trigger a playlist switch.
*
* @param {Object} stats
* Object containing stats about the request timing and size
* @private
*/
earlyAbortWhenNeeded_(stats) {
if (this.vhs_.tech_.paused() ||
// Don't abort if the current playlist is on the lowestEnabledRendition
// TODO: Replace using timeout with a boolean indicating whether this playlist is
// the lowestEnabledRendition.
!this.xhrOptions_.timeout ||
// Don't abort if we have no bandwidth information to estimate segment sizes
!this.playlist_.attributes.BANDWIDTH) {
return;
} // Wait at least 1 second since the first byte of data has been received before
// using the calculated bandwidth from the progress event to allow the bitrate
// to stabilize
if (Date.now() - (stats.firstBytesReceivedAt || Date.now()) < 1000) {
return;
}
const currentTime = this.currentTime_();
const measuredBandwidth = stats.bandwidth;
const segmentDuration = this.pendingSegment_.duration;
const requestTimeRemaining = Playlist.estimateSegmentRequestTime(segmentDuration, measuredBandwidth, this.playlist_, stats.bytesReceived); // Subtract 1 from the timeUntilRebuffer so we still consider an early abort
// if we are only left with less than 1 second when the request completes.
// A negative timeUntilRebuffering indicates we are already rebuffering
const timeUntilRebuffer$1 = timeUntilRebuffer(this.buffered_(), currentTime, this.vhs_.tech_.playbackRate()) - 1; // Only consider aborting early if the estimated time to finish the download
// is larger than the estimated time until the player runs out of forward buffer
if (requestTimeRemaining <= timeUntilRebuffer$1) {
return;
}
const switchCandidate = minRebufferMaxBandwidthSelector({
main: this.vhs_.playlists.main,
currentTime,
bandwidth: measuredBandwidth,
duration: this.duration_(),
segmentDuration,
timeUntilRebuffer: timeUntilRebuffer$1,
currentTimeline: this.currentTimeline_,
syncController: this.syncController_
});
if (!switchCandidate) {
return;
}
const rebufferingImpact = requestTimeRemaining - timeUntilRebuffer$1;
const timeSavedBySwitching = rebufferingImpact - switchCandidate.rebufferingImpact;
let minimumTimeSaving = 0.5; // If we are already rebuffering, increase the amount of variance we add to the
// potential round trip time of the new request so that we are not too aggressive
// with switching to a playlist that might save us a fraction of a second.
if (timeUntilRebuffer$1 <= TIME_FUDGE_FACTOR) {
minimumTimeSaving = 1;
}
if (!switchCandidate.playlist || switchCandidate.playlist.uri === this.playlist_.uri || timeSavedBySwitching < minimumTimeSaving) {
return;
} // set the bandwidth to that of the desired playlist being sure to scale by
// BANDWIDTH_VARIANCE and add one so the playlist selector does not exclude it
// don't trigger a bandwidthupdate as the bandwidth is artifial
this.bandwidth = switchCandidate.playlist.attributes.BANDWIDTH * Config.BANDWIDTH_VARIANCE + 1;
this.trigger('earlyabort');
}
handleAbort_(segmentInfo) {
this.logger_(`Aborting ${segmentInfoString(segmentInfo)}`);
this.mediaRequestsAborted += 1;
}
/**
* XHR `progress` event handler
*
* @param {Event}
* The XHR `progress` event
* @param {Object} simpleSegment
* A simplified segment object copy
* @private
*/
handleProgress_(event, simpleSegment) {
this.earlyAbortWhenNeeded_(simpleSegment.stats);
if (this.checkForAbort_(simpleSegment.requestId)) {
return;
}
this.trigger('progress');
}
handleTrackInfo_(simpleSegment, trackInfo) {
const {
hasAudio,
hasVideo
} = trackInfo;
const metadata = {
segmentInfo: segmentInfoPayload({
type: this.loaderType_,
segment: simpleSegment
}),
trackInfo: {
hasAudio,
hasVideo
}
};
this.trigger({
type: 'segmenttransmuxingtrackinfoavailable',
metadata
});
this.earlyAbortWhenNeeded_(simpleSegment.stats);
if (this.checkForAbort_(simpleSegment.requestId)) {
return;
}
if (this.checkForIllegalMediaSwitch(trackInfo)) {
return;
}
trackInfo = trackInfo || {}; // When we have track info, determine what media types this loader is dealing with.
// Guard against cases where we're not getting track info at all until we are
// certain that all streams will provide it.
if (!shallowEqual(this.currentMediaInfo_, trackInfo)) {
this.appendInitSegment_ = {
audio: true,
video: true
};
this.startingMediaInfo_ = trackInfo;
this.currentMediaInfo_ = trackInfo;
this.logger_('trackinfo update', trackInfo);
this.trigger('trackinfo');
} // trackinfo may cause an abort if the trackinfo
// causes a codec change to an unsupported codec.
if (this.checkForAbort_(simpleSegment.requestId)) {
return;
} // set trackinfo on the pending segment so that
// it can append.
this.pendingSegment_.trackInfo = trackInfo; // check if any calls were waiting on the track info
if (this.hasEnoughInfoToAppend_()) {
this.processCallQueue_();
} else {
checkAndFixTimelines(this);
}
}
handleTimingInfo_(simpleSegment, mediaType, timeType, time) {
this.earlyAbortWhenNeeded_(simpleSegment.stats);
if (this.checkForAbort_(simpleSegment.requestId)) {
return;
}
const segmentInfo = this.pendingSegment_;
const timingInfoProperty = timingInfoPropertyForMedia(mediaType);
segmentInfo[timingInfoProperty] = segmentInfo[timingInfoProperty] || {};
segmentInfo[timingInfoProperty][timeType] = time;
this.logger_(`timinginfo: ${mediaType} - ${timeType} - ${time}`); // check if any calls were waiting on the timing info
if (this.hasEnoughInfoToAppend_()) {
this.processCallQueue_();
} else {
checkAndFixTimelines(this);
}
}
handleCaptions_(simpleSegment, captionData) {
this.earlyAbortWhenNeeded_(simpleSegment.stats);
if (this.checkForAbort_(simpleSegment.requestId)) {
return;
} // This could only happen with fmp4 segments, but
// should still not happen in general
if (captionData.length === 0) {
this.logger_('SegmentLoader received no captions from a caption event');
return;
}
const segmentInfo = this.pendingSegment_; // Wait until we have some video data so that caption timing
// can be adjusted by the timestamp offset
if (!segmentInfo.hasAppendedData_) {
this.metadataQueue_.caption.push(this.handleCaptions_.bind(this, simpleSegment, captionData));
return;
}
const timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset();
const captionTracks = {}; // get total start/end and captions for each track/stream
captionData.forEach(caption => {
// caption.stream is actually a track name...
// set to the existing values in tracks or default values
captionTracks[caption.stream] = captionTracks[caption.stream] || {
// Infinity, as any other value will be less than this
startTime: Infinity,
captions: [],
// 0 as an other value will be more than this
endTime: 0
};
const captionTrack = captionTracks[caption.stream];
captionTrack.startTime = Math.min(captionTrack.startTime, caption.startTime + timestampOffset);
captionTrack.endTime = Math.max(captionTrack.endTime, caption.endTime + timestampOffset);
captionTrack.captions.push(caption);
});
Object.keys(captionTracks).forEach(trackName => {
const {
startTime,
endTime,
captions
} = captionTracks[trackName];
const inbandTextTracks = this.inbandTextTracks_;
this.logger_(`adding cues from ${startTime} -> ${endTime} for ${trackName}`);
createCaptionsTrackIfNotExists(inbandTextTracks, this.vhs_.tech_, trackName); // clear out any cues that start and end at the same time period for the same track.
// We do this because a rendition change that also changes the timescale for captions
// will result in captions being re-parsed for certain segments. If we add them again
// without clearing we will have two of the same captions visible.
removeCuesFromTrack(startTime, endTime, inbandTextTracks[trackName]);
addCaptionData({
captionArray: captions,
inbandTextTracks,
timestampOffset
});
}); // Reset stored captions since we added parsed
// captions to a text track at this point
if (this.transmuxer_) {
this.transmuxer_.postMessage({
action: 'clearParsedMp4Captions'
});
}
}
handleId3_(simpleSegment, id3Frames, dispatchType) {
this.earlyAbortWhenNeeded_(simpleSegment.stats);
if (this.checkForAbort_(simpleSegment.requestId)) {
return;
}
const segmentInfo = this.pendingSegment_; // we need to have appended data in order for the timestamp offset to be set
if (!segmentInfo.hasAppendedData_) {
this.metadataQueue_.id3.push(this.handleId3_.bind(this, simpleSegment, id3Frames, dispatchType));
return;
}
this.addMetadataToTextTrack(dispatchType, id3Frames, this.duration_());
}
processMetadataQueue_() {
this.metadataQueue_.id3.forEach(fn => fn());
this.metadataQueue_.caption.forEach(fn => fn());
this.metadataQueue_.id3 = [];
this.metadataQueue_.caption = [];
}
processCallQueue_() {
const callQueue = this.callQueue_; // Clear out the queue before the queued functions are run, since some of the
// functions may check the length of the load queue and default to pushing themselves
// back onto the queue.
this.callQueue_ = [];
callQueue.forEach(fun => fun());
}
processLoadQueue_() {
const loadQueue = this.loadQueue_; // Clear out the queue before the queued functions are run, since some of the
// functions may check the length of the load queue and default to pushing themselves
// back onto the queue.
this.loadQueue_ = [];
loadQueue.forEach(fun => fun());
}
/**
* Determines whether the loader has enough info to load the next segment.
*
* @return {boolean}
* Whether or not the loader has enough info to load the next segment
*/
hasEnoughInfoToLoad_() {
// Since primary timing goes by video, only the audio loader potentially needs to wait
// to load.
if (this.loaderType_ !== 'audio') {
return true;
}
const segmentInfo = this.pendingSegment_; // A fill buffer must have already run to establish a pending segment before there's
// enough info to load.
if (!segmentInfo) {
return false;
} // The first segment can and should be loaded immediately so that source buffers are
// created together (before appending). Source buffer creation uses the presence of
// audio and video data to determine whether to create audio/video source buffers, and
// uses processed (transmuxed or parsed) media to determine the types required.
if (!this.getCurrentMediaInfo_()) {
return true;
}
if (
// Technically, instead of waiting to load a segment on timeline changes, a segment
// can be requested and downloaded and only wait before it is transmuxed or parsed.
// But in practice, there are a few reasons why it is better to wait until a loader
// is ready to append that segment before requesting and downloading:
//
// 1. Because audio and main loaders cross discontinuities together, if this loader
// is waiting for the other to catch up, then instead of requesting another
// segment and using up more bandwidth, by not yet loading, more bandwidth is
// allotted to the loader currently behind.
// 2. media-segment-request doesn't have to have logic to consider whether a segment
// is ready to be processed or not, isolating the queueing behavior to the loader.
// 3. The audio loader bases some of its segment properties on timing information
// provided by the main loader, meaning that, if the logic for waiting on
// processing was in media-segment-request, then it would also need to know how
// to re-generate the segment information after the main loader caught up.
shouldWaitForTimelineChange({
timelineChangeController: this.timelineChangeController_,
currentTimeline: this.currentTimeline_,
segmentTimeline: segmentInfo.timeline,
loaderType: this.loaderType_,
audioDisabled: this.audioDisabled_
})) {
return false;
}
return true;
}
getCurrentMediaInfo_(segmentInfo = this.pendingSegment_) {
return segmentInfo && segmentInfo.trackInfo || this.currentMediaInfo_;
}
getMediaInfo_(segmentInfo = this.pendingSegment_) {
return this.getCurrentMediaInfo_(segmentInfo) || this.startingMediaInfo_;
}
getPendingSegmentPlaylist() {
return this.pendingSegment_ ? this.pendingSegment_.playlist : null;
}
hasEnoughInfoToAppend_() {
if (!this.sourceUpdater_.ready()) {
return false;
} // If content needs to be removed or the loader is waiting on an append reattempt,
// then no additional content should be appended until the prior append is resolved.
if (this.waitingOnRemove_ || this.quotaExceededErrorRetryTimeout_) {
return false;
}
const segmentInfo = this.pendingSegment_;
const trackInfo = this.getCurrentMediaInfo_(); // no segment to append any data for or
// we do not have information on this specific
// segment yet
if (!segmentInfo || !trackInfo) {
return false;
}
const {
hasAudio,
hasVideo,
isMuxed
} = trackInfo;
if (hasVideo && !segmentInfo.videoTimingInfo) {
return false;
} // muxed content only relies on video timing information for now.
if (hasAudio && !this.audioDisabled_ && !isMuxed && !segmentInfo.audioTimingInfo) {
return false;
} // we need to allow an append here even if we're moving to different timelines.
if (shouldWaitForTimelineChange({
timelineChangeController: this.timelineChangeController_,
currentTimeline: this.currentTimeline_,
segmentTimeline: segmentInfo.timeline,
loaderType: this.loaderType_,
audioDisabled: this.audioDisabled_
})) {
return false;
}
return true;
}
handleData_(simpleSegment, result) {
this.earlyAbortWhenNeeded_(simpleSegment.stats);
if (this.checkForAbort_(simpleSegment.requestId)) {
return;
} // If there's anything in the call queue, then this data came later and should be
// executed after the calls currently queued.
if (this.callQueue_.length || !this.hasEnoughInfoToAppend_()) {
checkAndFixTimelines(this);
this.callQueue_.push(this.handleData_.bind(this, simpleSegment, result));
return;
}
const segmentInfo = this.pendingSegment_; // update the time mapping so we can translate from display time to media time
this.setTimeMapping_(segmentInfo.timeline); // for tracking overall stats
this.updateMediaSecondsLoaded_(segmentInfo.part || segmentInfo.segment); // Note that the state isn't changed from loading to appending. This is because abort
// logic may change behavior depending on the state, and changing state too early may
// inflate our estimates of bandwidth. In the future this should be re-examined to
// note more granular states.
// don't process and append data if the mediaSource is closed
if (this.mediaSource_.readyState === 'closed') {
return;
} // if this request included an initialization segment, save that data
// to the initSegment cache
if (simpleSegment.map) {
simpleSegment.map = this.initSegmentForMap(simpleSegment.map, true); // move over init segment properties to media request
segmentInfo.segment.map = simpleSegment.map;
} // if this request included a segment key, save that data in the cache
if (simpleSegment.key) {
this.segmentKey(simpleSegment.key, true);
}
segmentInfo.isFmp4 = simpleSegment.isFmp4;
segmentInfo.timingInfo = segmentInfo.timingInfo || {};
if (segmentInfo.isFmp4) {
this.trigger('fmp4');
segmentInfo.timingInfo.start = segmentInfo[timingInfoPropertyForMedia(result.type)].start;
} else {
const trackInfo = this.getCurrentMediaInfo_();
const useVideoTimingInfo = this.loaderType_ === 'main' && trackInfo && trackInfo.hasVideo;
let firstVideoFrameTimeForData;
if (useVideoTimingInfo) {
firstVideoFrameTimeForData = segmentInfo.videoTimingInfo.start;
} // Segment loader knows more about segment timing than the transmuxer (in certain
// aspects), so make any changes required for a more accurate start time.
// Don't set the end time yet, as the segment may not be finished processing.
segmentInfo.timingInfo.start = this.trueSegmentStart_({
currentStart: segmentInfo.timingInfo.start,
playlist: segmentInfo.playlist,
mediaIndex: segmentInfo.mediaIndex,
currentVideoTimestampOffset: this.sourceUpdater_.videoTimestampOffset(),
useVideoTimingInfo,
firstVideoFrameTimeForData,
videoTimingInfo: segmentInfo.videoTimingInfo,
audioTimingInfo: segmentInfo.audioTimingInfo
});
} // Init segments for audio and video only need to be appended in certain cases. Now
// that data is about to be appended, we can check the final cases to determine
// whether we should append an init segment.
this.updateAppendInitSegmentStatus(segmentInfo, result.type); // Timestamp offset should be updated once we get new data and have its timing info,
// as we use the start of the segment to offset the best guess (playlist provided)
// timestamp offset.
this.updateSourceBufferTimestampOffset_(segmentInfo); // if this is a sync request we need to determine whether it should
// be appended or not.
if (segmentInfo.isSyncRequest) {
// first save/update our timing info for this segment.
// this is what allows us to choose an accurate segment
// and the main reason we make a sync request.
this.updateTimingInfoEnd_(segmentInfo);
this.syncController_.saveSegmentTimingInfo({
segmentInfo,
shouldSaveTimelineMapping: this.loaderType_ === 'main'
});
const next = this.chooseNextRequest_(); // If the sync request isn't the segment that would be requested next
// after taking into account its timing info, do not append it.
if (next.mediaIndex !== segmentInfo.mediaIndex || next.partIndex !== segmentInfo.partIndex) {
this.logger_('sync segment was incorrect, not appending');
return;
} // otherwise append it like any other segment as our guess was correct.
this.logger_('sync segment was correct, appending');
} // Save some state so that in the future anything waiting on first append (and/or
// timestamp offset(s)) can process immediately. While the extra state isn't optimal,
// we need some notion of whether the timestamp offset or other relevant information
// has had a chance to be set.
segmentInfo.hasAppendedData_ = true; // Now that the timestamp offset should be set, we can append any waiting ID3 tags.
this.processMetadataQueue_();
this.appendData_(segmentInfo, result);
}
updateAppendInitSegmentStatus(segmentInfo, type) {
// alt audio doesn't manage timestamp offset
if (this.loaderType_ === 'main' && typeof segmentInfo.timestampOffset === 'number' &&
// in the case that we're handling partial data, we don't want to append an init
// segment for each chunk
!segmentInfo.changedTimestampOffset) {
// if the timestamp offset changed, the timeline may have changed, so we have to re-
// append init segments
this.appendInitSegment_ = {
audio: true,
video: true
};
}
if (this.playlistOfLastInitSegment_[type] !== segmentInfo.playlist) {
// make sure we append init segment on playlist changes, in case the media config
// changed
this.appendInitSegment_[type] = true;
}
}
getInitSegmentAndUpdateState_({
type,
initSegment,
map,
playlist
}) {
// "The EXT-X-MAP tag specifies how to obtain the Media Initialization Section
// (Section 3) required to parse the applicable Media Segments. It applies to every
// Media Segment that appears after it in the Playlist until the next EXT-X-MAP tag
// or until the end of the playlist."
// https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.2.5
if (map) {
const id = initSegmentId(map);
if (this.activeInitSegmentId_ === id) {
// don't need to re-append the init segment if the ID matches
return null;
} // a map-specified init segment takes priority over any transmuxed (or otherwise
// obtained) init segment
//
// this also caches the init segment for later use
initSegment = this.initSegmentForMap(map, true).bytes;
this.activeInitSegmentId_ = id;
} // We used to always prepend init segments for video, however, that shouldn't be
// necessary. Instead, we should only append on changes, similar to what we've always
// done for audio. This is more important (though may not be that important) for
// frame-by-frame appending for LHLS, simply because of the increased quantity of
// appends.
if (initSegment && this.appendInitSegment_[type]) {
// Make sure we track the playlist that we last used for the init segment, so that
// we can re-append the init segment in the event that we get data from a new
// playlist. Discontinuities and track changes are handled in other sections.
this.playlistOfLastInitSegment_[type] = playlist; // Disable future init segment appends for this type. Until a change is necessary.
this.appendInitSegment_[type] = false; // we need to clear out the fmp4 active init segment id, since
// we are appending the muxer init segment
this.activeInitSegmentId_ = null;
return initSegment;
}
return null;
}
handleQuotaExceededError_({
segmentInfo,
type,
bytes
}, error) {
const audioBuffered = this.sourceUpdater_.audioBuffered();
const videoBuffered = this.sourceUpdater_.videoBuffered(); // For now we're ignoring any notion of gaps in the buffer, but they, in theory,
// should be cleared out during the buffer removals. However, log in case it helps
// debug.
if (audioBuffered.length > 1) {
this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the audio buffer: ' + timeRangesToArray(audioBuffered).join(', '));
}
if (videoBuffered.length > 1) {
this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the video buffer: ' + timeRangesToArray(videoBuffered).join(', '));
}
const audioBufferStart = audioBuffered.length ? audioBuffered.start(0) : 0;
const audioBufferEnd = audioBuffered.length ? audioBuffered.end(audioBuffered.length - 1) : 0;
const videoBufferStart = videoBuffered.length ? videoBuffered.start(0) : 0;
const videoBufferEnd = videoBuffered.length ? videoBuffered.end(videoBuffered.length - 1) : 0;
if (audioBufferEnd - audioBufferStart <= MIN_BACK_BUFFER && videoBufferEnd - videoBufferStart <= MIN_BACK_BUFFER) {
// Can't remove enough buffer to make room for new segment (or the browser doesn't
// allow for appends of segments this size). In the future, it may be possible to
// split up the segment and append in pieces, but for now, error out this playlist
// in an attempt to switch to a more manageable rendition.
this.logger_('On QUOTA_EXCEEDED_ERR, single segment too large to append to ' + 'buffer, triggering an error. ' + `Appended byte length: ${bytes.byteLength}, ` + `audio buffer: ${timeRangesToArray(audioBuffered).join(', ')}, ` + `video buffer: ${timeRangesToArray(videoBuffered).join(', ')}, `);
this.error({
message: 'Quota exceeded error with append of a single segment of content',
excludeUntil: Infinity
});
this.trigger('error');
return;
} // To try to resolve the quota exceeded error, clear back buffer and retry. This means
// that the segment-loader should block on future events until this one is handled, so
// that it doesn't keep moving onto further segments. Adding the call to the call
// queue will prevent further appends until waitingOnRemove_ and
// quotaExceededErrorRetryTimeout_ are cleared.
//
// Note that this will only block the current loader. In the case of demuxed content,
// the other load may keep filling as fast as possible. In practice, this should be
// OK, as it is a rare case when either audio has a high enough bitrate to fill up a
// source buffer, or video fills without enough room for audio to append (and without
// the availability of clearing out seconds of back buffer to make room for audio).
// But it might still be good to handle this case in the future as a TODO.
this.waitingOnRemove_ = true;
this.callQueue_.push(this.appendToSourceBuffer_.bind(this, {
segmentInfo,
type,
bytes
}));
const currentTime = this.currentTime_(); // Try to remove as much audio and video as possible to make room for new content
// before retrying.
const timeToRemoveUntil = currentTime - MIN_BACK_BUFFER;
this.logger_(`On QUOTA_EXCEEDED_ERR, removing audio/video from 0 to ${timeToRemoveUntil}`);
this.remove(0, timeToRemoveUntil, () => {
this.logger_(`On QUOTA_EXCEEDED_ERR, retrying append in ${MIN_BACK_BUFFER}s`);
this.waitingOnRemove_ = false; // wait the length of time alotted in the back buffer to prevent wasted
// attempts (since we can't clear less than the minimum)
this.quotaExceededErrorRetryTimeout_ = window$1.setTimeout(() => {
this.logger_('On QUOTA_EXCEEDED_ERR, re-processing call queue');
this.quotaExceededErrorRetryTimeout_ = null;
this.processCallQueue_();
}, MIN_BACK_BUFFER * 1000);
}, true);
}
handleAppendError_({
segmentInfo,
type,
bytes
}, error) {
// if there's no error, nothing to do
if (!error) {
return;
}
if (error.code === QUOTA_EXCEEDED_ERR) {
this.handleQuotaExceededError_({
segmentInfo,
type,
bytes
}); // A quota exceeded error should be recoverable with a future re-append, so no need
// to trigger an append error.
return;
}
this.logger_('Received non QUOTA_EXCEEDED_ERR on append', error); // If an append errors, we often can't recover.
// (see https://w3c.github.io/media-source/#sourcebuffer-append-error).
//
// Trigger a special error so that it can be handled separately from normal,
// recoverable errors.
this.error({
message: `${type} append of ${bytes.length}b failed for segment ` + `#${segmentInfo.mediaIndex} in playlist ${segmentInfo.playlist.id}`,
metadata: {
errorType: videojs.Error.StreamingFailedToAppendSegment
}
});
this.trigger('appenderror');
}
appendToSourceBuffer_({
segmentInfo,
type,
initSegment,
data,
bytes
}) {
// If this is a re-append, bytes were already created and don't need to be recreated
if (!bytes) {
const segments = [data];
let byteLength = data.byteLength;
if (initSegment) {
// if the media initialization segment is changing, append it before the content
// segment
segments.unshift(initSegment);
byteLength += initSegment.byteLength;
} // Technically we should be OK appending the init segment separately, however, we
// haven't yet tested that, and prepending is how we have always done things.
bytes = concatSegments({
bytes: byteLength,
segments
});
}
const metadata = {
segmentInfo: segmentInfoPayload({
type: this.loaderType_,
segment: segmentInfo
})
};
this.trigger({
type: 'segmentappendstart',
metadata
});
this.sourceUpdater_.appendBuffer({
segmentInfo,
type,
bytes
}, this.handleAppendError_.bind(this, {
segmentInfo,
type,
bytes
}));
}
handleSegmentTimingInfo_(type, requestId, segmentTimingInfo) {
if (!this.pendingSegment_ || requestId !== this.pendingSegment_.requestId) {
return;
}
const segment = this.pendingSegment_.segment;
const timingInfoProperty = `${type}TimingInfo`;
if (!segment[timingInfoProperty]) {
segment[timingInfoProperty] = {};
}
segment[timingInfoProperty].transmuxerPrependedSeconds = segmentTimingInfo.prependedContentDuration || 0;
segment[timingInfoProperty].transmuxedPresentationStart = segmentTimingInfo.start.presentation;
segment[timingInfoProperty].transmuxedDecodeStart = segmentTimingInfo.start.decode;
segment[timingInfoProperty].transmuxedPresentationEnd = segmentTimingInfo.end.presentation;
segment[timingInfoProperty].transmuxedDecodeEnd = segmentTimingInfo.end.decode; // mainly used as a reference for debugging
segment[timingInfoProperty].baseMediaDecodeTime = segmentTimingInfo.baseMediaDecodeTime;
}
appendData_(segmentInfo, result) {
const {
type,
data
} = result;
if (!data || !data.byteLength) {
return;
}
if (type === 'audio' && this.audioDisabled_) {
return;
}
const initSegment = this.getInitSegmentAndUpdateState_({
type,
initSegment: result.initSegment,
playlist: segmentInfo.playlist,
map: segmentInfo.isFmp4 ? segmentInfo.segment.map : null
});
this.appendToSourceBuffer_({
segmentInfo,
type,
initSegment,
data
});
}
/**
* load a specific segment from a request into the buffer
*
* @private
*/
loadSegment_(segmentInfo) {
this.state = 'WAITING';
this.pendingSegment_ = segmentInfo;
this.trimBackBuffer_(segmentInfo);
if (typeof segmentInfo.timestampOffset === 'number') {
if (this.transmuxer_) {
this.transmuxer_.postMessage({
action: 'clearAllMp4Captions'
});
}
}
if (!this.hasEnoughInfoToLoad_()) {
checkAndFixTimelines(this);
this.loadQueue_.push(() => {
// regenerate the audioAppendStart, timestampOffset, etc as they
// may have changed since this function was added to the queue.
const options = _extends({}, segmentInfo, {
forceTimestampOffset: true
});
_extends(segmentInfo, this.generateSegmentInfo_(options));
this.isPendingTimestampOffset_ = false;
this.updateTransmuxerAndRequestSegment_(segmentInfo);
});
return;
}
this.updateTransmuxerAndRequestSegment_(segmentInfo);
}
updateTransmuxerAndRequestSegment_(segmentInfo) {
// We'll update the source buffer's timestamp offset once we have transmuxed data, but
// the transmuxer still needs to be updated before then.
//
// Even though keepOriginalTimestamps is set to true for the transmuxer, timestamp
// offset must be passed to the transmuxer for stream correcting adjustments.
if (this.shouldUpdateTransmuxerTimestampOffset_(segmentInfo.timestampOffset)) {
this.gopBuffer_.length = 0; // gopsToAlignWith was set before the GOP buffer was cleared
segmentInfo.gopsToAlignWith = [];
this.timeMapping_ = 0; // reset values in the transmuxer since a discontinuity should start fresh
this.transmuxer_.postMessage({
action: 'reset'
});
this.transmuxer_.postMessage({
action: 'setTimestampOffset',
timestampOffset: segmentInfo.timestampOffset
});
}
const simpleSegment = this.createSimplifiedSegmentObj_(segmentInfo);
const isEndOfStream = this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex);
const isWalkingForward = this.mediaIndex !== null;
const isDiscontinuity = segmentInfo.timeline !== this.currentTimeline_ &&
// currentTimeline starts at -1, so we shouldn't end the timeline switching to 0,
// the first timeline
segmentInfo.timeline > 0;
const isEndOfTimeline = isEndOfStream || isWalkingForward && isDiscontinuity;
this.logger_(`Requesting
${compactSegmentUrlDescription(segmentInfo.uri)}
${segmentInfoString(segmentInfo)}`); // If there's an init segment associated with this segment, but it is not cached (identified by a lack of bytes),
// then this init segment has never been seen before and should be appended.
//
// At this point the content type (audio/video or both) is not yet known, but it should be safe to set
// both to true and leave the decision of whether to append the init segment to append time.
if (simpleSegment.map && !simpleSegment.map.bytes) {
this.logger_('going to request init segment.');
this.appendInitSegment_ = {
video: true,
audio: true
};
}
segmentInfo.abortRequests = mediaSegmentRequest({
xhr: this.vhs_.xhr,
xhrOptions: this.xhrOptions_,
decryptionWorker: this.decrypter_,
segment: simpleSegment,
abortFn: this.handleAbort_.bind(this, segmentInfo),
progressFn: this.handleProgress_.bind(this),
trackInfoFn: this.handleTrackInfo_.bind(this),
timingInfoFn: this.handleTimingInfo_.bind(this),
videoSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'video', segmentInfo.requestId),
audioSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'audio', segmentInfo.requestId),
captionsFn: this.handleCaptions_.bind(this),
isEndOfTimeline,
endedTimelineFn: () => {
this.logger_('received endedtimeline callback');
},
id3Fn: this.handleId3_.bind(this),
dataFn: this.handleData_.bind(this),
doneFn: this.segmentRequestFinished_.bind(this),
onTransmuxerLog: ({
message,
level,
stream
}) => {
this.logger_(`${segmentInfoString(segmentInfo)} logged from transmuxer stream ${stream} as a ${level}: ${message}`);
},
triggerSegmentEventFn: ({
type,
segment,
keyInfo,
trackInfo,
timingInfo
}) => {
const segInfo = segmentInfoPayload({
segment
});
const metadata = {
segmentInfo: segInfo
}; // add other properties if necessary.
if (keyInfo) {
metadata.keyInfo = keyInfo;
}
if (trackInfo) {
metadata.trackInfo = trackInfo;
}
if (timingInfo) {
metadata.timingInfo = timingInfo;
}
this.trigger({
type,
metadata
});
}
});
}
/**
* trim the back buffer so that we don't have too much data
* in the source buffer
*
* @private
*
* @param {Object} segmentInfo - the current segment
*/
trimBackBuffer_(segmentInfo) {
const removeToTime = safeBackBufferTrimTime(this.seekable_(), this.currentTime_(), this.playlist_.targetDuration || 10); // Chrome has a hard limit of 150MB of
// buffer and a very conservative "garbage collector"
// We manually clear out the old buffer to ensure
// we don't trigger the QuotaExceeded error
// on the source buffer during subsequent appends
if (removeToTime > 0) {
this.remove(0, removeToTime);
}
}
/**
* created a simplified copy of the segment object with just the
* information necessary to perform the XHR and decryption
*
* @private
*
* @param {Object} segmentInfo - the current segment
* @return {Object} a simplified segment object copy
*/
createSimplifiedSegmentObj_(segmentInfo) {
const segment = segmentInfo.segment;
const part = segmentInfo.part;
const isEncrypted = segmentInfo.segment.key || segmentInfo.segment.map && segmentInfo.segment.map.key;
const isMediaInitialization = segmentInfo.segment.map && !segmentInfo.segment.map.bytes;
const simpleSegment = {
resolvedUri: part ? part.resolvedUri : segment.resolvedUri,
byterange: part ? part.byterange : segment.byterange,
requestId: segmentInfo.requestId,
transmuxer: segmentInfo.transmuxer,
audioAppendStart: segmentInfo.audioAppendStart,
gopsToAlignWith: segmentInfo.gopsToAlignWith,
part: segmentInfo.part,
type: this.loaderType_,
start: segmentInfo.startOfSegment,
duration: segmentInfo.duration,
isEncrypted,
isMediaInitialization
};
const previousSegment = segmentInfo.playlist.segments[segmentInfo.mediaIndex - 1];
if (previousSegment && previousSegment.timeline === segment.timeline) {
// The baseStartTime of a segment is used to handle rollover when probing the TS
// segment to retrieve timing information. Since the probe only looks at the media's
// times (e.g., PTS and DTS values of the segment), and doesn't consider the
// player's time (e.g., player.currentTime()), baseStartTime should reflect the
// media time as well. transmuxedDecodeEnd represents the end time of a segment, in
// seconds of media time, so should be used here. The previous segment is used since
// the end of the previous segment should represent the beginning of the current
// segment, so long as they are on the same timeline.
if (previousSegment.videoTimingInfo) {
simpleSegment.baseStartTime = previousSegment.videoTimingInfo.transmuxedDecodeEnd;
} else if (previousSegment.audioTimingInfo) {
simpleSegment.baseStartTime = previousSegment.audioTimingInfo.transmuxedDecodeEnd;
}
}
if (segment.key) {
// if the media sequence is greater than 2^32, the IV will be incorrect
// assuming 10s segments, that would be about 1300 years
const iv = segment.key.iv || new Uint32Array([0, 0, 0, segmentInfo.mediaIndex + segmentInfo.playlist.mediaSequence]);
simpleSegment.key = this.segmentKey(segment.key);
simpleSegment.key.iv = iv;
}
if (segment.map) {
simpleSegment.map = this.initSegmentForMap(segment.map);
}
return simpleSegment;
}
saveTransferStats_(stats) {
// every request counts as a media request even if it has been aborted
// or canceled due to a timeout
this.mediaRequests += 1;
if (stats) {
this.mediaBytesTransferred += stats.bytesReceived;
this.mediaTransferDuration += stats.roundTripTime;
}
}
saveBandwidthRelatedStats_(duration, stats) {
// byteLength will be used for throughput, and should be based on bytes receieved,
// which we only know at the end of the request and should reflect total bytes
// downloaded rather than just bytes processed from components of the segment
this.pendingSegment_.byteLength = stats.bytesReceived;
if (duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
this.logger_(`Ignoring segment's bandwidth because its duration of ${duration}` + ` is less than the min to record ${MIN_SEGMENT_DURATION_TO_SAVE_STATS}`);
return;
}
const metadata = {
bandwidthInfo: {
from: this.bandwidth,
to: stats.bandwidth
}
}; // player event with payload
this.trigger({
type: 'bandwidthupdated',
metadata
});
this.bandwidth = stats.bandwidth;
this.roundTrip = stats.roundTripTime;
}
handleTimeout_() {
// although the VTT segment loader bandwidth isn't really used, it's good to
// maintain functinality between segment loaders
this.mediaRequestsTimedout += 1;
this.bandwidth = 1;
this.roundTrip = NaN;
this.trigger('bandwidthupdate');
this.trigger('timeout');
}
/**
* Handle the callback from the segmentRequest function and set the
* associated SegmentLoader state and errors if necessary
*
* @private
*/
segmentRequestFinished_(error, simpleSegment, result) {
// TODO handle special cases, e.g., muxed audio/video but only audio in the segment
// check the call queue directly since this function doesn't need to deal with any
// data, and can continue even if the source buffers are not set up and we didn't get
// any data from the segment
if (this.callQueue_.length) {
this.callQueue_.push(this.segmentRequestFinished_.bind(this, error, simpleSegment, result));
return;
}
this.saveTransferStats_(simpleSegment.stats); // The request was aborted and the SegmentLoader has already been reset
if (!this.pendingSegment_) {
return;
} // the request was aborted and the SegmentLoader has already started
// another request. this can happen when the timeout for an aborted
// request triggers due to a limitation in the XHR library
// do not count this as any sort of request or we risk double-counting
if (simpleSegment.requestId !== this.pendingSegment_.requestId) {
return;
} // an error occurred from the active pendingSegment_ so reset everything
if (error) {
this.pendingSegment_ = null;
this.state = 'READY'; // aborts are not a true error condition and nothing corrective needs to be done
if (error.code === REQUEST_ERRORS.ABORTED) {
return;
}
this.pause(); // the error is really just that at least one of the requests timed-out
// set the bandwidth to a very low value and trigger an ABR switch to
// take emergency action
if (error.code === REQUEST_ERRORS.TIMEOUT) {
this.handleTimeout_();
return;
} // if control-flow has arrived here, then the error is real
// emit an error event to exclude the current playlist
this.mediaRequestsErrored += 1;
this.error(error);
this.trigger('error');
return;
}
const segmentInfo = this.pendingSegment_; // the response was a success so set any bandwidth stats the request
// generated for ABR purposes
this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats);
segmentInfo.endOfAllRequests = simpleSegment.endOfAllRequests;
if (result.gopInfo) {
this.gopBuffer_ = updateGopBuffer(this.gopBuffer_, result.gopInfo, this.safeAppend_);
} // Although we may have already started appending on progress, we shouldn't switch the
// state away from loading until we are officially done loading the segment data.
this.state = 'APPENDING'; // used for testing
this.trigger('appending');
this.waitForAppendsToComplete_(segmentInfo);
}
setTimeMapping_(timeline) {
const timelineMapping = this.syncController_.mappingForTimeline(timeline);
if (timelineMapping !== null) {
this.timeMapping_ = timelineMapping;
}
}
updateMediaSecondsLoaded_(segment) {
if (typeof segment.start === 'number' && typeof segment.end === 'number') {
this.mediaSecondsLoaded += segment.end - segment.start;
} else {
this.mediaSecondsLoaded += segment.duration;
}
}
shouldUpdateTransmuxerTimestampOffset_(timestampOffset) {
if (timestampOffset === null) {
return false;
} // note that we're potentially using the same timestamp offset for both video and
// audio
if (this.loaderType_ === 'main' && timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
return true;
}
if (!this.audioDisabled_ && timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
return true;
}
return false;
}
trueSegmentStart_({
currentStart,
playlist,
mediaIndex,
firstVideoFrameTimeForData,
currentVideoTimestampOffset,
useVideoTimingInfo,
videoTimingInfo,
audioTimingInfo
}) {
if (typeof currentStart !== 'undefined') {
// if start was set once, keep using it
return currentStart;
}
if (!useVideoTimingInfo) {
return audioTimingInfo.start;
}
const previousSegment = playlist.segments[mediaIndex - 1]; // The start of a segment should be the start of the first full frame contained
// within that segment. Since the transmuxer maintains a cache of incomplete data
// from and/or the last frame seen, the start time may reflect a frame that starts
// in the previous segment. Check for that case and ensure the start time is
// accurate for the segment.
if (mediaIndex === 0 || !previousSegment || typeof previousSegment.start === 'undefined' || previousSegment.end !== firstVideoFrameTimeForData + currentVideoTimestampOffset) {
return firstVideoFrameTimeForData;
}
return videoTimingInfo.start;
}
waitForAppendsToComplete_(segmentInfo) {
const trackInfo = this.getCurrentMediaInfo_(segmentInfo);
if (!trackInfo) {
this.error({
message: 'No starting media returned, likely due to an unsupported media format.',
playlistExclusionDuration: Infinity
});
this.trigger('error');
return;
} // Although transmuxing is done, appends may not yet be finished. Throw a marker
// on each queue this loader is responsible for to ensure that the appends are
// complete.
const {
hasAudio,
hasVideo,
isMuxed
} = trackInfo;
const waitForVideo = this.loaderType_ === 'main' && hasVideo;
const waitForAudio = !this.audioDisabled_ && hasAudio && !isMuxed;
segmentInfo.waitingOnAppends = 0; // segments with no data
if (!segmentInfo.hasAppendedData_) {
if (!segmentInfo.timingInfo && typeof segmentInfo.timestampOffset === 'number') {
// When there's no audio or video data in the segment, there's no audio or video
// timing information.
//
// If there's no audio or video timing information, then the timestamp offset
// can't be adjusted to the appropriate value for the transmuxer and source
// buffers.
//
// Therefore, the next segment should be used to set the timestamp offset.
this.isPendingTimestampOffset_ = true;
} // override settings for metadata only segments
segmentInfo.timingInfo = {
start: 0
};
segmentInfo.waitingOnAppends++;
if (!this.isPendingTimestampOffset_) {
// update the timestampoffset
this.updateSourceBufferTimestampOffset_(segmentInfo); // make sure the metadata queue is processed even though we have
// no video/audio data.
this.processMetadataQueue_();
} // append is "done" instantly with no data.
this.checkAppendsDone_(segmentInfo);
return;
} // Since source updater could call back synchronously, do the increments first.
if (waitForVideo) {
segmentInfo.waitingOnAppends++;
}
if (waitForAudio) {
segmentInfo.waitingOnAppends++;
}
if (waitForVideo) {
this.sourceUpdater_.videoQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
}
if (waitForAudio) {
this.sourceUpdater_.audioQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
}
}
checkAppendsDone_(segmentInfo) {
if (this.checkForAbort_(segmentInfo.requestId)) {
return;
}
segmentInfo.waitingOnAppends--;
if (segmentInfo.waitingOnAppends === 0) {
this.handleAppendsDone_();
}
}
checkForIllegalMediaSwitch(trackInfo) {
const illegalMediaSwitchError = illegalMediaSwitch(this.loaderType_, this.getCurrentMediaInfo_(), trackInfo);
if (illegalMediaSwitchError) {
this.error({
message: illegalMediaSwitchError,
playlistExclusionDuration: Infinity
});
this.trigger('error');
return true;
}
return false;
}
updateSourceBufferTimestampOffset_(segmentInfo) {
if (segmentInfo.timestampOffset === null ||
// we don't yet have the start for whatever media type (video or audio) has
// priority, timing-wise, so we must wait
typeof segmentInfo.timingInfo.start !== 'number' ||
// already updated the timestamp offset for this segment
segmentInfo.changedTimestampOffset ||
// the alt audio loader should not be responsible for setting the timestamp offset
this.loaderType_ !== 'main') {
return;
}
let didChange = false; // Primary timing goes by video, and audio is trimmed in the transmuxer, meaning that
// the timing info here comes from video. In the event that the audio is longer than
// the video, this will trim the start of the audio.
// This also trims any offset from 0 at the beginning of the media
segmentInfo.timestampOffset -= this.getSegmentStartTimeForTimestampOffsetCalculation_({
videoTimingInfo: segmentInfo.segment.videoTimingInfo,
audioTimingInfo: segmentInfo.segment.audioTimingInfo,
timingInfo: segmentInfo.timingInfo
}); // In the event that there are part segment downloads, each will try to update the
// timestamp offset. Retaining this bit of state prevents us from updating in the
// future (within the same segment), however, there may be a better way to handle it.
segmentInfo.changedTimestampOffset = true;
if (segmentInfo.timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
this.sourceUpdater_.videoTimestampOffset(segmentInfo.timestampOffset);
didChange = true;
}
if (segmentInfo.timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
this.sourceUpdater_.audioTimestampOffset(segmentInfo.timestampOffset);
didChange = true;
}
if (didChange) {
this.trigger('timestampoffset');
}
}
getSegmentStartTimeForTimestampOffsetCalculation_({
videoTimingInfo,
audioTimingInfo,
timingInfo
}) {
if (!this.useDtsForTimestampOffset_) {
return timingInfo.start;
}
if (videoTimingInfo && typeof videoTimingInfo.transmuxedDecodeStart === 'number') {
return videoTimingInfo.transmuxedDecodeStart;
} // handle audio only
if (audioTimingInfo && typeof audioTimingInfo.transmuxedDecodeStart === 'number') {
return audioTimingInfo.transmuxedDecodeStart;
} // handle content not transmuxed (e.g., MP4)
return timingInfo.start;
}
updateTimingInfoEnd_(segmentInfo) {
segmentInfo.timingInfo = segmentInfo.timingInfo || {};
const trackInfo = this.getMediaInfo_();
const useVideoTimingInfo = this.loaderType_ === 'main' && trackInfo && trackInfo.hasVideo;
const prioritizedTimingInfo = useVideoTimingInfo && segmentInfo.videoTimingInfo ? segmentInfo.videoTimingInfo : segmentInfo.audioTimingInfo;
if (!prioritizedTimingInfo) {
return;
}
segmentInfo.timingInfo.end = typeof prioritizedTimingInfo.end === 'number' ?
// End time may not exist in a case where we aren't parsing the full segment (one
// current example is the case of fmp4), so use the rough duration to calculate an
// end time.
prioritizedTimingInfo.end : prioritizedTimingInfo.start + segmentInfo.duration;
}
/**
* callback to run when appendBuffer is finished. detects if we are
* in a good state to do things with the data we got, or if we need
* to wait for more
*
* @private
*/
handleAppendsDone_() {
// appendsdone can cause an abort
if (this.pendingSegment_) {
const metadata = {
segmentInfo: segmentInfoPayload({
type: this.loaderType_,
segment: this.pendingSegment_
})
};
this.trigger({
type: 'appendsdone',
metadata
});
}
if (!this.pendingSegment_) {
this.state = 'READY'; // TODO should this move into this.checkForAbort to speed up requests post abort in
// all appending cases?
if (!this.paused()) {
this.monitorBuffer_();
}
return;
}
const segmentInfo = this.pendingSegment_;
if (segmentInfo.part && segmentInfo.part.syncInfo) {
// low-latency flow
segmentInfo.part.syncInfo.markAppended();
} else if (segmentInfo.segment.syncInfo) {
// normal flow
segmentInfo.segment.syncInfo.markAppended();
} // Now that the end of the segment has been reached, we can set the end time. It's
// best to wait until all appends are done so we're sure that the primary media is
// finished (and we have its end time).
this.updateTimingInfoEnd_(segmentInfo);
if (this.shouldSaveSegmentTimingInfo_) {
// Timeline mappings should only be saved for the main loader. This is for multiple
// reasons:
//
// 1) Only one mapping is saved per timeline, meaning that if both the audio loader
// and the main loader try to save the timeline mapping, whichever comes later
// will overwrite the first. In theory this is OK, as the mappings should be the
// same, however, it breaks for (2)
// 2) In the event of a live stream, the initial live point will make for a somewhat
// arbitrary mapping. If audio and video streams are not perfectly in-sync, then
// the mapping will be off for one of the streams, dependent on which one was
// first saved (see (1)).
// 3) Primary timing goes by video in VHS, so the mapping should be video.
//
// Since the audio loader will wait for the main loader to load the first segment,
// the main loader will save the first timeline mapping, and ensure that there won't
// be a case where audio loads two segments without saving a mapping (thus leading
// to missing segment timing info).
this.syncController_.saveSegmentTimingInfo({
segmentInfo,
shouldSaveTimelineMapping: this.loaderType_ === 'main'
});
}
const segmentDurationMessage = getTroublesomeSegmentDurationMessage(segmentInfo, this.sourceType_);
if (segmentDurationMessage) {
if (segmentDurationMessage.severity === 'warn') {
videojs.log.warn(segmentDurationMessage.message);
} else {
this.logger_(segmentDurationMessage.message);
}
}
this.recordThroughput_(segmentInfo);
this.pendingSegment_ = null;
this.state = 'READY';
if (segmentInfo.isSyncRequest) {
this.trigger('syncinfoupdate'); // if the sync request was not appended
// then it was not the correct segment.
// throw it away and use the data it gave us
// to get the correct one.
if (!segmentInfo.hasAppendedData_) {
this.logger_(`Throwing away un-appended sync request ${segmentInfoString(segmentInfo)}`);
return;
}
}
this.logger_(`Appended ${segmentInfoString(segmentInfo)}`);
this.addSegmentMetadataCue_(segmentInfo);
this.fetchAtBuffer_ = true;
if (this.currentTimeline_ !== segmentInfo.timeline) {
this.timelineChangeController_.lastTimelineChange({
type: this.loaderType_,
from: this.currentTimeline_,
to: segmentInfo.timeline
}); // If audio is not disabled, the main segment loader is responsible for updating
// the audio timeline as well. If the content is video only, this won't have any
// impact.
if (this.loaderType_ === 'main' && !this.audioDisabled_) {
this.timelineChangeController_.lastTimelineChange({
type: 'audio',
from: this.currentTimeline_,
to: segmentInfo.timeline
});
}
}
this.currentTimeline_ = segmentInfo.timeline; // We must update the syncinfo to recalculate the seekable range before
// the following conditional otherwise it may consider this a bad "guess"
// and attempt to resync when the post-update seekable window and live
// point would mean that this was the perfect segment to fetch
this.trigger('syncinfoupdate');
const segment = segmentInfo.segment;
const part = segmentInfo.part;
const badSegmentGuess = segment.end && this.currentTime_() - segment.end > segmentInfo.playlist.targetDuration * 3;
const badPartGuess = part && part.end && this.currentTime_() - part.end > segmentInfo.playlist.partTargetDuration * 3; // If we previously appended a segment/part that ends more than 3 part/targetDurations before
// the currentTime_ that means that our conservative guess was too conservative.
// In that case, reset the loader state so that we try to use any information gained
// from the previous request to create a new, more accurate, sync-point.
if (badSegmentGuess || badPartGuess) {
this.logger_(`bad ${badSegmentGuess ? 'segment' : 'part'} ${segmentInfoString(segmentInfo)}`);
this.resetEverything();
return;
}
const isWalkingForward = this.mediaIndex !== null; // Don't do a rendition switch unless we have enough time to get a sync segment
// and conservatively guess
if (isWalkingForward) {
this.trigger('bandwidthupdate');
}
this.trigger('progress');
this.mediaIndex = segmentInfo.mediaIndex;
this.partIndex = segmentInfo.partIndex; // any time an update finishes and the last segment is in the
// buffer, end the stream. this ensures the "ended" event will
// fire if playback reaches that point.
if (this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex)) {
this.endOfStream();
} // used for testing
this.trigger('appended');
if (segmentInfo.hasAppendedData_) {
this.mediaAppends++;
}
if (!this.paused()) {
this.monitorBuffer_();
}
}
/**
* Records the current throughput of the decrypt, transmux, and append
* portion of the semgment pipeline. `throughput.rate` is a the cumulative
* moving average of the throughput. `throughput.count` is the number of
* data points in the average.
*
* @private
* @param {Object} segmentInfo the object returned by loadSegment
*/
recordThroughput_(segmentInfo) {
if (segmentInfo.duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
this.logger_(`Ignoring segment's throughput because its duration of ${segmentInfo.duration}` + ` is less than the min to record ${MIN_SEGMENT_DURATION_TO_SAVE_STATS}`);
return;
}
const rate = this.throughput.rate; // Add one to the time to ensure that we don't accidentally attempt to divide
// by zero in the case where the throughput is ridiculously high
const segmentProcessingTime = Date.now() - segmentInfo.endOfAllRequests + 1; // Multiply by 8000 to convert from bytes/millisecond to bits/second
const segmentProcessingThroughput = Math.floor(segmentInfo.byteLength / segmentProcessingTime * 8 * 1000); // This is just a cumulative moving average calculation:
// newAvg = oldAvg + (sample - oldAvg) / (sampleCount + 1)
this.throughput.rate += (segmentProcessingThroughput - rate) / ++this.throughput.count;
}
/**
* Adds a cue to the segment-metadata track with some metadata information about the
* segment
*
* @private
* @param {Object} segmentInfo
* the object returned by loadSegment
* @method addSegmentMetadataCue_
*/
addSegmentMetadataCue_(segmentInfo) {
if (!this.segmentMetadataTrack_) {
return;
}
const segment = segmentInfo.segment;
const start = segment.start;
const end = segment.end; // Do not try adding the cue if the start and end times are invalid.
if (!finite(start) || !finite(end)) {
return;
}
removeCuesFromTrack(start, end, this.segmentMetadataTrack_);
const Cue = window$1.WebKitDataCue || window$1.VTTCue;
const value = {
custom: segment.custom,
dateTimeObject: segment.dateTimeObject,
dateTimeString: segment.dateTimeString,
programDateTime: segment.programDateTime,
bandwidth: segmentInfo.playlist.attributes.BANDWIDTH,
resolution: segmentInfo.playlist.attributes.RESOLUTION,
codecs: segmentInfo.playlist.attributes.CODECS,
byteLength: segmentInfo.byteLength,
uri: segmentInfo.uri,
timeline: segmentInfo.timeline,
playlist: segmentInfo.playlist.id,
start,
end
};
const data = JSON.stringify(value);
const cue = new Cue(start, end, data); // Attach the metadata to the value property of the cue to keep consistency between
// the differences of WebKitDataCue in safari and VTTCue in other browsers
cue.value = value;
this.segmentMetadataTrack_.addCue(cue);
}
}
function noop() {}
const toTitleCase = function (string) {
if (typeof string !== 'string') {
return string;
}
return string.replace(/./, w => w.toUpperCase());
};
/**
* @file source-updater.js
*/
const bufferTypes = ['video', 'audio'];
const updating = (type, sourceUpdater) => {
const sourceBuffer = sourceUpdater[`${type}Buffer`];
return sourceBuffer && sourceBuffer.updating || sourceUpdater.queuePending[type];
};
const nextQueueIndexOfType = (type, queue) => {
for (let i = 0; i < queue.length; i++) {
const queueEntry = queue[i];
if (queueEntry.type === 'mediaSource') {
// If the next entry is a media source entry (uses multiple source buffers), block
// processing to allow it to go through first.
return null;
}
if (queueEntry.type === type) {
return i;
}
}
return null;
};
const shiftQueue = (type, sourceUpdater) => {
if (sourceUpdater.queue.length === 0) {
return;
}
let queueIndex = 0;
let queueEntry = sourceUpdater.queue[queueIndex];
if (queueEntry.type === 'mediaSource') {
if (!sourceUpdater.updating() && sourceUpdater.mediaSource.readyState !== 'closed') {
sourceUpdater.queue.shift();
queueEntry.action(sourceUpdater);
if (queueEntry.doneFn) {
queueEntry.doneFn();
} // Only specific source buffer actions must wait for async updateend events. Media
// Source actions process synchronously. Therefore, both audio and video source
// buffers are now clear to process the next queue entries.
shiftQueue('audio', sourceUpdater);
shiftQueue('video', sourceUpdater);
} // Media Source actions require both source buffers, so if the media source action
// couldn't process yet (because one or both source buffers are busy), block other
// queue actions until both are available and the media source action can process.
return;
}
if (type === 'mediaSource') {
// If the queue was shifted by a media source action (this happens when pushing a
// media source action onto the queue), then it wasn't from an updateend event from an
// audio or video source buffer, so there's no change from previous state, and no
// processing should be done.
return;
} // Media source queue entries don't need to consider whether the source updater is
// started (i.e., source buffers are created) as they don't need the source buffers, but
// source buffer queue entries do.
if (!sourceUpdater.ready() || sourceUpdater.mediaSource.readyState === 'closed' || updating(type, sourceUpdater)) {
return;
}
if (queueEntry.type !== type) {
queueIndex = nextQueueIndexOfType(type, sourceUpdater.queue);
if (queueIndex === null) {
// Either there's no queue entry that uses this source buffer type in the queue, or
// there's a media source queue entry before the next entry of this type, in which
// case wait for that action to process first.
return;
}
queueEntry = sourceUpdater.queue[queueIndex];
}
sourceUpdater.queue.splice(queueIndex, 1); // Keep a record that this source buffer type is in use.
//
// The queue pending operation must be set before the action is performed in the event
// that the action results in a synchronous event that is acted upon. For instance, if
// an exception is thrown that can be handled, it's possible that new actions will be
// appended to an empty queue and immediately executed, but would not have the correct
// pending information if this property was set after the action was performed.
sourceUpdater.queuePending[type] = queueEntry;
queueEntry.action(type, sourceUpdater);
if (!queueEntry.doneFn) {
// synchronous operation, process next entry
sourceUpdater.queuePending[type] = null;
shiftQueue(type, sourceUpdater);
return;
}
};
const cleanupBuffer = (type, sourceUpdater) => {
const buffer = sourceUpdater[`${type}Buffer`];
const titleType = toTitleCase(type);
if (!buffer) {
return;
}
buffer.removeEventListener('updateend', sourceUpdater[`on${titleType}UpdateEnd_`]);
buffer.removeEventListener('error', sourceUpdater[`on${titleType}Error_`]);
sourceUpdater.codecs[type] = null;
sourceUpdater[`${type}Buffer`] = null;
};
const inSourceBuffers = (mediaSource, sourceBuffer) => mediaSource && sourceBuffer && Array.prototype.indexOf.call(mediaSource.sourceBuffers, sourceBuffer) !== -1;
const actions = {
appendBuffer: (bytes, segmentInfo, onError) => (type, sourceUpdater) => {
const sourceBuffer = sourceUpdater[`${type}Buffer`]; // can't do anything if the media source / source buffer is null
// or the media source does not contain this source buffer.
if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
return;
}
sourceUpdater.logger_(`Appending segment ${segmentInfo.mediaIndex}'s ${bytes.length} bytes to ${type}Buffer`);
try {
sourceBuffer.appendBuffer(bytes);
} catch (e) {
sourceUpdater.logger_(`Error with code ${e.code} ` + (e.code === QUOTA_EXCEEDED_ERR ? '(QUOTA_EXCEEDED_ERR) ' : '') + `when appending segment ${segmentInfo.mediaIndex} to ${type}Buffer`);
sourceUpdater.queuePending[type] = null;
onError(e);
}
},
remove: (start, end) => (type, sourceUpdater) => {
const sourceBuffer = sourceUpdater[`${type}Buffer`]; // can't do anything if the media source / source buffer is null
// or the media source does not contain this source buffer.
if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
return;
}
sourceUpdater.logger_(`Removing ${start} to ${end} from ${type}Buffer`);
try {
sourceBuffer.remove(start, end);
} catch (e) {
sourceUpdater.logger_(`Remove ${start} to ${end} from ${type}Buffer failed`);
}
},
timestampOffset: offset => (type, sourceUpdater) => {
const sourceBuffer = sourceUpdater[`${type}Buffer`]; // can't do anything if the media source / source buffer is null
// or the media source does not contain this source buffer.
if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
return;
}
sourceUpdater.logger_(`Setting ${type}timestampOffset to ${offset}`);
sourceBuffer.timestampOffset = offset;
},
callback: callback => (type, sourceUpdater) => {
callback();
},
endOfStream: error => sourceUpdater => {
if (sourceUpdater.mediaSource.readyState !== 'open') {
return;
}
sourceUpdater.logger_(`Calling mediaSource endOfStream(${error || ''})`);
try {
sourceUpdater.mediaSource.endOfStream(error);
} catch (e) {
videojs.log.warn('Failed to call media source endOfStream', e);
}
},
duration: duration => sourceUpdater => {
sourceUpdater.logger_(`Setting mediaSource duration to ${duration}`);
try {
sourceUpdater.mediaSource.duration = duration;
} catch (e) {
videojs.log.warn('Failed to set media source duration', e);
}
},
abort: () => (type, sourceUpdater) => {
if (sourceUpdater.mediaSource.readyState !== 'open') {
return;
}
const sourceBuffer = sourceUpdater[`${type}Buffer`]; // can't do anything if the media source / source buffer is null
// or the media source does not contain this source buffer.
if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
return;
}
sourceUpdater.logger_(`calling abort on ${type}Buffer`);
try {
sourceBuffer.abort();
} catch (e) {
videojs.log.warn(`Failed to abort on ${type}Buffer`, e);
}
},
addSourceBuffer: (type, codec) => sourceUpdater => {
const titleType = toTitleCase(type);
const mime = getMimeForCodec(codec);
sourceUpdater.logger_(`Adding ${type}Buffer with codec ${codec} to mediaSource`);
const sourceBuffer = sourceUpdater.mediaSource.addSourceBuffer(mime);
sourceBuffer.addEventListener('updateend', sourceUpdater[`on${titleType}UpdateEnd_`]);
sourceBuffer.addEventListener('error', sourceUpdater[`on${titleType}Error_`]);
sourceUpdater.codecs[type] = codec;
sourceUpdater[`${type}Buffer`] = sourceBuffer;
},
removeSourceBuffer: type => sourceUpdater => {
const sourceBuffer = sourceUpdater[`${type}Buffer`];
cleanupBuffer(type, sourceUpdater); // can't do anything if the media source / source buffer is null
// or the media source does not contain this source buffer.
if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
return;
}
sourceUpdater.logger_(`Removing ${type}Buffer with codec ${sourceUpdater.codecs[type]} from mediaSource`);
try {
sourceUpdater.mediaSource.removeSourceBuffer(sourceBuffer);
} catch (e) {
videojs.log.warn(`Failed to removeSourceBuffer ${type}Buffer`, e);
}
},
changeType: codec => (type, sourceUpdater) => {
const sourceBuffer = sourceUpdater[`${type}Buffer`];
const mime = getMimeForCodec(codec); // can't do anything if the media source / source buffer is null
// or the media source does not contain this source buffer.
if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
return;
} // do not update codec if we don't need to.
// Only update if we change the codec base.
// For example, going from avc1.640028 to avc1.64001f does not require a changeType call.
const newCodecBase = codec.substring(0, codec.indexOf('.'));
const oldCodec = sourceUpdater.codecs[type];
const oldCodecBase = oldCodec.substring(0, oldCodec.indexOf('.'));
if (oldCodecBase === newCodecBase) {
return;
}
const metadata = {
codecsChangeInfo: {
from: oldCodec,
to: codec
}
};
sourceUpdater.trigger({
type: 'codecschange',
metadata
});
sourceUpdater.logger_(`changing ${type}Buffer codec from ${oldCodec} to ${codec}`); // check if change to the provided type is supported
try {
sourceBuffer.changeType(mime);
sourceUpdater.codecs[type] = codec;
} catch (e) {
metadata.errorType = videojs.Error.StreamingCodecsChangeError;
metadata.error = e;
e.metadata = metadata;
sourceUpdater.error_ = e;
sourceUpdater.trigger('error');
videojs.log.warn(`Failed to changeType on ${type}Buffer`, e);
}
}
};
const pushQueue = ({
type,
sourceUpdater,
action,
doneFn,
name
}) => {
sourceUpdater.queue.push({
type,
action,
doneFn,
name
});
shiftQueue(type, sourceUpdater);
};
const onUpdateend = (type, sourceUpdater) => e => {
// Although there should, in theory, be a pending action for any updateend receieved,
// there are some actions that may trigger updateend events without set definitions in
// the w3c spec. For instance, setting the duration on the media source may trigger
// updateend events on source buffers. This does not appear to be in the spec. As such,
// if we encounter an updateend without a corresponding pending action from our queue
// for that source buffer type, process the next action.
const bufferedRangesForType = sourceUpdater[`${type}Buffered`]();
const descriptiveString = bufferedRangesToString(bufferedRangesForType);
sourceUpdater.logger_(`received "updateend" event for ${type} Source Buffer: `, descriptiveString);
if (sourceUpdater.queuePending[type]) {
const doneFn = sourceUpdater.queuePending[type].doneFn;
sourceUpdater.queuePending[type] = null;
if (doneFn) {
// if there's an error, report it
doneFn(sourceUpdater[`${type}Error_`]);
}
}
shiftQueue(type, sourceUpdater);
};
/**
* A queue of callbacks to be serialized and applied when a
* MediaSource and its associated SourceBuffers are not in the
* updating state. It is used by the segment loader to update the
* underlying SourceBuffers when new data is loaded, for instance.
*
* @class SourceUpdater
* @param {MediaSource} mediaSource the MediaSource to create the SourceBuffer from
* @param {string} mimeType the desired MIME type of the underlying SourceBuffer
*/
class SourceUpdater extends videojs.EventTarget {
constructor(mediaSource) {
super();
this.mediaSource = mediaSource;
this.sourceopenListener_ = () => shiftQueue('mediaSource', this);
this.mediaSource.addEventListener('sourceopen', this.sourceopenListener_);
this.logger_ = logger('SourceUpdater'); // initial timestamp offset is 0
this.audioTimestampOffset_ = 0;
this.videoTimestampOffset_ = 0;
this.queue = [];
this.queuePending = {
audio: null,
video: null
};
this.delayedAudioAppendQueue_ = [];
this.videoAppendQueued_ = false;
this.codecs = {};
this.onVideoUpdateEnd_ = onUpdateend('video', this);
this.onAudioUpdateEnd_ = onUpdateend('audio', this);
this.onVideoError_ = e => {
// used for debugging
this.videoError_ = e;
};
this.onAudioError_ = e => {
// used for debugging
this.audioError_ = e;
};
this.createdSourceBuffers_ = false;
this.initializedEme_ = false;
this.triggeredReady_ = false;
}
initializedEme() {
this.initializedEme_ = true;
this.triggerReady();
}
hasCreatedSourceBuffers() {
// if false, likely waiting on one of the segment loaders to get enough data to create
// source buffers
return this.createdSourceBuffers_;
}
hasInitializedAnyEme() {
return this.initializedEme_;
}
ready() {
return this.hasCreatedSourceBuffers() && this.hasInitializedAnyEme();
}
createSourceBuffers(codecs) {
if (this.hasCreatedSourceBuffers()) {
// already created them before
return;
} // the intial addOrChangeSourceBuffers will always be
// two add buffers.
this.addOrChangeSourceBuffers(codecs);
this.createdSourceBuffers_ = true;
this.trigger('createdsourcebuffers');
this.triggerReady();
}
triggerReady() {
// only allow ready to be triggered once, this prevents the case
// where:
// 1. we trigger createdsourcebuffers
// 2. ie 11 synchronously initializates eme
// 3. the synchronous initialization causes us to trigger ready
// 4. We go back to the ready check in createSourceBuffers and ready is triggered again.
if (this.ready() && !this.triggeredReady_) {
this.triggeredReady_ = true;
this.trigger('ready');
}
}
/**
* Add a type of source buffer to the media source.
*
* @param {string} type
* The type of source buffer to add.
*
* @param {string} codec
* The codec to add the source buffer with.
*/
addSourceBuffer(type, codec) {
pushQueue({
type: 'mediaSource',
sourceUpdater: this,
action: actions.addSourceBuffer(type, codec),
name: 'addSourceBuffer'
});
}
/**
* call abort on a source buffer.
*
* @param {string} type
* The type of source buffer to call abort on.
*/
abort(type) {
pushQueue({
type,
sourceUpdater: this,
action: actions.abort(type),
name: 'abort'
});
}
/**
* Call removeSourceBuffer and remove a specific type
* of source buffer on the mediaSource.
*
* @param {string} type
* The type of source buffer to remove.
*/
removeSourceBuffer(type) {
if (!this.canRemoveSourceBuffer()) {
videojs.log.error('removeSourceBuffer is not supported!');
return;
}
pushQueue({
type: 'mediaSource',
sourceUpdater: this,
action: actions.removeSourceBuffer(type),
name: 'removeSourceBuffer'
});
}
/**
* Whether or not the removeSourceBuffer function is supported
* on the mediaSource.
*
* @return {boolean}
* if removeSourceBuffer can be called.
*/
canRemoveSourceBuffer() {
// As of Firefox 83 removeSourceBuffer
// throws errors, so we report that it does not support this.
return !videojs.browser.IS_FIREFOX && window$1.MediaSource && window$1.MediaSource.prototype && typeof window$1.MediaSource.prototype.removeSourceBuffer === 'function';
}
/**
* Whether or not the changeType function is supported
* on our SourceBuffers.
*
* @return {boolean}
* if changeType can be called.
*/
static canChangeType() {
return window$1.SourceBuffer && window$1.SourceBuffer.prototype && typeof window$1.SourceBuffer.prototype.changeType === 'function';
}
/**
* Whether or not the changeType function is supported
* on our SourceBuffers.
*
* @return {boolean}
* if changeType can be called.
*/
canChangeType() {
return this.constructor.canChangeType();
}
/**
* Call the changeType function on a source buffer, given the code and type.
*
* @param {string} type
* The type of source buffer to call changeType on.
*
* @param {string} codec
* The codec string to change type with on the source buffer.
*/
changeType(type, codec) {
if (!this.canChangeType()) {
videojs.log.error('changeType is not supported!');
return;
}
pushQueue({
type,
sourceUpdater: this,
action: actions.changeType(codec),
name: 'changeType'
});
}
/**
* Add source buffers with a codec or, if they are already created,
* call changeType on source buffers using changeType.
*
* @param {Object} codecs
* Codecs to switch to
*/
addOrChangeSourceBuffers(codecs) {
if (!codecs || typeof codecs !== 'object' || Object.keys(codecs).length === 0) {
throw new Error('Cannot addOrChangeSourceBuffers to undefined codecs');
}
Object.keys(codecs).forEach(type => {
const codec = codecs[type];
if (!this.hasCreatedSourceBuffers()) {
return this.addSourceBuffer(type, codec);
}
if (this.canChangeType()) {
this.changeType(type, codec);
}
});
}
/**
* Queue an update to append an ArrayBuffer.
*
* @param {MediaObject} object containing audioBytes and/or videoBytes
* @param {Function} done the function to call when done
* @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-appendBuffer-void-ArrayBuffer-data
*/
appendBuffer(options, doneFn) {
const {
segmentInfo,
type,
bytes
} = options;
this.processedAppend_ = true;
if (type === 'audio' && this.videoBuffer && !this.videoAppendQueued_) {
this.delayedAudioAppendQueue_.push([options, doneFn]);
this.logger_(`delayed audio append of ${bytes.length} until video append`);
return;
} // In the case of certain errors, for instance, QUOTA_EXCEEDED_ERR, updateend will
// not be fired. This means that the queue will be blocked until the next action
// taken by the segment-loader. Provide a mechanism for segment-loader to handle
// these errors by calling the doneFn with the specific error.
const onError = doneFn;
pushQueue({
type,
sourceUpdater: this,
action: actions.appendBuffer(bytes, segmentInfo || {
mediaIndex: -1
}, onError),
doneFn,
name: 'appendBuffer'
});
if (type === 'video') {
this.videoAppendQueued_ = true;
if (!this.delayedAudioAppendQueue_.length) {
return;
}
const queue = this.delayedAudioAppendQueue_.slice();
this.logger_(`queuing delayed audio ${queue.length} appendBuffers`);
this.delayedAudioAppendQueue_.length = 0;
queue.forEach(que => {
this.appendBuffer.apply(this, que);
});
}
}
/**
* Get the audio buffer's buffered timerange.
*
* @return {TimeRange}
* The audio buffer's buffered time range
*/
audioBuffered() {
// no media source/source buffer or it isn't in the media sources
// source buffer list
if (!inSourceBuffers(this.mediaSource, this.audioBuffer)) {
return createTimeRanges();
}
return this.audioBuffer.buffered ? this.audioBuffer.buffered : createTimeRanges();
}
/**
* Get the video buffer's buffered timerange.
*
* @return {TimeRange}
* The video buffer's buffered time range
*/
videoBuffered() {
// no media source/source buffer or it isn't in the media sources
// source buffer list
if (!inSourceBuffers(this.mediaSource, this.videoBuffer)) {
return createTimeRanges();
}
return this.videoBuffer.buffered ? this.videoBuffer.buffered : createTimeRanges();
}
/**
* Get a combined video/audio buffer's buffered timerange.
*
* @return {TimeRange}
* the combined time range
*/
buffered() {
const video = inSourceBuffers(this.mediaSource, this.videoBuffer) ? this.videoBuffer : null;
const audio = inSourceBuffers(this.mediaSource, this.audioBuffer) ? this.audioBuffer : null;
if (audio && !video) {
return this.audioBuffered();
}
if (video && !audio) {
return this.videoBuffered();
}
return bufferIntersection(this.audioBuffered(), this.videoBuffered());
}
/**
* Add a callback to the queue that will set duration on the mediaSource.
*
* @param {number} duration
* The duration to set
*
* @param {Function} [doneFn]
* function to run after duration has been set.
*/
setDuration(duration, doneFn = noop) {
// In order to set the duration on the media source, it's necessary to wait for all
// source buffers to no longer be updating. "If the updating attribute equals true on
// any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
// abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
pushQueue({
type: 'mediaSource',
sourceUpdater: this,
action: actions.duration(duration),
name: 'duration',
doneFn
});
}
/**
* Add a mediaSource endOfStream call to the queue
*
* @param {Error} [error]
* Call endOfStream with an error
*
* @param {Function} [doneFn]
* A function that should be called when the
* endOfStream call has finished.
*/
endOfStream(error = null, doneFn = noop) {
if (typeof error !== 'string') {
error = undefined;
} // In order to set the duration on the media source, it's necessary to wait for all
// source buffers to no longer be updating. "If the updating attribute equals true on
// any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
// abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
pushQueue({
type: 'mediaSource',
sourceUpdater: this,
action: actions.endOfStream(error),
name: 'endOfStream',
doneFn
});
}
/**
* Queue an update to remove a time range from the buffer.
*
* @param {number} start where to start the removal
* @param {number} end where to end the removal
* @param {Function} [done=noop] optional callback to be executed when the remove
* operation is complete
* @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
*/
removeAudio(start, end, done = noop) {
if (!this.audioBuffered().length || this.audioBuffered().end(0) === 0) {
done();
return;
}
pushQueue({
type: 'audio',
sourceUpdater: this,
action: actions.remove(start, end),
doneFn: done,
name: 'remove'
});
}
/**
* Queue an update to remove a time range from the buffer.
*
* @param {number} start where to start the removal
* @param {number} end where to end the removal
* @param {Function} [done=noop] optional callback to be executed when the remove
* operation is complete
* @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
*/
removeVideo(start, end, done = noop) {
if (!this.videoBuffered().length || this.videoBuffered().end(0) === 0) {
done();
return;
}
pushQueue({
type: 'video',
sourceUpdater: this,
action: actions.remove(start, end),
doneFn: done,
name: 'remove'
});
}
/**
* Whether the underlying sourceBuffer is updating or not
*
* @return {boolean} the updating status of the SourceBuffer
*/
updating() {
// the audio/video source buffer is updating
if (updating('audio', this) || updating('video', this)) {
return true;
}
return false;
}
/**
* Set/get the timestampoffset on the audio SourceBuffer
*
* @return {number} the timestamp offset
*/
audioTimestampOffset(offset) {
if (typeof offset !== 'undefined' && this.audioBuffer &&
// no point in updating if it's the same
this.audioTimestampOffset_ !== offset) {
pushQueue({
type: 'audio',
sourceUpdater: this,
action: actions.timestampOffset(offset),
name: 'timestampOffset'
});
this.audioTimestampOffset_ = offset;
}
return this.audioTimestampOffset_;
}
/**
* Set/get the timestampoffset on the video SourceBuffer
*
* @return {number} the timestamp offset
*/
videoTimestampOffset(offset) {
if (typeof offset !== 'undefined' && this.videoBuffer &&
// no point in updating if it's the same
this.videoTimestampOffset_ !== offset) {
pushQueue({
type: 'video',
sourceUpdater: this,
action: actions.timestampOffset(offset),
name: 'timestampOffset'
});
this.videoTimestampOffset_ = offset;
}
return this.videoTimestampOffset_;
}
/**
* Add a function to the queue that will be called
* when it is its turn to run in the audio queue.
*
* @param {Function} callback
* The callback to queue.
*/
audioQueueCallback(callback) {
if (!this.audioBuffer) {
return;
}
pushQueue({
type: 'audio',
sourceUpdater: this,
action: actions.callback(callback),
name: 'callback'
});
}
/**
* Add a function to the queue that will be called
* when it is its turn to run in the video queue.
*
* @param {Function} callback
* The callback to queue.
*/
videoQueueCallback(callback) {
if (!this.videoBuffer) {
return;
}
pushQueue({
type: 'video',
sourceUpdater: this,
action: actions.callback(callback),
name: 'callback'
});
}
/**
* dispose of the source updater and the underlying sourceBuffer
*/
dispose() {
this.trigger('dispose');
bufferTypes.forEach(type => {
this.abort(type);
if (this.canRemoveSourceBuffer()) {
this.removeSourceBuffer(type);
} else {
this[`${type}QueueCallback`](() => cleanupBuffer(type, this));
}
});
this.videoAppendQueued_ = false;
this.delayedAudioAppendQueue_.length = 0;
if (this.sourceopenListener_) {
this.mediaSource.removeEventListener('sourceopen', this.sourceopenListener_);
}
this.off();
}
}
const uint8ToUtf8 = uintArray => decodeURIComponent(escape(String.fromCharCode.apply(null, uintArray)));
const bufferToHexString = buffer => {
const uInt8Buffer = new Uint8Array(buffer);
return Array.from(uInt8Buffer).map(byte => byte.toString(16).padStart(2, '0')).join('');
};
/**
* @file vtt-segment-loader.js
*/
const VTT_LINE_TERMINATORS = new Uint8Array('\n\n'.split('').map(char => char.charCodeAt(0)));
class NoVttJsError extends Error {
constructor() {
super('Trying to parse received VTT cues, but there is no WebVTT. Make sure vtt.js is loaded.');
}
}
/**
* An object that manages segment loading and appending.
*
* @class VTTSegmentLoader
* @param {Object} options required and optional options
* @extends videojs.EventTarget
*/
class VTTSegmentLoader extends SegmentLoader {
constructor(settings, options = {}) {
super(settings, options); // SegmentLoader requires a MediaSource be specified or it will throw an error;
// however, VTTSegmentLoader has no need of a media source, so delete the reference
this.mediaSource_ = null;
this.subtitlesTrack_ = null;
this.featuresNativeTextTracks_ = settings.featuresNativeTextTracks;
this.loadVttJs = settings.loadVttJs; // The VTT segment will have its own time mappings. Saving VTT segment timing info in
// the sync controller leads to improper behavior.
this.shouldSaveSegmentTimingInfo_ = false;
}
createTransmuxer_() {
// don't need to transmux any subtitles
return null;
}
/**
* Indicates which time ranges are buffered
*
* @return {TimeRange}
* TimeRange object representing the current buffered ranges
*/
buffered_() {
if (!this.subtitlesTrack_ || !this.subtitlesTrack_.cues || !this.subtitlesTrack_.cues.length) {
return createTimeRanges();
}
const cues = this.subtitlesTrack_.cues;
const start = cues[0].startTime;
const end = cues[cues.length - 1].startTime;
return createTimeRanges([[start, end]]);
}
/**
* Gets and sets init segment for the provided map
*
* @param {Object} map
* The map object representing the init segment to get or set
* @param {boolean=} set
* If true, the init segment for the provided map should be saved
* @return {Object}
* map object for desired init segment
*/
initSegmentForMap(map, set = false) {
if (!map) {
return null;
}
const id = initSegmentId(map);
let storedMap = this.initSegments_[id];
if (set && !storedMap && map.bytes) {
// append WebVTT line terminators to the media initialization segment if it exists
// to follow the WebVTT spec (https://w3c.github.io/webvtt/#file-structure) that
// requires two or more WebVTT line terminators between the WebVTT header and the
// rest of the file
const combinedByteLength = VTT_LINE_TERMINATORS.byteLength + map.bytes.byteLength;
const combinedSegment = new Uint8Array(combinedByteLength);
combinedSegment.set(map.bytes);
combinedSegment.set(VTT_LINE_TERMINATORS, map.bytes.byteLength);
this.initSegments_[id] = storedMap = {
resolvedUri: map.resolvedUri,
byterange: map.byterange,
bytes: combinedSegment
};
}
return storedMap || map;
}
/**
* Returns true if all configuration required for loading is present, otherwise false.
*
* @return {boolean} True if the all configuration is ready for loading
* @private
*/
couldBeginLoading_() {
return this.playlist_ && this.subtitlesTrack_ && !this.paused();
}
/**
* Once all the starting parameters have been specified, begin
* operation. This method should only be invoked from the INIT
* state.
*
* @private
*/
init_() {
this.state = 'READY';
this.resetEverything();
return this.monitorBuffer_();
}
/**
* Set a subtitle track on the segment loader to add subtitles to
*
* @param {TextTrack=} track
* The text track to add loaded subtitles to
* @return {TextTrack}
* Returns the subtitles track
*/
track(track) {
if (typeof track === 'undefined') {
return this.subtitlesTrack_;
}
this.subtitlesTrack_ = track; // if we were unpaused but waiting for a sourceUpdater, start
// buffering now
if (this.state === 'INIT' && this.couldBeginLoading_()) {
this.init_();
}
return this.subtitlesTrack_;
}
/**
* Remove any data in the source buffer between start and end times
*
* @param {number} start - the start time of the region to remove from the buffer
* @param {number} end - the end time of the region to remove from the buffer
*/
remove(start, end) {
removeCuesFromTrack(start, end, this.subtitlesTrack_);
}
/**
* fill the buffer with segements unless the sourceBuffers are
* currently updating
*
* Note: this function should only ever be called by monitorBuffer_
* and never directly
*
* @private
*/
fillBuffer_() {
// see if we need to begin loading immediately
const segmentInfo = this.chooseNextRequest_();
if (!segmentInfo) {
return;
}
if (this.syncController_.timestampOffsetForTimeline(segmentInfo.timeline) === null) {
// We don't have the timestamp offset that we need to sync subtitles.
// Rerun on a timestamp offset or user interaction.
const checkTimestampOffset = () => {
this.state = 'READY';
if (!this.paused()) {
// if not paused, queue a buffer check as soon as possible
this.monitorBuffer_();
}
};
this.syncController_.one('timestampoffset', checkTimestampOffset);
this.state = 'WAITING_ON_TIMELINE';
return;
}
this.loadSegment_(segmentInfo);
} // never set a timestamp offset for vtt segments.
timestampOffsetForSegment_() {
return null;
}
chooseNextRequest_() {
return this.skipEmptySegments_(super.chooseNextRequest_());
}
/**
* Prevents the segment loader from requesting segments we know contain no subtitles
* by walking forward until we find the next segment that we don't know whether it is
* empty or not.
*
* @param {Object} segmentInfo
* a segment info object that describes the current segment
* @return {Object}
* a segment info object that describes the current segment
*/
skipEmptySegments_(segmentInfo) {
while (segmentInfo && segmentInfo.segment.empty) {
// stop at the last possible segmentInfo
if (segmentInfo.mediaIndex + 1 >= segmentInfo.playlist.segments.length) {
segmentInfo = null;
break;
}
segmentInfo = this.generateSegmentInfo_({
playlist: segmentInfo.playlist,
mediaIndex: segmentInfo.mediaIndex + 1,
startOfSegment: segmentInfo.startOfSegment + segmentInfo.duration,
isSyncRequest: segmentInfo.isSyncRequest
});
}
return segmentInfo;
}
stopForError(error) {
this.error(error);
this.state = 'READY';
this.pause();
this.trigger('error');
}
/**
* append a decrypted segement to the SourceBuffer through a SourceUpdater
*
* @private
*/
segmentRequestFinished_(error, simpleSegment, result) {
if (!this.subtitlesTrack_) {
this.state = 'READY';
return;
}
this.saveTransferStats_(simpleSegment.stats); // the request was aborted
if (!this.pendingSegment_) {
this.state = 'READY';
this.mediaRequestsAborted += 1;
return;
}
if (error) {
if (error.code === REQUEST_ERRORS.TIMEOUT) {
this.handleTimeout_();
}
if (error.code === REQUEST_ERRORS.ABORTED) {
this.mediaRequestsAborted += 1;
} else {
this.mediaRequestsErrored += 1;
}
this.stopForError(error);
return;
}
const segmentInfo = this.pendingSegment_; // although the VTT segment loader bandwidth isn't really used, it's good to
// maintain functionality between segment loaders
this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats); // if this request included a segment key, save that data in the cache
if (simpleSegment.key) {
this.segmentKey(simpleSegment.key, true);
}
this.state = 'APPENDING'; // used for tests
this.trigger('appending');
const segment = segmentInfo.segment;
if (segment.map) {
segment.map.bytes = simpleSegment.map.bytes;
}
segmentInfo.bytes = simpleSegment.bytes; // Make sure that vttjs has loaded, otherwise, load it and wait till it finished loading
if (typeof window$1.WebVTT !== 'function' && typeof this.loadVttJs === 'function') {
this.state = 'WAITING_ON_VTTJS'; // should be fine to call multiple times
// script will be loaded once but multiple listeners will be added to the queue, which is expected.
this.loadVttJs().then(() => this.segmentRequestFinished_(error, simpleSegment, result), () => this.stopForError({
message: 'Error loading vtt.js'
}));
return;
}
segment.requested = true;
try {
this.parseVTTCues_(segmentInfo);
} catch (e) {
this.stopForError({
message: e.message,
metadata: {
errorType: videojs.Error.StreamingVttParserError,
error: e
}
});
return;
}
this.updateTimeMapping_(segmentInfo, this.syncController_.timelines[segmentInfo.timeline], this.playlist_);
if (segmentInfo.cues.length) {
segmentInfo.timingInfo = {
start: segmentInfo.cues[0].startTime,
end: segmentInfo.cues[segmentInfo.cues.length - 1].endTime
};
} else {
segmentInfo.timingInfo = {
start: segmentInfo.startOfSegment,
end: segmentInfo.startOfSegment + segmentInfo.duration
};
}
if (segmentInfo.isSyncRequest) {
this.trigger('syncinfoupdate');
this.pendingSegment_ = null;
this.state = 'READY';
return;
}
segmentInfo.byteLength = segmentInfo.bytes.byteLength;
this.mediaSecondsLoaded += segment.duration; // Create VTTCue instances for each cue in the new segment and add them to
// the subtitle track
segmentInfo.cues.forEach(cue => {
this.subtitlesTrack_.addCue(this.featuresNativeTextTracks_ ? new window$1.VTTCue(cue.startTime, cue.endTime, cue.text) : cue);
}); // Remove any duplicate cues from the subtitle track. The WebVTT spec allows
// cues to have identical time-intervals, but if the text is also identical
// we can safely assume it is a duplicate that can be removed (ex. when a cue
// "overlaps" VTT segments)
removeDuplicateCuesFromTrack(this.subtitlesTrack_);
this.handleAppendsDone_();
}
handleData_() {// noop as we shouldn't be getting video/audio data captions
// that we do not support here.
}
updateTimingInfoEnd_() {// noop
}
/**
* Uses the WebVTT parser to parse the segment response
*
* @throws NoVttJsError
*
* @param {Object} segmentInfo
* a segment info object that describes the current segment
* @private
*/
parseVTTCues_(segmentInfo) {
let decoder;
let decodeBytesToString = false;
if (typeof window$1.WebVTT !== 'function') {
// caller is responsible for exception handling.
throw new NoVttJsError();
}
if (typeof window$1.TextDecoder === 'function') {
decoder = new window$1.TextDecoder('utf8');
} else {
decoder = window$1.WebVTT.StringDecoder();
decodeBytesToString = true;
}
const parser = new window$1.WebVTT.Parser(window$1, window$1.vttjs, decoder);
segmentInfo.cues = [];
segmentInfo.timestampmap = {
MPEGTS: 0,
LOCAL: 0
};
parser.oncue = segmentInfo.cues.push.bind(segmentInfo.cues);
parser.ontimestampmap = map => {
segmentInfo.timestampmap = map;
};
parser.onparsingerror = error => {
videojs.log.warn('Error encountered when parsing cues: ' + error.message);
};
if (segmentInfo.segment.map) {
let mapData = segmentInfo.segment.map.bytes;
if (decodeBytesToString) {
mapData = uint8ToUtf8(mapData);
}
parser.parse(mapData);
}
let segmentData = segmentInfo.bytes;
if (decodeBytesToString) {
segmentData = uint8ToUtf8(segmentData);
}
parser.parse(segmentData);
parser.flush();
}
/**
* Updates the start and end times of any cues parsed by the WebVTT parser using
* the information parsed from the X-TIMESTAMP-MAP header and a TS to media time mapping
* from the SyncController
*
* @param {Object} segmentInfo
* a segment info object that describes the current segment
* @param {Object} mappingObj
* object containing a mapping from TS to media time
* @param {Object} playlist
* the playlist object containing the segment
* @private
*/
updateTimeMapping_(segmentInfo, mappingObj, playlist) {
const segment = segmentInfo.segment;
if (!mappingObj) {
// If the sync controller does not have a mapping of TS to Media Time for the
// timeline, then we don't have enough information to update the cue
// start/end times
return;
}
if (!segmentInfo.cues.length) {
// If there are no cues, we also do not have enough information to figure out
// segment timing. Mark that the segment contains no cues so we don't re-request
// an empty segment.
segment.empty = true;
return;
}
const {
MPEGTS,
LOCAL
} = segmentInfo.timestampmap;
/**
* From the spec:
* The MPEGTS media timestamp MUST use a 90KHz timescale,
* even when non-WebVTT Media Segments use a different timescale.
*/
const mpegTsInSeconds = MPEGTS / ONE_SECOND_IN_TS;
const diff = mpegTsInSeconds - LOCAL + mappingObj.mapping;
segmentInfo.cues.forEach(cue => {
const duration = cue.endTime - cue.startTime;
const startTime = MPEGTS === 0 ? cue.startTime + diff : this.handleRollover_(cue.startTime + diff, mappingObj.time);
cue.startTime = Math.max(startTime, 0);
cue.endTime = Math.max(startTime + duration, 0);
});
if (!playlist.syncInfo) {
const firstStart = segmentInfo.cues[0].startTime;
const lastStart = segmentInfo.cues[segmentInfo.cues.length - 1].startTime;
playlist.syncInfo = {
mediaSequence: playlist.mediaSequence + segmentInfo.mediaIndex,
time: Math.min(firstStart, lastStart - segment.duration)
};
}
}
/**
* MPEG-TS PES timestamps are limited to 2^33.
* Once they reach 2^33, they roll over to 0.
* mux.js handles PES timestamp rollover for the following scenarios:
* [forward rollover(right)] ->
* PES timestamps monotonically increase, and once they reach 2^33, they roll over to 0
* [backward rollover(left)] -->
* we seek back to position before rollover.
*
* According to the HLS SPEC:
* When synchronizing WebVTT with PES timestamps, clients SHOULD account
* for cases where the 33-bit PES timestamps have wrapped and the WebVTT
* cue times have not. When the PES timestamp wraps, the WebVTT Segment
* SHOULD have a X-TIMESTAMP-MAP header that maps the current WebVTT
* time to the new (low valued) PES timestamp.
*
* So we want to handle rollover here and align VTT Cue start/end time to the player's time.
*/
handleRollover_(value, reference) {
if (reference === null) {
return value;
}
let valueIn90khz = value * ONE_SECOND_IN_TS;
const referenceIn90khz = reference * ONE_SECOND_IN_TS;
let offset;
if (referenceIn90khz < valueIn90khz) {
// - 2^33
offset = -8589934592;
} else {
// + 2^33
offset = 8589934592;
} // distance(value - reference) > 2^32
while (Math.abs(valueIn90khz - referenceIn90khz) > 4294967296) {
valueIn90khz += offset;
}
return valueIn90khz / ONE_SECOND_IN_TS;
}
}
/**
* @file ad-cue-tags.js
*/
/**
* Searches for an ad cue that overlaps with the given mediaTime
*
* @param {Object} track
* the track to find the cue for
*
* @param {number} mediaTime
* the time to find the cue at
*
* @return {Object|null}
* the found cue or null
*/
const findAdCue = function (track, mediaTime) {
const cues = track.cues;
for (let i = 0; i < cues.length; i++) {
const cue = cues[i];
if (mediaTime >= cue.adStartTime && mediaTime <= cue.adEndTime) {
return cue;
}
}
return null;
};
const updateAdCues = function (media, track, offset = 0) {
if (!media.segments) {
return;
}
let mediaTime = offset;
let cue;
for (let i = 0; i < media.segments.length; i++) {
const segment = media.segments[i];
if (!cue) {
// Since the cues will span for at least the segment duration, adding a fudge
// factor of half segment duration will prevent duplicate cues from being
// created when timing info is not exact (e.g. cue start time initialized
// at 10.006677, but next call mediaTime is 10.003332 )
cue = findAdCue(track, mediaTime + segment.duration / 2);
}
if (cue) {
if ('cueIn' in segment) {
// Found a CUE-IN so end the cue
cue.endTime = mediaTime;
cue.adEndTime = mediaTime;
mediaTime += segment.duration;
cue = null;
continue;
}
if (mediaTime < cue.endTime) {
// Already processed this mediaTime for this cue
mediaTime += segment.duration;
continue;
} // otherwise extend cue until a CUE-IN is found
cue.endTime += segment.duration;
} else {
if ('cueOut' in segment) {
cue = new window$1.VTTCue(mediaTime, mediaTime + segment.duration, segment.cueOut);
cue.adStartTime = mediaTime; // Assumes tag format to be
// #EXT-X-CUE-OUT:30
cue.adEndTime = mediaTime + parseFloat(segment.cueOut);
track.addCue(cue);
}
if ('cueOutCont' in segment) {
// Entered into the middle of an ad cue
// Assumes tag formate to be
// #EXT-X-CUE-OUT-CONT:10/30
const [adOffset, adTotal] = segment.cueOutCont.split('/').map(parseFloat);
cue = new window$1.VTTCue(mediaTime, mediaTime + segment.duration, '');
cue.adStartTime = mediaTime - adOffset;
cue.adEndTime = cue.adStartTime + adTotal;
track.addCue(cue);
}
}
mediaTime += segment.duration;
}
};
class SyncInfo {
/**
* @param {number} start - media sequence start
* @param {number} end - media sequence end
* @param {number} segmentIndex - index for associated segment
* @param {number|null} [partIndex] - index for associated part
* @param {boolean} [appended] - appended indicator
*
*/
constructor({
start,
end,
segmentIndex,
partIndex = null,
appended = false
}) {
this.start_ = start;
this.end_ = end;
this.segmentIndex_ = segmentIndex;
this.partIndex_ = partIndex;
this.appended_ = appended;
}
isInRange(targetTime) {
return targetTime >= this.start && targetTime < this.end;
}
markAppended() {
this.appended_ = true;
}
resetAppendedStatus() {
this.appended_ = false;
}
get isAppended() {
return this.appended_;
}
get start() {
return this.start_;
}
get end() {
return this.end_;
}
get segmentIndex() {
return this.segmentIndex_;
}
get partIndex() {
return this.partIndex_;
}
}
class SyncInfoData {
/**
*
* @param {SyncInfo} segmentSyncInfo - sync info for a given segment
* @param {Array} [partsSyncInfo] - sync infos for a list of parts for a given segment
*/
constructor(segmentSyncInfo, partsSyncInfo = []) {
this.segmentSyncInfo_ = segmentSyncInfo;
this.partsSyncInfo_ = partsSyncInfo;
}
get segmentSyncInfo() {
return this.segmentSyncInfo_;
}
get partsSyncInfo() {
return this.partsSyncInfo_;
}
get hasPartsSyncInfo() {
return this.partsSyncInfo_.length > 0;
}
resetAppendStatus() {
this.segmentSyncInfo_.resetAppendedStatus();
this.partsSyncInfo_.forEach(partSyncInfo => partSyncInfo.resetAppendedStatus());
}
}
class MediaSequenceSync {
constructor() {
/**
* @type {Map}
* @protected
*/
this.storage_ = new Map();
this.diagnostics_ = '';
this.isReliable_ = false;
this.start_ = -Infinity;
this.end_ = Infinity;
}
get start() {
return this.start_;
}
get end() {
return this.end_;
}
get diagnostics() {
return this.diagnostics_;
}
get isReliable() {
return this.isReliable_;
}
resetAppendedStatus() {
this.storage_.forEach(syncInfoData => syncInfoData.resetAppendStatus());
}
/**
* update sync storage
*
* @param {Object} playlist
* @param {number} currentTime
*
* @return {void}
*/
update(playlist, currentTime) {
const {
mediaSequence,
segments
} = playlist;
this.isReliable_ = this.isReliablePlaylist_(mediaSequence, segments);
if (!this.isReliable_) {
return;
}
return this.updateStorage_(segments, mediaSequence, this.calculateBaseTime_(mediaSequence, currentTime));
}
/**
* @param {number} targetTime
* @return {SyncInfo|null}
*/
getSyncInfoForTime(targetTime) {
for (const {
segmentSyncInfo,
partsSyncInfo
} of this.storage_.values()) {
// Normal segment flow:
if (!partsSyncInfo.length) {
if (segmentSyncInfo.isInRange(targetTime)) {
return segmentSyncInfo;
}
} else {
// Low latency flow:
for (const partSyncInfo of partsSyncInfo) {
if (partSyncInfo.isInRange(targetTime)) {
return partSyncInfo;
}
}
}
}
return null;
}
getSyncInfoForMediaSequence(mediaSequence) {
return this.storage_.get(mediaSequence);
}
updateStorage_(segments, startingMediaSequence, startingTime) {
const newStorage = new Map();
let newDiagnostics = '\n';
let currentStart = startingTime;
let currentMediaSequence = startingMediaSequence;
this.start_ = currentStart;
segments.forEach((segment, segmentIndex) => {
const prevSyncInfoData = this.storage_.get(currentMediaSequence);
const segmentStart = currentStart;
const segmentEnd = segmentStart + segment.duration;
const segmentIsAppended = Boolean(prevSyncInfoData && prevSyncInfoData.segmentSyncInfo && prevSyncInfoData.segmentSyncInfo.isAppended);
const segmentSyncInfo = new SyncInfo({
start: segmentStart,
end: segmentEnd,
appended: segmentIsAppended,
segmentIndex
});
segment.syncInfo = segmentSyncInfo;
let currentPartStart = currentStart;
const partsSyncInfo = (segment.parts || []).map((part, partIndex) => {
const partStart = currentPartStart;
const partEnd = currentPartStart + part.duration;
const partIsAppended = Boolean(prevSyncInfoData && prevSyncInfoData.partsSyncInfo && prevSyncInfoData.partsSyncInfo[partIndex] && prevSyncInfoData.partsSyncInfo[partIndex].isAppended);
const partSyncInfo = new SyncInfo({
start: partStart,
end: partEnd,
appended: partIsAppended,
segmentIndex,
partIndex
});
currentPartStart = partEnd;
newDiagnostics += `Media Sequence: ${currentMediaSequence}.${partIndex} | Range: ${partStart} --> ${partEnd} | Appended: ${partIsAppended}\n`;
part.syncInfo = partSyncInfo;
return partSyncInfo;
});
newStorage.set(currentMediaSequence, new SyncInfoData(segmentSyncInfo, partsSyncInfo));
newDiagnostics += `${compactSegmentUrlDescription(segment.resolvedUri)} | Media Sequence: ${currentMediaSequence} | Range: ${segmentStart} --> ${segmentEnd} | Appended: ${segmentIsAppended}\n`;
currentMediaSequence++;
currentStart = segmentEnd;
});
this.end_ = currentStart;
this.storage_ = newStorage;
this.diagnostics_ = newDiagnostics;
}
calculateBaseTime_(mediaSequence, fallback) {
if (!this.storage_.size) {
// Initial setup flow.
return 0;
}
if (this.storage_.has(mediaSequence)) {
// Normal flow.
return this.storage_.get(mediaSequence).segmentSyncInfo.start;
} // Fallback flow.
// There is a gap between last recorded playlist and a new one received.
return fallback;
}
isReliablePlaylist_(mediaSequence, segments) {
return mediaSequence !== undefined && mediaSequence !== null && Array.isArray(segments) && segments.length;
}
}
class DependantMediaSequenceSync extends MediaSequenceSync {
constructor(parent) {
super();
this.parent_ = parent;
}
calculateBaseTime_(mediaSequence, fallback) {
if (!this.storage_.size) {
const info = this.parent_.getSyncInfoForMediaSequence(mediaSequence);
if (info) {
return info.segmentSyncInfo.start;
}
return 0;
}
return super.calculateBaseTime_(mediaSequence, fallback);
}
}
/**
* @file sync-controller.js
*/
// synchronize expired playlist segments.
// the max media sequence diff is 48 hours of live stream
// content with two second segments. Anything larger than that
// will likely be invalid.
const MAX_MEDIA_SEQUENCE_DIFF_FOR_SYNC = 86400;
const syncPointStrategies = [
// Stategy "VOD": Handle the VOD-case where the sync-point is *always*
// the equivalence display-time 0 === segment-index 0
{
name: 'VOD',
run: (syncController, playlist, duration, currentTimeline, currentTime) => {
if (duration !== Infinity) {
const syncPoint = {
time: 0,
segmentIndex: 0,
partIndex: null
};
return syncPoint;
}
return null;
}
}, {
name: 'MediaSequence',
/**
* run media sequence strategy
*
* @param {SyncController} syncController
* @param {Object} playlist
* @param {number} duration
* @param {number} currentTimeline
* @param {number} currentTime
* @param {string} type
*/
run: (syncController, playlist, duration, currentTimeline, currentTime, type) => {
const mediaSequenceSync = syncController.getMediaSequenceSync(type);
if (!mediaSequenceSync) {
return null;
}
if (!mediaSequenceSync.isReliable) {
return null;
}
const syncInfo = mediaSequenceSync.getSyncInfoForTime(currentTime);
if (!syncInfo) {
return null;
}
return {
time: syncInfo.start,
partIndex: syncInfo.partIndex,
segmentIndex: syncInfo.segmentIndex
};
}
},
// Stategy "ProgramDateTime": We have a program-date-time tag in this playlist
{
name: 'ProgramDateTime',
run: (syncController, playlist, duration, currentTimeline, currentTime) => {
if (!Object.keys(syncController.timelineToDatetimeMappings).length) {
return null;
}
let syncPoint = null;
let lastDistance = null;
const partsAndSegments = getPartsAndSegments(playlist);
currentTime = currentTime || 0;
for (let i = 0; i < partsAndSegments.length; i++) {
// start from the end and loop backwards for live
// or start from the front and loop forwards for non-live
const index = playlist.endList || currentTime === 0 ? i : partsAndSegments.length - (i + 1);
const partAndSegment = partsAndSegments[index];
const segment = partAndSegment.segment;
const datetimeMapping = syncController.timelineToDatetimeMappings[segment.timeline];
if (!datetimeMapping || !segment.dateTimeObject) {
continue;
}
const segmentTime = segment.dateTimeObject.getTime() / 1000;
let start = segmentTime + datetimeMapping; // take part duration into account.
if (segment.parts && typeof partAndSegment.partIndex === 'number') {
for (let z = 0; z < partAndSegment.partIndex; z++) {
start += segment.parts[z].duration;
}
}
const distance = Math.abs(currentTime - start); // Once the distance begins to increase, or if distance is 0, we have passed
// currentTime and can stop looking for better candidates
if (lastDistance !== null && (distance === 0 || lastDistance < distance)) {
break;
}
lastDistance = distance;
syncPoint = {
time: start,
segmentIndex: partAndSegment.segmentIndex,
partIndex: partAndSegment.partIndex
};
}
return syncPoint;
}
},
// Stategy "Segment": We have a known time mapping for a timeline and a
// segment in the current timeline with timing data
{
name: 'Segment',
run: (syncController, playlist, duration, currentTimeline, currentTime) => {
let syncPoint = null;
let lastDistance = null;
currentTime = currentTime || 0;
const partsAndSegments = getPartsAndSegments(playlist);
for (let i = 0; i < partsAndSegments.length; i++) {
// start from the end and loop backwards for live
// or start from the front and loop forwards for non-live
const index = playlist.endList || currentTime === 0 ? i : partsAndSegments.length - (i + 1);
const partAndSegment = partsAndSegments[index];
const segment = partAndSegment.segment;
const start = partAndSegment.part && partAndSegment.part.start || segment && segment.start;
if (segment.timeline === currentTimeline && typeof start !== 'undefined') {
const distance = Math.abs(currentTime - start); // Once the distance begins to increase, we have passed
// currentTime and can stop looking for better candidates
if (lastDistance !== null && lastDistance < distance) {
break;
}
if (!syncPoint || lastDistance === null || lastDistance >= distance) {
lastDistance = distance;
syncPoint = {
time: start,
segmentIndex: partAndSegment.segmentIndex,
partIndex: partAndSegment.partIndex
};
}
}
}
return syncPoint;
}
},
// Stategy "Discontinuity": We have a discontinuity with a known
// display-time
{
name: 'Discontinuity',
run: (syncController, playlist, duration, currentTimeline, currentTime) => {
let syncPoint = null;
currentTime = currentTime || 0;
if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
let lastDistance = null;
for (let i = 0; i < playlist.discontinuityStarts.length; i++) {
const segmentIndex = playlist.discontinuityStarts[i];
const discontinuity = playlist.discontinuitySequence + i + 1;
const discontinuitySync = syncController.discontinuities[discontinuity];
if (discontinuitySync) {
const distance = Math.abs(currentTime - discontinuitySync.time); // Once the distance begins to increase, we have passed
// currentTime and can stop looking for better candidates
if (lastDistance !== null && lastDistance < distance) {
break;
}
if (!syncPoint || lastDistance === null || lastDistance >= distance) {
lastDistance = distance;
syncPoint = {
time: discontinuitySync.time,
segmentIndex,
partIndex: null
};
}
}
}
}
return syncPoint;
}
},
// Stategy "Playlist": We have a playlist with a known mapping of
// segment index to display time
{
name: 'Playlist',
run: (syncController, playlist, duration, currentTimeline, currentTime) => {
if (playlist.syncInfo) {
const syncPoint = {
time: playlist.syncInfo.time,
segmentIndex: playlist.syncInfo.mediaSequence - playlist.mediaSequence,
partIndex: null
};
return syncPoint;
}
return null;
}
}];
class SyncController extends videojs.EventTarget {
constructor(options = {}) {
super(); // ...for synching across variants
this.timelines = [];
this.discontinuities = [];
this.timelineToDatetimeMappings = {}; // TODO: this map should be only available for HLS. Since only HLS has MediaSequence.
// For some reason this map helps with syncing between quality switch for MPEG-DASH as well.
// Moreover if we disable this map for MPEG-DASH - quality switch will be broken.
// MPEG-DASH should have its own separate sync strategy
const main = new MediaSequenceSync();
const audio = new DependantMediaSequenceSync(main);
const vtt = new DependantMediaSequenceSync(main);
this.mediaSequenceStorage_ = {
main,
audio,
vtt
};
this.logger_ = logger('SyncController');
}
/**
*
* @param {string} loaderType
* @return {MediaSequenceSync|null}
*/
getMediaSequenceSync(loaderType) {
return this.mediaSequenceStorage_[loaderType] || null;
}
/**
* Find a sync-point for the playlist specified
*
* A sync-point is defined as a known mapping from display-time to
* a segment-index in the current playlist.
*
* @param {Playlist} playlist
* The playlist that needs a sync-point
* @param {number} duration
* Duration of the MediaSource (Infinite if playing a live source)
* @param {number} currentTimeline
* The last timeline from which a segment was loaded
* @param {number} currentTime
* Current player's time
* @param {string} type
* Segment loader type
* @return {Object}
* A sync-point object
*/
getSyncPoint(playlist, duration, currentTimeline, currentTime, type) {
// Always use VOD sync point for VOD
if (duration !== Infinity) {
const vodSyncPointStrategy = syncPointStrategies.find(({
name
}) => name === 'VOD');
return vodSyncPointStrategy.run(this, playlist, duration);
}
const syncPoints = this.runStrategies_(playlist, duration, currentTimeline, currentTime, type);
if (!syncPoints.length) {
// Signal that we need to attempt to get a sync-point manually
// by fetching a segment in the playlist and constructing
// a sync-point from that information
return null;
} // If we have exact match just return it instead of finding the nearest distance
for (const syncPointInfo of syncPoints) {
const {
syncPoint,
strategy
} = syncPointInfo;
const {
segmentIndex,
time
} = syncPoint;
if (segmentIndex < 0) {
continue;
}
const selectedSegment = playlist.segments[segmentIndex];
const start = time;
const end = start + selectedSegment.duration;
this.logger_(`Strategy: ${strategy}. Current time: ${currentTime}. selected segment: ${segmentIndex}. Time: [${start} -> ${end}]}`);
if (currentTime >= start && currentTime < end) {
this.logger_('Found sync point with exact match: ', syncPoint);
return syncPoint;
}
} // Now find the sync-point that is closest to the currentTime because
// that should result in the most accurate guess about which segment
// to fetch
return this.selectSyncPoint_(syncPoints, {
key: 'time',
value: currentTime
});
}
/**
* Calculate the amount of time that has expired off the playlist during playback
*
* @param {Playlist} playlist
* Playlist object to calculate expired from
* @param {number} duration
* Duration of the MediaSource (Infinity if playling a live source)
* @return {number|null}
* The amount of time that has expired off the playlist during playback. Null
* if no sync-points for the playlist can be found.
*/
getExpiredTime(playlist, duration) {
if (!playlist || !playlist.segments) {
return null;
}
const syncPoints = this.runStrategies_(playlist, duration, playlist.discontinuitySequence, 0); // Without sync-points, there is not enough information to determine the expired time
if (!syncPoints.length) {
return null;
}
const syncPoint = this.selectSyncPoint_(syncPoints, {
key: 'segmentIndex',
value: 0
}); // If the sync-point is beyond the start of the playlist, we want to subtract the
// duration from index 0 to syncPoint.segmentIndex instead of adding.
if (syncPoint.segmentIndex > 0) {
syncPoint.time *= -1;
}
return Math.abs(syncPoint.time + sumDurations({
defaultDuration: playlist.targetDuration,
durationList: playlist.segments,
startIndex: syncPoint.segmentIndex,
endIndex: 0
}));
}
/**
* Runs each sync-point strategy and returns a list of sync-points returned by the
* strategies
*
* @private
* @param {Playlist} playlist
* The playlist that needs a sync-point
* @param {number} duration
* Duration of the MediaSource (Infinity if playing a live source)
* @param {number} currentTimeline
* The last timeline from which a segment was loaded
* @param {number} currentTime
* Current player's time
* @param {string} type
* Segment loader type
* @return {Array}
* A list of sync-point objects
*/
runStrategies_(playlist, duration, currentTimeline, currentTime, type) {
const syncPoints = []; // Try to find a sync-point in by utilizing various strategies...
for (let i = 0; i < syncPointStrategies.length; i++) {
const strategy = syncPointStrategies[i];
const syncPoint = strategy.run(this, playlist, duration, currentTimeline, currentTime, type);
if (syncPoint) {
syncPoint.strategy = strategy.name;
syncPoints.push({
strategy: strategy.name,
syncPoint
});
}
}
return syncPoints;
}
/**
* Selects the sync-point nearest the specified target
*
* @private
* @param {Array} syncPoints
* List of sync-points to select from
* @param {Object} target
* Object specifying the property and value we are targeting
* @param {string} target.key
* Specifies the property to target. Must be either 'time' or 'segmentIndex'
* @param {number} target.value
* The value to target for the specified key.
* @return {Object}
* The sync-point nearest the target
*/
selectSyncPoint_(syncPoints, target) {
let bestSyncPoint = syncPoints[0].syncPoint;
let bestDistance = Math.abs(syncPoints[0].syncPoint[target.key] - target.value);
let bestStrategy = syncPoints[0].strategy;
for (let i = 1; i < syncPoints.length; i++) {
const newDistance = Math.abs(syncPoints[i].syncPoint[target.key] - target.value);
if (newDistance < bestDistance) {
bestDistance = newDistance;
bestSyncPoint = syncPoints[i].syncPoint;
bestStrategy = syncPoints[i].strategy;
}
}
this.logger_(`syncPoint for [${target.key}: ${target.value}] chosen with strategy` + ` [${bestStrategy}]: [time:${bestSyncPoint.time},` + ` segmentIndex:${bestSyncPoint.segmentIndex}` + (typeof bestSyncPoint.partIndex === 'number' ? `,partIndex:${bestSyncPoint.partIndex}` : '') + ']');
return bestSyncPoint;
}
/**
* Save any meta-data present on the segments when segments leave
* the live window to the playlist to allow for synchronization at the
* playlist level later.
*
* @param {Playlist} oldPlaylist - The previous active playlist
* @param {Playlist} newPlaylist - The updated and most current playlist
*/
saveExpiredSegmentInfo(oldPlaylist, newPlaylist) {
const mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence; // Ignore large media sequence gaps
if (mediaSequenceDiff > MAX_MEDIA_SEQUENCE_DIFF_FOR_SYNC) {
videojs.log.warn(`Not saving expired segment info. Media sequence gap ${mediaSequenceDiff} is too large.`);
return;
} // When a segment expires from the playlist and it has a start time
// save that information as a possible sync-point reference in future
for (let i = mediaSequenceDiff - 1; i >= 0; i--) {
const lastRemovedSegment = oldPlaylist.segments[i];
if (lastRemovedSegment && typeof lastRemovedSegment.start !== 'undefined') {
newPlaylist.syncInfo = {
mediaSequence: oldPlaylist.mediaSequence + i,
time: lastRemovedSegment.start
};
this.logger_(`playlist refresh sync: [time:${newPlaylist.syncInfo.time},` + ` mediaSequence: ${newPlaylist.syncInfo.mediaSequence}]`);
this.trigger('syncinfoupdate');
break;
}
}
}
/**
* Save the mapping from playlist's ProgramDateTime to display. This should only happen
* before segments start to load.
*
* @param {Playlist} playlist - The currently active playlist
*/
setDateTimeMappingForStart(playlist) {
// It's possible for the playlist to be updated before playback starts, meaning time
// zero is not yet set. If, during these playlist refreshes, a discontinuity is
// crossed, then the old time zero mapping (for the prior timeline) would be retained
// unless the mappings are cleared.
this.timelineToDatetimeMappings = {};
if (playlist.segments && playlist.segments.length && playlist.segments[0].dateTimeObject) {
const firstSegment = playlist.segments[0];
const playlistTimestamp = firstSegment.dateTimeObject.getTime() / 1000;
this.timelineToDatetimeMappings[firstSegment.timeline] = -playlistTimestamp;
}
}
/**
* Calculates and saves timeline mappings, playlist sync info, and segment timing values
* based on the latest timing information.
*
* @param {Object} options
* Options object
* @param {SegmentInfo} options.segmentInfo
* The current active request information
* @param {boolean} options.shouldSaveTimelineMapping
* If there's a timeline change, determines if the timeline mapping should be
* saved for timeline mapping and program date time mappings.
*/
saveSegmentTimingInfo({
segmentInfo,
shouldSaveTimelineMapping
}) {
const didCalculateSegmentTimeMapping = this.calculateSegmentTimeMapping_(segmentInfo, segmentInfo.timingInfo, shouldSaveTimelineMapping);
const segment = segmentInfo.segment;
if (didCalculateSegmentTimeMapping) {
this.saveDiscontinuitySyncInfo_(segmentInfo); // If the playlist does not have sync information yet, record that information
// now with segment timing information
if (!segmentInfo.playlist.syncInfo) {
segmentInfo.playlist.syncInfo = {
mediaSequence: segmentInfo.playlist.mediaSequence + segmentInfo.mediaIndex,
time: segment.start
};
}
}
const dateTime = segment.dateTimeObject;
if (segment.discontinuity && shouldSaveTimelineMapping && dateTime) {
this.timelineToDatetimeMappings[segment.timeline] = -(dateTime.getTime() / 1000);
}
}
timestampOffsetForTimeline(timeline) {
if (typeof this.timelines[timeline] === 'undefined') {
return null;
}
return this.timelines[timeline].time;
}
mappingForTimeline(timeline) {
if (typeof this.timelines[timeline] === 'undefined') {
return null;
}
return this.timelines[timeline].mapping;
}
/**
* Use the "media time" for a segment to generate a mapping to "display time" and
* save that display time to the segment.
*
* @private
* @param {SegmentInfo} segmentInfo
* The current active request information
* @param {Object} timingInfo
* The start and end time of the current segment in "media time"
* @param {boolean} shouldSaveTimelineMapping
* If there's a timeline change, determines if the timeline mapping should be
* saved in timelines.
* @return {boolean}
* Returns false if segment time mapping could not be calculated
*/
calculateSegmentTimeMapping_(segmentInfo, timingInfo, shouldSaveTimelineMapping) {
// TODO: remove side effects
const segment = segmentInfo.segment;
const part = segmentInfo.part;
let mappingObj = this.timelines[segmentInfo.timeline];
let start;
let end;
if (typeof segmentInfo.timestampOffset === 'number') {
mappingObj = {
time: segmentInfo.startOfSegment,
mapping: segmentInfo.startOfSegment - timingInfo.start
};
if (shouldSaveTimelineMapping) {
this.timelines[segmentInfo.timeline] = mappingObj;
this.trigger('timestampoffset');
this.logger_(`time mapping for timeline ${segmentInfo.timeline}: ` + `[time: ${mappingObj.time}] [mapping: ${mappingObj.mapping}]`);
}
start = segmentInfo.startOfSegment;
end = timingInfo.end + mappingObj.mapping;
} else if (mappingObj) {
start = timingInfo.start + mappingObj.mapping;
end = timingInfo.end + mappingObj.mapping;
} else {
return false;
}
if (part) {
part.start = start;
part.end = end;
} // If we don't have a segment start yet or the start value we got
// is less than our current segment.start value, save a new start value.
// We have to do this because parts will have segment timing info saved
// multiple times and we want segment start to be the earliest part start
// value for that segment.
if (!segment.start || start < segment.start) {
segment.start = start;
}
segment.end = end;
return true;
}
/**
* Each time we have discontinuity in the playlist, attempt to calculate the location
* in display of the start of the discontinuity and save that. We also save an accuracy
* value so that we save values with the most accuracy (closest to 0.)
*
* @private
* @param {SegmentInfo} segmentInfo - The current active request information
*/
saveDiscontinuitySyncInfo_(segmentInfo) {
const playlist = segmentInfo.playlist;
const segment = segmentInfo.segment; // If the current segment is a discontinuity then we know exactly where
// the start of the range and it's accuracy is 0 (greater accuracy values
// mean more approximation)
if (segment.discontinuity) {
this.discontinuities[segment.timeline] = {
time: segment.start,
accuracy: 0
};
} else if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
// Search for future discontinuities that we can provide better timing
// information for and save that information for sync purposes
for (let i = 0; i < playlist.discontinuityStarts.length; i++) {
const segmentIndex = playlist.discontinuityStarts[i];
const discontinuity = playlist.discontinuitySequence + i + 1;
const mediaIndexDiff = segmentIndex - segmentInfo.mediaIndex;
const accuracy = Math.abs(mediaIndexDiff);
if (!this.discontinuities[discontinuity] || this.discontinuities[discontinuity].accuracy > accuracy) {
let time;
if (mediaIndexDiff < 0) {
time = segment.start - sumDurations({
defaultDuration: playlist.targetDuration,
durationList: playlist.segments,
startIndex: segmentInfo.mediaIndex,
endIndex: segmentIndex
});
} else {
time = segment.end + sumDurations({
defaultDuration: playlist.targetDuration,
durationList: playlist.segments,
startIndex: segmentInfo.mediaIndex + 1,
endIndex: segmentIndex
});
}
this.discontinuities[discontinuity] = {
time,
accuracy
};
}
}
}
}
dispose() {
this.trigger('dispose');
this.off();
}
}
/**
* The TimelineChangeController acts as a source for segment loaders to listen for and
* keep track of latest and pending timeline changes. This is useful to ensure proper
* sync, as each loader may need to make a consideration for what timeline the other
* loader is on before making changes which could impact the other loader's media.
*
* @class TimelineChangeController
* @extends videojs.EventTarget
*/
class TimelineChangeController extends videojs.EventTarget {
constructor() {
super();
this.pendingTimelineChanges_ = {};
this.lastTimelineChanges_ = {};
}
clearPendingTimelineChange(type) {
this.pendingTimelineChanges_[type] = null;
this.trigger('pendingtimelinechange');
}
pendingTimelineChange({
type,
from,
to
}) {
if (typeof from === 'number' && typeof to === 'number') {
this.pendingTimelineChanges_[type] = {
type,
from,
to
};
this.trigger('pendingtimelinechange');
}
return this.pendingTimelineChanges_[type];
}
lastTimelineChange({
type,
from,
to
}) {
if (typeof from === 'number' && typeof to === 'number') {
this.lastTimelineChanges_[type] = {
type,
from,
to
};
delete this.pendingTimelineChanges_[type];
const metadata = {
timelineChangeInfo: {
from,
to
}
};
this.trigger({
type: 'timelinechange',
metadata
});
}
return this.lastTimelineChanges_[type];
}
dispose() {
this.trigger('dispose');
this.pendingTimelineChanges_ = {};
this.lastTimelineChanges_ = {};
this.off();
}
}
/* rollup-plugin-worker-factory start for worker!/home/runner/work/http-streaming/http-streaming/src/decrypter-worker.js */
const workerCode = transform(getWorkerString(function () {
/**
* @file stream.js
*/
/**
* A lightweight readable stream implemention that handles event dispatching.
*
* @class Stream
*/
var Stream = /*#__PURE__*/function () {
function Stream() {
this.listeners = {};
}
/**
* Add a listener for a specified event type.
*
* @param {string} type the event name
* @param {Function} listener the callback to be invoked when an event of
* the specified type occurs
*/
var _proto = Stream.prototype;
_proto.on = function on(type, listener) {
if (!this.listeners[type]) {
this.listeners[type] = [];
}
this.listeners[type].push(listener);
}
/**
* Remove a listener for a specified event type.
*
* @param {string} type the event name
* @param {Function} listener a function previously registered for this
* type of event through `on`
* @return {boolean} if we could turn it off or not
*/;
_proto.off = function off(type, listener) {
if (!this.listeners[type]) {
return false;
}
var index = this.listeners[type].indexOf(listener); // TODO: which is better?
// In Video.js we slice listener functions
// on trigger so that it does not mess up the order
// while we loop through.
//
// Here we slice on off so that the loop in trigger
// can continue using it's old reference to loop without
// messing up the order.
this.listeners[type] = this.listeners[type].slice(0);
this.listeners[type].splice(index, 1);
return index > -1;
}
/**
* Trigger an event of the specified type on this stream. Any additional
* arguments to this function are passed as parameters to event listeners.
*
* @param {string} type the event name
*/;
_proto.trigger = function trigger(type) {
var callbacks = this.listeners[type];
if (!callbacks) {
return;
} // Slicing the arguments on every invocation of this method
// can add a significant amount of overhead. Avoid the
// intermediate object creation for the common case of a
// single callback argument
if (arguments.length === 2) {
var length = callbacks.length;
for (var i = 0; i < length; ++i) {
callbacks[i].call(this, arguments[1]);
}
} else {
var args = Array.prototype.slice.call(arguments, 1);
var _length = callbacks.length;
for (var _i = 0; _i < _length; ++_i) {
callbacks[_i].apply(this, args);
}
}
}
/**
* Destroys the stream and cleans up.
*/;
_proto.dispose = function dispose() {
this.listeners = {};
}
/**
* Forwards all `data` events on this stream to the destination stream. The
* destination stream should provide a method `push` to receive the data
* events as they arrive.
*
* @param {Stream} destination the stream that will receive all `data` events
* @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
*/;
_proto.pipe = function pipe(destination) {
this.on('data', function (data) {
destination.push(data);
});
};
return Stream;
}();
/*! @name pkcs7 @version 1.0.4 @license Apache-2.0 */
/**
* Returns the subarray of a Uint8Array without PKCS#7 padding.
*
* @param padded {Uint8Array} unencrypted bytes that have been padded
* @return {Uint8Array} the unpadded bytes
* @see http://tools.ietf.org/html/rfc5652
*/
function unpad(padded) {
return padded.subarray(0, padded.byteLength - padded[padded.byteLength - 1]);
}
/*! @name aes-decrypter @version 4.0.1 @license Apache-2.0 */
/**
* @file aes.js
*
* This file contains an adaptation of the AES decryption algorithm
* from the Standford Javascript Cryptography Library. That work is
* covered by the following copyright and permissions notice:
*
* Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation
* are those of the authors and should not be interpreted as representing
* official policies, either expressed or implied, of the authors.
*/
/**
* Expand the S-box tables.
*
* @private
*/
const precompute = function () {
const tables = [[[], [], [], [], []], [[], [], [], [], []]];
const encTable = tables[0];
const decTable = tables[1];
const sbox = encTable[4];
const sboxInv = decTable[4];
let i;
let x;
let xInv;
const d = [];
const th = [];
let x2;
let x4;
let x8;
let s;
let tEnc;
let tDec; // Compute double and third tables
for (i = 0; i < 256; i++) {
th[(d[i] = i << 1 ^ (i >> 7) * 283) ^ i] = i;
}
for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
// Compute sbox
s = xInv ^ xInv << 1 ^ xInv << 2 ^ xInv << 3 ^ xInv << 4;
s = s >> 8 ^ s & 255 ^ 99;
sbox[x] = s;
sboxInv[s] = x; // Compute MixColumns
x8 = d[x4 = d[x2 = d[x]]];
tDec = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
tEnc = d[s] * 0x101 ^ s * 0x1010100;
for (i = 0; i < 4; i++) {
encTable[i][x] = tEnc = tEnc << 24 ^ tEnc >>> 8;
decTable[i][s] = tDec = tDec << 24 ^ tDec >>> 8;
}
} // Compactify. Considerable speedup on Firefox.
for (i = 0; i < 5; i++) {
encTable[i] = encTable[i].slice(0);
decTable[i] = decTable[i].slice(0);
}
return tables;
};
let aesTables = null;
/**
* Schedule out an AES key for both encryption and decryption. This
* is a low-level class. Use a cipher mode to do bulk encryption.
*
* @class AES
* @param key {Array} The key as an array of 4, 6 or 8 words.
*/
class AES {
constructor(key) {
/**
* The expanded S-box and inverse S-box tables. These will be computed
* on the client so that we don't have to send them down the wire.
*
* There are two tables, _tables[0] is for encryption and
* _tables[1] is for decryption.
*
* The first 4 sub-tables are the expanded S-box with MixColumns. The
* last (_tables[01][4]) is the S-box itself.
*
* @private
*/
// if we have yet to precompute the S-box tables
// do so now
if (!aesTables) {
aesTables = precompute();
} // then make a copy of that object for use
this._tables = [[aesTables[0][0].slice(), aesTables[0][1].slice(), aesTables[0][2].slice(), aesTables[0][3].slice(), aesTables[0][4].slice()], [aesTables[1][0].slice(), aesTables[1][1].slice(), aesTables[1][2].slice(), aesTables[1][3].slice(), aesTables[1][4].slice()]];
let i;
let j;
let tmp;
const sbox = this._tables[0][4];
const decTable = this._tables[1];
const keyLen = key.length;
let rcon = 1;
if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
throw new Error('Invalid aes key size');
}
const encKey = key.slice(0);
const decKey = [];
this._key = [encKey, decKey]; // schedule encryption keys
for (i = keyLen; i < 4 * keyLen + 28; i++) {
tmp = encKey[i - 1]; // apply sbox
if (i % keyLen === 0 || keyLen === 8 && i % keyLen === 4) {
tmp = sbox[tmp >>> 24] << 24 ^ sbox[tmp >> 16 & 255] << 16 ^ sbox[tmp >> 8 & 255] << 8 ^ sbox[tmp & 255]; // shift rows and add rcon
if (i % keyLen === 0) {
tmp = tmp << 8 ^ tmp >>> 24 ^ rcon << 24;
rcon = rcon << 1 ^ (rcon >> 7) * 283;
}
}
encKey[i] = encKey[i - keyLen] ^ tmp;
} // schedule decryption keys
for (j = 0; i; j++, i--) {
tmp = encKey[j & 3 ? i : i - 4];
if (i <= 4 || j < 4) {
decKey[j] = tmp;
} else {
decKey[j] = decTable[0][sbox[tmp >>> 24]] ^ decTable[1][sbox[tmp >> 16 & 255]] ^ decTable[2][sbox[tmp >> 8 & 255]] ^ decTable[3][sbox[tmp & 255]];
}
}
}
/**
* Decrypt 16 bytes, specified as four 32-bit words.
*
* @param {number} encrypted0 the first word to decrypt
* @param {number} encrypted1 the second word to decrypt
* @param {number} encrypted2 the third word to decrypt
* @param {number} encrypted3 the fourth word to decrypt
* @param {Int32Array} out the array to write the decrypted words
* into
* @param {number} offset the offset into the output array to start
* writing results
* @return {Array} The plaintext.
*/
decrypt(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {
const key = this._key[1]; // state variables a,b,c,d are loaded with pre-whitened data
let a = encrypted0 ^ key[0];
let b = encrypted3 ^ key[1];
let c = encrypted2 ^ key[2];
let d = encrypted1 ^ key[3];
let a2;
let b2;
let c2; // key.length === 2 ?
const nInnerRounds = key.length / 4 - 2;
let i;
let kIndex = 4;
const table = this._tables[1]; // load up the tables
const table0 = table[0];
const table1 = table[1];
const table2 = table[2];
const table3 = table[3];
const sbox = table[4]; // Inner rounds. Cribbed from OpenSSL.
for (i = 0; i < nInnerRounds; i++) {
a2 = table0[a >>> 24] ^ table1[b >> 16 & 255] ^ table2[c >> 8 & 255] ^ table3[d & 255] ^ key[kIndex];
b2 = table0[b >>> 24] ^ table1[c >> 16 & 255] ^ table2[d >> 8 & 255] ^ table3[a & 255] ^ key[kIndex + 1];
c2 = table0[c >>> 24] ^ table1[d >> 16 & 255] ^ table2[a >> 8 & 255] ^ table3[b & 255] ^ key[kIndex + 2];
d = table0[d >>> 24] ^ table1[a >> 16 & 255] ^ table2[b >> 8 & 255] ^ table3[c & 255] ^ key[kIndex + 3];
kIndex += 4;
a = a2;
b = b2;
c = c2;
} // Last round.
for (i = 0; i < 4; i++) {
out[(3 & -i) + offset] = sbox[a >>> 24] << 24 ^ sbox[b >> 16 & 255] << 16 ^ sbox[c >> 8 & 255] << 8 ^ sbox[d & 255] ^ key[kIndex++];
a2 = a;
a = b;
b = c;
c = d;
d = a2;
}
}
}
/**
* @file async-stream.js
*/
/**
* A wrapper around the Stream class to use setTimeout
* and run stream "jobs" Asynchronously
*
* @class AsyncStream
* @extends Stream
*/
class AsyncStream extends Stream {
constructor() {
super(Stream);
this.jobs = [];
this.delay = 1;
this.timeout_ = null;
}
/**
* process an async job
*
* @private
*/
processJob_() {
this.jobs.shift()();
if (this.jobs.length) {
this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
} else {
this.timeout_ = null;
}
}
/**
* push a job into the stream
*
* @param {Function} job the job to push into the stream
*/
push(job) {
this.jobs.push(job);
if (!this.timeout_) {
this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
}
}
}
/**
* @file decrypter.js
*
* An asynchronous implementation of AES-128 CBC decryption with
* PKCS#7 padding.
*/
/**
* Convert network-order (big-endian) bytes into their little-endian
* representation.
*/
const ntoh = function (word) {
return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;
};
/**
* Decrypt bytes using AES-128 with CBC and PKCS#7 padding.
*
* @param {Uint8Array} encrypted the encrypted bytes
* @param {Uint32Array} key the bytes of the decryption key
* @param {Uint32Array} initVector the initialization vector (IV) to
* use for the first round of CBC.
* @return {Uint8Array} the decrypted bytes
*
* @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
* @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29
* @see https://tools.ietf.org/html/rfc2315
*/
const decrypt = function (encrypted, key, initVector) {
// word-level access to the encrypted bytes
const encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2);
const decipher = new AES(Array.prototype.slice.call(key)); // byte and word-level access for the decrypted output
const decrypted = new Uint8Array(encrypted.byteLength);
const decrypted32 = new Int32Array(decrypted.buffer); // temporary variables for working with the IV, encrypted, and
// decrypted data
let init0;
let init1;
let init2;
let init3;
let encrypted0;
let encrypted1;
let encrypted2;
let encrypted3; // iteration variable
let wordIx; // pull out the words of the IV to ensure we don't modify the
// passed-in reference and easier access
init0 = initVector[0];
init1 = initVector[1];
init2 = initVector[2];
init3 = initVector[3]; // decrypt four word sequences, applying cipher-block chaining (CBC)
// to each decrypted block
for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {
// convert big-endian (network order) words into little-endian
// (javascript order)
encrypted0 = ntoh(encrypted32[wordIx]);
encrypted1 = ntoh(encrypted32[wordIx + 1]);
encrypted2 = ntoh(encrypted32[wordIx + 2]);
encrypted3 = ntoh(encrypted32[wordIx + 3]); // decrypt the block
decipher.decrypt(encrypted0, encrypted1, encrypted2, encrypted3, decrypted32, wordIx); // XOR with the IV, and restore network byte-order to obtain the
// plaintext
decrypted32[wordIx] = ntoh(decrypted32[wordIx] ^ init0);
decrypted32[wordIx + 1] = ntoh(decrypted32[wordIx + 1] ^ init1);
decrypted32[wordIx + 2] = ntoh(decrypted32[wordIx + 2] ^ init2);
decrypted32[wordIx + 3] = ntoh(decrypted32[wordIx + 3] ^ init3); // setup the IV for the next round
init0 = encrypted0;
init1 = encrypted1;
init2 = encrypted2;
init3 = encrypted3;
}
return decrypted;
};
/**
* The `Decrypter` class that manages decryption of AES
* data through `AsyncStream` objects and the `decrypt`
* function
*
* @param {Uint8Array} encrypted the encrypted bytes
* @param {Uint32Array} key the bytes of the decryption key
* @param {Uint32Array} initVector the initialization vector (IV) to
* @param {Function} done the function to run when done
* @class Decrypter
*/
class Decrypter {
constructor(encrypted, key, initVector, done) {
const step = Decrypter.STEP;
const encrypted32 = new Int32Array(encrypted.buffer);
const decrypted = new Uint8Array(encrypted.byteLength);
let i = 0;
this.asyncStream_ = new AsyncStream(); // split up the encryption job and do the individual chunks asynchronously
this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
for (i = step; i < encrypted32.length; i += step) {
initVector = new Uint32Array([ntoh(encrypted32[i - 4]), ntoh(encrypted32[i - 3]), ntoh(encrypted32[i - 2]), ntoh(encrypted32[i - 1])]);
this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
} // invoke the done() callback when everything is finished
this.asyncStream_.push(function () {
// remove pkcs#7 padding from the decrypted bytes
done(null, unpad(decrypted));
});
}
/**
* a getter for step the maximum number of bytes to process at one time
*
* @return {number} the value of step 32000
*/
static get STEP() {
// 4 * 8000;
return 32000;
}
/**
* @private
*/
decryptChunk_(encrypted, key, initVector, decrypted) {
return function () {
const bytes = decrypt(encrypted, key, initVector);
decrypted.set(bytes, encrypted.byteOffset);
};
}
}
var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
var win;
if (typeof window !== "undefined") {
win = window;
} else if (typeof commonjsGlobal !== "undefined") {
win = commonjsGlobal;
} else if (typeof self !== "undefined") {
win = self;
} else {
win = {};
}
var window_1 = win;
var isArrayBufferView = function isArrayBufferView(obj) {
if (ArrayBuffer.isView === 'function') {
return ArrayBuffer.isView(obj);
}
return obj && obj.buffer instanceof ArrayBuffer;
};
var BigInt = window_1.BigInt || Number;
[BigInt('0x1'), BigInt('0x100'), BigInt('0x10000'), BigInt('0x1000000'), BigInt('0x100000000'), BigInt('0x10000000000'), BigInt('0x1000000000000'), BigInt('0x100000000000000'), BigInt('0x10000000000000000')];
(function () {
var a = new Uint16Array([0xFFCC]);
var b = new Uint8Array(a.buffer, a.byteOffset, a.byteLength);
if (b[0] === 0xFF) {
return 'big';
}
if (b[0] === 0xCC) {
return 'little';
}
return 'unknown';
})();
/**
* Creates an object for sending to a web worker modifying properties that are TypedArrays
* into a new object with seperated properties for the buffer, byteOffset, and byteLength.
*
* @param {Object} message
* Object of properties and values to send to the web worker
* @return {Object}
* Modified message with TypedArray values expanded
* @function createTransferableMessage
*/
const createTransferableMessage = function (message) {
const transferable = {};
Object.keys(message).forEach(key => {
const value = message[key];
if (isArrayBufferView(value)) {
transferable[key] = {
bytes: value.buffer,
byteOffset: value.byteOffset,
byteLength: value.byteLength
};
} else {
transferable[key] = value;
}
});
return transferable;
};
/* global self */
/**
* Our web worker interface so that things can talk to aes-decrypter
* that will be running in a web worker. the scope is passed to this by
* webworkify.
*/
self.onmessage = function (event) {
const data = event.data;
const encrypted = new Uint8Array(data.encrypted.bytes, data.encrypted.byteOffset, data.encrypted.byteLength);
const key = new Uint32Array(data.key.bytes, data.key.byteOffset, data.key.byteLength / 4);
const iv = new Uint32Array(data.iv.bytes, data.iv.byteOffset, data.iv.byteLength / 4);
/* eslint-disable no-new, handle-callback-err */
new Decrypter(encrypted, key, iv, function (err, bytes) {
self.postMessage(createTransferableMessage({
source: data.source,
decrypted: bytes
}), [bytes.buffer]);
});
/* eslint-enable */
};
}));
var Decrypter = factory(workerCode);
/* rollup-plugin-worker-factory end for worker!/home/runner/work/http-streaming/http-streaming/src/decrypter-worker.js */
/**
* Convert the properties of an HLS track into an audioTrackKind.
*
* @private
*/
const audioTrackKind_ = properties => {
let kind = properties.default ? 'main' : 'alternative';
if (properties.characteristics && properties.characteristics.indexOf('public.accessibility.describes-video') >= 0) {
kind = 'main-desc';
}
return kind;
};
/**
* Pause provided segment loader and playlist loader if active
*
* @param {SegmentLoader} segmentLoader
* SegmentLoader to pause
* @param {Object} mediaType
* Active media type
* @function stopLoaders
*/
const stopLoaders = (segmentLoader, mediaType) => {
segmentLoader.abort();
segmentLoader.pause();
if (mediaType && mediaType.activePlaylistLoader) {
mediaType.activePlaylistLoader.pause();
mediaType.activePlaylistLoader = null;
}
};
/**
* Start loading provided segment loader and playlist loader
*
* @param {PlaylistLoader} playlistLoader
* PlaylistLoader to start loading
* @param {Object} mediaType
* Active media type
* @function startLoaders
*/
const startLoaders = (playlistLoader, mediaType) => {
// Segment loader will be started after `loadedmetadata` or `loadedplaylist` from the
// playlist loader
mediaType.activePlaylistLoader = playlistLoader;
playlistLoader.load();
};
/**
* Returns a function to be called when the media group changes. It performs a
* non-destructive (preserve the buffer) resync of the SegmentLoader. This is because a
* change of group is merely a rendition switch of the same content at another encoding,
* rather than a change of content, such as switching audio from English to Spanish.
*
* @param {string} type
* MediaGroup type
* @param {Object} settings
* Object containing required information for media groups
* @return {Function}
* Handler for a non-destructive resync of SegmentLoader when the active media
* group changes.
* @function onGroupChanged
*/
const onGroupChanged = (type, settings) => () => {
const {
segmentLoaders: {
[type]: segmentLoader,
main: mainSegmentLoader
},
mediaTypes: {
[type]: mediaType
}
} = settings;
const activeTrack = mediaType.activeTrack();
const activeGroup = mediaType.getActiveGroup();
const previousActiveLoader = mediaType.activePlaylistLoader;
const lastGroup = mediaType.lastGroup_; // the group did not change do nothing
if (activeGroup && lastGroup && activeGroup.id === lastGroup.id) {
return;
}
mediaType.lastGroup_ = activeGroup;
mediaType.lastTrack_ = activeTrack;
stopLoaders(segmentLoader, mediaType);
if (!activeGroup || activeGroup.isMainPlaylist) {
// there is no group active or active group is a main playlist and won't change
return;
}
if (!activeGroup.playlistLoader) {
if (previousActiveLoader) {
// The previous group had a playlist loader but the new active group does not
// this means we are switching from demuxed to muxed audio. In this case we want to
// do a destructive reset of the main segment loader and not restart the audio
// loaders.
mainSegmentLoader.resetEverything();
}
return;
} // Non-destructive resync
segmentLoader.resyncLoader();
startLoaders(activeGroup.playlistLoader, mediaType);
};
const onGroupChanging = (type, settings) => () => {
const {
segmentLoaders: {
[type]: segmentLoader
},
mediaTypes: {
[type]: mediaType
}
} = settings;
mediaType.lastGroup_ = null;
segmentLoader.abort();
segmentLoader.pause();
};
/**
* Returns a function to be called when the media track changes. It performs a
* destructive reset of the SegmentLoader to ensure we start loading as close to
* currentTime as possible.
*
* @param {string} type
* MediaGroup type
* @param {Object} settings
* Object containing required information for media groups
* @return {Function}
* Handler for a destructive reset of SegmentLoader when the active media
* track changes.
* @function onTrackChanged
*/
const onTrackChanged = (type, settings) => () => {
const {
mainPlaylistLoader,
segmentLoaders: {
[type]: segmentLoader,
main: mainSegmentLoader
},
mediaTypes: {
[type]: mediaType
}
} = settings;
const activeTrack = mediaType.activeTrack();
const activeGroup = mediaType.getActiveGroup();
const previousActiveLoader = mediaType.activePlaylistLoader;
const lastTrack = mediaType.lastTrack_; // track did not change, do nothing
if (lastTrack && activeTrack && lastTrack.id === activeTrack.id) {
return;
}
mediaType.lastGroup_ = activeGroup;
mediaType.lastTrack_ = activeTrack;
stopLoaders(segmentLoader, mediaType);
if (!activeGroup) {
// there is no group active so we do not want to restart loaders
return;
}
if (activeGroup.isMainPlaylist) {
// track did not change, do nothing
if (!activeTrack || !lastTrack || activeTrack.id === lastTrack.id) {
return;
}
const pc = settings.vhs.playlistController_;
const newPlaylist = pc.selectPlaylist(); // media will not change do nothing
if (pc.media() === newPlaylist) {
return;
}
mediaType.logger_(`track change. Switching main audio from ${lastTrack.id} to ${activeTrack.id}`);
mainPlaylistLoader.pause();
mainSegmentLoader.resetEverything();
pc.fastQualityChange_(newPlaylist);
return;
}
if (type === 'AUDIO') {
if (!activeGroup.playlistLoader) {
// when switching from demuxed audio/video to muxed audio/video (noted by no
// playlist loader for the audio group), we want to do a destructive reset of the
// main segment loader and not restart the audio loaders
mainSegmentLoader.setAudio(true); // don't have to worry about disabling the audio of the audio segment loader since
// it should be stopped
mainSegmentLoader.resetEverything();
return;
} // although the segment loader is an audio segment loader, call the setAudio
// function to ensure it is prepared to re-append the init segment (or handle other
// config changes)
segmentLoader.setAudio(true);
mainSegmentLoader.setAudio(false);
}
if (previousActiveLoader === activeGroup.playlistLoader) {
// Nothing has actually changed. This can happen because track change events can fire
// multiple times for a "single" change. One for enabling the new active track, and
// one for disabling the track that was active
startLoaders(activeGroup.playlistLoader, mediaType);
return;
}
if (segmentLoader.track) {
// For WebVTT, set the new text track in the segmentloader
segmentLoader.track(activeTrack);
} // destructive reset
segmentLoader.resetEverything();
startLoaders(activeGroup.playlistLoader, mediaType);
};
const onError = {
/**
* Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
* an error.
*
* @param {string} type
* MediaGroup type
* @param {Object} settings
* Object containing required information for media groups
* @return {Function}
* Error handler. Logs warning (or error if the playlist is excluded) to
* console and switches back to default audio track.
* @function onError.AUDIO
*/
AUDIO: (type, settings) => () => {
const {
mediaTypes: {
[type]: mediaType
},
excludePlaylist
} = settings; // switch back to default audio track
const activeTrack = mediaType.activeTrack();
const activeGroup = mediaType.activeGroup();
const id = (activeGroup.filter(group => group.default)[0] || activeGroup[0]).id;
const defaultTrack = mediaType.tracks[id];
if (activeTrack === defaultTrack) {
// Default track encountered an error. All we can do now is exclude the current
// rendition and hope another will switch audio groups
excludePlaylist({
error: {
message: 'Problem encountered loading the default audio track.'
}
});
return;
}
videojs.log.warn('Problem encountered loading the alternate audio track.' + 'Switching back to default.');
for (const trackId in mediaType.tracks) {
mediaType.tracks[trackId].enabled = mediaType.tracks[trackId] === defaultTrack;
}
mediaType.onTrackChanged();
},
/**
* Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
* an error.
*
* @param {string} type
* MediaGroup type
* @param {Object} settings
* Object containing required information for media groups
* @return {Function}
* Error handler. Logs warning to console and disables the active subtitle track
* @function onError.SUBTITLES
*/
SUBTITLES: (type, settings) => () => {
const {
mediaTypes: {
[type]: mediaType
}
} = settings;
videojs.log.warn('Problem encountered loading the subtitle track.' + 'Disabling subtitle track.');
const track = mediaType.activeTrack();
if (track) {
track.mode = 'disabled';
}
mediaType.onTrackChanged();
}
};
const setupListeners = {
/**
* Setup event listeners for audio playlist loader
*
* @param {string} type
* MediaGroup type
* @param {PlaylistLoader|null} playlistLoader
* PlaylistLoader to register listeners on
* @param {Object} settings
* Object containing required information for media groups
* @function setupListeners.AUDIO
*/
AUDIO: (type, playlistLoader, settings) => {
if (!playlistLoader) {
// no playlist loader means audio will be muxed with the video
return;
}
const {
tech,
requestOptions,
segmentLoaders: {
[type]: segmentLoader
}
} = settings;
playlistLoader.on('loadedmetadata', () => {
const media = playlistLoader.media();
segmentLoader.playlist(media, requestOptions); // if the video is already playing, or if this isn't a live video and preload
// permits, start downloading segments
if (!tech.paused() || media.endList && tech.preload() !== 'none') {
segmentLoader.load();
}
});
playlistLoader.on('loadedplaylist', () => {
segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
if (!tech.paused()) {
segmentLoader.load();
}
});
playlistLoader.on('error', onError[type](type, settings));
},
/**
* Setup event listeners for subtitle playlist loader
*
* @param {string} type
* MediaGroup type
* @param {PlaylistLoader|null} playlistLoader
* PlaylistLoader to register listeners on
* @param {Object} settings
* Object containing required information for media groups
* @function setupListeners.SUBTITLES
*/
SUBTITLES: (type, playlistLoader, settings) => {
const {
tech,
requestOptions,
segmentLoaders: {
[type]: segmentLoader
},
mediaTypes: {
[type]: mediaType
}
} = settings;
playlistLoader.on('loadedmetadata', () => {
const media = playlistLoader.media();
segmentLoader.playlist(media, requestOptions);
segmentLoader.track(mediaType.activeTrack()); // if the video is already playing, or if this isn't a live video and preload
// permits, start downloading segments
if (!tech.paused() || media.endList && tech.preload() !== 'none') {
segmentLoader.load();
}
});
playlistLoader.on('loadedplaylist', () => {
segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
if (!tech.paused()) {
segmentLoader.load();
}
});
playlistLoader.on('error', onError[type](type, settings));
}
};
const initialize = {
/**
* Setup PlaylistLoaders and AudioTracks for the audio groups
*
* @param {string} type
* MediaGroup type
* @param {Object} settings
* Object containing required information for media groups
* @function initialize.AUDIO
*/
'AUDIO': (type, settings) => {
const {
vhs,
sourceType,
segmentLoaders: {
[type]: segmentLoader
},
requestOptions,
main: {
mediaGroups
},
mediaTypes: {
[type]: {
groups,
tracks,
logger_
}
},
mainPlaylistLoader
} = settings;
const audioOnlyMain = isAudioOnly(mainPlaylistLoader.main); // force a default if we have none
if (!mediaGroups[type] || Object.keys(mediaGroups[type]).length === 0) {
mediaGroups[type] = {
main: {
default: {
default: true
}
}
};
if (audioOnlyMain) {
mediaGroups[type].main.default.playlists = mainPlaylistLoader.main.playlists;
}
}
for (const groupId in mediaGroups[type]) {
if (!groups[groupId]) {
groups[groupId] = [];
}
for (const variantLabel in mediaGroups[type][groupId]) {
let properties = mediaGroups[type][groupId][variantLabel];
let playlistLoader;
if (audioOnlyMain) {
logger_(`AUDIO group '${groupId}' label '${variantLabel}' is a main playlist`);
properties.isMainPlaylist = true;
playlistLoader = null; // if vhs-json was provided as the source, and the media playlist was resolved,
// use the resolved media playlist object
} else if (sourceType === 'vhs-json' && properties.playlists) {
playlistLoader = new PlaylistLoader(properties.playlists[0], vhs, requestOptions);
} else if (properties.resolvedUri) {
playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions); // TODO: dash isn't the only type with properties.playlists
// should we even have properties.playlists in this check.
} else if (properties.playlists && sourceType === 'dash') {
playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, mainPlaylistLoader);
} else {
// no resolvedUri means the audio is muxed with the video when using this
// audio track
playlistLoader = null;
}
properties = merge({
id: variantLabel,
playlistLoader
}, properties);
setupListeners[type](type, properties.playlistLoader, settings);
groups[groupId].push(properties);
if (typeof tracks[variantLabel] === 'undefined') {
const track = new videojs.AudioTrack({
id: variantLabel,
kind: audioTrackKind_(properties),
enabled: false,
language: properties.language,
default: properties.default,
label: variantLabel
});
tracks[variantLabel] = track;
}
}
} // setup single error event handler for the segment loader
segmentLoader.on('error', onError[type](type, settings));
},
/**
* Setup PlaylistLoaders and TextTracks for the subtitle groups
*
* @param {string} type
* MediaGroup type
* @param {Object} settings
* Object containing required information for media groups
* @function initialize.SUBTITLES
*/
'SUBTITLES': (type, settings) => {
const {
tech,
vhs,
sourceType,
segmentLoaders: {
[type]: segmentLoader
},
requestOptions,
main: {
mediaGroups
},
mediaTypes: {
[type]: {
groups,
tracks
}
},
mainPlaylistLoader
} = settings;
for (const groupId in mediaGroups[type]) {
if (!groups[groupId]) {
groups[groupId] = [];
}
for (const variantLabel in mediaGroups[type][groupId]) {
if (!vhs.options_.useForcedSubtitles && mediaGroups[type][groupId][variantLabel].forced) {
// Subtitle playlists with the forced attribute are not selectable in Safari.
// According to Apple's HLS Authoring Specification:
// If content has forced subtitles and regular subtitles in a given language,
// the regular subtitles track in that language MUST contain both the forced
// subtitles and the regular subtitles for that language.
// Because of this requirement and that Safari does not add forced subtitles,
// forced subtitles are skipped here to maintain consistent experience across
// all platforms
continue;
}
let properties = mediaGroups[type][groupId][variantLabel];
let playlistLoader;
if (sourceType === 'hls') {
playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions);
} else if (sourceType === 'dash') {
const playlists = properties.playlists.filter(p => p.excludeUntil !== Infinity);
if (!playlists.length) {
return;
}
playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, mainPlaylistLoader);
} else if (sourceType === 'vhs-json') {
playlistLoader = new PlaylistLoader(
// if the vhs-json object included the media playlist, use the media playlist
// as provided, otherwise use the resolved URI to load the playlist
properties.playlists ? properties.playlists[0] : properties.resolvedUri, vhs, requestOptions);
}
properties = merge({
id: variantLabel,
playlistLoader
}, properties);
setupListeners[type](type, properties.playlistLoader, settings);
groups[groupId].push(properties);
if (typeof tracks[variantLabel] === 'undefined') {
const track = tech.addRemoteTextTrack({
id: variantLabel,
kind: 'subtitles',
default: properties.default && properties.autoselect,
language: properties.language,
label: variantLabel
}, false).track;
tracks[variantLabel] = track;
}
}
} // setup single error event handler for the segment loader
segmentLoader.on('error', onError[type](type, settings));
},
/**
* Setup TextTracks for the closed-caption groups
*
* @param {String} type
* MediaGroup type
* @param {Object} settings
* Object containing required information for media groups
* @function initialize['CLOSED-CAPTIONS']
*/
'CLOSED-CAPTIONS': (type, settings) => {
const {
tech,
main: {
mediaGroups
},
mediaTypes: {
[type]: {
groups,
tracks
}
}
} = settings;
for (const groupId in mediaGroups[type]) {
if (!groups[groupId]) {
groups[groupId] = [];
}
for (const variantLabel in mediaGroups[type][groupId]) {
const properties = mediaGroups[type][groupId][variantLabel]; // Look for either 608 (CCn) or 708 (SERVICEn) caption services
if (!/^(?:CC|SERVICE)/.test(properties.instreamId)) {
continue;
}
const captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};
let newProps = {
label: variantLabel,
language: properties.language,
instreamId: properties.instreamId,
default: properties.default && properties.autoselect
};
if (captionServices[newProps.instreamId]) {
newProps = merge(newProps, captionServices[newProps.instreamId]);
}
if (newProps.default === undefined) {
delete newProps.default;
} // No PlaylistLoader is required for Closed-Captions because the captions are
// embedded within the video stream
groups[groupId].push(merge({
id: variantLabel
}, properties));
if (typeof tracks[variantLabel] === 'undefined') {
const track = tech.addRemoteTextTrack({
id: newProps.instreamId,
kind: 'captions',
default: newProps.default,
language: newProps.language,
label: newProps.label
}, false).track;
tracks[variantLabel] = track;
}
}
}
}
};
const groupMatch = (list, media) => {
for (let i = 0; i < list.length; i++) {
if (playlistMatch(media, list[i])) {
return true;
}
if (list[i].playlists && groupMatch(list[i].playlists, media)) {
return true;
}
}
return false;
};
/**
* Returns a function used to get the active group of the provided type
*
* @param {string} type
* MediaGroup type
* @param {Object} settings
* Object containing required information for media groups
* @return {Function}
* Function that returns the active media group for the provided type. Takes an
* optional parameter {TextTrack} track. If no track is provided, a list of all
* variants in the group, otherwise the variant corresponding to the provided
* track is returned.
* @function activeGroup
*/
const activeGroup = (type, settings) => track => {
const {
mainPlaylistLoader,
mediaTypes: {
[type]: {
groups
}
}
} = settings;
const media = mainPlaylistLoader.media();
if (!media) {
return null;
}
let variants = null; // set to variants to main media active group
if (media.attributes[type]) {
variants = groups[media.attributes[type]];
}
const groupKeys = Object.keys(groups);
if (!variants) {
// find the mainPlaylistLoader media
// that is in a media group if we are dealing
// with audio only
if (type === 'AUDIO' && groupKeys.length > 1 && isAudioOnly(settings.main)) {
for (let i = 0; i < groupKeys.length; i++) {
const groupPropertyList = groups[groupKeys[i]];
if (groupMatch(groupPropertyList, media)) {
variants = groupPropertyList;
break;
}
} // use the main group if it exists
} else if (groups.main) {
variants = groups.main; // only one group, use that one
} else if (groupKeys.length === 1) {
variants = groups[groupKeys[0]];
}
}
if (typeof track === 'undefined') {
return variants;
}
if (track === null || !variants) {
// An active track was specified so a corresponding group is expected. track === null
// means no track is currently active so there is no corresponding group
return null;
}
return variants.filter(props => props.id === track.id)[0] || null;
};
const activeTrack = {
/**
* Returns a function used to get the active track of type provided
*
* @param {string} type
* MediaGroup type
* @param {Object} settings
* Object containing required information for media groups
* @return {Function}
* Function that returns the active media track for the provided type. Returns
* null if no track is active
* @function activeTrack.AUDIO
*/
AUDIO: (type, settings) => () => {
const {
mediaTypes: {
[type]: {
tracks
}
}
} = settings;
for (const id in tracks) {
if (tracks[id].enabled) {
return tracks[id];
}
}
return null;
},
/**
* Returns a function used to get the active track of type provided
*
* @param {string} type
* MediaGroup type
* @param {Object} settings
* Object containing required information for media groups
* @return {Function}
* Function that returns the active media track for the provided type. Returns
* null if no track is active
* @function activeTrack.SUBTITLES
*/
SUBTITLES: (type, settings) => () => {
const {
mediaTypes: {
[type]: {
tracks
}
}
} = settings;
for (const id in tracks) {
if (tracks[id].mode === 'showing' || tracks[id].mode === 'hidden') {
return tracks[id];
}
}
return null;
}
};
const getActiveGroup = (type, {
mediaTypes
}) => () => {
const activeTrack_ = mediaTypes[type].activeTrack();
if (!activeTrack_) {
return null;
}
return mediaTypes[type].activeGroup(activeTrack_);
};
/**
* Setup PlaylistLoaders and Tracks for media groups (Audio, Subtitles,
* Closed-Captions) specified in the main manifest.
*
* @param {Object} settings
* Object containing required information for setting up the media groups
* @param {Tech} settings.tech
* The tech of the player
* @param {Object} settings.requestOptions
* XHR request options used by the segment loaders
* @param {PlaylistLoader} settings.mainPlaylistLoader
* PlaylistLoader for the main source
* @param {VhsHandler} settings.vhs
* VHS SourceHandler
* @param {Object} settings.main
* The parsed main manifest
* @param {Object} settings.mediaTypes
* Object to store the loaders, tracks, and utility methods for each media type
* @param {Function} settings.excludePlaylist
* Excludes the current rendition and forces a rendition switch.
* @function setupMediaGroups
*/
const setupMediaGroups = settings => {
['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(type => {
initialize[type](type, settings);
});
const {
mediaTypes,
mainPlaylistLoader,
tech,
vhs,
segmentLoaders: {
['AUDIO']: audioSegmentLoader,
main: mainSegmentLoader
}
} = settings; // setup active group and track getters and change event handlers
['AUDIO', 'SUBTITLES'].forEach(type => {
mediaTypes[type].activeGroup = activeGroup(type, settings);
mediaTypes[type].activeTrack = activeTrack[type](type, settings);
mediaTypes[type].onGroupChanged = onGroupChanged(type, settings);
mediaTypes[type].onGroupChanging = onGroupChanging(type, settings);
mediaTypes[type].onTrackChanged = onTrackChanged(type, settings);
mediaTypes[type].getActiveGroup = getActiveGroup(type, settings);
}); // DO NOT enable the default subtitle or caption track.
// DO enable the default audio track
const audioGroup = mediaTypes.AUDIO.activeGroup();
if (audioGroup) {
const groupId = (audioGroup.filter(group => group.default)[0] || audioGroup[0]).id;
mediaTypes.AUDIO.tracks[groupId].enabled = true;
mediaTypes.AUDIO.onGroupChanged();
mediaTypes.AUDIO.onTrackChanged();
const activeAudioGroup = mediaTypes.AUDIO.getActiveGroup(); // a similar check for handling setAudio on each loader is run again each time the
// track is changed, but needs to be handled here since the track may not be considered
// changed on the first call to onTrackChanged
if (!activeAudioGroup.playlistLoader) {
// either audio is muxed with video or the stream is audio only
mainSegmentLoader.setAudio(true);
} else {
// audio is demuxed
mainSegmentLoader.setAudio(false);
audioSegmentLoader.setAudio(true);
}
}
mainPlaylistLoader.on('mediachange', () => {
['AUDIO', 'SUBTITLES'].forEach(type => mediaTypes[type].onGroupChanged());
});
mainPlaylistLoader.on('mediachanging', () => {
['AUDIO', 'SUBTITLES'].forEach(type => mediaTypes[type].onGroupChanging());
}); // custom audio track change event handler for usage event
const onAudioTrackChanged = () => {
mediaTypes.AUDIO.onTrackChanged();
tech.trigger({
type: 'usage',
name: 'vhs-audio-change'
});
};
tech.audioTracks().addEventListener('change', onAudioTrackChanged);
tech.remoteTextTracks().addEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
vhs.on('dispose', () => {
tech.audioTracks().removeEventListener('change', onAudioTrackChanged);
tech.remoteTextTracks().removeEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
}); // clear existing audio tracks and add the ones we just created
tech.clearTracks('audio');
for (const id in mediaTypes.AUDIO.tracks) {
tech.audioTracks().addTrack(mediaTypes.AUDIO.tracks[id]);
}
};
/**
* Creates skeleton object used to store the loaders, tracks, and utility methods for each
* media type
*
* @return {Object}
* Object to store the loaders, tracks, and utility methods for each media type
* @function createMediaTypes
*/
const createMediaTypes = () => {
const mediaTypes = {};
['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(type => {
mediaTypes[type] = {
groups: {},
tracks: {},
activePlaylistLoader: null,
activeGroup: noop,
activeTrack: noop,
getActiveGroup: noop,
onGroupChanged: noop,
onTrackChanged: noop,
lastTrack_: null,
logger_: logger(`MediaGroups[${type}]`)
};
});
return mediaTypes;
};
/**
* A utility class for setting properties and maintaining the state of the content steering manifest.
*
* Content Steering manifest format:
* VERSION: number (required) currently only version 1 is supported.
* TTL: number in seconds (optional) until the next content steering manifest reload.
* RELOAD-URI: string (optional) uri to fetch the next content steering manifest.
* SERVICE-LOCATION-PRIORITY or PATHWAY-PRIORITY a non empty array of unique string values.
* PATHWAY-CLONES: array (optional) (HLS only) pathway clone objects to copy from other playlists.
*/
class SteeringManifest {
constructor() {
this.priority_ = [];
this.pathwayClones_ = new Map();
}
set version(number) {
// Only version 1 is currently supported for both DASH and HLS.
if (number === 1) {
this.version_ = number;
}
}
set ttl(seconds) {
// TTL = time-to-live, default = 300 seconds.
this.ttl_ = seconds || 300;
}
set reloadUri(uri) {
if (uri) {
// reload URI can be relative to the previous reloadUri.
this.reloadUri_ = resolveUrl(this.reloadUri_, uri);
}
}
set priority(array) {
// priority must be non-empty and unique values.
if (array && array.length) {
this.priority_ = array;
}
}
set pathwayClones(array) {
// pathwayClones must be non-empty.
if (array && array.length) {
this.pathwayClones_ = new Map(array.map(clone => [clone.ID, clone]));
}
}
get version() {
return this.version_;
}
get ttl() {
return this.ttl_;
}
get reloadUri() {
return this.reloadUri_;
}
get priority() {
return this.priority_;
}
get pathwayClones() {
return this.pathwayClones_;
}
}
/**
* This class represents a content steering manifest and associated state. See both HLS and DASH specifications.
* HLS: https://developer.apple.com/streaming/HLSContentSteeringSpecification.pdf and
* https://datatracker.ietf.org/doc/draft-pantos-hls-rfc8216bis/ section 4.4.6.6.
* DASH: https://dashif.org/docs/DASH-IF-CTS-00XX-Content-Steering-Community-Review.pdf
*
* @param {function} xhr for making a network request from the browser.
* @param {function} bandwidth for fetching the current bandwidth from the main segment loader.
*/
class ContentSteeringController extends videojs.EventTarget {
constructor(xhr, bandwidth) {
super();
this.currentPathway = null;
this.defaultPathway = null;
this.queryBeforeStart = false;
this.availablePathways_ = new Set();
this.steeringManifest = new SteeringManifest();
this.proxyServerUrl_ = null;
this.manifestType_ = null;
this.ttlTimeout_ = null;
this.request_ = null;
this.currentPathwayClones = new Map();
this.nextPathwayClones = new Map();
this.excludedSteeringManifestURLs = new Set();
this.logger_ = logger('Content Steering');
this.xhr_ = xhr;
this.getBandwidth_ = bandwidth;
}
/**
* Assigns the content steering tag properties to the steering controller
*
* @param {string} baseUrl the baseURL from the main manifest for resolving the steering manifest url
* @param {Object} steeringTag the content steering tag from the main manifest
*/
assignTagProperties(baseUrl, steeringTag) {
this.manifestType_ = steeringTag.serverUri ? 'HLS' : 'DASH'; // serverUri is HLS serverURL is DASH
const steeringUri = steeringTag.serverUri || steeringTag.serverURL;
if (!steeringUri) {
this.logger_(`steering manifest URL is ${steeringUri}, cannot request steering manifest.`);
this.trigger('error');
return;
} // Content steering manifests can be encoded as a data URI. We can decode, parse and return early if that's the case.
if (steeringUri.startsWith('data:')) {
this.decodeDataUriManifest_(steeringUri.substring(steeringUri.indexOf(',') + 1));
return;
} // reloadUri is the resolution of the main manifest URL and steering URL.
this.steeringManifest.reloadUri = resolveUrl(baseUrl, steeringUri); // pathwayId is HLS defaultServiceLocation is DASH
this.defaultPathway = steeringTag.pathwayId || steeringTag.defaultServiceLocation; // currently only DASH supports the following properties on tags.
this.queryBeforeStart = steeringTag.queryBeforeStart;
this.proxyServerUrl_ = steeringTag.proxyServerURL; // trigger a steering event if we have a pathway from the content steering tag.
// this tells VHS which segment pathway to start with.
// If queryBeforeStart is true we need to wait for the steering manifest response.
if (this.defaultPathway && !this.queryBeforeStart) {
this.trigger('content-steering');
}
}
/**
* Requests the content steering manifest and parse the response. This should only be called after
* assignTagProperties was called with a content steering tag.
*
* @param {string} initialUri The optional uri to make the request with.
* If set, the request should be made with exactly what is passed in this variable.
* This scenario should only happen once on initalization.
*/
requestSteeringManifest(initial) {
const reloadUri = this.steeringManifest.reloadUri;
if (!reloadUri) {
return;
} // We currently don't support passing MPD query parameters directly to the content steering URL as this requires
// ExtUrlQueryInfo tag support. See the DASH content steering spec section 8.1.
// This request URI accounts for manifest URIs that have been excluded.
const uri = initial ? reloadUri : this.getRequestURI(reloadUri); // If there are no valid manifest URIs, we should stop content steering.
if (!uri) {
this.logger_('No valid content steering manifest URIs. Stopping content steering.');
this.trigger('error');
this.dispose();
return;
}
const metadata = {
contentSteeringInfo: {
uri
}
};
this.trigger({
type: 'contentsteeringloadstart',
metadata
});
this.request_ = this.xhr_({
uri,
requestType: 'content-steering-manifest'
}, (error, errorInfo) => {
if (error) {
// If the client receives HTTP 410 Gone in response to a manifest request,
// it MUST NOT issue another request for that URI for the remainder of the
// playback session. It MAY continue to use the most-recently obtained set
// of Pathways.
if (errorInfo.status === 410) {
this.logger_(`manifest request 410 ${error}.`);
this.logger_(`There will be no more content steering requests to ${uri} this session.`);
this.excludedSteeringManifestURLs.add(uri);
return;
} // If the client receives HTTP 429 Too Many Requests with a Retry-After
// header in response to a manifest request, it SHOULD wait until the time
// specified by the Retry-After header to reissue the request.
if (errorInfo.status === 429) {
const retrySeconds = errorInfo.responseHeaders['retry-after'];
this.logger_(`manifest request 429 ${error}.`);
this.logger_(`content steering will retry in ${retrySeconds} seconds.`);
this.startTTLTimeout_(parseInt(retrySeconds, 10));
return;
} // If the Steering Manifest cannot be loaded and parsed correctly, the
// client SHOULD continue to use the previous values and attempt to reload
// it after waiting for the previously-specified TTL (or 5 minutes if
// none).
this.logger_(`manifest failed to load ${error}.`);
this.startTTLTimeout_();
return;
}
this.trigger({
type: 'contentsteeringloadcomplete',
metadata
});
let steeringManifestJson;
try {
steeringManifestJson = JSON.parse(this.request_.responseText);
} catch (parseError) {
const errorMetadata = {
errorType: videojs.Error.StreamingContentSteeringParserError,
error: parseError
};
this.trigger({
type: 'error',
metadata: errorMetadata
});
}
this.assignSteeringProperties_(steeringManifestJson);
const parsedMetadata = {
contentSteeringInfo: metadata.contentSteeringInfo,
contentSteeringManifest: {
version: this.steeringManifest.version,
reloadUri: this.steeringManifest.reloadUri,
priority: this.steeringManifest.priority
}
};
this.trigger({
type: 'contentsteeringparsed',
metadata: parsedMetadata
});
this.startTTLTimeout_();
});
}
/**
* Set the proxy server URL and add the steering manifest url as a URI encoded parameter.
*
* @param {string} steeringUrl the steering manifest url
* @return the steering manifest url to a proxy server with all parameters set
*/
setProxyServerUrl_(steeringUrl) {
const steeringUrlObject = new window$1.URL(steeringUrl);
const proxyServerUrlObject = new window$1.URL(this.proxyServerUrl_);
proxyServerUrlObject.searchParams.set('url', encodeURI(steeringUrlObject.toString()));
return this.setSteeringParams_(proxyServerUrlObject.toString());
}
/**
* Decodes and parses the data uri encoded steering manifest
*
* @param {string} dataUri the data uri to be decoded and parsed.
*/
decodeDataUriManifest_(dataUri) {
const steeringManifestJson = JSON.parse(window$1.atob(dataUri));
this.assignSteeringProperties_(steeringManifestJson);
}
/**
* Set the HLS or DASH content steering manifest request query parameters. For example:
* _HLS_pathway="" and _HLS_throughput=
* _DASH_pathway and _DASH_throughput
*
* @param {string} uri to add content steering server parameters to.
* @return a new uri as a string with the added steering query parameters.
*/
setSteeringParams_(url) {
const urlObject = new window$1.URL(url);
const path = this.getPathway();
const networkThroughput = this.getBandwidth_();
if (path) {
const pathwayKey = `_${this.manifestType_}_pathway`;
urlObject.searchParams.set(pathwayKey, path);
}
if (networkThroughput) {
const throughputKey = `_${this.manifestType_}_throughput`;
urlObject.searchParams.set(throughputKey, networkThroughput);
}
return urlObject.toString();
}
/**
* Assigns the current steering manifest properties and to the SteeringManifest object
*
* @param {Object} steeringJson the raw JSON steering manifest
*/
assignSteeringProperties_(steeringJson) {
this.steeringManifest.version = steeringJson.VERSION;
if (!this.steeringManifest.version) {
this.logger_(`manifest version is ${steeringJson.VERSION}, which is not supported.`);
this.trigger('error');
return;
}
this.steeringManifest.ttl = steeringJson.TTL;
this.steeringManifest.reloadUri = steeringJson['RELOAD-URI']; // HLS = PATHWAY-PRIORITY required. DASH = SERVICE-LOCATION-PRIORITY optional
this.steeringManifest.priority = steeringJson['PATHWAY-PRIORITY'] || steeringJson['SERVICE-LOCATION-PRIORITY']; // Pathway clones to be created/updated in HLS.
// See section 7.2 https://datatracker.ietf.org/doc/draft-pantos-hls-rfc8216bis/
this.steeringManifest.pathwayClones = steeringJson['PATHWAY-CLONES'];
this.nextPathwayClones = this.steeringManifest.pathwayClones; // 1. apply first pathway from the array.
// 2. if first pathway doesn't exist in manifest, try next pathway.
// a. if all pathways are exhausted, ignore the steering manifest priority.
// 3. if segments fail from an established pathway, try all variants/renditions, then exclude the failed pathway.
// a. exclude a pathway for a minimum of the last TTL duration. Meaning, from the next steering response,
// the excluded pathway will be ignored.
// See excludePathway usage in excludePlaylist().
// If there are no available pathways, we need to stop content steering.
if (!this.availablePathways_.size) {
this.logger_('There are no available pathways for content steering. Ending content steering.');
this.trigger('error');
this.dispose();
}
const chooseNextPathway = pathwaysByPriority => {
for (const path of pathwaysByPriority) {
if (this.availablePathways_.has(path)) {
return path;
}
} // If no pathway matches, ignore the manifest and choose the first available.
return [...this.availablePathways_][0];
};
const nextPathway = chooseNextPathway(this.steeringManifest.priority);
if (this.currentPathway !== nextPathway) {
this.currentPathway = nextPathway;
this.trigger('content-steering');
}
}
/**
* Returns the pathway to use for steering decisions
*
* @return {string} returns the current pathway or the default
*/
getPathway() {
return this.currentPathway || this.defaultPathway;
}
/**
* Chooses the manifest request URI based on proxy URIs and server URLs.
* Also accounts for exclusion on certain manifest URIs.
*
* @param {string} reloadUri the base uri before parameters
*
* @return {string} the final URI for the request to the manifest server.
*/
getRequestURI(reloadUri) {
if (!reloadUri) {
return null;
}
const isExcluded = uri => this.excludedSteeringManifestURLs.has(uri);
if (this.proxyServerUrl_) {
const proxyURI = this.setProxyServerUrl_(reloadUri);
if (!isExcluded(proxyURI)) {
return proxyURI;
}
}
const steeringURI = this.setSteeringParams_(reloadUri);
if (!isExcluded(steeringURI)) {
return steeringURI;
} // Return nothing if all valid manifest URIs are excluded.
return null;
}
/**
* Start the timeout for re-requesting the steering manifest at the TTL interval.
*
* @param {number} ttl time in seconds of the timeout. Defaults to the
* ttl interval in the steering manifest
*/
startTTLTimeout_(ttl = this.steeringManifest.ttl) {
// 300 (5 minutes) is the default value.
const ttlMS = ttl * 1000;
this.ttlTimeout_ = window$1.setTimeout(() => {
this.requestSteeringManifest();
}, ttlMS);
}
/**
* Clear the TTL timeout if necessary.
*/
clearTTLTimeout_() {
window$1.clearTimeout(this.ttlTimeout_);
this.ttlTimeout_ = null;
}
/**
* aborts any current steering xhr and sets the current request object to null
*/
abort() {
if (this.request_) {
this.request_.abort();
}
this.request_ = null;
}
/**
* aborts steering requests clears the ttl timeout and resets all properties.
*/
dispose() {
this.off('content-steering');
this.off('error');
this.abort();
this.clearTTLTimeout_();
this.currentPathway = null;
this.defaultPathway = null;
this.queryBeforeStart = null;
this.proxyServerUrl_ = null;
this.manifestType_ = null;
this.ttlTimeout_ = null;
this.request_ = null;
this.excludedSteeringManifestURLs = new Set();
this.availablePathways_ = new Set();
this.steeringManifest = new SteeringManifest();
}
/**
* adds a pathway to the available pathways set
*
* @param {string} pathway the pathway string to add
*/
addAvailablePathway(pathway) {
if (pathway) {
this.availablePathways_.add(pathway);
}
}
/**
* Clears all pathways from the available pathways set
*/
clearAvailablePathways() {
this.availablePathways_.clear();
}
/**
* Removes a pathway from the available pathways set.
*/
excludePathway(pathway) {
return this.availablePathways_.delete(pathway);
}
/**
* Checks the refreshed DASH manifest content steering tag for changes.
*
* @param {string} baseURL new steering tag on DASH manifest refresh
* @param {Object} newTag the new tag to check for changes
* @return a true or false whether the new tag has different values
*/
didDASHTagChange(baseURL, newTag) {
return !newTag && this.steeringManifest.reloadUri || newTag && (resolveUrl(baseURL, newTag.serverURL) !== this.steeringManifest.reloadUri || newTag.defaultServiceLocation !== this.defaultPathway || newTag.queryBeforeStart !== this.queryBeforeStart || newTag.proxyServerURL !== this.proxyServerUrl_);
}
getAvailablePathways() {
return this.availablePathways_;
}
}
const ABORT_EARLY_EXCLUSION_SECONDS = 10;
let Vhs$1; // SegmentLoader stats that need to have each loader's
// values summed to calculate the final value
const loaderStats = ['mediaRequests', 'mediaRequestsAborted', 'mediaRequestsTimedout', 'mediaRequestsErrored', 'mediaTransferDuration', 'mediaBytesTransferred', 'mediaAppends'];
const sumLoaderStat = function (stat) {
return this.audioSegmentLoader_[stat] + this.mainSegmentLoader_[stat];
};
const shouldSwitchToMedia = function ({
currentPlaylist,
buffered,
currentTime,
nextPlaylist,
bufferLowWaterLine,
bufferHighWaterLine,
duration,
bufferBasedABR,
log
}) {
// we have no other playlist to switch to
if (!nextPlaylist) {
videojs.log.warn('We received no playlist to switch to. Please check your stream.');
return false;
}
const sharedLogLine = `allowing switch ${currentPlaylist && currentPlaylist.id || 'null'} -> ${nextPlaylist.id}`;
if (!currentPlaylist) {
log(`${sharedLogLine} as current playlist is not set`);
return true;
} // no need to switch if playlist is the same
if (nextPlaylist.id === currentPlaylist.id) {
return false;
} // determine if current time is in a buffered range.
const isBuffered = Boolean(findRange(buffered, currentTime).length); // If the playlist is live, then we want to not take low water line into account.
// This is because in LIVE, the player plays 3 segments from the end of the
// playlist, and if `BUFFER_LOW_WATER_LINE` is greater than the duration availble
// in those segments, a viewer will never experience a rendition upswitch.
if (!currentPlaylist.endList) {
// For LLHLS live streams, don't switch renditions before playback has started, as it almost
// doubles the time to first playback.
if (!isBuffered && typeof currentPlaylist.partTargetDuration === 'number') {
log(`not ${sharedLogLine} as current playlist is live llhls, but currentTime isn't in buffered.`);
return false;
}
log(`${sharedLogLine} as current playlist is live`);
return true;
}
const forwardBuffer = timeAheadOf(buffered, currentTime);
const maxBufferLowWaterLine = bufferBasedABR ? Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE : Config.MAX_BUFFER_LOW_WATER_LINE; // For the same reason as LIVE, we ignore the low water line when the VOD
// duration is below the max potential low water line
if (duration < maxBufferLowWaterLine) {
log(`${sharedLogLine} as duration < max low water line (${duration} < ${maxBufferLowWaterLine})`);
return true;
}
const nextBandwidth = nextPlaylist.attributes.BANDWIDTH;
const currBandwidth = currentPlaylist.attributes.BANDWIDTH; // when switching down, if our buffer is lower than the high water line,
// we can switch down
if (nextBandwidth < currBandwidth && (!bufferBasedABR || forwardBuffer < bufferHighWaterLine)) {
let logLine = `${sharedLogLine} as next bandwidth < current bandwidth (${nextBandwidth} < ${currBandwidth})`;
if (bufferBasedABR) {
logLine += ` and forwardBuffer < bufferHighWaterLine (${forwardBuffer} < ${bufferHighWaterLine})`;
}
log(logLine);
return true;
} // and if our buffer is higher than the low water line,
// we can switch up
if ((!bufferBasedABR || nextBandwidth > currBandwidth) && forwardBuffer >= bufferLowWaterLine) {
let logLine = `${sharedLogLine} as forwardBuffer >= bufferLowWaterLine (${forwardBuffer} >= ${bufferLowWaterLine})`;
if (bufferBasedABR) {
logLine += ` and next bandwidth > current bandwidth (${nextBandwidth} > ${currBandwidth})`;
}
log(logLine);
return true;
}
log(`not ${sharedLogLine} as no switching criteria met`);
return false;
};
/**
* the main playlist controller controller all interactons
* between playlists and segmentloaders. At this time this mainly
* involves a main playlist and a series of audio playlists
* if they are available
*
* @class PlaylistController
* @extends videojs.EventTarget
*/
class PlaylistController extends videojs.EventTarget {
constructor(options) {
super();
const {
src,
withCredentials,
tech,
bandwidth,
externVhs,
useCueTags,
playlistExclusionDuration,
enableLowInitialPlaylist,
sourceType,
cacheEncryptionKeys,
bufferBasedABR,
leastPixelDiffSelector,
captionServices
} = options;
if (!src) {
throw new Error('A non-empty playlist URL or JSON manifest string is required');
}
let {
maxPlaylistRetries
} = options;
if (maxPlaylistRetries === null || typeof maxPlaylistRetries === 'undefined') {
maxPlaylistRetries = Infinity;
}
Vhs$1 = externVhs;
this.bufferBasedABR = Boolean(bufferBasedABR);
this.leastPixelDiffSelector = Boolean(leastPixelDiffSelector);
this.withCredentials = withCredentials;
this.tech_ = tech;
this.vhs_ = tech.vhs;
this.player_ = options.player_;
this.sourceType_ = sourceType;
this.useCueTags_ = useCueTags;
this.playlistExclusionDuration = playlistExclusionDuration;
this.maxPlaylistRetries = maxPlaylistRetries;
this.enableLowInitialPlaylist = enableLowInitialPlaylist;
if (this.useCueTags_) {
this.cueTagsTrack_ = this.tech_.addTextTrack('metadata', 'ad-cues');
this.cueTagsTrack_.inBandMetadataTrackDispatchType = '';
}
this.requestOptions_ = {
withCredentials,
maxPlaylistRetries,
timeout: null
};
this.on('error', this.pauseLoading);
this.mediaTypes_ = createMediaTypes();
this.mediaSource = new window$1.MediaSource();
this.handleDurationChange_ = this.handleDurationChange_.bind(this);
this.handleSourceOpen_ = this.handleSourceOpen_.bind(this);
this.handleSourceEnded_ = this.handleSourceEnded_.bind(this);
this.mediaSource.addEventListener('durationchange', this.handleDurationChange_); // load the media source into the player
this.mediaSource.addEventListener('sourceopen', this.handleSourceOpen_);
this.mediaSource.addEventListener('sourceended', this.handleSourceEnded_); // we don't have to handle sourceclose since dispose will handle termination of
// everything, and the MediaSource should not be detached without a proper disposal
this.seekable_ = createTimeRanges();
this.hasPlayed_ = false;
this.syncController_ = new SyncController(options);
this.segmentMetadataTrack_ = tech.addRemoteTextTrack({
kind: 'metadata',
label: 'segment-metadata'
}, false).track;
this.decrypter_ = new Decrypter();
this.sourceUpdater_ = new SourceUpdater(this.mediaSource);
this.inbandTextTracks_ = {};
this.timelineChangeController_ = new TimelineChangeController();
this.keyStatusMap_ = new Map();
const segmentLoaderSettings = {
vhs: this.vhs_,
parse708captions: options.parse708captions,
useDtsForTimestampOffset: options.useDtsForTimestampOffset,
captionServices,
mediaSource: this.mediaSource,
currentTime: this.tech_.currentTime.bind(this.tech_),
seekable: () => this.seekable(),
seeking: () => this.tech_.seeking(),
duration: () => this.duration(),
hasPlayed: () => this.hasPlayed_,
goalBufferLength: () => this.goalBufferLength(),
bandwidth,
syncController: this.syncController_,
decrypter: this.decrypter_,
sourceType: this.sourceType_,
inbandTextTracks: this.inbandTextTracks_,
cacheEncryptionKeys,
sourceUpdater: this.sourceUpdater_,
timelineChangeController: this.timelineChangeController_,
exactManifestTimings: options.exactManifestTimings,
addMetadataToTextTrack: this.addMetadataToTextTrack.bind(this)
}; // The source type check not only determines whether a special DASH playlist loader
// should be used, but also covers the case where the provided src is a vhs-json
// manifest object (instead of a URL). In the case of vhs-json, the default
// PlaylistLoader should be used.
this.mainPlaylistLoader_ = this.sourceType_ === 'dash' ? new DashPlaylistLoader(src, this.vhs_, merge(this.requestOptions_, {
addMetadataToTextTrack: this.addMetadataToTextTrack.bind(this)
})) : new PlaylistLoader(src, this.vhs_, merge(this.requestOptions_, {
addDateRangesToTextTrack: this.addDateRangesToTextTrack_.bind(this)
}));
this.setupMainPlaylistLoaderListeners_(); // setup segment loaders
// combined audio/video or just video when alternate audio track is selected
this.mainSegmentLoader_ = new SegmentLoader(merge(segmentLoaderSettings, {
segmentMetadataTrack: this.segmentMetadataTrack_,
loaderType: 'main'
}), options); // alternate audio track
this.audioSegmentLoader_ = new SegmentLoader(merge(segmentLoaderSettings, {
loaderType: 'audio'
}), options);
this.subtitleSegmentLoader_ = new VTTSegmentLoader(merge(segmentLoaderSettings, {
loaderType: 'vtt',
featuresNativeTextTracks: this.tech_.featuresNativeTextTracks,
loadVttJs: () => new Promise((resolve, reject) => {
function onLoad() {
tech.off('vttjserror', onError);
resolve();
}
function onError() {
tech.off('vttjsloaded', onLoad);
reject();
}
tech.one('vttjsloaded', onLoad);
tech.one('vttjserror', onError); // safe to call multiple times, script will be loaded only once:
tech.addWebVttScript_();
})
}), options);
const getBandwidth = () => {
return this.mainSegmentLoader_.bandwidth;
};
this.contentSteeringController_ = new ContentSteeringController(this.vhs_.xhr, getBandwidth);
this.setupSegmentLoaderListeners_();
if (this.bufferBasedABR) {
this.mainPlaylistLoader_.one('loadedplaylist', () => this.startABRTimer_());
this.tech_.on('pause', () => this.stopABRTimer_());
this.tech_.on('play', () => this.startABRTimer_());
} // Create SegmentLoader stat-getters
// mediaRequests_
// mediaRequestsAborted_
// mediaRequestsTimedout_
// mediaRequestsErrored_
// mediaTransferDuration_
// mediaBytesTransferred_
// mediaAppends_
loaderStats.forEach(stat => {
this[stat + '_'] = sumLoaderStat.bind(this, stat);
});
this.logger_ = logger('pc');
this.triggeredFmp4Usage = false;
if (this.tech_.preload() === 'none') {
this.loadOnPlay_ = () => {
this.loadOnPlay_ = null;
this.mainPlaylistLoader_.load();
};
this.tech_.one('play', this.loadOnPlay_);
} else {
this.mainPlaylistLoader_.load();
}
this.timeToLoadedData__ = -1;
this.mainAppendsToLoadedData__ = -1;
this.audioAppendsToLoadedData__ = -1;
const event = this.tech_.preload() === 'none' ? 'play' : 'loadstart'; // start the first frame timer on loadstart or play (for preload none)
this.tech_.one(event, () => {
const timeToLoadedDataStart = Date.now();
this.tech_.one('loadeddata', () => {
this.timeToLoadedData__ = Date.now() - timeToLoadedDataStart;
this.mainAppendsToLoadedData__ = this.mainSegmentLoader_.mediaAppends;
this.audioAppendsToLoadedData__ = this.audioSegmentLoader_.mediaAppends;
});
});
}
mainAppendsToLoadedData_() {
return this.mainAppendsToLoadedData__;
}
audioAppendsToLoadedData_() {
return this.audioAppendsToLoadedData__;
}
appendsToLoadedData_() {
const main = this.mainAppendsToLoadedData_();
const audio = this.audioAppendsToLoadedData_();
if (main === -1 || audio === -1) {
return -1;
}
return main + audio;
}
timeToLoadedData_() {
return this.timeToLoadedData__;
}
/**
* Run selectPlaylist and switch to the new playlist if we should
*
* @param {string} [reason=abr] a reason for why the ABR check is made
* @private
*/
checkABR_(reason = 'abr') {
const nextPlaylist = this.selectPlaylist();
if (nextPlaylist && this.shouldSwitchToMedia_(nextPlaylist)) {
this.switchMedia_(nextPlaylist, reason);
}
}
switchMedia_(playlist, cause, delay) {
const oldMedia = this.media();
const oldId = oldMedia && (oldMedia.id || oldMedia.uri);
const newId = playlist && (playlist.id || playlist.uri);
if (oldId && oldId !== newId) {
this.logger_(`switch media ${oldId} -> ${newId} from ${cause}`);
const metadata = {
renditionInfo: {
id: newId,
bandwidth: playlist.attributes.BANDWIDTH,
resolution: playlist.attributes.RESOLUTION,
codecs: playlist.attributes.CODECS
},
cause
};
this.trigger({
type: 'renditionselected',
metadata
});
this.tech_.trigger({
type: 'usage',
name: `vhs-rendition-change-${cause}`
});
}
this.mainPlaylistLoader_.media(playlist, delay);
}
/**
* A function that ensures we switch our playlists inside of `mediaTypes`
* to match the current `serviceLocation` provided by the contentSteering controller.
* We want to check media types of `AUDIO`, `SUBTITLES`, and `CLOSED-CAPTIONS`.
*
* This should only be called on a DASH playback scenario while using content steering.
* This is necessary due to differences in how media in HLS manifests are generally tied to
* a video playlist, where in DASH that is not always the case.
*/
switchMediaForDASHContentSteering_() {
['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(type => {
const mediaType = this.mediaTypes_[type];
const activeGroup = mediaType ? mediaType.activeGroup() : null;
const pathway = this.contentSteeringController_.getPathway();
if (activeGroup && pathway) {
// activeGroup can be an array or a single group
const mediaPlaylists = activeGroup.length ? activeGroup[0].playlists : activeGroup.playlists;
const dashMediaPlaylists = mediaPlaylists.filter(p => p.attributes.serviceLocation === pathway); // Switch the current active playlist to the correct CDN
if (dashMediaPlaylists.length) {
this.mediaTypes_[type].activePlaylistLoader.media(dashMediaPlaylists[0]);
}
}
});
}
/**
* Start a timer that periodically calls checkABR_
*
* @private
*/
startABRTimer_() {
this.stopABRTimer_();
this.abrTimer_ = window$1.setInterval(() => this.checkABR_(), 250);
}
/**
* Stop the timer that periodically calls checkABR_
*
* @private
*/
stopABRTimer_() {
// if we're scrubbing, we don't need to pause.
// This getter will be added to Video.js in version 7.11.
if (this.tech_.scrubbing && this.tech_.scrubbing()) {
return;
}
window$1.clearInterval(this.abrTimer_);
this.abrTimer_ = null;
}
/**
* Get a list of playlists for the currently selected audio playlist
*
* @return {Array} the array of audio playlists
*/
getAudioTrackPlaylists_() {
const main = this.main();
const defaultPlaylists = main && main.playlists || []; // if we don't have any audio groups then we can only
// assume that the audio tracks are contained in main
// playlist array, use that or an empty array.
if (!main || !main.mediaGroups || !main.mediaGroups.AUDIO) {
return defaultPlaylists;
}
const AUDIO = main.mediaGroups.AUDIO;
const groupKeys = Object.keys(AUDIO);
let track; // get the current active track
if (Object.keys(this.mediaTypes_.AUDIO.groups).length) {
track = this.mediaTypes_.AUDIO.activeTrack(); // or get the default track from main if mediaTypes_ isn't setup yet
} else {
// default group is `main` or just the first group.
const defaultGroup = AUDIO.main || groupKeys.length && AUDIO[groupKeys[0]];
for (const label in defaultGroup) {
if (defaultGroup[label].default) {
track = {
label
};
break;
}
}
} // no active track no playlists.
if (!track) {
return defaultPlaylists;
}
const playlists = []; // get all of the playlists that are possible for the
// active track.
for (const group in AUDIO) {
if (AUDIO[group][track.label]) {
const properties = AUDIO[group][track.label];
if (properties.playlists && properties.playlists.length) {
playlists.push.apply(playlists, properties.playlists);
} else if (properties.uri) {
playlists.push(properties);
} else if (main.playlists.length) {
// if an audio group does not have a uri
// see if we have main playlists that use it as a group.
// if we do then add those to the playlists list.
for (let i = 0; i < main.playlists.length; i++) {
const playlist = main.playlists[i];
if (playlist.attributes && playlist.attributes.AUDIO && playlist.attributes.AUDIO === group) {
playlists.push(playlist);
}
}
}
}
}
if (!playlists.length) {
return defaultPlaylists;
}
return playlists;
}
/**
* Register event handlers on the main playlist loader. A helper
* function for construction time.
*
* @private
*/
setupMainPlaylistLoaderListeners_() {
this.mainPlaylistLoader_.on('loadedmetadata', () => {
const media = this.mainPlaylistLoader_.media();
const requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
// timeout the request.
if (isLowestEnabledRendition(this.mainPlaylistLoader_.main, this.mainPlaylistLoader_.media())) {
this.requestOptions_.timeout = 0;
} else {
this.requestOptions_.timeout = requestTimeout;
} // if this isn't a live video and preload permits, start
// downloading segments
if (media.endList && this.tech_.preload() !== 'none') {
this.mainSegmentLoader_.playlist(media, this.requestOptions_);
this.mainSegmentLoader_.load();
}
setupMediaGroups({
sourceType: this.sourceType_,
segmentLoaders: {
AUDIO: this.audioSegmentLoader_,
SUBTITLES: this.subtitleSegmentLoader_,
main: this.mainSegmentLoader_
},
tech: this.tech_,
requestOptions: this.requestOptions_,
mainPlaylistLoader: this.mainPlaylistLoader_,
vhs: this.vhs_,
main: this.main(),
mediaTypes: this.mediaTypes_,
excludePlaylist: this.excludePlaylist.bind(this)
});
this.triggerPresenceUsage_(this.main(), media);
this.setupFirstPlay();
if (!this.mediaTypes_.AUDIO.activePlaylistLoader || this.mediaTypes_.AUDIO.activePlaylistLoader.media()) {
this.trigger('selectedinitialmedia');
} else {
// We must wait for the active audio playlist loader to
// finish setting up before triggering this event so the
// representations API and EME setup is correct
this.mediaTypes_.AUDIO.activePlaylistLoader.one('loadedmetadata', () => {
this.trigger('selectedinitialmedia');
});
}
});
this.mainPlaylistLoader_.on('loadedplaylist', () => {
if (this.loadOnPlay_) {
this.tech_.off('play', this.loadOnPlay_);
}
let updatedPlaylist = this.mainPlaylistLoader_.media();
if (!updatedPlaylist) {
// Add content steering listeners on first load and init.
this.attachContentSteeringListeners_();
this.initContentSteeringController_(); // exclude any variants that are not supported by the browser before selecting
// an initial media as the playlist selectors do not consider browser support
this.excludeUnsupportedVariants_();
let selectedMedia;
if (this.enableLowInitialPlaylist) {
selectedMedia = this.selectInitialPlaylist();
}
if (!selectedMedia) {
selectedMedia = this.selectPlaylist();
}
if (!selectedMedia || !this.shouldSwitchToMedia_(selectedMedia)) {
return;
}
this.initialMedia_ = selectedMedia;
this.switchMedia_(this.initialMedia_, 'initial'); // Under the standard case where a source URL is provided, loadedplaylist will
// fire again since the playlist will be requested. In the case of vhs-json
// (where the manifest object is provided as the source), when the media
// playlist's `segments` list is already available, a media playlist won't be
// requested, and loadedplaylist won't fire again, so the playlist handler must be
// called on its own here.
const haveJsonSource = this.sourceType_ === 'vhs-json' && this.initialMedia_.segments;
if (!haveJsonSource) {
return;
}
updatedPlaylist = this.initialMedia_;
}
this.handleUpdatedMediaPlaylist(updatedPlaylist);
});
this.mainPlaylistLoader_.on('error', () => {
const error = this.mainPlaylistLoader_.error;
this.excludePlaylist({
playlistToExclude: error.playlist,
error
});
});
this.mainPlaylistLoader_.on('mediachanging', () => {
this.mainSegmentLoader_.abort();
this.mainSegmentLoader_.pause();
});
this.mainPlaylistLoader_.on('mediachange', () => {
const media = this.mainPlaylistLoader_.media();
const requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
// timeout the request.
if (isLowestEnabledRendition(this.mainPlaylistLoader_.main, this.mainPlaylistLoader_.media())) {
this.requestOptions_.timeout = 0;
} else {
this.requestOptions_.timeout = requestTimeout;
}
if (this.sourceType_ === 'dash') {
// we don't want to re-request the same hls playlist right after it was changed
this.mainPlaylistLoader_.load();
} // TODO: Create a new event on the PlaylistLoader that signals
// that the segments have changed in some way and use that to
// update the SegmentLoader instead of doing it twice here and
// on `loadedplaylist`
this.mainSegmentLoader_.pause();
this.mainSegmentLoader_.playlist(media, this.requestOptions_);
if (this.waitingForFastQualityPlaylistReceived_) {
this.runFastQualitySwitch_();
} else {
this.mainSegmentLoader_.load();
}
this.tech_.trigger({
type: 'mediachange',
bubbles: true
});
});
this.mainPlaylistLoader_.on('playlistunchanged', () => {
const updatedPlaylist = this.mainPlaylistLoader_.media(); // ignore unchanged playlists that have already been
// excluded for not-changing. We likely just have a really slowly updating
// playlist.
if (updatedPlaylist.lastExcludeReason_ === 'playlist-unchanged') {
return;
}
const playlistOutdated = this.stuckAtPlaylistEnd_(updatedPlaylist);
if (playlistOutdated) {
// Playlist has stopped updating and we're stuck at its end. Try to
// exclude it and switch to another playlist in the hope that that
// one is updating (and give the player a chance to re-adjust to the
// safe live point).
this.excludePlaylist({
error: {
message: 'Playlist no longer updating.',
reason: 'playlist-unchanged'
}
}); // useful for monitoring QoS
this.tech_.trigger('playliststuck');
}
});
this.mainPlaylistLoader_.on('renditiondisabled', () => {
this.tech_.trigger({
type: 'usage',
name: 'vhs-rendition-disabled'
});
});
this.mainPlaylistLoader_.on('renditionenabled', () => {
this.tech_.trigger({
type: 'usage',
name: 'vhs-rendition-enabled'
});
});
const playlistLoaderEvents = ['manifestrequeststart', 'manifestrequestcomplete', 'manifestparsestart', 'manifestparsecomplete', 'playlistrequeststart', 'playlistrequestcomplete', 'playlistparsestart', 'playlistparsecomplete', 'renditiondisabled', 'renditionenabled'];
playlistLoaderEvents.forEach(eventName => {
this.mainPlaylistLoader_.on(eventName, metadata => {
// trigger directly on the player to ensure early events are fired.
this.player_.trigger(_extends({}, metadata));
});
});
}
/**
* Given an updated media playlist (whether it was loaded for the first time, or
* refreshed for live playlists), update any relevant properties and state to reflect
* changes in the media that should be accounted for (e.g., cues and duration).
*
* @param {Object} updatedPlaylist the updated media playlist object
*
* @private
*/
handleUpdatedMediaPlaylist(updatedPlaylist) {
if (this.useCueTags_) {
this.updateAdCues_(updatedPlaylist);
} // TODO: Create a new event on the PlaylistLoader that signals
// that the segments have changed in some way and use that to
// update the SegmentLoader instead of doing it twice here and
// on `mediachange`
this.mainSegmentLoader_.pause();
this.mainSegmentLoader_.playlist(updatedPlaylist, this.requestOptions_);
if (this.waitingForFastQualityPlaylistReceived_) {
this.runFastQualitySwitch_();
}
this.updateDuration(!updatedPlaylist.endList); // If the player isn't paused, ensure that the segment loader is running,
// as it is possible that it was temporarily stopped while waiting for
// a playlist (e.g., in case the playlist errored and we re-requested it).
if (!this.tech_.paused()) {
this.mainSegmentLoader_.load();
if (this.audioSegmentLoader_) {
this.audioSegmentLoader_.load();
}
}
}
/**
* A helper function for triggerring presence usage events once per source
*
* @private
*/
triggerPresenceUsage_(main, media) {
const mediaGroups = main.mediaGroups || {};
let defaultDemuxed = true;
const audioGroupKeys = Object.keys(mediaGroups.AUDIO);
for (const mediaGroup in mediaGroups.AUDIO) {
for (const label in mediaGroups.AUDIO[mediaGroup]) {
const properties = mediaGroups.AUDIO[mediaGroup][label];
if (!properties.uri) {
defaultDemuxed = false;
}
}
}
if (defaultDemuxed) {
this.tech_.trigger({
type: 'usage',
name: 'vhs-demuxed'
});
}
if (Object.keys(mediaGroups.SUBTITLES).length) {
this.tech_.trigger({
type: 'usage',
name: 'vhs-webvtt'
});
}
if (Vhs$1.Playlist.isAes(media)) {
this.tech_.trigger({
type: 'usage',
name: 'vhs-aes'
});
}
if (audioGroupKeys.length && Object.keys(mediaGroups.AUDIO[audioGroupKeys[0]]).length > 1) {
this.tech_.trigger({
type: 'usage',
name: 'vhs-alternate-audio'
});
}
if (this.useCueTags_) {
this.tech_.trigger({
type: 'usage',
name: 'vhs-playlist-cue-tags'
});
}
}
shouldSwitchToMedia_(nextPlaylist) {
const currentPlaylist = this.mainPlaylistLoader_.media() || this.mainPlaylistLoader_.pendingMedia_;
const currentTime = this.tech_.currentTime();
const bufferLowWaterLine = this.bufferLowWaterLine();
const bufferHighWaterLine = this.bufferHighWaterLine();
const buffered = this.tech_.buffered();
return shouldSwitchToMedia({
buffered,
currentTime,
currentPlaylist,
nextPlaylist,
bufferLowWaterLine,
bufferHighWaterLine,
duration: this.duration(),
bufferBasedABR: this.bufferBasedABR,
log: this.logger_
});
}
/**
* Register event handlers on the segment loaders. A helper function
* for construction time.
*
* @private
*/
setupSegmentLoaderListeners_() {
this.mainSegmentLoader_.on('bandwidthupdate', () => {
// Whether or not buffer based ABR or another ABR is used, on a bandwidth change it's
// useful to check to see if a rendition switch should be made.
this.checkABR_('bandwidthupdate');
this.tech_.trigger('bandwidthupdate');
});
this.mainSegmentLoader_.on('timeout', () => {
if (this.bufferBasedABR) {
// If a rendition change is needed, then it would've be done on `bandwidthupdate`.
// Here the only consideration is that for buffer based ABR there's no guarantee
// of an immediate switch (since the bandwidth is averaged with a timeout
// bandwidth value of 1), so force a load on the segment loader to keep it going.
this.mainSegmentLoader_.load();
}
}); // `progress` events are not reliable enough of a bandwidth measure to trigger buffer
// based ABR.
if (!this.bufferBasedABR) {
this.mainSegmentLoader_.on('progress', () => {
this.trigger('progress');
});
}
this.mainSegmentLoader_.on('error', () => {
const error = this.mainSegmentLoader_.error();
this.excludePlaylist({
playlistToExclude: error.playlist,
error
});
});
this.mainSegmentLoader_.on('appenderror', () => {
this.error = this.mainSegmentLoader_.error_;
this.trigger('error');
});
this.mainSegmentLoader_.on('syncinfoupdate', () => {
this.onSyncInfoUpdate_();
});
this.mainSegmentLoader_.on('timestampoffset', () => {
this.tech_.trigger({
type: 'usage',
name: 'vhs-timestamp-offset'
});
});
this.audioSegmentLoader_.on('syncinfoupdate', () => {
this.onSyncInfoUpdate_();
});
this.audioSegmentLoader_.on('appenderror', () => {
this.error = this.audioSegmentLoader_.error_;
this.trigger('error');
});
this.mainSegmentLoader_.on('ended', () => {
this.logger_('main segment loader ended');
this.onEndOfStream();
}); // In DASH, there is the possibility of the video segment and the audio segment
// at a current time to be on different timelines. When this occurs, the player
// forwards playback to a point where these two segment types are back on the same
// timeline. This time will be just after the end of the audio segment that is on
// a previous timeline.
if (this.sourceType_ === 'dash') {
this.timelineChangeController_.on('audioTimelineBehind', () => {
const segmentInfo = this.audioSegmentLoader_.pendingSegment_;
if (!segmentInfo || !segmentInfo.segment || !segmentInfo.segment.syncInfo) {
return;
} // Update the current time to just after the faulty audio segment.
// This moves playback to a spot where both audio and video segments
// are on the same timeline.
const newTime = segmentInfo.segment.syncInfo.end + 0.01;
this.tech_.setCurrentTime(newTime);
});
}
this.mainSegmentLoader_.on('earlyabort', event => {
// never try to early abort with the new ABR algorithm
if (this.bufferBasedABR) {
return;
}
this.delegateLoaders_('all', ['abort']);
this.excludePlaylist({
error: {
message: 'Aborted early because there isn\'t enough bandwidth to complete ' + 'the request without rebuffering.'
},
playlistExclusionDuration: ABORT_EARLY_EXCLUSION_SECONDS
});
});
const updateCodecs = () => {
if (!this.sourceUpdater_.hasCreatedSourceBuffers()) {
return this.tryToCreateSourceBuffers_();
}
const codecs = this.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
if (!codecs) {
return;
}
this.sourceUpdater_.addOrChangeSourceBuffers(codecs);
};
this.mainSegmentLoader_.on('trackinfo', updateCodecs);
this.audioSegmentLoader_.on('trackinfo', updateCodecs);
this.mainSegmentLoader_.on('fmp4', () => {
if (!this.triggeredFmp4Usage) {
this.tech_.trigger({
type: 'usage',
name: 'vhs-fmp4'
});
this.triggeredFmp4Usage = true;
}
});
this.audioSegmentLoader_.on('fmp4', () => {
if (!this.triggeredFmp4Usage) {
this.tech_.trigger({
type: 'usage',
name: 'vhs-fmp4'
});
this.triggeredFmp4Usage = true;
}
});
this.audioSegmentLoader_.on('ended', () => {
this.logger_('audioSegmentLoader ended');
this.onEndOfStream();
});
const segmentLoaderEvents = ['segmentselected', 'segmentloadstart', 'segmentloaded', 'segmentkeyloadstart', 'segmentkeyloadcomplete', 'segmentdecryptionstart', 'segmentdecryptioncomplete', 'segmenttransmuxingstart', 'segmenttransmuxingcomplete', 'segmenttransmuxingtrackinfoavailable', 'segmenttransmuxingtiminginfoavailable', 'segmentappendstart', 'appendsdone', 'bandwidthupdated', 'timelinechange', 'codecschange'];
segmentLoaderEvents.forEach(eventName => {
this.mainSegmentLoader_.on(eventName, metadata => {
this.player_.trigger(_extends({}, metadata));
});
this.audioSegmentLoader_.on(eventName, metadata => {
this.player_.trigger(_extends({}, metadata));
});
this.subtitleSegmentLoader_.on(eventName, metadata => {
this.player_.trigger(_extends({}, metadata));
});
});
}
mediaSecondsLoaded_() {
return Math.max(this.audioSegmentLoader_.mediaSecondsLoaded + this.mainSegmentLoader_.mediaSecondsLoaded);
}
/**
* Call load on our SegmentLoaders
*/
load() {
this.mainSegmentLoader_.load();
if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
this.audioSegmentLoader_.load();
}
if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
this.subtitleSegmentLoader_.load();
}
}
/**
* Re-tune playback quality level for the current player
* conditions. This method will perform destructive actions like removing
* already buffered content in order to readjust the currently active
* playlist quickly. This is good for manual quality changes
*
* @private
*/
fastQualityChange_(media = this.selectPlaylist()) {
if (media && media === this.mainPlaylistLoader_.media()) {
this.logger_('skipping fastQualityChange because new media is same as old');
return;
}
this.switchMedia_(media, 'fast-quality'); // we would like to avoid race condition when we call fastQuality,
// reset everything and start loading segments from prev segments instead of new because new playlist is not received yet
this.waitingForFastQualityPlaylistReceived_ = true;
}
runFastQualitySwitch_() {
this.waitingForFastQualityPlaylistReceived_ = false; // Delete all buffered data to allow an immediate quality switch.
this.mainSegmentLoader_.pause();
this.mainSegmentLoader_.resetEverything(() => {
this.mainSegmentLoader_.load();
}); // don't need to reset audio as it is reset when media changes
}
/**
* Begin playback.
*/
play() {
if (this.setupFirstPlay()) {
return;
}
if (this.tech_.ended()) {
this.tech_.setCurrentTime(0);
}
if (this.hasPlayed_) {
this.load();
}
const seekable = this.tech_.seekable(); // if the viewer has paused and we fell out of the live window,
// seek forward to the live point
if (this.tech_.duration() === Infinity) {
if (this.tech_.currentTime() < seekable.start(0)) {
return this.tech_.setCurrentTime(seekable.end(seekable.length - 1));
}
}
}
/**
* Seek to the latest media position if this is a live video and the
* player and video are loaded and initialized.
*/
setupFirstPlay() {
const media = this.mainPlaylistLoader_.media(); // Check that everything is ready to begin buffering for the first call to play
// If 1) there is no active media
// 2) the player is paused
// 3) the first play has already been setup
// then exit early
if (!media || this.tech_.paused() || this.hasPlayed_) {
return false;
} // when the video is a live stream and/or has a start time
if (!media.endList || media.start) {
const seekable = this.seekable();
if (!seekable.length) {
// without a seekable range, the player cannot seek to begin buffering at the
// live or start point
return false;
}
const seekableEnd = seekable.end(0);
let startPoint = seekableEnd;
if (media.start) {
const offset = media.start.timeOffset;
if (offset < 0) {
startPoint = Math.max(seekableEnd + offset, seekable.start(0));
} else {
startPoint = Math.min(seekableEnd, offset);
}
} // trigger firstplay to inform the source handler to ignore the next seek event
this.trigger('firstplay'); // seek to the live point
this.tech_.setCurrentTime(startPoint);
}
this.hasPlayed_ = true; // we can begin loading now that everything is ready
this.load();
return true;
}
/**
* handle the sourceopen event on the MediaSource
*
* @private
*/
handleSourceOpen_() {
// Only attempt to create the source buffer if none already exist.
// handleSourceOpen is also called when we are "re-opening" a source buffer
// after `endOfStream` has been called (in response to a seek for instance)
this.tryToCreateSourceBuffers_(); // if autoplay is enabled, begin playback. This is duplicative of
// code in video.js but is required because play() must be invoked
// *after* the media source has opened.
if (this.tech_.autoplay()) {
const playPromise = this.tech_.play(); // Catch/silence error when a pause interrupts a play request
// on browsers which return a promise
if (typeof playPromise !== 'undefined' && typeof playPromise.then === 'function') {
playPromise.then(null, e => {});
}
}
this.trigger('sourceopen');
}
/**
* handle the sourceended event on the MediaSource
*
* @private
*/
handleSourceEnded_() {
if (!this.inbandTextTracks_.metadataTrack_) {
return;
}
const cues = this.inbandTextTracks_.metadataTrack_.cues;
if (!cues || !cues.length) {
return;
}
const duration = this.duration();
cues[cues.length - 1].endTime = isNaN(duration) || Math.abs(duration) === Infinity ? Number.MAX_VALUE : duration;
}
/**
* handle the durationchange event on the MediaSource
*
* @private
*/
handleDurationChange_() {
this.tech_.trigger('durationchange');
}
/**
* Calls endOfStream on the media source when all active stream types have called
* endOfStream
*
* @param {string} streamType
* Stream type of the segment loader that called endOfStream
* @private
*/
onEndOfStream() {
let isEndOfStream = this.mainSegmentLoader_.ended_;
if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
const mainMediaInfo = this.mainSegmentLoader_.getCurrentMediaInfo_(); // if the audio playlist loader exists, then alternate audio is active
if (!mainMediaInfo || mainMediaInfo.hasVideo) {
// if we do not know if the main segment loader contains video yet or if we
// definitively know the main segment loader contains video, then we need to wait
// for both main and audio segment loaders to call endOfStream
isEndOfStream = isEndOfStream && this.audioSegmentLoader_.ended_;
} else {
// otherwise just rely on the audio loader
isEndOfStream = this.audioSegmentLoader_.ended_;
}
}
if (!isEndOfStream) {
return;
}
this.stopABRTimer_();
this.sourceUpdater_.endOfStream();
}
/**
* Check if a playlist has stopped being updated
*
* @param {Object} playlist the media playlist object
* @return {boolean} whether the playlist has stopped being updated or not
*/
stuckAtPlaylistEnd_(playlist) {
const seekable = this.seekable();
if (!seekable.length) {
// playlist doesn't have enough information to determine whether we are stuck
return false;
}
const expired = this.syncController_.getExpiredTime(playlist, this.duration());
if (expired === null) {
return false;
} // does not use the safe live end to calculate playlist end, since we
// don't want to say we are stuck while there is still content
const absolutePlaylistEnd = Vhs$1.Playlist.playlistEnd(playlist, expired);
const currentTime = this.tech_.currentTime();
const buffered = this.tech_.buffered();
if (!buffered.length) {
// return true if the playhead reached the absolute end of the playlist
return absolutePlaylistEnd - currentTime <= SAFE_TIME_DELTA;
}
const bufferedEnd = buffered.end(buffered.length - 1); // return true if there is too little buffer left and buffer has reached absolute
// end of playlist
return bufferedEnd - currentTime <= SAFE_TIME_DELTA && absolutePlaylistEnd - bufferedEnd <= SAFE_TIME_DELTA;
}
/**
* Exclude a playlist for a set amount of time, making it unavailable for selection by
* the rendition selection algorithm, then force a new playlist (rendition) selection.
*
* @param {Object=} playlistToExclude
* the playlist to exclude, defaults to the currently selected playlist
* @param {Object=} error
* an optional error
* @param {number=} playlistExclusionDuration
* an optional number of seconds to exclude the playlist
*/
excludePlaylist({
playlistToExclude = this.mainPlaylistLoader_.media(),
error = {},
playlistExclusionDuration
}) {
// If the `error` was generated by the playlist loader, it will contain
// the playlist we were trying to load (but failed) and that should be
// excluded instead of the currently selected playlist which is likely
// out-of-date in this scenario
playlistToExclude = playlistToExclude || this.mainPlaylistLoader_.media();
playlistExclusionDuration = playlistExclusionDuration || error.playlistExclusionDuration || this.playlistExclusionDuration; // If there is no current playlist, then an error occurred while we were
// trying to load the main OR while we were disposing of the tech
if (!playlistToExclude) {
this.error = error;
if (this.mediaSource.readyState !== 'open') {
this.trigger('error');
} else {
this.sourceUpdater_.endOfStream('network');
}
return;
}
playlistToExclude.playlistErrors_++;
const playlists = this.mainPlaylistLoader_.main.playlists;
const enabledPlaylists = playlists.filter(isEnabled);
const isFinalRendition = enabledPlaylists.length === 1 && enabledPlaylists[0] === playlistToExclude; // Don't exclude the only playlist unless it was excluded
// forever
if (playlists.length === 1 && playlistExclusionDuration !== Infinity) {
videojs.log.warn(`Problem encountered with playlist ${playlistToExclude.id}. ` + 'Trying again since it is the only playlist.');
this.tech_.trigger('retryplaylist'); // if this is a final rendition, we should delay
return this.mainPlaylistLoader_.load(isFinalRendition);
}
if (isFinalRendition) {
// If we're content steering, try other pathways.
if (this.main().contentSteering) {
const pathway = this.pathwayAttribute_(playlistToExclude); // Ignore at least 1 steering manifest refresh.
const reIncludeDelay = this.contentSteeringController_.steeringManifest.ttl * 1000;
this.contentSteeringController_.excludePathway(pathway);
this.excludeThenChangePathway_();
setTimeout(() => {
this.contentSteeringController_.addAvailablePathway(pathway);
}, reIncludeDelay);
return;
} // Since we're on the final non-excluded playlist, and we're about to exclude
// it, instead of erring the player or retrying this playlist, clear out the current
// exclusion list. This allows other playlists to be attempted in case any have been
// fixed.
let reincluded = false;
playlists.forEach(playlist => {
// skip current playlist which is about to be excluded
if (playlist === playlistToExclude) {
return;
}
const excludeUntil = playlist.excludeUntil; // a playlist cannot be reincluded if it wasn't excluded to begin with.
if (typeof excludeUntil !== 'undefined' && excludeUntil !== Infinity) {
reincluded = true;
delete playlist.excludeUntil;
}
});
if (reincluded) {
videojs.log.warn('Removing other playlists from the exclusion list because the last ' + 'rendition is about to be excluded.'); // Technically we are retrying a playlist, in that we are simply retrying a previous
// playlist. This is needed for users relying on the retryplaylist event to catch a
// case where the player might be stuck and looping through "dead" playlists.
this.tech_.trigger('retryplaylist');
}
} // Exclude this playlist
let excludeUntil;
if (playlistToExclude.playlistErrors_ > this.maxPlaylistRetries) {
excludeUntil = Infinity;
} else {
excludeUntil = Date.now() + playlistExclusionDuration * 1000;
}
playlistToExclude.excludeUntil = excludeUntil;
if (error.reason) {
playlistToExclude.lastExcludeReason_ = error.reason;
}
this.tech_.trigger('excludeplaylist');
this.tech_.trigger({
type: 'usage',
name: 'vhs-rendition-excluded'
}); // TODO: only load a new playlist if we're excluding the current playlist
// If this function was called with a playlist that's not the current active playlist
// (e.g., media().id !== playlistToExclude.id),
// then a new playlist should not be selected and loaded, as there's nothing wrong with the current playlist.
const nextPlaylist = this.selectPlaylist();
if (!nextPlaylist) {
this.error = 'Playback cannot continue. No available working or supported playlists.';
this.trigger('error');
return;
}
const logFn = error.internal ? this.logger_ : videojs.log.warn;
const errorMessage = error.message ? ' ' + error.message : '';
logFn(`${error.internal ? 'Internal problem' : 'Problem'} encountered with playlist ${playlistToExclude.id}.` + `${errorMessage} Switching to playlist ${nextPlaylist.id}.`); // if audio group changed reset audio loaders
if (nextPlaylist.attributes.AUDIO !== playlistToExclude.attributes.AUDIO) {
this.delegateLoaders_('audio', ['abort', 'pause']);
} // if subtitle group changed reset subtitle loaders
if (nextPlaylist.attributes.SUBTITLES !== playlistToExclude.attributes.SUBTITLES) {
this.delegateLoaders_('subtitle', ['abort', 'pause']);
}
this.delegateLoaders_('main', ['abort', 'pause']);
const delayDuration = nextPlaylist.targetDuration / 2 * 1000 || 5 * 1000;
const shouldDelay = typeof nextPlaylist.lastRequest === 'number' && Date.now() - nextPlaylist.lastRequest <= delayDuration; // delay if it's a final rendition or if the last refresh is sooner than half targetDuration
return this.switchMedia_(nextPlaylist, 'exclude', isFinalRendition || shouldDelay);
}
/**
* Pause all segment/playlist loaders
*/
pauseLoading() {
this.delegateLoaders_('all', ['abort', 'pause']);
this.stopABRTimer_();
}
/**
* Call a set of functions in order on playlist loaders, segment loaders,
* or both types of loaders.
*
* @param {string} filter
* Filter loaders that should call fnNames using a string. Can be:
* * all - run on all loaders
* * audio - run on all audio loaders
* * subtitle - run on all subtitle loaders
* * main - run on the main loaders
*
* @param {Array|string} fnNames
* A string or array of function names to call.
*/
delegateLoaders_(filter, fnNames) {
const loaders = [];
const dontFilterPlaylist = filter === 'all';
if (dontFilterPlaylist || filter === 'main') {
loaders.push(this.mainPlaylistLoader_);
}
const mediaTypes = [];
if (dontFilterPlaylist || filter === 'audio') {
mediaTypes.push('AUDIO');
}
if (dontFilterPlaylist || filter === 'subtitle') {
mediaTypes.push('CLOSED-CAPTIONS');
mediaTypes.push('SUBTITLES');
}
mediaTypes.forEach(mediaType => {
const loader = this.mediaTypes_[mediaType] && this.mediaTypes_[mediaType].activePlaylistLoader;
if (loader) {
loaders.push(loader);
}
});
['main', 'audio', 'subtitle'].forEach(name => {
const loader = this[`${name}SegmentLoader_`];
if (loader && (filter === name || filter === 'all')) {
loaders.push(loader);
}
});
loaders.forEach(loader => fnNames.forEach(fnName => {
if (typeof loader[fnName] === 'function') {
loader[fnName]();
}
}));
}
/**
* set the current time on all segment loaders
*
* @param {TimeRange} currentTime the current time to set
* @return {TimeRange} the current time
*/
setCurrentTime(currentTime) {
const buffered = findRange(this.tech_.buffered(), currentTime);
if (!(this.mainPlaylistLoader_ && this.mainPlaylistLoader_.media())) {
// return immediately if the metadata is not ready yet
return 0;
} // it's clearly an edge-case but don't thrown an error if asked to
// seek within an empty playlist
if (!this.mainPlaylistLoader_.media().segments) {
return 0;
} // if the seek location is already buffered, continue buffering as usual
if (buffered && buffered.length) {
return currentTime;
} // cancel outstanding requests so we begin buffering at the new
// location
this.mainSegmentLoader_.pause();
this.mainSegmentLoader_.resetEverything();
if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
this.audioSegmentLoader_.pause();
this.audioSegmentLoader_.resetEverything();
}
if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
this.subtitleSegmentLoader_.pause();
this.subtitleSegmentLoader_.resetEverything();
} // start segment loader loading in case they are paused
this.load();
}
/**
* get the current duration
*
* @return {TimeRange} the duration
*/
duration() {
if (!this.mainPlaylistLoader_) {
return 0;
}
const media = this.mainPlaylistLoader_.media();
if (!media) {
// no playlists loaded yet, so can't determine a duration
return 0;
} // Don't rely on the media source for duration in the case of a live playlist since
// setting the native MediaSource's duration to infinity ends up with consequences to
// seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
//
// This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
// however, few browsers have support for setLiveSeekableRange()
// https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
//
// Until a time when the duration of the media source can be set to infinity, and a
// seekable range specified across browsers, just return Infinity.
if (!media.endList) {
return Infinity;
} // Since this is a VOD video, it is safe to rely on the media source's duration (if
// available). If it's not available, fall back to a playlist-calculated estimate.
if (this.mediaSource) {
return this.mediaSource.duration;
}
return Vhs$1.Playlist.duration(media);
}
/**
* check the seekable range
*
* @return {TimeRange} the seekable range
*/
seekable() {
return this.seekable_;
}
onSyncInfoUpdate_() {
let audioSeekable; // TODO check for creation of both source buffers before updating seekable
//
// A fix was made to this function where a check for
// this.sourceUpdater_.hasCreatedSourceBuffers
// was added to ensure that both source buffers were created before seekable was
// updated. However, it originally had a bug where it was checking for a true and
// returning early instead of checking for false. Setting it to check for false to
// return early though created other issues. A call to play() would check for seekable
// end without verifying that a seekable range was present. In addition, even checking
// for that didn't solve some issues, as handleFirstPlay is sometimes worked around
// due to a media update calling load on the segment loaders, skipping a seek to live,
// thereby starting live streams at the beginning of the stream rather than at the end.
//
// This conditional should be fixed to wait for the creation of two source buffers at
// the same time as the other sections of code are fixed to properly seek to live and
// not throw an error due to checking for a seekable end when no seekable range exists.
//
// For now, fall back to the older behavior, with the understanding that the seekable
// range may not be completely correct, leading to a suboptimal initial live point.
if (!this.mainPlaylistLoader_) {
return;
}
let media = this.mainPlaylistLoader_.media();
if (!media) {
return;
}
let expired = this.syncController_.getExpiredTime(media, this.duration());
if (expired === null) {
// not enough information to update seekable
return;
}
const main = this.mainPlaylistLoader_.main;
const mainSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(main, media));
if (mainSeekable.length === 0) {
return;
}
if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
media = this.mediaTypes_.AUDIO.activePlaylistLoader.media();
expired = this.syncController_.getExpiredTime(media, this.duration());
if (expired === null) {
return;
}
audioSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(main, media));
if (audioSeekable.length === 0) {
return;
}
}
let oldEnd;
let oldStart;
if (this.seekable_ && this.seekable_.length) {
oldEnd = this.seekable_.end(0);
oldStart = this.seekable_.start(0);
}
if (!audioSeekable) {
// seekable has been calculated based on buffering video data so it
// can be returned directly
this.seekable_ = mainSeekable;
} else if (audioSeekable.start(0) > mainSeekable.end(0) || mainSeekable.start(0) > audioSeekable.end(0)) {
// seekables are pretty far off, rely on main
this.seekable_ = mainSeekable;
} else {
this.seekable_ = createTimeRanges([[audioSeekable.start(0) > mainSeekable.start(0) ? audioSeekable.start(0) : mainSeekable.start(0), audioSeekable.end(0) < mainSeekable.end(0) ? audioSeekable.end(0) : mainSeekable.end(0)]]);
} // seekable is the same as last time
if (this.seekable_ && this.seekable_.length) {
if (this.seekable_.end(0) === oldEnd && this.seekable_.start(0) === oldStart) {
return;
}
}
this.logger_(`seekable updated [${printableRange(this.seekable_)}]`);
const metadata = {
seekableRanges: this.seekable_
};
this.trigger({
type: 'seekablerangeschanged',
metadata
});
this.tech_.trigger('seekablechanged');
}
/**
* Update the player duration
*/
updateDuration(isLive) {
if (this.updateDuration_) {
this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
this.updateDuration_ = null;
}
if (this.mediaSource.readyState !== 'open') {
this.updateDuration_ = this.updateDuration.bind(this, isLive);
this.mediaSource.addEventListener('sourceopen', this.updateDuration_);
return;
}
if (isLive) {
const seekable = this.seekable();
if (!seekable.length) {
return;
} // Even in the case of a live playlist, the native MediaSource's duration should not
// be set to Infinity (even though this would be expected for a live playlist), since
// setting the native MediaSource's duration to infinity ends up with consequences to
// seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
//
// This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
// however, few browsers have support for setLiveSeekableRange()
// https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
//
// Until a time when the duration of the media source can be set to infinity, and a
// seekable range specified across browsers, the duration should be greater than or
// equal to the last possible seekable value.
// MediaSource duration starts as NaN
// It is possible (and probable) that this case will never be reached for many
// sources, since the MediaSource reports duration as the highest value without
// accounting for timestamp offset. For example, if the timestamp offset is -100 and
// we buffered times 0 to 100 with real times of 100 to 200, even though current
// time will be between 0 and 100, the native media source may report the duration
// as 200. However, since we report duration separate from the media source (as
// Infinity), and as long as the native media source duration value is greater than
// our reported seekable range, seeks will work as expected. The large number as
// duration for live is actually a strategy used by some players to work around the
// issue of live seekable ranges cited above.
if (isNaN(this.mediaSource.duration) || this.mediaSource.duration < seekable.end(seekable.length - 1)) {
this.sourceUpdater_.setDuration(seekable.end(seekable.length - 1));
}
return;
}
const buffered = this.tech_.buffered();
let duration = Vhs$1.Playlist.duration(this.mainPlaylistLoader_.media());
if (buffered.length > 0) {
duration = Math.max(duration, buffered.end(buffered.length - 1));
}
if (this.mediaSource.duration !== duration) {
this.sourceUpdater_.setDuration(duration);
}
}
/**
* dispose of the PlaylistController and everything
* that it controls
*/
dispose() {
this.trigger('dispose');
this.decrypter_.terminate();
this.mainPlaylistLoader_.dispose();
this.mainSegmentLoader_.dispose();
this.contentSteeringController_.dispose();
this.keyStatusMap_.clear();
if (this.loadOnPlay_) {
this.tech_.off('play', this.loadOnPlay_);
}
['AUDIO', 'SUBTITLES'].forEach(type => {
const groups = this.mediaTypes_[type].groups;
for (const id in groups) {
groups[id].forEach(group => {
if (group.playlistLoader) {
group.playlistLoader.dispose();
}
});
}
});
this.audioSegmentLoader_.dispose();
this.subtitleSegmentLoader_.dispose();
this.sourceUpdater_.dispose();
this.timelineChangeController_.dispose();
this.stopABRTimer_();
if (this.updateDuration_) {
this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
}
this.mediaSource.removeEventListener('durationchange', this.handleDurationChange_); // load the media source into the player
this.mediaSource.removeEventListener('sourceopen', this.handleSourceOpen_);
this.mediaSource.removeEventListener('sourceended', this.handleSourceEnded_);
this.off();
}
/**
* return the main playlist object if we have one
*
* @return {Object} the main playlist object that we parsed
*/
main() {
return this.mainPlaylistLoader_.main;
}
/**
* return the currently selected playlist
*
* @return {Object} the currently selected playlist object that we parsed
*/
media() {
// playlist loader will not return media if it has not been fully loaded
return this.mainPlaylistLoader_.media() || this.initialMedia_;
}
areMediaTypesKnown_() {
const usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;
const hasMainMediaInfo = !!this.mainSegmentLoader_.getCurrentMediaInfo_(); // if we are not using an audio loader, then we have audio media info
// otherwise check on the segment loader.
const hasAudioMediaInfo = !usingAudioLoader ? true : !!this.audioSegmentLoader_.getCurrentMediaInfo_(); // one or both loaders has not loaded sufficently to get codecs
if (!hasMainMediaInfo || !hasAudioMediaInfo) {
return false;
}
return true;
} // find from and to for codec switch event
getCodecsOrExclude_() {
const media = {
main: this.mainSegmentLoader_.getCurrentMediaInfo_() || {},
audio: this.audioSegmentLoader_.getCurrentMediaInfo_() || {}
};
const playlist = this.mainSegmentLoader_.getPendingSegmentPlaylist() || this.media(); // set "main" media equal to video
media.video = media.main;
const playlistCodecs = codecsForPlaylist(this.main(), playlist);
const codecs = {};
const usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;
if (media.main.hasVideo) {
codecs.video = playlistCodecs.video || media.main.videoCodec || DEFAULT_VIDEO_CODEC;
}
if (media.main.isMuxed) {
codecs.video += `,${playlistCodecs.audio || media.main.audioCodec || DEFAULT_AUDIO_CODEC}`;
}
if (media.main.hasAudio && !media.main.isMuxed || media.audio.hasAudio || usingAudioLoader) {
codecs.audio = playlistCodecs.audio || media.main.audioCodec || media.audio.audioCodec || DEFAULT_AUDIO_CODEC; // set audio isFmp4 so we use the correct "supports" function below
media.audio.isFmp4 = media.main.hasAudio && !media.main.isMuxed ? media.main.isFmp4 : media.audio.isFmp4;
} // no codecs, no playback.
if (!codecs.audio && !codecs.video) {
this.excludePlaylist({
playlistToExclude: playlist,
error: {
message: 'Could not determine codecs for playlist.'
},
playlistExclusionDuration: Infinity
});
return;
} // fmp4 relies on browser support, while ts relies on muxer support
const supportFunction = (isFmp4, codec) => isFmp4 ? browserSupportsCodec(codec) : muxerSupportsCodec(codec);
const unsupportedCodecs = {};
let unsupportedAudio;
['video', 'audio'].forEach(function (type) {
if (codecs.hasOwnProperty(type) && !supportFunction(media[type].isFmp4, codecs[type])) {
const supporter = media[type].isFmp4 ? 'browser' : 'muxer';
unsupportedCodecs[supporter] = unsupportedCodecs[supporter] || [];
unsupportedCodecs[supporter].push(codecs[type]);
if (type === 'audio') {
unsupportedAudio = supporter;
}
}
});
if (usingAudioLoader && unsupportedAudio && playlist.attributes.AUDIO) {
const audioGroup = playlist.attributes.AUDIO;
this.main().playlists.forEach(variant => {
const variantAudioGroup = variant.attributes && variant.attributes.AUDIO;
if (variantAudioGroup === audioGroup && variant !== playlist) {
variant.excludeUntil = Infinity;
}
});
this.logger_(`excluding audio group ${audioGroup} as ${unsupportedAudio} does not support codec(s): "${codecs.audio}"`);
} // if we have any unsupported codecs exclude this playlist.
if (Object.keys(unsupportedCodecs).length) {
const message = Object.keys(unsupportedCodecs).reduce((acc, supporter) => {
if (acc) {
acc += ', ';
}
acc += `${supporter} does not support codec(s): "${unsupportedCodecs[supporter].join(',')}"`;
return acc;
}, '') + '.';
this.excludePlaylist({
playlistToExclude: playlist,
error: {
internal: true,
message
},
playlistExclusionDuration: Infinity
});
return;
} // check if codec switching is happening
if (this.sourceUpdater_.hasCreatedSourceBuffers() && !this.sourceUpdater_.canChangeType()) {
const switchMessages = [];
['video', 'audio'].forEach(type => {
const newCodec = (parseCodecs(this.sourceUpdater_.codecs[type] || '')[0] || {}).type;
const oldCodec = (parseCodecs(codecs[type] || '')[0] || {}).type;
if (newCodec && oldCodec && newCodec.toLowerCase() !== oldCodec.toLowerCase()) {
switchMessages.push(`"${this.sourceUpdater_.codecs[type]}" -> "${codecs[type]}"`);
}
});
if (switchMessages.length) {
this.excludePlaylist({
playlistToExclude: playlist,
error: {
message: `Codec switching not supported: ${switchMessages.join(', ')}.`,
internal: true
},
playlistExclusionDuration: Infinity
});
return;
}
} // TODO: when using the muxer shouldn't we just return
// the codecs that the muxer outputs?
return codecs;
}
/**
* Create source buffers and exlude any incompatible renditions.
*
* @private
*/
tryToCreateSourceBuffers_() {
// media source is not ready yet or sourceBuffers are already
// created.
if (this.mediaSource.readyState !== 'open' || this.sourceUpdater_.hasCreatedSourceBuffers()) {
return;
}
if (!this.areMediaTypesKnown_()) {
return;
}
const codecs = this.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
if (!codecs) {
return;
}
this.sourceUpdater_.createSourceBuffers(codecs);
const codecString = [codecs.video, codecs.audio].filter(Boolean).join(',');
this.excludeIncompatibleVariants_(codecString);
}
/**
* Excludes playlists with codecs that are unsupported by the muxer and browser.
*/
excludeUnsupportedVariants_() {
const playlists = this.main().playlists;
const ids = []; // TODO: why don't we have a property to loop through all
// playlist? Why did we ever mix indexes and keys?
Object.keys(playlists).forEach(key => {
const variant = playlists[key]; // check if we already processed this playlist.
if (ids.indexOf(variant.id) !== -1) {
return;
}
ids.push(variant.id);
const codecs = codecsForPlaylist(this.main, variant);
const unsupported = [];
if (codecs.audio && !muxerSupportsCodec(codecs.audio) && !browserSupportsCodec(codecs.audio)) {
unsupported.push(`audio codec ${codecs.audio}`);
}
if (codecs.video && !muxerSupportsCodec(codecs.video) && !browserSupportsCodec(codecs.video)) {
unsupported.push(`video codec ${codecs.video}`);
}
if (codecs.text && codecs.text === 'stpp.ttml.im1t') {
unsupported.push(`text codec ${codecs.text}`);
}
if (unsupported.length) {
variant.excludeUntil = Infinity;
this.logger_(`excluding ${variant.id} for unsupported: ${unsupported.join(', ')}`);
}
});
}
/**
* Exclude playlists that are known to be codec or
* stream-incompatible with the SourceBuffer configuration. For
* instance, Media Source Extensions would cause the video element to
* stall waiting for video data if you switched from a variant with
* video and audio to an audio-only one.
*
* @param {Object} media a media playlist compatible with the current
* set of SourceBuffers. Variants in the current main playlist that
* do not appear to have compatible codec or stream configurations
* will be excluded from the default playlist selection algorithm
* indefinitely.
* @private
*/
excludeIncompatibleVariants_(codecString) {
const ids = [];
const playlists = this.main().playlists;
const codecs = unwrapCodecList(parseCodecs(codecString));
const codecCount_ = codecCount(codecs);
const videoDetails = codecs.video && parseCodecs(codecs.video)[0] || null;
const audioDetails = codecs.audio && parseCodecs(codecs.audio)[0] || null;
Object.keys(playlists).forEach(key => {
const variant = playlists[key]; // check if we already processed this playlist.
// or it if it is already excluded forever.
if (ids.indexOf(variant.id) !== -1 || variant.excludeUntil === Infinity) {
return;
}
ids.push(variant.id);
const exclusionReasons = []; // get codecs from the playlist for this variant
const variantCodecs = codecsForPlaylist(this.mainPlaylistLoader_.main, variant);
const variantCodecCount = codecCount(variantCodecs); // if no codecs are listed, we cannot determine that this
// variant is incompatible. Wait for mux.js to probe
if (!variantCodecs.audio && !variantCodecs.video) {
return;
} // TODO: we can support this by removing the
// old media source and creating a new one, but it will take some work.
// The number of streams cannot change
if (variantCodecCount !== codecCount_) {
exclusionReasons.push(`codec count "${variantCodecCount}" !== "${codecCount_}"`);
} // only exclude playlists by codec change, if codecs cannot switch
// during playback.
if (!this.sourceUpdater_.canChangeType()) {
const variantVideoDetails = variantCodecs.video && parseCodecs(variantCodecs.video)[0] || null;
const variantAudioDetails = variantCodecs.audio && parseCodecs(variantCodecs.audio)[0] || null; // the video codec cannot change
if (variantVideoDetails && videoDetails && variantVideoDetails.type.toLowerCase() !== videoDetails.type.toLowerCase()) {
exclusionReasons.push(`video codec "${variantVideoDetails.type}" !== "${videoDetails.type}"`);
} // the audio codec cannot change
if (variantAudioDetails && audioDetails && variantAudioDetails.type.toLowerCase() !== audioDetails.type.toLowerCase()) {
exclusionReasons.push(`audio codec "${variantAudioDetails.type}" !== "${audioDetails.type}"`);
}
}
if (exclusionReasons.length) {
variant.excludeUntil = Infinity;
this.logger_(`excluding ${variant.id}: ${exclusionReasons.join(' && ')}`);
}
});
}
updateAdCues_(media) {
let offset = 0;
const seekable = this.seekable();
if (seekable.length) {
offset = seekable.start(0);
}
updateAdCues(media, this.cueTagsTrack_, offset);
}
/**
* Calculates the desired forward buffer length based on current time
*
* @return {number} Desired forward buffer length in seconds
*/
goalBufferLength() {
const currentTime = this.tech_.currentTime();
const initial = Config.GOAL_BUFFER_LENGTH;
const rate = Config.GOAL_BUFFER_LENGTH_RATE;
const max = Math.max(initial, Config.MAX_GOAL_BUFFER_LENGTH);
return Math.min(initial + currentTime * rate, max);
}
/**
* Calculates the desired buffer low water line based on current time
*
* @return {number} Desired buffer low water line in seconds
*/
bufferLowWaterLine() {
const currentTime = this.tech_.currentTime();
const initial = Config.BUFFER_LOW_WATER_LINE;
const rate = Config.BUFFER_LOW_WATER_LINE_RATE;
const max = Math.max(initial, Config.MAX_BUFFER_LOW_WATER_LINE);
const newMax = Math.max(initial, Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE);
return Math.min(initial + currentTime * rate, this.bufferBasedABR ? newMax : max);
}
bufferHighWaterLine() {
return Config.BUFFER_HIGH_WATER_LINE;
}
addDateRangesToTextTrack_(dateRanges) {
createMetadataTrackIfNotExists(this.inbandTextTracks_, 'com.apple.streaming', this.tech_);
addDateRangeMetadata({
inbandTextTracks: this.inbandTextTracks_,
dateRanges
});
}
addMetadataToTextTrack(dispatchType, metadataArray, videoDuration) {
const timestampOffset = this.sourceUpdater_.videoBuffer ? this.sourceUpdater_.videoTimestampOffset() : this.sourceUpdater_.audioTimestampOffset(); // There's potentially an issue where we could double add metadata if there's a muxed
// audio/video source with a metadata track, and an alt audio with a metadata track.
// However, this probably won't happen, and if it does it can be handled then.
createMetadataTrackIfNotExists(this.inbandTextTracks_, dispatchType, this.tech_);
addMetadata({
inbandTextTracks: this.inbandTextTracks_,
metadataArray,
timestampOffset,
videoDuration
});
}
/**
* Utility for getting the pathway or service location from an HLS or DASH playlist.
*
* @param {Object} playlist for getting pathway from.
* @return the pathway attribute of a playlist
*/
pathwayAttribute_(playlist) {
return playlist.attributes['PATHWAY-ID'] || playlist.attributes.serviceLocation;
}
/**
* Initialize available pathways and apply the tag properties.
*/
initContentSteeringController_() {
const main = this.main();
if (!main.contentSteering) {
return;
}
for (const playlist of main.playlists) {
this.contentSteeringController_.addAvailablePathway(this.pathwayAttribute_(playlist));
}
this.contentSteeringController_.assignTagProperties(main.uri, main.contentSteering); // request the steering manifest immediately if queryBeforeStart is set.
if (this.contentSteeringController_.queryBeforeStart) {
// When queryBeforeStart is true, initial request should omit steering parameters.
this.contentSteeringController_.requestSteeringManifest(true);
return;
} // otherwise start content steering after playback starts
this.tech_.one('canplay', () => {
this.contentSteeringController_.requestSteeringManifest();
});
}
/**
* Reset the content steering controller and re-init.
*/
resetContentSteeringController_() {
this.contentSteeringController_.clearAvailablePathways();
this.contentSteeringController_.dispose();
this.initContentSteeringController_();
}
/**
* Attaches the listeners for content steering.
*/
attachContentSteeringListeners_() {
this.contentSteeringController_.on('content-steering', this.excludeThenChangePathway_.bind(this));
const contentSteeringEvents = ['contentsteeringloadstart', 'contentsteeringloadcomplete', 'contentsteeringparsed'];
contentSteeringEvents.forEach(eventName => {
this.contentSteeringController_.on(eventName, metadata => {
this.trigger(_extends({}, metadata));
});
});
if (this.sourceType_ === 'dash') {
this.mainPlaylistLoader_.on('loadedplaylist', () => {
const main = this.main(); // check if steering tag or pathways changed.
const didDashTagChange = this.contentSteeringController_.didDASHTagChange(main.uri, main.contentSteering);
const didPathwaysChange = () => {
const availablePathways = this.contentSteeringController_.getAvailablePathways();
const newPathways = [];
for (const playlist of main.playlists) {
const serviceLocation = playlist.attributes.serviceLocation;
if (serviceLocation) {
newPathways.push(serviceLocation);
if (!availablePathways.has(serviceLocation)) {
return true;
}
}
} // If we have no new serviceLocations and previously had availablePathways
if (!newPathways.length && availablePathways.size) {
return true;
}
return false;
};
if (didDashTagChange || didPathwaysChange()) {
this.resetContentSteeringController_();
}
});
}
}
/**
* Simple exclude and change playlist logic for content steering.
*/
excludeThenChangePathway_() {
const currentPathway = this.contentSteeringController_.getPathway();
if (!currentPathway) {
return;
}
this.handlePathwayClones_();
const main = this.main();
const playlists = main.playlists;
const ids = new Set();
let didEnablePlaylists = false;
Object.keys(playlists).forEach(key => {
const variant = playlists[key];
const pathwayId = this.pathwayAttribute_(variant);
const differentPathwayId = pathwayId && currentPathway !== pathwayId;
const steeringExclusion = variant.excludeUntil === Infinity && variant.lastExcludeReason_ === 'content-steering';
if (steeringExclusion && !differentPathwayId) {
delete variant.excludeUntil;
delete variant.lastExcludeReason_;
didEnablePlaylists = true;
}
const noExcludeUntil = !variant.excludeUntil && variant.excludeUntil !== Infinity;
const shouldExclude = !ids.has(variant.id) && differentPathwayId && noExcludeUntil;
if (!shouldExclude) {
return;
}
ids.add(variant.id);
variant.excludeUntil = Infinity;
variant.lastExcludeReason_ = 'content-steering'; // TODO: kind of spammy, maybe move this.
this.logger_(`excluding ${variant.id} for ${variant.lastExcludeReason_}`);
});
if (this.contentSteeringController_.manifestType_ === 'DASH') {
Object.keys(this.mediaTypes_).forEach(key => {
const type = this.mediaTypes_[key];
if (type.activePlaylistLoader) {
const currentPlaylist = type.activePlaylistLoader.media_; // Check if the current media playlist matches the current CDN
if (currentPlaylist && currentPlaylist.attributes.serviceLocation !== currentPathway) {
didEnablePlaylists = true;
}
}
});
}
if (didEnablePlaylists) {
this.changeSegmentPathway_();
}
}
/**
* Add, update, or delete playlists and media groups for
* the pathway clones for HLS Content Steering.
*
* See https://datatracker.ietf.org/doc/draft-pantos-hls-rfc8216bis/
*
* NOTE: Pathway cloning does not currently support the `PER_VARIANT_URIS` and
* `PER_RENDITION_URIS` as we do not handle `STABLE-VARIANT-ID` or
* `STABLE-RENDITION-ID` values.
*/
handlePathwayClones_() {
const main = this.main();
const playlists = main.playlists;
const currentPathwayClones = this.contentSteeringController_.currentPathwayClones;
const nextPathwayClones = this.contentSteeringController_.nextPathwayClones;
const hasClones = currentPathwayClones && currentPathwayClones.size || nextPathwayClones && nextPathwayClones.size;
if (!hasClones) {
return;
}
for (const [id, clone] of currentPathwayClones.entries()) {
const newClone = nextPathwayClones.get(id); // Delete the old pathway clone.
if (!newClone) {
this.mainPlaylistLoader_.updateOrDeleteClone(clone);
this.contentSteeringController_.excludePathway(id);
}
}
for (const [id, clone] of nextPathwayClones.entries()) {
const oldClone = currentPathwayClones.get(id); // Create a new pathway if it is a new pathway clone object.
if (!oldClone) {
const playlistsToClone = playlists.filter(p => {
return p.attributes['PATHWAY-ID'] === clone['BASE-ID'];
});
playlistsToClone.forEach(p => {
this.mainPlaylistLoader_.addClonePathway(clone, p);
});
this.contentSteeringController_.addAvailablePathway(id);
continue;
} // There have not been changes to the pathway clone object, so skip.
if (this.equalPathwayClones_(oldClone, clone)) {
continue;
} // Update a preexisting cloned pathway.
// True is set for the update flag.
this.mainPlaylistLoader_.updateOrDeleteClone(clone, true);
this.contentSteeringController_.addAvailablePathway(id);
} // Deep copy contents of next to current pathways.
this.contentSteeringController_.currentPathwayClones = new Map(JSON.parse(JSON.stringify([...nextPathwayClones])));
}
/**
* Determines whether two pathway clone objects are equivalent.
*
* @param {Object} a The first pathway clone object.
* @param {Object} b The second pathway clone object.
* @return {boolean} True if the pathway clone objects are equal, false otherwise.
*/
equalPathwayClones_(a, b) {
if (a['BASE-ID'] !== b['BASE-ID'] || a.ID !== b.ID || a['URI-REPLACEMENT'].HOST !== b['URI-REPLACEMENT'].HOST) {
return false;
}
const aParams = a['URI-REPLACEMENT'].PARAMS;
const bParams = b['URI-REPLACEMENT'].PARAMS; // We need to iterate through both lists of params because one could be
// missing a parameter that the other has.
for (const p in aParams) {
if (aParams[p] !== bParams[p]) {
return false;
}
}
for (const p in bParams) {
if (aParams[p] !== bParams[p]) {
return false;
}
}
return true;
}
/**
* Changes the current playlists for audio, video and subtitles after a new pathway
* is chosen from content steering.
*/
changeSegmentPathway_() {
const nextPlaylist = this.selectPlaylist();
this.pauseLoading(); // Switch audio and text track playlists if necessary in DASH
if (this.contentSteeringController_.manifestType_ === 'DASH') {
this.switchMediaForDASHContentSteering_();
}
this.switchMedia_(nextPlaylist, 'content-steering');
}
/**
* Iterates through playlists and check their keyId set and compare with the
* keyStatusMap, only enable playlists that have a usable key. If the playlist
* has no keyId leave it enabled by default.
*/
excludeNonUsablePlaylistsByKeyId_() {
if (!this.mainPlaylistLoader_ || !this.mainPlaylistLoader_.main) {
return;
}
let nonUsableKeyStatusCount = 0;
const NON_USABLE = 'non-usable';
this.mainPlaylistLoader_.main.playlists.forEach(playlist => {
const keyIdSet = this.mainPlaylistLoader_.getKeyIdSet(playlist); // If the playlist doesn't have keyIDs lets not exclude it.
if (!keyIdSet || !keyIdSet.size) {
return;
}
keyIdSet.forEach(key => {
const USABLE = 'usable';
const hasUsableKeyStatus = this.keyStatusMap_.has(key) && this.keyStatusMap_.get(key) === USABLE;
const nonUsableExclusion = playlist.lastExcludeReason_ === NON_USABLE && playlist.excludeUntil === Infinity;
if (!hasUsableKeyStatus) {
// Only exclude playlists that haven't already been excluded as non-usable.
if (playlist.excludeUntil !== Infinity && playlist.lastExcludeReason_ !== NON_USABLE) {
playlist.excludeUntil = Infinity;
playlist.lastExcludeReason_ = NON_USABLE;
this.logger_(`excluding playlist ${playlist.id} because the key ID ${key} doesn't exist in the keyStatusMap or is not ${USABLE}`);
} // count all nonUsableKeyStatus
nonUsableKeyStatusCount++;
} else if (hasUsableKeyStatus && nonUsableExclusion) {
delete playlist.excludeUntil;
delete playlist.lastExcludeReason_;
this.logger_(`enabling playlist ${playlist.id} because key ID ${key} is ${USABLE}`);
}
});
}); // If for whatever reason every playlist has a non usable key status. Lets try re-including the SD renditions as a failsafe.
if (nonUsableKeyStatusCount >= this.mainPlaylistLoader_.main.playlists.length) {
this.mainPlaylistLoader_.main.playlists.forEach(playlist => {
const isNonHD = playlist && playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height < 720;
const excludedForNonUsableKey = playlist.excludeUntil === Infinity && playlist.lastExcludeReason_ === NON_USABLE;
if (isNonHD && excludedForNonUsableKey) {
// Only delete the excludeUntil so we don't try and re-exclude these playlists.
delete playlist.excludeUntil;
videojs.log.warn(`enabling non-HD playlist ${playlist.id} because all playlists were excluded due to ${NON_USABLE} key IDs`);
}
});
}
}
/**
* Adds a keystatus to the keystatus map, tries to convert to string if necessary.
*
* @param {any} keyId the keyId to add a status for
* @param {string} status the status of the keyId
*/
addKeyStatus_(keyId, status) {
const isString = typeof keyId === 'string';
const keyIdHexString = isString ? keyId : bufferToHexString(keyId);
const formattedKeyIdString = keyIdHexString.slice(0, 32).toLowerCase();
this.logger_(`KeyStatus '${status}' with key ID ${formattedKeyIdString} added to the keyStatusMap`);
this.keyStatusMap_.set(formattedKeyIdString, status);
}
/**
* Utility function for adding key status to the keyStatusMap and filtering usable encrypted playlists.
*
* @param {any} keyId the keyId from the keystatuschange event
* @param {string} status the key status string
*/
updatePlaylistByKeyStatus(keyId, status) {
this.addKeyStatus_(keyId, status);
if (!this.waitingForFastQualityPlaylistReceived_) {
this.excludeNonUsableThenChangePlaylist_();
} // Listen to loadedplaylist with a single listener and check for new contentProtection elements when a playlist is updated.
this.mainPlaylistLoader_.off('loadedplaylist', this.excludeNonUsableThenChangePlaylist_.bind(this));
this.mainPlaylistLoader_.on('loadedplaylist', this.excludeNonUsableThenChangePlaylist_.bind(this));
}
excludeNonUsableThenChangePlaylist_() {
this.excludeNonUsablePlaylistsByKeyId_();
this.fastQualityChange_();
}
}
/**
* Returns a function that acts as the Enable/disable playlist function.
*
* @param {PlaylistLoader} loader - The main playlist loader
* @param {string} playlistID - id of the playlist
* @param {Function} changePlaylistFn - A function to be called after a
* playlist's enabled-state has been changed. Will NOT be called if a
* playlist's enabled-state is unchanged
* @param {boolean=} enable - Value to set the playlist enabled-state to
* or if undefined returns the current enabled-state for the playlist
* @return {Function} Function for setting/getting enabled
*/
const enableFunction = (loader, playlistID, changePlaylistFn) => enable => {
const playlist = loader.main.playlists[playlistID];
const incompatible = isIncompatible(playlist);
const currentlyEnabled = isEnabled(playlist);
if (typeof enable === 'undefined') {
return currentlyEnabled;
}
if (enable) {
delete playlist.disabled;
} else {
playlist.disabled = true;
}
const metadata = {
renditionInfo: {
id: playlistID,
bandwidth: playlist.attributes.BANDWIDTH,
resolution: playlist.attributes.RESOLUTION,
codecs: playlist.attributes.CODECS
},
cause: 'fast-quality'
};
if (enable !== currentlyEnabled && !incompatible) {
// Ensure the outside world knows about our changes
changePlaylistFn(playlist);
if (enable) {
loader.trigger({
type: 'renditionenabled',
metadata
});
} else {
loader.trigger({
type: 'renditiondisabled',
metadata
});
}
}
return enable;
};
/**
* The representation object encapsulates the publicly visible information
* in a media playlist along with a setter/getter-type function (enabled)
* for changing the enabled-state of a particular playlist entry
*
* @class Representation
*/
class Representation {
constructor(vhsHandler, playlist, id) {
const {
playlistController_: pc
} = vhsHandler;
const qualityChangeFunction = pc.fastQualityChange_.bind(pc); // some playlist attributes are optional
if (playlist.attributes) {
const resolution = playlist.attributes.RESOLUTION;
this.width = resolution && resolution.width;
this.height = resolution && resolution.height;
this.bandwidth = playlist.attributes.BANDWIDTH;
this.frameRate = playlist.attributes['FRAME-RATE'];
}
this.codecs = codecsForPlaylist(pc.main(), playlist);
this.playlist = playlist; // The id is simply the ordinality of the media playlist
// within the main playlist
this.id = id; // Partially-apply the enableFunction to create a playlist-
// specific variant
this.enabled = enableFunction(vhsHandler.playlists, playlist.id, qualityChangeFunction);
}
}
/**
* A mixin function that adds the `representations` api to an instance
* of the VhsHandler class
*
* @param {VhsHandler} vhsHandler - An instance of VhsHandler to add the
* representation API into
*/
const renditionSelectionMixin = function (vhsHandler) {
// Add a single API-specific function to the VhsHandler instance
vhsHandler.representations = () => {
const main = vhsHandler.playlistController_.main();
const playlists = isAudioOnly(main) ? vhsHandler.playlistController_.getAudioTrackPlaylists_() : main.playlists;
if (!playlists) {
return [];
}
return playlists.filter(media => !isIncompatible(media)).map((e, i) => new Representation(vhsHandler, e, e.id));
};
};
/**
* @file playback-watcher.js
*
* Playback starts, and now my watch begins. It shall not end until my death. I shall
* take no wait, hold no uncleared timeouts, father no bad seeks. I shall wear no crowns
* and win no glory. I shall live and die at my post. I am the corrector of the underflow.
* I am the watcher of gaps. I am the shield that guards the realms of seekable. I pledge
* my life and honor to the Playback Watch, for this Player and all the Players to come.
*/
const timerCancelEvents = ['seeking', 'seeked', 'pause', 'playing', 'error'];
/**
* @class PlaybackWatcher
*/
class PlaybackWatcher extends videojs.EventTarget {
/**
* Represents an PlaybackWatcher object.
*
* @class
* @param {Object} options an object that includes the tech and settings
*/
constructor(options) {
super();
this.playlistController_ = options.playlistController;
this.tech_ = options.tech;
this.seekable = options.seekable;
this.allowSeeksWithinUnsafeLiveWindow = options.allowSeeksWithinUnsafeLiveWindow;
this.liveRangeSafeTimeDelta = options.liveRangeSafeTimeDelta;
this.media = options.media;
this.playedRanges_ = [];
this.consecutiveUpdates = 0;
this.lastRecordedTime = null;
this.checkCurrentTimeTimeout_ = null;
this.logger_ = logger('PlaybackWatcher');
this.logger_('initialize');
const playHandler = () => this.monitorCurrentTime_();
const canPlayHandler = () => this.monitorCurrentTime_();
const waitingHandler = () => this.techWaiting_();
const cancelTimerHandler = () => this.resetTimeUpdate_();
const pc = this.playlistController_;
const loaderTypes = ['main', 'subtitle', 'audio'];
const loaderChecks = {};
loaderTypes.forEach(type => {
loaderChecks[type] = {
reset: () => this.resetSegmentDownloads_(type),
updateend: () => this.checkSegmentDownloads_(type)
};
pc[`${type}SegmentLoader_`].on('appendsdone', loaderChecks[type].updateend); // If a rendition switch happens during a playback stall where the buffer
// isn't changing we want to reset. We cannot assume that the new rendition
// will also be stalled, until after new appends.
pc[`${type}SegmentLoader_`].on('playlistupdate', loaderChecks[type].reset); // Playback stalls should not be detected right after seeking.
// This prevents one segment playlists (single vtt or single segment content)
// from being detected as stalling. As the buffer will not change in those cases, since
// the buffer is the entire video duration.
this.tech_.on(['seeked', 'seeking'], loaderChecks[type].reset);
});
/**
* We check if a seek was into a gap through the following steps:
* 1. We get a seeking event and we do not get a seeked event. This means that
* a seek was attempted but not completed.
* 2. We run `fixesBadSeeks_` on segment loader appends. This means that we already
* removed everything from our buffer and appended a segment, and should be ready
* to check for gaps.
*/
const setSeekingHandlers = fn => {
['main', 'audio'].forEach(type => {
pc[`${type}SegmentLoader_`][fn]('appended', this.seekingAppendCheck_);
});
};
this.seekingAppendCheck_ = () => {
if (this.fixesBadSeeks_()) {
this.consecutiveUpdates = 0;
this.lastRecordedTime = this.tech_.currentTime();
setSeekingHandlers('off');
}
};
this.clearSeekingAppendCheck_ = () => setSeekingHandlers('off');
this.watchForBadSeeking_ = () => {
this.clearSeekingAppendCheck_();
setSeekingHandlers('on');
};
this.tech_.on('seeked', this.clearSeekingAppendCheck_);
this.tech_.on('seeking', this.watchForBadSeeking_);
this.tech_.on('waiting', waitingHandler);
this.tech_.on(timerCancelEvents, cancelTimerHandler);
this.tech_.on('canplay', canPlayHandler);
/*
An edge case exists that results in gaps not being skipped when they exist at the beginning of a stream. This case
is surfaced in one of two ways:
1) The `waiting` event is fired before the player has buffered content, making it impossible
to find or skip the gap. The `waiting` event is followed by a `play` event. On first play
we can check if playback is stalled due to a gap, and skip the gap if necessary.
2) A source with a gap at the beginning of the stream is loaded programatically while the player
is in a playing state. To catch this case, it's important that our one-time play listener is setup
even if the player is in a playing state
*/
this.tech_.one('play', playHandler); // Define the dispose function to clean up our events
this.dispose = () => {
this.clearSeekingAppendCheck_();
this.logger_('dispose');
this.tech_.off('waiting', waitingHandler);
this.tech_.off(timerCancelEvents, cancelTimerHandler);
this.tech_.off('canplay', canPlayHandler);
this.tech_.off('play', playHandler);
this.tech_.off('seeking', this.watchForBadSeeking_);
this.tech_.off('seeked', this.clearSeekingAppendCheck_);
loaderTypes.forEach(type => {
pc[`${type}SegmentLoader_`].off('appendsdone', loaderChecks[type].updateend);
pc[`${type}SegmentLoader_`].off('playlistupdate', loaderChecks[type].reset);
this.tech_.off(['seeked', 'seeking'], loaderChecks[type].reset);
});
if (this.checkCurrentTimeTimeout_) {
window$1.clearTimeout(this.checkCurrentTimeTimeout_);
}
this.resetTimeUpdate_();
};
}
/**
* Periodically check current time to see if playback stopped
*
* @private
*/
monitorCurrentTime_() {
this.checkCurrentTime_();
if (this.checkCurrentTimeTimeout_) {
window$1.clearTimeout(this.checkCurrentTimeTimeout_);
} // 42 = 24 fps // 250 is what Webkit uses // FF uses 15
this.checkCurrentTimeTimeout_ = window$1.setTimeout(this.monitorCurrentTime_.bind(this), 250);
}
/**
* Reset stalled download stats for a specific type of loader
*
* @param {string} type
* The segment loader type to check.
*
* @listens SegmentLoader#playlistupdate
* @listens Tech#seeking
* @listens Tech#seeked
*/
resetSegmentDownloads_(type) {
const loader = this.playlistController_[`${type}SegmentLoader_`];
if (this[`${type}StalledDownloads_`] > 0) {
this.logger_(`resetting possible stalled download count for ${type} loader`);
}
this[`${type}StalledDownloads_`] = 0;
this[`${type}Buffered_`] = loader.buffered_();
}
/**
* Checks on every segment `appendsdone` to see
* if segment appends are making progress. If they are not
* and we are still downloading bytes. We exclude the playlist.
*
* @param {string} type
* The segment loader type to check.
*
* @listens SegmentLoader#appendsdone
*/
checkSegmentDownloads_(type) {
const pc = this.playlistController_;
const loader = pc[`${type}SegmentLoader_`];
const buffered = loader.buffered_();
const isBufferedDifferent = isRangeDifferent(this[`${type}Buffered_`], buffered);
this[`${type}Buffered_`] = buffered; // if another watcher is going to fix the issue or
// the buffered value for this loader changed
// appends are working
if (isBufferedDifferent) {
const metadata = {
bufferedRanges: buffered
};
pc.trigger({
type: 'bufferedrangeschanged',
metadata
});
this.resetSegmentDownloads_(type);
return;
}
this[`${type}StalledDownloads_`]++;
this.logger_(`found #${this[`${type}StalledDownloads_`]} ${type} appends that did not increase buffer (possible stalled download)`, {
playlistId: loader.playlist_ && loader.playlist_.id,
buffered: timeRangesToArray(buffered)
}); // after 10 possibly stalled appends with no reset, exclude
if (this[`${type}StalledDownloads_`] < 10) {
return;
}
this.logger_(`${type} loader stalled download exclusion`);
this.resetSegmentDownloads_(type);
this.tech_.trigger({
type: 'usage',
name: `vhs-${type}-download-exclusion`
});
if (type === 'subtitle') {
return;
} // TODO: should we exclude audio tracks rather than main tracks
// when type is audio?
pc.excludePlaylist({
error: {
message: `Excessive ${type} segment downloading detected.`
},
playlistExclusionDuration: Infinity
});
}
/**
* The purpose of this function is to emulate the "waiting" event on
* browsers that do not emit it when they are waiting for more
* data to continue playback
*
* @private
*/
checkCurrentTime_() {
if (this.tech_.paused() || this.tech_.seeking()) {
return;
}
const currentTime = this.tech_.currentTime();
const buffered = this.tech_.buffered();
if (this.lastRecordedTime === currentTime && (!buffered.length || currentTime + SAFE_TIME_DELTA >= buffered.end(buffered.length - 1))) {
// If current time is at the end of the final buffered region, then any playback
// stall is most likely caused by buffering in a low bandwidth environment. The tech
// should fire a `waiting` event in this scenario, but due to browser and tech
// inconsistencies. Calling `techWaiting_` here allows us to simulate
// responding to a native `waiting` event when the tech fails to emit one.
return this.techWaiting_();
}
if (this.consecutiveUpdates >= 5 && currentTime === this.lastRecordedTime) {
this.consecutiveUpdates++;
this.waiting_();
} else if (currentTime === this.lastRecordedTime) {
this.consecutiveUpdates++;
} else {
this.playedRanges_.push(createTimeRanges([this.lastRecordedTime, currentTime]));
const metadata = {
playedRanges: this.playedRanges_
};
this.playlistController_.trigger({
type: 'playedrangeschanged',
metadata
});
this.consecutiveUpdates = 0;
this.lastRecordedTime = currentTime;
}
}
/**
* Resets the 'timeupdate' mechanism designed to detect that we are stalled
*
* @private
*/
resetTimeUpdate_() {
this.consecutiveUpdates = 0;
}
/**
* Fixes situations where there's a bad seek
*
* @return {boolean} whether an action was taken to fix the seek
* @private
*/
fixesBadSeeks_() {
const seeking = this.tech_.seeking();
if (!seeking) {
return false;
} // TODO: It's possible that these seekable checks should be moved out of this function
// and into a function that runs on seekablechange. It's also possible that we only need
// afterSeekableWindow as the buffered check at the bottom is good enough to handle before
// seekable range.
const seekable = this.seekable();
const currentTime = this.tech_.currentTime();
const isAfterSeekableRange = this.afterSeekableWindow_(seekable, currentTime, this.media(), this.allowSeeksWithinUnsafeLiveWindow);
let seekTo;
if (isAfterSeekableRange) {
const seekableEnd = seekable.end(seekable.length - 1); // sync to live point (if VOD, our seekable was updated and we're simply adjusting)
seekTo = seekableEnd;
}
if (this.beforeSeekableWindow_(seekable, currentTime)) {
const seekableStart = seekable.start(0); // sync to the beginning of the live window
// provide a buffer of .1 seconds to handle rounding/imprecise numbers
seekTo = seekableStart + (
// if the playlist is too short and the seekable range is an exact time (can
// happen in live with a 3 segment playlist), then don't use a time delta
seekableStart === seekable.end(0) ? 0 : SAFE_TIME_DELTA);
}
if (typeof seekTo !== 'undefined') {
this.logger_(`Trying to seek outside of seekable at time ${currentTime} with ` + `seekable range ${printableRange(seekable)}. Seeking to ` + `${seekTo}.`);
this.tech_.setCurrentTime(seekTo);
return true;
}
const sourceUpdater = this.playlistController_.sourceUpdater_;
const buffered = this.tech_.buffered();
const audioBuffered = sourceUpdater.audioBuffer ? sourceUpdater.audioBuffered() : null;
const videoBuffered = sourceUpdater.videoBuffer ? sourceUpdater.videoBuffered() : null;
const media = this.media(); // verify that at least two segment durations or one part duration have been
// appended before checking for a gap.
const minAppendedDuration = media.partTargetDuration ? media.partTargetDuration : (media.targetDuration - TIME_FUDGE_FACTOR) * 2; // verify that at least two segment durations have been
// appended before checking for a gap.
const bufferedToCheck = [audioBuffered, videoBuffered];
for (let i = 0; i < bufferedToCheck.length; i++) {
// skip null buffered
if (!bufferedToCheck[i]) {
continue;
}
const timeAhead = timeAheadOf(bufferedToCheck[i], currentTime); // if we are less than two video/audio segment durations or one part
// duration behind we haven't appended enough to call this a bad seek.
if (timeAhead < minAppendedDuration) {
return false;
}
}
const nextRange = findNextRange(buffered, currentTime); // we have appended enough content, but we don't have anything buffered
// to seek over the gap
if (nextRange.length === 0) {
return false;
}
seekTo = nextRange.start(0) + SAFE_TIME_DELTA;
this.logger_(`Buffered region starts (${nextRange.start(0)}) ` + ` just beyond seek point (${currentTime}). Seeking to ${seekTo}.`);
this.tech_.setCurrentTime(seekTo);
return true;
}
/**
* Handler for situations when we determine the player is waiting.
*
* @private
*/
waiting_() {
if (this.techWaiting_()) {
return;
} // All tech waiting checks failed. Use last resort correction
const currentTime = this.tech_.currentTime();
const buffered = this.tech_.buffered();
const currentRange = findRange(buffered, currentTime); // Sometimes the player can stall for unknown reasons within a contiguous buffered
// region with no indication that anything is amiss (seen in Firefox). Seeking to
// currentTime is usually enough to kickstart the player. This checks that the player
// is currently within a buffered region before attempting a corrective seek.
// Chrome does not appear to continue `timeupdate` events after a `waiting` event
// until there is ~ 3 seconds of forward buffer available. PlaybackWatcher should also
// make sure there is ~3 seconds of forward buffer before taking any corrective action
// to avoid triggering an `unknownwaiting` event when the network is slow.
if (currentRange.length && currentTime + 3 <= currentRange.end(0)) {
this.resetTimeUpdate_();
this.tech_.setCurrentTime(currentTime);
this.logger_(`Stopped at ${currentTime} while inside a buffered region ` + `[${currentRange.start(0)} -> ${currentRange.end(0)}]. Attempting to resume ` + 'playback by seeking to the current time.'); // unknown waiting corrections may be useful for monitoring QoS
this.tech_.trigger({
type: 'usage',
name: 'vhs-unknown-waiting'
});
return;
}
}
/**
* Handler for situations when the tech fires a `waiting` event
*
* @return {boolean}
* True if an action (or none) was needed to correct the waiting. False if no
* checks passed
* @private
*/
techWaiting_() {
const seekable = this.seekable();
const currentTime = this.tech_.currentTime();
if (this.tech_.seeking()) {
// Tech is seeking or already waiting on another action, no action needed
return true;
}
if (this.beforeSeekableWindow_(seekable, currentTime)) {
const livePoint = seekable.end(seekable.length - 1);
this.logger_(`Fell out of live window at time ${currentTime}. Seeking to ` + `live point (seekable end) ${livePoint}`);
this.resetTimeUpdate_();
this.tech_.setCurrentTime(livePoint); // live window resyncs may be useful for monitoring QoS
this.tech_.trigger({
type: 'usage',
name: 'vhs-live-resync'
});
return true;
}
const sourceUpdater = this.tech_.vhs.playlistController_.sourceUpdater_;
const buffered = this.tech_.buffered();
const videoUnderflow = this.videoUnderflow_({
audioBuffered: sourceUpdater.audioBuffered(),
videoBuffered: sourceUpdater.videoBuffered(),
currentTime
});
if (videoUnderflow) {
// Even though the video underflowed and was stuck in a gap, the audio overplayed
// the gap, leading currentTime into a buffered range. Seeking to currentTime
// allows the video to catch up to the audio position without losing any audio
// (only suffering ~3 seconds of frozen video and a pause in audio playback).
this.resetTimeUpdate_();
this.tech_.setCurrentTime(currentTime); // video underflow may be useful for monitoring QoS
this.tech_.trigger({
type: 'usage',
name: 'vhs-video-underflow'
});
return true;
}
const nextRange = findNextRange(buffered, currentTime); // check for gap
if (nextRange.length > 0) {
this.logger_(`Stopped at ${currentTime} and seeking to ${nextRange.start(0)}`);
this.resetTimeUpdate_();
this.skipTheGap_(currentTime);
return true;
} // All checks failed. Returning false to indicate failure to correct waiting
return false;
}
afterSeekableWindow_(seekable, currentTime, playlist, allowSeeksWithinUnsafeLiveWindow = false) {
if (!seekable.length) {
// we can't make a solid case if there's no seekable, default to false
return false;
}
let allowedEnd = seekable.end(seekable.length - 1) + SAFE_TIME_DELTA;
const isLive = !playlist.endList;
const isLLHLS = typeof playlist.partTargetDuration === 'number';
if (isLive && (isLLHLS || allowSeeksWithinUnsafeLiveWindow)) {
allowedEnd = seekable.end(seekable.length - 1) + playlist.targetDuration * 3;
}
if (currentTime > allowedEnd) {
return true;
}
return false;
}
beforeSeekableWindow_(seekable, currentTime) {
if (seekable.length &&
// can't fall before 0 and 0 seekable start identifies VOD stream
seekable.start(0) > 0 && currentTime < seekable.start(0) - this.liveRangeSafeTimeDelta) {
return true;
}
return false;
}
videoUnderflow_({
videoBuffered,
audioBuffered,
currentTime
}) {
// audio only content will not have video underflow :)
if (!videoBuffered) {
return;
}
let gap; // find a gap in demuxed content.
if (videoBuffered.length && audioBuffered.length) {
// in Chrome audio will continue to play for ~3s when we run out of video
// so we have to check that the video buffer did have some buffer in the
// past.
const lastVideoRange = findRange(videoBuffered, currentTime - 3);
const videoRange = findRange(videoBuffered, currentTime);
const audioRange = findRange(audioBuffered, currentTime);
if (audioRange.length && !videoRange.length && lastVideoRange.length) {
gap = {
start: lastVideoRange.end(0),
end: audioRange.end(0)
};
} // find a gap in muxed content.
} else {
const nextRange = findNextRange(videoBuffered, currentTime); // Even if there is no available next range, there is still a possibility we are
// stuck in a gap due to video underflow.
if (!nextRange.length) {
gap = this.gapFromVideoUnderflow_(videoBuffered, currentTime);
}
}
if (gap) {
this.logger_(`Encountered a gap in video from ${gap.start} to ${gap.end}. ` + `Seeking to current time ${currentTime}`);
return true;
}
return false;
}
/**
* Timer callback. If playback still has not proceeded, then we seek
* to the start of the next buffered region.
*
* @private
*/
skipTheGap_(scheduledCurrentTime) {
const buffered = this.tech_.buffered();
const currentTime = this.tech_.currentTime();
const nextRange = findNextRange(buffered, currentTime);
this.resetTimeUpdate_();
if (nextRange.length === 0 || currentTime !== scheduledCurrentTime) {
return;
}
this.logger_('skipTheGap_:', 'currentTime:', currentTime, 'scheduled currentTime:', scheduledCurrentTime, 'nextRange start:', nextRange.start(0)); // only seek if we still have not played
this.tech_.setCurrentTime(nextRange.start(0) + TIME_FUDGE_FACTOR);
const metadata = {
gapInfo: {
from: currentTime,
to: nextRange.start(0)
}
};
this.playlistController_.trigger({
type: 'gapjumped',
metadata
});
this.tech_.trigger({
type: 'usage',
name: 'vhs-gap-skip'
});
}
gapFromVideoUnderflow_(buffered, currentTime) {
// At least in Chrome, if there is a gap in the video buffer, the audio will continue
// playing for ~3 seconds after the video gap starts. This is done to account for
// video buffer underflow/underrun (note that this is not done when there is audio
// buffer underflow/underrun -- in that case the video will stop as soon as it
// encounters the gap, as audio stalls are more noticeable/jarring to a user than
// video stalls). The player's time will reflect the playthrough of audio, so the
// time will appear as if we are in a buffered region, even if we are stuck in a
// "gap."
//
// Example:
// video buffer: 0 => 10.1, 10.2 => 20
// audio buffer: 0 => 20
// overall buffer: 0 => 10.1, 10.2 => 20
// current time: 13
//
// Chrome's video froze at 10 seconds, where the video buffer encountered the gap,
// however, the audio continued playing until it reached ~3 seconds past the gap
// (13 seconds), at which point it stops as well. Since current time is past the
// gap, findNextRange will return no ranges.
//
// To check for this issue, we see if there is a gap that starts somewhere within
// a 3 second range (3 seconds +/- 1 second) back from our current time.
const gaps = findGaps(buffered);
for (let i = 0; i < gaps.length; i++) {
const start = gaps.start(i);
const end = gaps.end(i); // gap is starts no more than 4 seconds back
if (currentTime - start < 4 && currentTime - start > 2) {
return {
start,
end
};
}
}
return null;
}
}
const defaultOptions = {
errorInterval: 30,
getSource(next) {
const tech = this.tech({
IWillNotUseThisInPlugins: true
});
const sourceObj = tech.currentSource_ || this.currentSource();
return next(sourceObj);
}
};
/**
* Main entry point for the plugin
*
* @param {Player} player a reference to a videojs Player instance
* @param {Object} [options] an object with plugin options
* @private
*/
const initPlugin = function (player, options) {
let lastCalled = 0;
let seekTo = 0;
const localOptions = merge(defaultOptions, options);
player.ready(() => {
player.trigger({
type: 'usage',
name: 'vhs-error-reload-initialized'
});
});
/**
* Player modifications to perform that must wait until `loadedmetadata`
* has been triggered
*
* @private
*/
const loadedMetadataHandler = function () {
if (seekTo) {
player.currentTime(seekTo);
}
};
/**
* Set the source on the player element, play, and seek if necessary
*
* @param {Object} sourceObj An object specifying the source url and mime-type to play
* @private
*/
const setSource = function (sourceObj) {
if (sourceObj === null || sourceObj === undefined) {
return;
}
seekTo = player.duration() !== Infinity && player.currentTime() || 0;
player.one('loadedmetadata', loadedMetadataHandler);
player.src(sourceObj);
player.trigger({
type: 'usage',
name: 'vhs-error-reload'
});
player.play();
};
/**
* Attempt to get a source from either the built-in getSource function
* or a custom function provided via the options
*
* @private
*/
const errorHandler = function () {
// Do not attempt to reload the source if a source-reload occurred before
// 'errorInterval' time has elapsed since the last source-reload
if (Date.now() - lastCalled < localOptions.errorInterval * 1000) {
player.trigger({
type: 'usage',
name: 'vhs-error-reload-canceled'
});
return;
}
if (!localOptions.getSource || typeof localOptions.getSource !== 'function') {
videojs.log.error('ERROR: reloadSourceOnError - The option getSource must be a function!');
return;
}
lastCalled = Date.now();
return localOptions.getSource.call(player, setSource);
};
/**
* Unbind any event handlers that were bound by the plugin
*
* @private
*/
const cleanupEvents = function () {
player.off('loadedmetadata', loadedMetadataHandler);
player.off('error', errorHandler);
player.off('dispose', cleanupEvents);
};
/**
* Cleanup before re-initializing the plugin
*
* @param {Object} [newOptions] an object with plugin options
* @private
*/
const reinitPlugin = function (newOptions) {
cleanupEvents();
initPlugin(player, newOptions);
};
player.on('error', errorHandler);
player.on('dispose', cleanupEvents); // Overwrite the plugin function so that we can correctly cleanup before
// initializing the plugin
player.reloadSourceOnError = reinitPlugin;
};
/**
* Reload the source when an error is detected as long as there
* wasn't an error previously within the last 30 seconds
*
* @param {Object} [options] an object with plugin options
*/
const reloadSourceOnError = function (options) {
initPlugin(this, options);
};
var version$4 = "3.13.3";
var version$3 = "7.0.3";
var version$2 = "1.3.0";
var version$1 = "7.1.0";
var version = "4.0.1";
const Vhs = {
PlaylistLoader,
Playlist,
utils,
STANDARD_PLAYLIST_SELECTOR: lastBandwidthSelector,
INITIAL_PLAYLIST_SELECTOR: lowestBitrateCompatibleVariantSelector,
lastBandwidthSelector,
movingAverageBandwidthSelector,
comparePlaylistBandwidth,
comparePlaylistResolution,
xhr: xhrFactory()
}; // Define getter/setters for config properties
Object.keys(Config).forEach(prop => {
Object.defineProperty(Vhs, prop, {
get() {
videojs.log.warn(`using Vhs.${prop} is UNSAFE be sure you know what you are doing`);
return Config[prop];
},
set(value) {
videojs.log.warn(`using Vhs.${prop} is UNSAFE be sure you know what you are doing`);
if (typeof value !== 'number' || value < 0) {
videojs.log.warn(`value of Vhs.${prop} must be greater than or equal to 0`);
return;
}
Config[prop] = value;
}
});
});
const LOCAL_STORAGE_KEY = 'videojs-vhs';
/**
* Updates the selectedIndex of the QualityLevelList when a mediachange happens in vhs.
*
* @param {QualityLevelList} qualityLevels The QualityLevelList to update.
* @param {PlaylistLoader} playlistLoader PlaylistLoader containing the new media info.
* @function handleVhsMediaChange
*/
const handleVhsMediaChange = function (qualityLevels, playlistLoader) {
const newPlaylist = playlistLoader.media();
let selectedIndex = -1;
for (let i = 0; i < qualityLevels.length; i++) {
if (qualityLevels[i].id === newPlaylist.id) {
selectedIndex = i;
break;
}
}
qualityLevels.selectedIndex_ = selectedIndex;
qualityLevels.trigger({
selectedIndex,
type: 'change'
});
};
/**
* Adds quality levels to list once playlist metadata is available
*
* @param {QualityLevelList} qualityLevels The QualityLevelList to attach events to.
* @param {Object} vhs Vhs object to listen to for media events.
* @function handleVhsLoadedMetadata
*/
const handleVhsLoadedMetadata = function (qualityLevels, vhs) {
vhs.representations().forEach(rep => {
qualityLevels.addQualityLevel(rep);
});
handleVhsMediaChange(qualityLevels, vhs.playlists);
}; // VHS is a source handler, not a tech. Make sure attempts to use it
// as one do not cause exceptions.
Vhs.canPlaySource = function () {
return videojs.log.warn('VHS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
};
const emeKeySystems = (keySystemOptions, mainPlaylist, audioPlaylist) => {
if (!keySystemOptions) {
return keySystemOptions;
}
let codecs = {};
if (mainPlaylist && mainPlaylist.attributes && mainPlaylist.attributes.CODECS) {
codecs = unwrapCodecList(parseCodecs(mainPlaylist.attributes.CODECS));
}
if (audioPlaylist && audioPlaylist.attributes && audioPlaylist.attributes.CODECS) {
codecs.audio = audioPlaylist.attributes.CODECS;
}
const videoContentType = getMimeForCodec(codecs.video);
const audioContentType = getMimeForCodec(codecs.audio); // upsert the content types based on the selected playlist
const keySystemContentTypes = {};
for (const keySystem in keySystemOptions) {
keySystemContentTypes[keySystem] = {};
if (audioContentType) {
keySystemContentTypes[keySystem].audioContentType = audioContentType;
}
if (videoContentType) {
keySystemContentTypes[keySystem].videoContentType = videoContentType;
} // Default to using the video playlist's PSSH even though they may be different, as
// videojs-contrib-eme will only accept one in the options.
//
// This shouldn't be an issue for most cases as early intialization will handle all
// unique PSSH values, and if they aren't, then encrypted events should have the
// specific information needed for the unique license.
if (mainPlaylist.contentProtection && mainPlaylist.contentProtection[keySystem] && mainPlaylist.contentProtection[keySystem].pssh) {
keySystemContentTypes[keySystem].pssh = mainPlaylist.contentProtection[keySystem].pssh;
} // videojs-contrib-eme accepts the option of specifying: 'com.some.cdm': 'url'
// so we need to prevent overwriting the URL entirely
if (typeof keySystemOptions[keySystem] === 'string') {
keySystemContentTypes[keySystem].url = keySystemOptions[keySystem];
}
}
return merge(keySystemOptions, keySystemContentTypes);
};
/**
* @typedef {Object} KeySystems
*
* keySystems configuration for https://github.com/videojs/videojs-contrib-eme
* Note: not all options are listed here.
*
* @property {Uint8Array} [pssh]
* Protection System Specific Header
*/
/**
* Goes through all the playlists and collects an array of KeySystems options objects
* containing each playlist's keySystems and their pssh values, if available.
*
* @param {Object[]} playlists
* The playlists to look through
* @param {string[]} keySystems
* The keySystems to collect pssh values for
*
* @return {KeySystems[]}
* An array of KeySystems objects containing available key systems and their
* pssh values
*/
const getAllPsshKeySystemsOptions = (playlists, keySystems) => {
return playlists.reduce((keySystemsArr, playlist) => {
if (!playlist.contentProtection) {
return keySystemsArr;
}
const keySystemsOptions = keySystems.reduce((keySystemsObj, keySystem) => {
const keySystemOptions = playlist.contentProtection[keySystem];
if (keySystemOptions && keySystemOptions.pssh) {
keySystemsObj[keySystem] = {
pssh: keySystemOptions.pssh
};
}
return keySystemsObj;
}, {});
if (Object.keys(keySystemsOptions).length) {
keySystemsArr.push(keySystemsOptions);
}
return keySystemsArr;
}, []);
};
/**
* Returns a promise that waits for the
* [eme plugin](https://github.com/videojs/videojs-contrib-eme) to create a key session.
*
* Works around https://bugs.chromium.org/p/chromium/issues/detail?id=895449 in non-IE11
* browsers.
*
* As per the above ticket, this is particularly important for Chrome, where, if
* unencrypted content is appended before encrypted content and the key session has not
* been created, a MEDIA_ERR_DECODE will be thrown once the encrypted content is reached
* during playback.
*
* @param {Object} player
* The player instance
* @param {Object[]} sourceKeySystems
* The key systems options from the player source
* @param {Object} [audioMedia]
* The active audio media playlist (optional)
* @param {Object[]} mainPlaylists
* The playlists found on the main playlist object
*
* @return {Object}
* Promise that resolves when the key session has been created
*/
const waitForKeySessionCreation = ({
player,
sourceKeySystems,
audioMedia,
mainPlaylists
}) => {
if (!player.eme.initializeMediaKeys) {
return Promise.resolve();
} // TODO should all audio PSSH values be initialized for DRM?
//
// All unique video rendition pssh values are initialized for DRM, but here only
// the initial audio playlist license is initialized. In theory, an encrypted
// event should be fired if the user switches to an alternative audio playlist
// where a license is required, but this case hasn't yet been tested. In addition, there
// may be many alternate audio playlists unlikely to be used (e.g., multiple different
// languages).
const playlists = audioMedia ? mainPlaylists.concat([audioMedia]) : mainPlaylists;
const keySystemsOptionsArr = getAllPsshKeySystemsOptions(playlists, Object.keys(sourceKeySystems));
const initializationFinishedPromises = [];
const keySessionCreatedPromises = []; // Since PSSH values are interpreted as initData, EME will dedupe any duplicates. The
// only place where it should not be deduped is for ms-prefixed APIs, but
// the existence of modern EME APIs in addition to
// ms-prefixed APIs on Edge should prevent this from being a concern.
// initializeMediaKeys also won't use the webkit-prefixed APIs.
keySystemsOptionsArr.forEach(keySystemsOptions => {
keySessionCreatedPromises.push(new Promise((resolve, reject) => {
player.tech_.one('keysessioncreated', resolve);
}));
initializationFinishedPromises.push(new Promise((resolve, reject) => {
player.eme.initializeMediaKeys({
keySystems: keySystemsOptions
}, err => {
if (err) {
reject(err);
return;
}
resolve();
});
}));
}); // The reasons Promise.race is chosen over Promise.any:
//
// * Promise.any is only available in Safari 14+.
// * None of these promises are expected to reject. If they do reject, it might be
// better here for the race to surface the rejection, rather than mask it by using
// Promise.any.
return Promise.race([
// If a session was previously created, these will all finish resolving without
// creating a new session, otherwise it will take until the end of all license
// requests, which is why the key session check is used (to make setup much faster).
Promise.all(initializationFinishedPromises),
// Once a single session is created, the browser knows DRM will be used.
Promise.race(keySessionCreatedPromises)]);
};
/**
* If the [eme](https://github.com/videojs/videojs-contrib-eme) plugin is available, and
* there are keySystems on the source, sets up source options to prepare the source for
* eme.
*
* @param {Object} player
* The player instance
* @param {Object[]} sourceKeySystems
* The key systems options from the player source
* @param {Object} media
* The active media playlist
* @param {Object} [audioMedia]
* The active audio media playlist (optional)
*
* @return {boolean}
* Whether or not options were configured and EME is available
*/
const setupEmeOptions = ({
player,
sourceKeySystems,
media,
audioMedia
}) => {
const sourceOptions = emeKeySystems(sourceKeySystems, media, audioMedia);
if (!sourceOptions) {
return false;
}
player.currentSource().keySystems = sourceOptions; // eme handles the rest of the setup, so if it is missing
// do nothing.
if (sourceOptions && !player.eme) {
videojs.log.warn('DRM encrypted source cannot be decrypted without a DRM plugin');
return false;
}
return true;
};
const getVhsLocalStorage = () => {
if (!window$1.localStorage) {
return null;
}
const storedObject = window$1.localStorage.getItem(LOCAL_STORAGE_KEY);
if (!storedObject) {
return null;
}
try {
return JSON.parse(storedObject);
} catch (e) {
// someone may have tampered with the value
return null;
}
};
const updateVhsLocalStorage = options => {
if (!window$1.localStorage) {
return false;
}
let objectToStore = getVhsLocalStorage();
objectToStore = objectToStore ? merge(objectToStore, options) : options;
try {
window$1.localStorage.setItem(LOCAL_STORAGE_KEY, JSON.stringify(objectToStore));
} catch (e) {
// Throws if storage is full (e.g., always on iOS 5+ Safari private mode, where
// storage is set to 0).
// https://developer.mozilla.org/en-US/docs/Web/API/Storage/setItem#Exceptions
// No need to perform any operation.
return false;
}
return objectToStore;
};
/**
* Parses VHS-supported media types from data URIs. See
* https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs
* for information on data URIs.
*
* @param {string} dataUri
* The data URI
*
* @return {string|Object}
* The parsed object/string, or the original string if no supported media type
* was found
*/
const expandDataUri = dataUri => {
if (dataUri.toLowerCase().indexOf('data:application/vnd.videojs.vhs+json,') === 0) {
return JSON.parse(dataUri.substring(dataUri.indexOf(',') + 1));
} // no known case for this data URI, return the string as-is
return dataUri;
};
/**
* Adds a request hook to an xhr object
*
* @param {Object} xhr object to add the onRequest hook to
* @param {function} callback hook function for an xhr request
*/
const addOnRequestHook = (xhr, callback) => {
if (!xhr._requestCallbackSet) {
xhr._requestCallbackSet = new Set();
}
xhr._requestCallbackSet.add(callback);
};
/**
* Adds a response hook to an xhr object
*
* @param {Object} xhr object to add the onResponse hook to
* @param {function} callback hook function for an xhr response
*/
const addOnResponseHook = (xhr, callback) => {
if (!xhr._responseCallbackSet) {
xhr._responseCallbackSet = new Set();
}
xhr._responseCallbackSet.add(callback);
};
/**
* Removes a request hook on an xhr object, deletes the onRequest set if empty.
*
* @param {Object} xhr object to remove the onRequest hook from
* @param {function} callback hook function to remove
*/
const removeOnRequestHook = (xhr, callback) => {
if (!xhr._requestCallbackSet) {
return;
}
xhr._requestCallbackSet.delete(callback);
if (!xhr._requestCallbackSet.size) {
delete xhr._requestCallbackSet;
}
};
/**
* Removes a response hook on an xhr object, deletes the onResponse set if empty.
*
* @param {Object} xhr object to remove the onResponse hook from
* @param {function} callback hook function to remove
*/
const removeOnResponseHook = (xhr, callback) => {
if (!xhr._responseCallbackSet) {
return;
}
xhr._responseCallbackSet.delete(callback);
if (!xhr._responseCallbackSet.size) {
delete xhr._responseCallbackSet;
}
};
/**
* Whether the browser has built-in HLS support.
*/
Vhs.supportsNativeHls = function () {
if (!document$1 || !document$1.createElement) {
return false;
}
const video = document$1.createElement('video'); // native HLS is definitely not supported if HTML5 video isn't
if (!videojs.getTech('Html5').isSupported()) {
return false;
} // HLS manifests can go by many mime-types
const canPlay = [
// Apple santioned
'application/vnd.apple.mpegurl',
// Apple sanctioned for backwards compatibility
'audio/mpegurl',
// Very common
'audio/x-mpegurl',
// Very common
'application/x-mpegurl',
// Included for completeness
'video/x-mpegurl', 'video/mpegurl', 'application/mpegurl'];
return canPlay.some(function (canItPlay) {
return /maybe|probably/i.test(video.canPlayType(canItPlay));
});
}();
Vhs.supportsNativeDash = function () {
if (!document$1 || !document$1.createElement || !videojs.getTech('Html5').isSupported()) {
return false;
}
return /maybe|probably/i.test(document$1.createElement('video').canPlayType('application/dash+xml'));
}();
Vhs.supportsTypeNatively = type => {
if (type === 'hls') {
return Vhs.supportsNativeHls;
}
if (type === 'dash') {
return Vhs.supportsNativeDash;
}
return false;
};
/**
* VHS is a source handler, not a tech. Make sure attempts to use it
* as one do not cause exceptions.
*/
Vhs.isSupported = function () {
return videojs.log.warn('VHS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
};
/**
* A global function for setting an onRequest hook
*
* @param {function} callback for request modifiction
*/
Vhs.xhr.onRequest = function (callback) {
addOnRequestHook(Vhs.xhr, callback);
};
/**
* A global function for setting an onResponse hook
*
* @param {callback} callback for response data retrieval
*/
Vhs.xhr.onResponse = function (callback) {
addOnResponseHook(Vhs.xhr, callback);
};
/**
* Deletes a global onRequest callback if it exists
*
* @param {function} callback to delete from the global set
*/
Vhs.xhr.offRequest = function (callback) {
removeOnRequestHook(Vhs.xhr, callback);
};
/**
* Deletes a global onResponse callback if it exists
*
* @param {function} callback to delete from the global set
*/
Vhs.xhr.offResponse = function (callback) {
removeOnResponseHook(Vhs.xhr, callback);
};
const Component = videojs.getComponent('Component');
/**
* The Vhs Handler object, where we orchestrate all of the parts
* of VHS to interact with video.js
*
* @class VhsHandler
* @extends videojs.Component
* @param {Object} source the soruce object
* @param {Tech} tech the parent tech object
* @param {Object} options optional and required options
*/
class VhsHandler extends Component {
constructor(source, tech, options) {
super(tech, options.vhs); // if a tech level `initialBandwidth` option was passed
// use that over the VHS level `bandwidth` option
if (typeof options.initialBandwidth === 'number') {
this.options_.bandwidth = options.initialBandwidth;
}
this.logger_ = logger('VhsHandler'); // we need access to the player in some cases,
// so, get it from Video.js via the `playerId`
if (tech.options_ && tech.options_.playerId) {
const _player = videojs.getPlayer(tech.options_.playerId);
this.player_ = _player;
}
this.tech_ = tech;
this.source_ = source;
this.stats = {};
this.ignoreNextSeekingEvent_ = false;
this.setOptions_();
if (this.options_.overrideNative && tech.overrideNativeAudioTracks && tech.overrideNativeVideoTracks) {
tech.overrideNativeAudioTracks(true);
tech.overrideNativeVideoTracks(true);
} else if (this.options_.overrideNative && (tech.featuresNativeVideoTracks || tech.featuresNativeAudioTracks)) {
// overriding native VHS only works if audio tracks have been emulated
// error early if we're misconfigured
throw new Error('Overriding native VHS requires emulated tracks. ' + 'See https://git.io/vMpjB');
} // listen for fullscreenchange events for this player so that we
// can adjust our quality selection quickly
this.on(document$1, ['fullscreenchange', 'webkitfullscreenchange', 'mozfullscreenchange', 'MSFullscreenChange'], event => {
const fullscreenElement = document$1.fullscreenElement || document$1.webkitFullscreenElement || document$1.mozFullScreenElement || document$1.msFullscreenElement;
if (fullscreenElement && fullscreenElement.contains(this.tech_.el())) {
this.playlistController_.fastQualityChange_();
} else {
// When leaving fullscreen, since the in page pixel dimensions should be smaller
// than full screen, see if there should be a rendition switch down to preserve
// bandwidth.
this.playlistController_.checkABR_();
}
});
this.on(this.tech_, 'seeking', function () {
if (this.ignoreNextSeekingEvent_) {
this.ignoreNextSeekingEvent_ = false;
return;
}
this.setCurrentTime(this.tech_.currentTime());
});
this.on(this.tech_, 'error', function () {
// verify that the error was real and we are loaded
// enough to have pc loaded.
if (this.tech_.error() && this.playlistController_) {
this.playlistController_.pauseLoading();
}
});
this.on(this.tech_, 'play', this.play);
}
/**
* Set VHS options based on options from configuration, as well as partial
* options to be passed at a later time.
*
* @param {Object} options A partial chunk of config options
*/
setOptions_(options = {}) {
this.options_ = merge(this.options_, options); // defaults
this.options_.withCredentials = this.options_.withCredentials || false;
this.options_.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions === false ? false : true;
this.options_.useDevicePixelRatio = this.options_.useDevicePixelRatio || false;
this.options_.useBandwidthFromLocalStorage = typeof this.source_.useBandwidthFromLocalStorage !== 'undefined' ? this.source_.useBandwidthFromLocalStorage : this.options_.useBandwidthFromLocalStorage || false;
this.options_.useForcedSubtitles = this.options_.useForcedSubtitles || false;
this.options_.useNetworkInformationApi = this.options_.useNetworkInformationApi || false;
this.options_.useDtsForTimestampOffset = this.options_.useDtsForTimestampOffset || false;
this.options_.customTagParsers = this.options_.customTagParsers || [];
this.options_.customTagMappers = this.options_.customTagMappers || [];
this.options_.cacheEncryptionKeys = this.options_.cacheEncryptionKeys || false;
this.options_.llhls = this.options_.llhls === false ? false : true;
this.options_.bufferBasedABR = this.options_.bufferBasedABR || false;
if (typeof this.options_.playlistExclusionDuration !== 'number') {
this.options_.playlistExclusionDuration = 60;
}
if (typeof this.options_.bandwidth !== 'number') {
if (this.options_.useBandwidthFromLocalStorage) {
const storedObject = getVhsLocalStorage();
if (storedObject && storedObject.bandwidth) {
this.options_.bandwidth = storedObject.bandwidth;
this.tech_.trigger({
type: 'usage',
name: 'vhs-bandwidth-from-local-storage'
});
}
if (storedObject && storedObject.throughput) {
this.options_.throughput = storedObject.throughput;
this.tech_.trigger({
type: 'usage',
name: 'vhs-throughput-from-local-storage'
});
}
}
} // if bandwidth was not set by options or pulled from local storage, start playlist
// selection at a reasonable bandwidth
if (typeof this.options_.bandwidth !== 'number') {
this.options_.bandwidth = Config.INITIAL_BANDWIDTH;
} // If the bandwidth number is unchanged from the initial setting
// then this takes precedence over the enableLowInitialPlaylist option
this.options_.enableLowInitialPlaylist = this.options_.enableLowInitialPlaylist && this.options_.bandwidth === Config.INITIAL_BANDWIDTH; // grab options passed to player.src
['withCredentials', 'useDevicePixelRatio', 'customPixelRatio', 'limitRenditionByPlayerDimensions', 'bandwidth', 'customTagParsers', 'customTagMappers', 'cacheEncryptionKeys', 'playlistSelector', 'initialPlaylistSelector', 'bufferBasedABR', 'liveRangeSafeTimeDelta', 'llhls', 'useForcedSubtitles', 'useNetworkInformationApi', 'useDtsForTimestampOffset', 'exactManifestTimings', 'leastPixelDiffSelector'].forEach(option => {
if (typeof this.source_[option] !== 'undefined') {
this.options_[option] = this.source_[option];
}
});
this.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions;
this.useDevicePixelRatio = this.options_.useDevicePixelRatio;
const customPixelRatio = this.options_.customPixelRatio; // Ensure the custom pixel ratio is a number greater than or equal to 0
if (typeof customPixelRatio === 'number' && customPixelRatio >= 0) {
this.customPixelRatio = customPixelRatio;
}
} // alias for public method to set options
setOptions(options = {}) {
this.setOptions_(options);
}
/**
* called when player.src gets called, handle a new source
*
* @param {Object} src the source object to handle
*/
src(src, type) {
// do nothing if the src is falsey
if (!src) {
return;
}
this.setOptions_(); // add main playlist controller options
this.options_.src = expandDataUri(this.source_.src);
this.options_.tech = this.tech_;
this.options_.externVhs = Vhs;
this.options_.sourceType = simpleTypeFromSourceType(type); // Whenever we seek internally, we should update the tech
this.options_.seekTo = time => {
this.tech_.setCurrentTime(time);
}; // pass player to allow for player level eventing on construction.
this.options_.player_ = this.player_;
this.playlistController_ = new PlaylistController(this.options_);
const playbackWatcherOptions = merge({
liveRangeSafeTimeDelta: SAFE_TIME_DELTA
}, this.options_, {
seekable: () => this.seekable(),
media: () => this.playlistController_.media(),
playlistController: this.playlistController_
});
this.playbackWatcher_ = new PlaybackWatcher(playbackWatcherOptions);
this.attachStreamingEventListeners_();
this.playlistController_.on('error', () => {
const player = videojs.players[this.tech_.options_.playerId];
let error = this.playlistController_.error;
if (typeof error === 'object' && !error.code) {
error.code = 3;
} else if (typeof error === 'string') {
error = {
message: error,
code: 3
};
}
player.error(error);
});
const defaultSelector = this.options_.bufferBasedABR ? Vhs.movingAverageBandwidthSelector(0.55) : Vhs.STANDARD_PLAYLIST_SELECTOR; // `this` in selectPlaylist should be the VhsHandler for backwards
// compatibility with < v2
this.playlistController_.selectPlaylist = this.selectPlaylist ? this.selectPlaylist.bind(this) : defaultSelector.bind(this);
this.playlistController_.selectInitialPlaylist = Vhs.INITIAL_PLAYLIST_SELECTOR.bind(this); // re-expose some internal objects for backwards compatibility with < v2
this.playlists = this.playlistController_.mainPlaylistLoader_;
this.mediaSource = this.playlistController_.mediaSource; // Proxy assignment of some properties to the main playlist
// controller. Using a custom property for backwards compatibility
// with < v2
Object.defineProperties(this, {
selectPlaylist: {
get() {
return this.playlistController_.selectPlaylist;
},
set(selectPlaylist) {
this.playlistController_.selectPlaylist = selectPlaylist.bind(this);
}
},
throughput: {
get() {
return this.playlistController_.mainSegmentLoader_.throughput.rate;
},
set(throughput) {
this.playlistController_.mainSegmentLoader_.throughput.rate = throughput; // By setting `count` to 1 the throughput value becomes the starting value
// for the cumulative average
this.playlistController_.mainSegmentLoader_.throughput.count = 1;
}
},
bandwidth: {
get() {
let playerBandwidthEst = this.playlistController_.mainSegmentLoader_.bandwidth;
const networkInformation = window$1.navigator.connection || window$1.navigator.mozConnection || window$1.navigator.webkitConnection;
const tenMbpsAsBitsPerSecond = 10e6;
if (this.options_.useNetworkInformationApi && networkInformation) {
// downlink returns Mbps
// https://developer.mozilla.org/en-US/docs/Web/API/NetworkInformation/downlink
const networkInfoBandwidthEstBitsPerSec = networkInformation.downlink * 1000 * 1000; // downlink maxes out at 10 Mbps. In the event that both networkInformationApi and the player
// estimate a bandwidth greater than 10 Mbps, use the larger of the two estimates to ensure that
// high quality streams are not filtered out.
if (networkInfoBandwidthEstBitsPerSec >= tenMbpsAsBitsPerSecond && playerBandwidthEst >= tenMbpsAsBitsPerSecond) {
playerBandwidthEst = Math.max(playerBandwidthEst, networkInfoBandwidthEstBitsPerSec);
} else {
playerBandwidthEst = networkInfoBandwidthEstBitsPerSec;
}
}
return playerBandwidthEst;
},
set(bandwidth) {
this.playlistController_.mainSegmentLoader_.bandwidth = bandwidth; // setting the bandwidth manually resets the throughput counter
// `count` is set to zero that current value of `rate` isn't included
// in the cumulative average
this.playlistController_.mainSegmentLoader_.throughput = {
rate: 0,
count: 0
};
}
},
/**
* `systemBandwidth` is a combination of two serial processes bit-rates. The first
* is the network bitrate provided by `bandwidth` and the second is the bitrate of
* the entire process after that - decryption, transmuxing, and appending - provided
* by `throughput`.
*
* Since the two process are serial, the overall system bandwidth is given by:
* sysBandwidth = 1 / (1 / bandwidth + 1 / throughput)
*/
systemBandwidth: {
get() {
const invBandwidth = 1 / (this.bandwidth || 1);
let invThroughput;
if (this.throughput > 0) {
invThroughput = 1 / this.throughput;
} else {
invThroughput = 0;
}
const systemBitrate = Math.floor(1 / (invBandwidth + invThroughput));
return systemBitrate;
},
set() {
videojs.log.error('The "systemBandwidth" property is read-only');
}
}
});
if (this.options_.bandwidth) {
this.bandwidth = this.options_.bandwidth;
}
if (this.options_.throughput) {
this.throughput = this.options_.throughput;
}
Object.defineProperties(this.stats, {
bandwidth: {
get: () => this.bandwidth || 0,
enumerable: true
},
mediaRequests: {
get: () => this.playlistController_.mediaRequests_() || 0,
enumerable: true
},
mediaRequestsAborted: {
get: () => this.playlistController_.mediaRequestsAborted_() || 0,
enumerable: true
},
mediaRequestsTimedout: {
get: () => this.playlistController_.mediaRequestsTimedout_() || 0,
enumerable: true
},
mediaRequestsErrored: {
get: () => this.playlistController_.mediaRequestsErrored_() || 0,
enumerable: true
},
mediaTransferDuration: {
get: () => this.playlistController_.mediaTransferDuration_() || 0,
enumerable: true
},
mediaBytesTransferred: {
get: () => this.playlistController_.mediaBytesTransferred_() || 0,
enumerable: true
},
mediaSecondsLoaded: {
get: () => this.playlistController_.mediaSecondsLoaded_() || 0,
enumerable: true
},
mediaAppends: {
get: () => this.playlistController_.mediaAppends_() || 0,
enumerable: true
},
mainAppendsToLoadedData: {
get: () => this.playlistController_.mainAppendsToLoadedData_() || 0,
enumerable: true
},
audioAppendsToLoadedData: {
get: () => this.playlistController_.audioAppendsToLoadedData_() || 0,
enumerable: true
},
appendsToLoadedData: {
get: () => this.playlistController_.appendsToLoadedData_() || 0,
enumerable: true
},
timeToLoadedData: {
get: () => this.playlistController_.timeToLoadedData_() || 0,
enumerable: true
},
buffered: {
get: () => timeRangesToArray(this.tech_.buffered()),
enumerable: true
},
currentTime: {
get: () => this.tech_.currentTime(),
enumerable: true
},
currentSource: {
get: () => this.tech_.currentSource_,
enumerable: true
},
currentTech: {
get: () => this.tech_.name_,
enumerable: true
},
duration: {
get: () => this.tech_.duration(),
enumerable: true
},
main: {
get: () => this.playlists.main,
enumerable: true
},
playerDimensions: {
get: () => this.tech_.currentDimensions(),
enumerable: true
},
seekable: {
get: () => timeRangesToArray(this.tech_.seekable()),
enumerable: true
},
timestamp: {
get: () => Date.now(),
enumerable: true
},
videoPlaybackQuality: {
get: () => this.tech_.getVideoPlaybackQuality(),
enumerable: true
}
});
this.tech_.one('canplay', this.playlistController_.setupFirstPlay.bind(this.playlistController_));
this.tech_.on('bandwidthupdate', () => {
if (this.options_.useBandwidthFromLocalStorage) {
updateVhsLocalStorage({
bandwidth: this.bandwidth,
throughput: Math.round(this.throughput)
});
}
});
this.playlistController_.on('selectedinitialmedia', () => {
// Add the manual rendition mix-in to VhsHandler
renditionSelectionMixin(this);
});
this.playlistController_.sourceUpdater_.on('createdsourcebuffers', () => {
this.setupEme_();
}); // the bandwidth of the primary segment loader is our best
// estimate of overall bandwidth
this.on(this.playlistController_, 'progress', function () {
this.tech_.trigger('progress');
}); // In the live case, we need to ignore the very first `seeking` event since
// that will be the result of the seek-to-live behavior
this.on(this.playlistController_, 'firstplay', function () {
this.ignoreNextSeekingEvent_ = true;
});
this.setupQualityLevels_(); // do nothing if the tech has been disposed already
// this can occur if someone sets the src in player.ready(), for instance
if (!this.tech_.el()) {
return;
}
this.mediaSourceUrl_ = window$1.URL.createObjectURL(this.playlistController_.mediaSource);
this.tech_.src(this.mediaSourceUrl_);
}
createKeySessions_() {
const audioPlaylistLoader = this.playlistController_.mediaTypes_.AUDIO.activePlaylistLoader;
this.logger_('waiting for EME key session creation');
waitForKeySessionCreation({
player: this.player_,
sourceKeySystems: this.source_.keySystems,
audioMedia: audioPlaylistLoader && audioPlaylistLoader.media(),
mainPlaylists: this.playlists.main.playlists
}).then(() => {
this.logger_('created EME key session');
this.playlistController_.sourceUpdater_.initializedEme();
}).catch(err => {
this.logger_('error while creating EME key session', err);
this.player_.error({
message: 'Failed to initialize media keys for EME',
code: 3
});
});
}
handleWaitingForKey_() {
// If waitingforkey is fired, it's possible that the data that's necessary to retrieve
// the key is in the manifest. While this should've happened on initial source load, it
// may happen again in live streams where the keys change, and the manifest info
// reflects the update.
//
// Because videojs-contrib-eme compares the PSSH data we send to that of PSSH data it's
// already requested keys for, we don't have to worry about this generating extraneous
// requests.
this.logger_('waitingforkey fired, attempting to create any new key sessions');
this.createKeySessions_();
}
/**
* If necessary and EME is available, sets up EME options and waits for key session
* creation.
*
* This function also updates the source updater so taht it can be used, as for some
* browsers, EME must be configured before content is appended (if appending unencrypted
* content before encrypted content).
*/
setupEme_() {
const audioPlaylistLoader = this.playlistController_.mediaTypes_.AUDIO.activePlaylistLoader;
const didSetupEmeOptions = setupEmeOptions({
player: this.player_,
sourceKeySystems: this.source_.keySystems,
media: this.playlists.media(),
audioMedia: audioPlaylistLoader && audioPlaylistLoader.media()
});
this.player_.tech_.on('keystatuschange', e => {
this.playlistController_.updatePlaylistByKeyStatus(e.keyId, e.status);
});
this.handleWaitingForKey_ = this.handleWaitingForKey_.bind(this);
this.player_.tech_.on('waitingforkey', this.handleWaitingForKey_);
if (!didSetupEmeOptions) {
// If EME options were not set up, we've done all we could to initialize EME.
this.playlistController_.sourceUpdater_.initializedEme();
return;
}
this.createKeySessions_();
}
/**
* Initializes the quality levels and sets listeners to update them.
*
* @method setupQualityLevels_
* @private
*/
setupQualityLevels_() {
const player = videojs.players[this.tech_.options_.playerId]; // if there isn't a player or there isn't a qualityLevels plugin
// or qualityLevels_ listeners have already been setup, do nothing.
if (!player || !player.qualityLevels || this.qualityLevels_) {
return;
}
this.qualityLevels_ = player.qualityLevels();
this.playlistController_.on('selectedinitialmedia', () => {
handleVhsLoadedMetadata(this.qualityLevels_, this);
});
this.playlists.on('mediachange', () => {
handleVhsMediaChange(this.qualityLevels_, this.playlists);
});
}
/**
* return the version
*/
static version() {
return {
'@videojs/http-streaming': version$4,
'mux.js': version$3,
'mpd-parser': version$2,
'm3u8-parser': version$1,
'aes-decrypter': version
};
}
/**
* return the version
*/
version() {
return this.constructor.version();
}
canChangeType() {
return SourceUpdater.canChangeType();
}
/**
* Begin playing the video.
*/
play() {
this.playlistController_.play();
}
/**
* a wrapper around the function in PlaylistController
*/
setCurrentTime(currentTime) {
this.playlistController_.setCurrentTime(currentTime);
}
/**
* a wrapper around the function in PlaylistController
*/
duration() {
return this.playlistController_.duration();
}
/**
* a wrapper around the function in PlaylistController
*/
seekable() {
return this.playlistController_.seekable();
}
/**
* Abort all outstanding work and cleanup.
*/
dispose() {
if (this.playbackWatcher_) {
this.playbackWatcher_.dispose();
}
if (this.playlistController_) {
this.playlistController_.dispose();
}
if (this.qualityLevels_) {
this.qualityLevels_.dispose();
}
if (this.tech_ && this.tech_.vhs) {
delete this.tech_.vhs;
}
if (this.mediaSourceUrl_ && window$1.URL.revokeObjectURL) {
window$1.URL.revokeObjectURL(this.mediaSourceUrl_);
this.mediaSourceUrl_ = null;
}
if (this.tech_) {
this.tech_.off('waitingforkey', this.handleWaitingForKey_);
}
super.dispose();
}
convertToProgramTime(time, callback) {
return getProgramTime({
playlist: this.playlistController_.media(),
time,
callback
});
} // the player must be playing before calling this
seekToProgramTime(programTime, callback, pauseAfterSeek = true, retryCount = 2) {
return seekToProgramTime({
programTime,
playlist: this.playlistController_.media(),
retryCount,
pauseAfterSeek,
seekTo: this.options_.seekTo,
tech: this.options_.tech,
callback
});
}
/**
* Adds the onRequest, onResponse, offRequest and offResponse functions
* to the VhsHandler xhr Object.
*/
setupXhrHooks_() {
/**
* A player function for setting an onRequest hook
*
* @param {function} callback for request modifiction
*/
this.xhr.onRequest = callback => {
addOnRequestHook(this.xhr, callback);
};
/**
* A player function for setting an onResponse hook
*
* @param {callback} callback for response data retrieval
*/
this.xhr.onResponse = callback => {
addOnResponseHook(this.xhr, callback);
};
/**
* Deletes a player onRequest callback if it exists
*
* @param {function} callback to delete from the player set
*/
this.xhr.offRequest = callback => {
removeOnRequestHook(this.xhr, callback);
};
/**
* Deletes a player onResponse callback if it exists
*
* @param {function} callback to delete from the player set
*/
this.xhr.offResponse = callback => {
removeOnResponseHook(this.xhr, callback);
}; // Trigger an event on the player to notify the user that vhs is ready to set xhr hooks.
// This allows hooks to be set before the source is set to vhs when handleSource is called.
this.player_.trigger('xhr-hooks-ready');
}
attachStreamingEventListeners_() {
const playlistControllerEvents = ['seekablerangeschanged', 'bufferedrangeschanged', 'contentsteeringloadstart', 'contentsteeringloadcomplete', 'contentsteeringparsed'];
const playbackWatcher = ['gapjumped', 'playedrangeschanged']; // re-emit streaming events and payloads on the player.
playlistControllerEvents.forEach(eventName => {
this.playlistController_.on(eventName, metadata => {
this.player_.trigger(_extends({}, metadata));
});
});
playbackWatcher.forEach(eventName => {
this.playbackWatcher_.on(eventName, metadata => {
this.player_.trigger(_extends({}, metadata));
});
});
}
}
/**
* The Source Handler object, which informs video.js what additional
* MIME types are supported and sets up playback. It is registered
* automatically to the appropriate tech based on the capabilities of
* the browser it is running in. It is not necessary to use or modify
* this object in normal usage.
*/
const VhsSourceHandler = {
name: 'videojs-http-streaming',
VERSION: version$4,
canHandleSource(srcObj, options = {}) {
const localOptions = merge(videojs.options, options);
return VhsSourceHandler.canPlayType(srcObj.type, localOptions);
},
handleSource(source, tech, options = {}) {
const localOptions = merge(videojs.options, options);
tech.vhs = new VhsHandler(source, tech, localOptions);
tech.vhs.xhr = xhrFactory();
tech.vhs.setupXhrHooks_();
tech.vhs.src(source.src, source.type);
return tech.vhs;
},
canPlayType(type, options) {
const simpleType = simpleTypeFromSourceType(type);
if (!simpleType) {
return '';
}
const overrideNative = VhsSourceHandler.getOverrideNative(options);
const supportsTypeNatively = Vhs.supportsTypeNatively(simpleType);
const canUseMsePlayback = !supportsTypeNatively || overrideNative;
return canUseMsePlayback ? 'maybe' : '';
},
getOverrideNative(options = {}) {
const {
vhs = {}
} = options;
const defaultOverrideNative = !(videojs.browser.IS_ANY_SAFARI || videojs.browser.IS_IOS);
const {
overrideNative = defaultOverrideNative
} = vhs;
return overrideNative;
}
};
/**
* Check to see if the native MediaSource object exists and supports
* an MP4 container with both H.264 video and AAC-LC audio.
*
* @return {boolean} if native media sources are supported
*/
const supportsNativeMediaSources = () => {
return browserSupportsCodec('avc1.4d400d,mp4a.40.2');
}; // register source handlers with the appropriate techs
if (supportsNativeMediaSources()) {
videojs.getTech('Html5').registerSourceHandler(VhsSourceHandler, 0);
}
videojs.VhsHandler = VhsHandler;
videojs.VhsSourceHandler = VhsSourceHandler;
videojs.Vhs = Vhs;
if (!videojs.use) {
videojs.registerComponent('Vhs', Vhs);
}
videojs.options.vhs = videojs.options.vhs || {};
if (!videojs.getPlugin || !videojs.getPlugin('reloadSourceOnError')) {
videojs.registerPlugin('reloadSourceOnError', reloadSourceOnError);
}
export { videojs as default };