diff --git a/frontend/config/mediacms.config.pages.js b/frontend/config/mediacms.config.pages.js
index 56b35df..3d58e75 100644
--- a/frontend/config/mediacms.config.pages.js
+++ b/frontend/config/mediacms.config.pages.js
@@ -20,11 +20,11 @@ const formatPage = (page) => {
? templates.renderPageContent({ page: { id: pageContentId, component: page.component } })
: undefined;
const headLinks = [
- { rel: 'preload', href: './static/lib/video-js/7.7.5/video.min.js', as: 'script' },
+ { rel: 'preload', href: './static/lib/video-js/7.20.2/video.min.js', as: 'script' },
...(page.headLinks ? page.headLinks : []),
];
const bodyScripts = [
- { src: './static/lib/video-js/7.7.5/video.min.js' },
+ { src: './static/lib/video-js/7.20.2/video.min.js' },
...(page.bodyScripts ? page.bodyScripts : []),
];
diff --git a/frontend/package-lock.json b/frontend/package-lock.json
index b322c8d..46f733f 100644
--- a/frontend/package-lock.json
+++ b/frontend/package-lock.json
@@ -1091,7 +1091,6 @@
"version": "7.14.6",
"resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.14.6.tgz",
"integrity": "sha512-/PCB2uJ7oM44tz8YhC4Z/6PeOKXp4K588f+5M3clr1M4zbqztlo0XEfJ2LEzj/FgwfgGcIdl8n7YYjTCI0BYwg==",
- "dev": true,
"requires": {
"regenerator-runtime": "^0.13.4"
}
@@ -5154,6 +5153,14 @@
"ipaddr.js": "^1.9.0"
}
},
+ "invariant": {
+ "version": "2.2.4",
+ "resolved": "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz",
+ "integrity": "sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==",
+ "requires": {
+ "loose-envify": "^1.0.0"
+ }
+ },
"ip": {
"version": "1.1.5",
"resolved": "https://registry.npmjs.org/ip/-/ip-1.1.5.tgz",
@@ -7664,7 +7671,6 @@
"version": "15.7.2",
"resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz",
"integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==",
- "dev": true,
"requires": {
"loose-envify": "^1.4.0",
"object-assign": "^4.1.1",
@@ -7841,8 +7847,28 @@
"react-is": {
"version": "16.13.1",
"resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz",
- "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==",
- "dev": true
+ "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ=="
+ },
+ "react-mentions": {
+ "version": "4.4.7",
+ "resolved": "https://registry.npmjs.org/react-mentions/-/react-mentions-4.4.7.tgz",
+ "integrity": "sha512-VNriu2h/uOB+RS0mwZgPG2Vf+UtdDvRh5zbXa2TNc1WqacKuNDgTdhlbo9LEOZRBxRzIeTUYQmYJ7p9M9rDHqQ==",
+ "requires": {
+ "@babel/runtime": "7.4.5",
+ "invariant": "^2.2.4",
+ "prop-types": "^15.5.8",
+ "substyle": "^9.1.0"
+ },
+ "dependencies": {
+ "@babel/runtime": {
+ "version": "7.4.5",
+ "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.4.5.tgz",
+ "integrity": "sha512-TuI4qpWZP6lGOGIuGWtp9sPluqYICmbk8T/1vpSysqJxRPkudh/ofFWyqdcMsDf2s7KvDL4/YHgKyvcS3g9CJQ==",
+ "requires": {
+ "regenerator-runtime": "^0.13.2"
+ }
+ }
+ }
},
"read-cache": {
"version": "1.0.0",
@@ -7956,8 +7982,7 @@
"regenerator-runtime": {
"version": "0.13.7",
"resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.7.tgz",
- "integrity": "sha512-a54FxoJDIr27pgf7IgeQGxmqUNYrcV338lf/6gH456HZ/PhX+5BcwHXG9ajESmwe6WRO0tAzRUrRmNONWgkrew==",
- "dev": true
+ "integrity": "sha512-a54FxoJDIr27pgf7IgeQGxmqUNYrcV338lf/6gH456HZ/PhX+5BcwHXG9ajESmwe6WRO0tAzRUrRmNONWgkrew=="
},
"regenerator-transform": {
"version": "0.14.5",
@@ -9233,6 +9258,15 @@
"postcss-selector-parser": "^6.0.4"
}
},
+ "substyle": {
+ "version": "9.4.1",
+ "resolved": "https://registry.npmjs.org/substyle/-/substyle-9.4.1.tgz",
+ "integrity": "sha512-VOngeq/W1/UkxiGzeqVvDbGDPM8XgUyJVWjrqeh+GgKqspEPiLYndK+XRcsKUHM5Muz/++1ctJ1QCF/OqRiKWA==",
+ "requires": {
+ "@babel/runtime": "^7.3.4",
+ "invariant": "^2.2.4"
+ }
+ },
"supports-color": {
"version": "5.5.0",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
diff --git a/frontend/src/static/js/components/video-player/VideoPlayer.scss b/frontend/src/static/js/components/video-player/VideoPlayer.scss
index e58f2ba..b5436ca 100755
--- a/frontend/src/static/js/components/video-player/VideoPlayer.scss
+++ b/frontend/src/static/js/components/video-player/VideoPlayer.scss
@@ -1,5 +1,5 @@
@use "sass:math";
-@import '../../../lib/video-js/7.7.5/video-js.min.css';
+@import '../../../lib/video-js/7.20.2/video-js.min.css';
@import '../../../css/includes/_variables.scss';
@keyframes up-next-circle-countdown {
diff --git a/frontend/src/static/lib/video-js/7.7.5/video-js.css b/frontend/src/static/lib/video-js/7.20.2/video-js.css
old mode 100755
new mode 100644
similarity index 87%
rename from frontend/src/static/lib/video-js/7.7.5/video-js.css
rename to frontend/src/static/lib/video-js/7.20.2/video-js.css
index 5b0e3b9..60432a2
--- a/frontend/src/static/lib/video-js/7.7.5/video-js.css
+++ b/frontend/src/static/lib/video-js/7.20.2/video-js.css
@@ -379,21 +379,38 @@
.video-js.vjs-fluid,
.video-js.vjs-16-9,
-.video-js.vjs-4-3 {
+.video-js.vjs-4-3,
+.video-js.vjs-9-16,
+.video-js.vjs-1-1 {
width: 100%;
max-width: 100%;
+}
+
+.video-js.vjs-fluid:not(.vjs-audio-only-mode),
+.video-js.vjs-16-9:not(.vjs-audio-only-mode),
+.video-js.vjs-4-3:not(.vjs-audio-only-mode),
+.video-js.vjs-9-16:not(.vjs-audio-only-mode),
+.video-js.vjs-1-1:not(.vjs-audio-only-mode) {
height: 0;
}
-.video-js.vjs-16-9 {
+.video-js.vjs-16-9:not(.vjs-audio-only-mode) {
padding-top: 56.25%;
}
-.video-js.vjs-4-3 {
+.video-js.vjs-4-3:not(.vjs-audio-only-mode) {
padding-top: 75%;
}
-.video-js.vjs-fill {
+.video-js.vjs-9-16:not(.vjs-audio-only-mode) {
+ padding-top: 177.7777777778%;
+}
+
+.video-js.vjs-1-1:not(.vjs-audio-only-mode) {
+ padding-top: 100%;
+}
+
+.video-js.vjs-fill:not(.vjs-audio-only-mode) {
width: 100%;
height: 100%;
}
@@ -406,6 +423,10 @@
height: 100%;
}
+.video-js.vjs-audio-only-mode .vjs-tech {
+ display: none;
+}
+
body.vjs-full-window {
padding: 0;
margin: 0;
@@ -422,7 +443,7 @@ body.vjs-full-window {
right: 0;
}
-.video-js.vjs-fullscreen {
+.video-js.vjs-fullscreen:not(.vjs-ios-native-fs) {
width: 100% !important;
height: 100% !important;
padding-top: 0 !important;
@@ -451,8 +472,8 @@ body.vjs-full-window {
.vjs-lock-showing {
display: block !important;
- opacity: 1;
- visibility: visible;
+ opacity: 1 !important;
+ visibility: visible !important;
}
.vjs-no-js {
@@ -614,6 +635,11 @@ body.vjs-full-window {
color: #2B333F;
}
+.video-js .vjs-menu *:not(.vjs-selected):focus:not(:focus-visible),
+.js-focus-visible .vjs-menu *:not(.vjs-selected):focus:not(.focus-visible) {
+ background: none;
+}
+
.vjs-menu li.vjs-menu-title {
text-align: center;
text-transform: uppercase;
@@ -736,7 +762,8 @@ body.vjs-full-window {
background-color: rgba(43, 51, 63, 0.7);
}
-.vjs-has-started .vjs-control-bar {
+.vjs-has-started .vjs-control-bar,
+.vjs-audio-only-mode .vjs-control-bar {
display: flex;
visibility: visible;
opacity: 1;
@@ -746,6 +773,7 @@ body.vjs-full-window {
.vjs-has-started.vjs-user-inactive.vjs-playing .vjs-control-bar {
visibility: visible;
opacity: 0;
+ pointer-events: none;
transition: visibility 1s, opacity 1s;
}
@@ -755,9 +783,11 @@ body.vjs-full-window {
display: none !important;
}
-.vjs-audio.vjs-has-started.vjs-user-inactive.vjs-playing .vjs-control-bar {
+.vjs-audio.vjs-has-started.vjs-user-inactive.vjs-playing .vjs-control-bar,
+.vjs-audio-only-mode.vjs-has-started.vjs-user-inactive.vjs-playing .vjs-control-bar {
opacity: 1;
visibility: visible;
+ pointer-events: auto;
}
.vjs-has-started.vjs-no-flex .vjs-control-bar {
@@ -774,18 +804,28 @@ body.vjs-full-window {
flex: none;
}
+.video-js .vjs-control.vjs-visible-text {
+ width: auto;
+ padding-left: 1em;
+ padding-right: 1em;
+}
+
.vjs-button > .vjs-icon-placeholder:before {
font-size: 1.8em;
line-height: 1.67;
}
+.vjs-button > .vjs-icon-placeholder {
+ display: block;
+}
+
.video-js .vjs-control:focus:before,
.video-js .vjs-control:hover:before,
.video-js .vjs-control:focus {
text-shadow: 0em 0em 1em white;
}
-.video-js .vjs-control-text {
+.video-js *:not(.vjs-visible-text) > .vjs-control-text {
border: 0;
clip: rect(0 0 0 0);
height: 1px;
@@ -1074,6 +1114,7 @@ body.vjs-full-window {
.video-js .vjs-volume-level:before {
position: absolute;
font-size: 0.9em;
+ z-index: 1;
}
.vjs-slider-vertical .vjs-volume-level {
@@ -1082,6 +1123,7 @@ body.vjs-full-window {
.vjs-slider-vertical .vjs-volume-level:before {
top: -0.5em;
left: -0.3em;
+ z-index: 1;
}
.vjs-slider-horizontal .vjs-volume-level {
@@ -1116,6 +1158,77 @@ body.vjs-full-window {
left: -2em;
}
+.video-js .vjs-volume-tooltip {
+ background-color: #fff;
+ background-color: rgba(255, 255, 255, 0.8);
+ border-radius: 0.3em;
+ color: #000;
+ float: right;
+ font-family: Arial, Helvetica, sans-serif;
+ font-size: 1em;
+ padding: 6px 8px 8px 8px;
+ pointer-events: none;
+ position: absolute;
+ top: -3.4em;
+ visibility: hidden;
+ z-index: 1;
+}
+
+.video-js .vjs-volume-control:hover .vjs-volume-tooltip,
+.video-js .vjs-volume-control:hover .vjs-progress-holder:focus .vjs-volume-tooltip {
+ display: block;
+ font-size: 1em;
+ visibility: visible;
+}
+
+.video-js .vjs-volume-vertical:hover .vjs-volume-tooltip,
+.video-js .vjs-volume-vertical:hover .vjs-progress-holder:focus .vjs-volume-tooltip {
+ left: 1em;
+ top: -12px;
+}
+
+.video-js .vjs-volume-control.disabled:hover .vjs-volume-tooltip {
+ font-size: 1em;
+}
+
+.video-js .vjs-volume-control .vjs-mouse-display {
+ display: none;
+ position: absolute;
+ width: 100%;
+ height: 1px;
+ background-color: #000;
+ z-index: 1;
+}
+
+.video-js .vjs-volume-horizontal .vjs-mouse-display {
+ width: 1px;
+ height: 100%;
+}
+
+.vjs-no-flex .vjs-volume-control .vjs-mouse-display {
+ z-index: 0;
+}
+
+.video-js .vjs-volume-control:hover .vjs-mouse-display {
+ display: block;
+}
+
+.video-js.vjs-user-inactive .vjs-volume-control .vjs-mouse-display {
+ visibility: hidden;
+ opacity: 0;
+ transition: visibility 1s, opacity 1s;
+}
+
+.video-js.vjs-user-inactive.vjs-no-flex .vjs-volume-control .vjs-mouse-display {
+ display: none;
+}
+
+.vjs-mouse-display .vjs-volume-tooltip {
+ color: #fff;
+ background-color: #000;
+ background-color: rgba(0, 0, 0, 0.8);
+}
+
.vjs-poster {
display: inline-block;
vertical-align: middle;
@@ -1134,18 +1247,16 @@ body.vjs-full-window {
height: 100%;
}
-.vjs-has-started .vjs-poster {
- display: none;
-}
-
-.vjs-audio.vjs-has-started .vjs-poster {
- display: block;
-}
-
+.vjs-has-started .vjs-poster,
.vjs-using-native-controls .vjs-poster {
display: none;
}
+.vjs-audio.vjs-has-started .vjs-poster,
+.vjs-has-started.vjs-audio-poster-mode .vjs-poster {
+ display: block;
+}
+
.video-js .vjs-live-control {
display: flex;
align-items: flex-start;
@@ -1166,6 +1277,7 @@ body.vjs-full-window {
}
.video-js .vjs-seek-to-live-control {
+ align-items: center;
cursor: pointer;
flex: none;
display: inline-flex;
@@ -1252,6 +1364,7 @@ body.vjs-full-window {
pointer-events: none;
}
+.video-js.vjs-controls-disabled .vjs-text-track-display,
.video-js.vjs-user-inactive.vjs-playing .vjs-text-track-display {
bottom: 1em;
}
@@ -1278,6 +1391,7 @@ video::-webkit-media-text-track-display {
transform: translateY(-3em);
}
+.video-js.vjs-controls-disabled video::-webkit-media-text-track-display,
.video-js.vjs-user-inactive.vjs-playing video::-webkit-media-text-track-display {
transform: translateY(-1.5em);
}
@@ -1286,10 +1400,18 @@ video::-webkit-media-text-track-display {
cursor: pointer;
flex: none;
}
+.video-js.vjs-audio-only-mode .vjs-picture-in-picture-control {
+ display: none;
+}
+
.video-js .vjs-fullscreen-control {
cursor: pointer;
flex: none;
}
+.video-js.vjs-audio-only-mode .vjs-fullscreen-control {
+ display: none;
+}
+
.vjs-playback-rate > .vjs-menu-button,
.vjs-playback-rate .vjs-playback-rate-value {
position: absolute;
@@ -1445,10 +1567,18 @@ video::-webkit-media-text-track-display {
border-top-color: #73859f;
}
}
+.video-js.vjs-audio-only-mode .vjs-captions-button {
+ display: none;
+}
+
.vjs-chapters-button .vjs-menu ul {
width: 24em;
}
+.video-js.vjs-audio-only-mode .vjs-descriptions-button {
+ display: none;
+}
+
.video-js .vjs-subs-caps-button + .vjs-menu .vjs-captions-menu-item .vjs-menu-item-text .vjs-icon-placeholder {
vertical-align: middle;
display: inline-block;
@@ -1462,6 +1592,10 @@ video::-webkit-media-text-track-display {
line-height: inherit;
}
+.video-js.vjs-audio-only-mode .vjs-subs-caps-button {
+ display: none;
+}
+
.video-js .vjs-audio-button + .vjs-menu .vjs-main-desc-menu-item .vjs-menu-item-text .vjs-icon-placeholder {
vertical-align: middle;
display: inline-block;
@@ -1475,62 +1609,38 @@ video::-webkit-media-text-track-display {
line-height: inherit;
}
-.video-js:not(.vjs-fullscreen).vjs-layout-small .vjs-current-time,
-.video-js:not(.vjs-fullscreen).vjs-layout-small .vjs-time-divider,
-.video-js:not(.vjs-fullscreen).vjs-layout-small .vjs-duration,
-.video-js:not(.vjs-fullscreen).vjs-layout-small .vjs-remaining-time,
-.video-js:not(.vjs-fullscreen).vjs-layout-small .vjs-playback-rate,
-.video-js:not(.vjs-fullscreen).vjs-layout-small .vjs-chapters-button,
-.video-js:not(.vjs-fullscreen).vjs-layout-small .vjs-descriptions-button,
-.video-js:not(.vjs-fullscreen).vjs-layout-small .vjs-captions-button,
-.video-js:not(.vjs-fullscreen).vjs-layout-small .vjs-subtitles-button,
-.video-js:not(.vjs-fullscreen).vjs-layout-small .vjs-audio-button,
-.video-js:not(.vjs-fullscreen).vjs-layout-small .vjs-volume-control, .video-js:not(.vjs-fullscreen).vjs-layout-x-small .vjs-current-time,
-.video-js:not(.vjs-fullscreen).vjs-layout-x-small .vjs-time-divider,
-.video-js:not(.vjs-fullscreen).vjs-layout-x-small .vjs-duration,
-.video-js:not(.vjs-fullscreen).vjs-layout-x-small .vjs-remaining-time,
-.video-js:not(.vjs-fullscreen).vjs-layout-x-small .vjs-playback-rate,
-.video-js:not(.vjs-fullscreen).vjs-layout-x-small .vjs-chapters-button,
-.video-js:not(.vjs-fullscreen).vjs-layout-x-small .vjs-descriptions-button,
-.video-js:not(.vjs-fullscreen).vjs-layout-x-small .vjs-captions-button,
-.video-js:not(.vjs-fullscreen).vjs-layout-x-small .vjs-subtitles-button,
-.video-js:not(.vjs-fullscreen).vjs-layout-x-small .vjs-audio-button,
-.video-js:not(.vjs-fullscreen).vjs-layout-x-small .vjs-volume-control, .video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-current-time,
-.video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-time-divider,
-.video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-duration,
-.video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-remaining-time,
-.video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-playback-rate,
-.video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-chapters-button,
-.video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-descriptions-button,
-.video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-captions-button,
-.video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-subtitles-button,
-.video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-audio-button,
-.video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-volume-control {
+.video-js.vjs-layout-small .vjs-current-time,
+.video-js.vjs-layout-small .vjs-time-divider,
+.video-js.vjs-layout-small .vjs-duration,
+.video-js.vjs-layout-small .vjs-remaining-time,
+.video-js.vjs-layout-small .vjs-playback-rate,
+.video-js.vjs-layout-small .vjs-volume-control, .video-js.vjs-layout-x-small .vjs-current-time,
+.video-js.vjs-layout-x-small .vjs-time-divider,
+.video-js.vjs-layout-x-small .vjs-duration,
+.video-js.vjs-layout-x-small .vjs-remaining-time,
+.video-js.vjs-layout-x-small .vjs-playback-rate,
+.video-js.vjs-layout-x-small .vjs-volume-control, .video-js.vjs-layout-tiny .vjs-current-time,
+.video-js.vjs-layout-tiny .vjs-time-divider,
+.video-js.vjs-layout-tiny .vjs-duration,
+.video-js.vjs-layout-tiny .vjs-remaining-time,
+.video-js.vjs-layout-tiny .vjs-playback-rate,
+.video-js.vjs-layout-tiny .vjs-volume-control {
display: none;
}
-.video-js:not(.vjs-fullscreen).vjs-layout-small .vjs-volume-panel.vjs-volume-panel-horizontal:hover,
-.video-js:not(.vjs-fullscreen).vjs-layout-small .vjs-volume-panel.vjs-volume-panel-horizontal:active,
-.video-js:not(.vjs-fullscreen).vjs-layout-small .vjs-volume-panel.vjs-volume-panel-horizontal.vjs-slider-active, .video-js:not(.vjs-fullscreen).vjs-layout-x-small .vjs-volume-panel.vjs-volume-panel-horizontal:hover,
-.video-js:not(.vjs-fullscreen).vjs-layout-x-small .vjs-volume-panel.vjs-volume-panel-horizontal:active,
-.video-js:not(.vjs-fullscreen).vjs-layout-x-small .vjs-volume-panel.vjs-volume-panel-horizontal.vjs-slider-active, .video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-volume-panel.vjs-volume-panel-horizontal:hover,
-.video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-volume-panel.vjs-volume-panel-horizontal:active,
-.video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-volume-panel.vjs-volume-panel-horizontal.vjs-slider-active {
+.video-js.vjs-layout-small .vjs-volume-panel.vjs-volume-panel-horizontal:hover, .video-js.vjs-layout-small .vjs-volume-panel.vjs-volume-panel-horizontal:active, .video-js.vjs-layout-small .vjs-volume-panel.vjs-volume-panel-horizontal.vjs-slider-active, .video-js.vjs-layout-small .vjs-volume-panel.vjs-volume-panel-horizontal.vjs-hover, .video-js.vjs-layout-x-small .vjs-volume-panel.vjs-volume-panel-horizontal:hover, .video-js.vjs-layout-x-small .vjs-volume-panel.vjs-volume-panel-horizontal:active, .video-js.vjs-layout-x-small .vjs-volume-panel.vjs-volume-panel-horizontal.vjs-slider-active, .video-js.vjs-layout-x-small .vjs-volume-panel.vjs-volume-panel-horizontal.vjs-hover, .video-js.vjs-layout-tiny .vjs-volume-panel.vjs-volume-panel-horizontal:hover, .video-js.vjs-layout-tiny .vjs-volume-panel.vjs-volume-panel-horizontal:active, .video-js.vjs-layout-tiny .vjs-volume-panel.vjs-volume-panel-horizontal.vjs-slider-active, .video-js.vjs-layout-tiny .vjs-volume-panel.vjs-volume-panel-horizontal.vjs-hover {
width: auto;
width: initial;
}
-.video-js:not(.vjs-fullscreen).vjs-layout-x-small:not(.vjs-liveui) .vjs-subs-caps-button, .video-js:not(.vjs-fullscreen).vjs-layout-x-small:not(.vjs-live) .vjs-subs-caps-button, .video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-subs-caps-button {
+.video-js.vjs-layout-x-small .vjs-progress-control, .video-js.vjs-layout-tiny .vjs-progress-control {
display: none;
}
-.video-js:not(.vjs-fullscreen).vjs-layout-x-small.vjs-liveui .vjs-custom-control-spacer, .video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-custom-control-spacer {
+.video-js.vjs-layout-x-small .vjs-custom-control-spacer {
flex: auto;
display: block;
}
-.video-js:not(.vjs-fullscreen).vjs-layout-x-small.vjs-liveui.vjs-no-flex .vjs-custom-control-spacer, .video-js:not(.vjs-fullscreen).vjs-layout-tiny.vjs-no-flex .vjs-custom-control-spacer {
+.video-js.vjs-layout-x-small.vjs-no-flex .vjs-custom-control-spacer {
width: auto;
}
-.video-js:not(.vjs-fullscreen).vjs-layout-x-small.vjs-liveui .vjs-progress-control, .video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-progress-control {
- display: none;
-}
.vjs-modal-dialog.vjs-text-track-settings {
background-color: #2B333F;
@@ -1653,11 +1763,8 @@ video::-webkit-media-text-track-display {
.js-focus-visible .video-js *:focus:not(.focus-visible) {
outline: none;
- background: none;
}
-.video-js *:focus:not(:focus-visible),
-.video-js .vjs-menu *:focus:not(:focus-visible) {
+.video-js *:focus:not(:focus-visible) {
outline: none;
- background: none;
}
diff --git a/frontend/src/static/lib/video-js/7.20.2/video-js.min.css b/frontend/src/static/lib/video-js/7.20.2/video-js.min.css
new file mode 100644
index 0000000..8026ccf
--- /dev/null
+++ b/frontend/src/static/lib/video-js/7.20.2/video-js.min.css
@@ -0,0 +1 @@
+@charset "UTF-8";.video-js .vjs-big-play-button .vjs-icon-placeholder:before,.video-js .vjs-modal-dialog,.vjs-button>.vjs-icon-placeholder:before,.vjs-modal-dialog .vjs-modal-dialog-content{position:absolute;top:0;left:0;width:100%;height:100%}.video-js .vjs-big-play-button .vjs-icon-placeholder:before,.vjs-button>.vjs-icon-placeholder:before{text-align:center}@font-face{font-family:VideoJS;src:url(data:application/font-woff;charset=utf-8;base64,d09GRgABAAAAABDkAAsAAAAAG6gAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAABHU1VCAAABCAAAADsAAABUIIslek9TLzIAAAFEAAAAPgAAAFZRiV3hY21hcAAAAYQAAADaAAADPv749/pnbHlmAAACYAAAC3AAABHQZg6OcWhlYWQAAA3QAAAAKwAAADYZw251aGhlYQAADfwAAAAdAAAAJA+RCLFobXR4AAAOHAAAABMAAACM744AAGxvY2EAAA4wAAAASAAAAEhF6kqubWF4cAAADngAAAAfAAAAIAE0AIFuYW1lAAAOmAAAASUAAAIK1cf1oHBvc3QAAA/AAAABJAAAAdPExYuNeJxjYGRgYOBiMGCwY2BycfMJYeDLSSzJY5BiYGGAAJA8MpsxJzM9kYEDxgPKsYBpDiBmg4gCACY7BUgAeJxjYGS7wTiBgZWBgaWQ5RkDA8MvCM0cwxDOeI6BgYmBlZkBKwhIc01hcPjI+FGJHcRdyA4RZgQRADK3CxEAAHic7dFZbsMgAEXRS0ycyZnnOeG7y+qC8pU1dHusIOXxuoxaOlwZYWQB0Aea4quIEN4E9LzKbKjzDeM6H/mua6Lmc/p8yhg0lvdYx15ZG8uOLQOGjMp3EzqmzJizYMmKNRu27Nhz4MiJMxeu3Ljz4Ekqm7T8P52G8PP3lnTOVk++Z6iN6QZzNN1F7ptuN7eGOjDUoaGODHVsuvU8MdTO9Hd5aqgzQ50b6sJQl4a6MtS1oW4MdWuoO0PdG+rBUI+GejLUs6FeDPVqqDdDvRvqw1CfhpqM9At0iFLaAAB4nJ1YDXBTVRZ+5/22TUlJ8we0pHlJm7RJf5O8F2j6EymlSPkpxaL8U2xpa3DKj0CBhc2IW4eWKSokIoLsuMqssM64f+jA4HSdWXXXscBq67IOs3FXZ1ZYWVyRFdo899yXtIBQZ90k7717zz3v3HPPOfd854YCCj9cL9dL0RQFOqCbGJnrHb5EayiKIWN8iA/hWBblo6hUWm8TtCDwE80WMJus/irwyxOdxeB0MDb14VNJHnXYoLLSl6FfCUYO9nYPTA8Epg9090LprfbBbZ2hY0UlJUXHQp3/vtWkS6EBv8+rPMq5u9692f/dNxJNiqwC1xPE9TCUgCsSdQWgE3XQD25lkG4CN2xmTcOXWBOyser6RN6KnGbKSbmQ3+d0OI1m2W8QzLLkI2sykrWAgJJEtA8vGGW/2Q+CmT3n8zS9wZwu2DCvtuZKZN3xkrLh36yCZuUomQSqGpY8t/25VfHVhw8z4ebGBtfLb0ya9PCaDc+8dGTvk2dsh6z7WzvowlXKUSWo9MJ15a3KrEP2loOr2Ojhw6iW6hf2BDdEccQvZGpaAy7YovSwq8kr7HGllxpd71rkS6G0Sf11sl9OvMK1+jwPPODxjUwkOim9CU3ix1wNjXDfmJSEn618Bs6lpWwUpU+8PCqLMY650zjq8VhCIP17NEKTx3eaLL+s5Pi6yJWaWjTHLR1jYzPSV9VF/6Ojdb/1kO3Mk3uhHC0x6gc1BjlKQ+nQFxTYdaJkZ7ySVxLBbhR1dsboNXp1tCYKW2LRaEzpYcIx2BKNxaL0ZaUnSqfFoiNhHKR/GkX6PWUSAaJelQaqZL1EpoHNsajSEyPSoJ9IjhIxTdjHLmwZvhRDOiFTY/YeQnvrVZmiTQtGncECXtFTBZLOVwwMRgoXHAkXzMzPn1nAJJ8jYSbMDaqN2waGLzNhih/bZynUBMpIWSg7VYi7DRx2m8ALkIdRCJwI6ArJx2EI8kaDWeTQKeAFk9fjl/1AvwktjQ1P7NjyMGQyfd4vjipX6M/i52D7Cq80kqlcxEcGXRr/FEcgs0u5uGgB4VWuMFfpdn2Re6Hi3PqzmxWKsz6+ae2Pn9hXXw/fqM859UiGC0oKYYILJBqJrsn1Z1E5qOs9rQCiUQRREjm8yJcbHF5cUJufX1vAHlefw0XgUoboS3ETfQlTxBC4SOtuE8VPRJTBSCQSjZCpk7Gqzu+masaZ2y7Zjehho4F3g82BNDkAHpORG4+OCS+f6JTPmtRn/PH1kch6d04sp7AQb25aQ/pqUyXeQ8vrebG8OYQdXOQ+585u0sdW9rqalzRURiJ+9F4MweRFrKUjl1GUYhH1A27WOHw5cTFSFPMo9EeUIGnQTZHIaJ7AHLaOKsOODaNF9jkBjYG2QEsQ2xjMUAx2bBEbeTBWMHwskBjngq56S/yfgkBnWBa4K9sqKtq2t1UI8S9He5XuBRbawAdatrQEAi30Aks2+LM8WeCbalVZkWNylvJ+dqJnzVb+OHlSoKW8nPCP7Rd+CcZ2DdWAGqJ2CBFOphgywFFCFBNtfAbGtNPBCwxvygHeYMZMY9ZboBqwq/pVrsbgN5tkv152ODlbMfiqwGMBgxa4Exz3QhovRIUp6acqZmQzRq0ypDXS2TPLT02YIkQETnOE445oOGxOmXAqUJNNG7XgupMjPq2ua9asrj5yY/yuKteO1Kx0YNJTufrirLe1mZnat7OL6rnUdCWenpW6I8mAnbsY8KWs1PuSovCW9A/Z25PQ24a7cNOqgmTkLmBMgh4THgc4b9k2IVv1/g/F5nGljwPLfOgHAzJzh45V/4+WenTzmMtR5Z7us2Tys909UHqrPY7KbckoxRvRHhmVc3cJGE97uml0R1S0jdULVl7EvZtDFVBF35N9cEdjpgmAiOlFZ+Dtoh93+D3zzHr8RRNZQhnCNMNbcegOvpEwZoL+06cJQ07h+th3fZ/7PVbVC6ngTAV/KoLFuO6+2KFcU651gEb5ugPSIb1D+Xp8V4+k3sEIGnw5mYe4If4k1lFYr6SCzmM2EQ8iWtmwjnBI9kTwe1TlfAmXh7H02by9fW2gsjKwtv0aaURKil4OdV7rDL1MXIFNrhdxohcZXYTnq47WisrKitaObbf5+yvkLi5J6lCNZZ+B6GC38VNBZBDidSS/+mSvh6s+srgC8pyKMvDtt+de3c9fU76ZPfuM8ud4Kv0fyP/LqfepMT/3oZxSqpZaTa1DaQYLY8TFsHYbWYsPoRhRWfL5eSSQbhUGgGC3YLbVMk6PitTFNGpAsNrC6D1VNBKgBHMejaiuRWEWGgsSDBTJjqWIl8kJLlsaLJ2tXDr6xGfT85bM2Q06a46x2HTgvdnV8z5YDy/27J4zt6x2VtkzjoYpkq36kaBr4eQSg7tyiVweWubXZugtadl58ydapfbORfKsDTuZ0OBgx4cfdjCf5tbWNITnL120fdOi1RV1C3uKGzNdwYLcMvZ3BxoPyTOCD1XvXTp7U10gWCVmTV9b3r2z0SkGWovb2hp9I89O8a2smlyaO8muMU+dRmtzp60IzAoFpjLr1n388boLyf0dRvxhsHZ0qbWqDkwqvvpkj4l0fY6EIXRi5sQSrAvsVYwXRy4qJ2EVtD1AN7a0HWth9ymvL1xc3WTUKK/TAHA/bXDVtVWfOMfuGxGZv4Ln/jVr9jc3j1yMv0tndmyt9Vq88Y9gH1wtLX3KWjot5++jWHgAoZZkQ14wGQ20Fli71UmKJAy4xKMSTGbVdybW7FDDAut9XpD5AzWrYO7zQ8qffqF8+Ynd/clrHcdyxGy3a/3+mfNnzC/cBsveTjnTvXf1o6vzOlZw7WtqtdmPK/Errz/6NNtD72zmNOZfbmYdTGHfoofqI79Oc+R2n1lrnL6pOm0Up7kwxhTW12Amm7WYkXR2qYrF2AmgmbAsxZjwy1xpg/m1Je2vrp8v/nz2xpmlBg4E9hrMU341wVpTOh/OfmGvAnra8q6uctr60ZQHV3Q+WMQJykMj8ZsWn2QBOmmHMB+m5pDIpTFonYigiaKAhGEiAHF7EliVnQkjoLVIMPtJpBKHYd3A8GYH9jJzrWwmHx5Qjp7vDAX0suGRym1vtm/9W1/HyR8vczfMs6Sk8DSv855/5dlX9oQq52hT8syyp2rx5Id17IAyAM3wIjQPMOHzytEB64q6D5zT91yNbnx3V/nqnd017S9Y0605k3izoXLpsxde2n38yoOV9s1LcjwzNjbdX6asnBVaBj/6/DwKwPkpcqbDG7BnsXoSqWnUAmottYF6jMSdVyYZh3zVXCjwTiwwHH6sGuRiEHQGzuRX6whZkp123oy1BWE2mEfJ/tvIRtM4ZM5bDXiMsPMaAKOTyc5uL57rqyyc5y5JE5pm1i2S2iUX0CcaQ6lC6Zog7JqSqZmYlosl2K6pwNA84zRnQW6SaALYZQGW5lhCtU/W34N6o+bKfZ8cf3/Cl/+iTX3wBzpOY4mRkeNf3rptycGSshQWgGbYt5jFc2e0+DglIrwl6DVWQ7BuwaJ3Xk1J4VL5urnLl/Wf+gHU/hZoZdKNym6lG+I34FaNeZKcSpJIo2IeCVvpdsDGfKvzJnAwmeD37Ow65ZWwSowpgwX5T69s/rB55dP5BcpgDKFV8p7q2sn/1uc93bVzT/w6UrCqDTWvfCq/oCD/qZXNoUj8BL5Kp6GU017frfNXkAtiiyf/SOCEeLqnd8R/Ql9GlCRfctS6k5chvIBuQ1zCCjoCHL2DHNHIXxMJ3kQeO8lbsUXONeSfA5EjcG6/E+KdhN4bP04vBhdi883+BFBzQbxFbvZzQeY9LNBZc0FNfn5NwfDn6rCTnTw6R8o+gfpf5hCom33cRuiTlss3KHmZjD+BPN+5gXuA2ziS/Q73mLxUkpbKN/eqwz5uK0X9F3h2d1V4nGNgZGBgAOJd776+iue3+crAzc4AAje5Bfcg0xz9YHEOBiYQBQA8FQlFAHicY2BkYGBnAAGOPgaG//85+hkYGVCBMgBGGwNYAAAAeJxjYGBgYB8EmKOPgQEAQ04BfgAAAAAAAA4AaAB+AMwA4AECAUIBbAGYAcICGAJYArQC4AMwA7AD3gQwBJYE3AUkBWYFigYgBmYGtAbqB1gIEghYCG4IhAi2COh4nGNgZGBgUGYoZWBnAAEmIOYCQgaG/2A+AwAYCQG2AHicXZBNaoNAGIZfE5PQCKFQ2lUps2oXBfOzzAESyDKBQJdGR2NQR3QSSE/QE/QEPUUPUHqsvsrXjTMw83zPvPMNCuAWP3DQDAejdm1GjzwS7pMmwi75XngAD4/CQ/oX4TFe4Qt7uMMbOzjuDc0EmXCP/C7cJ38Iu+RP4QEe8CU8pP8WHmOPX2EPz87TPo202ey2OjlnQSXV/6arOjWFmvszMWtd6CqwOlKHq6ovycLaWMWVydXKFFZnmVFlZU46tP7R2nI5ncbi/dDkfDtFBA2DDXbYkhKc+V0Bqs5Zt9JM1HQGBRTm/EezTmZNKtpcAMs9Yu6AK9caF76zoLWIWcfMGOSkVduvSWechqZsz040Ib2PY3urxBJTzriT95lipz+TN1fmAAAAeJxtkMl2wjAMRfOAhABlKm2h80C3+ajgCKKDY6cegP59TYBzukAL+z1Zsq8ctaJTTKPrsUQLbXQQI0EXKXroY4AbDDHCGBNMcYsZ7nCPB8yxwCOe8IwXvOIN7/jAJ76wxHfUqWX+OzgumWAjJMV17i0Ndlr6irLKO+qftdT7i6y4uFSUvCknay+lFYZIZaQcmfH/xIFdYn98bqhra1aKTM/6lWMnyaYirx1rFUQZFBkb2zJUtoXeJCeg0WnLtHeSFc3OtrnozNwqi0TkSpBMDB1nSde5oJXW23hTS2/T0LilglXX7dmFVxLnq5U0vYATHFk3zX3BOisoQHNDFDeZnqKDy9hRNawN7Vh727hFzcJ5c8TILrKZfH7tIPxAFP0BpLeJPA==) format("woff");font-weight:400;font-style:normal}.video-js .vjs-big-play-button .vjs-icon-placeholder:before,.video-js .vjs-play-control .vjs-icon-placeholder,.vjs-icon-play{font-family:VideoJS;font-weight:400;font-style:normal}.video-js .vjs-big-play-button .vjs-icon-placeholder:before,.video-js .vjs-play-control .vjs-icon-placeholder:before,.vjs-icon-play:before{content:"\f101"}.vjs-icon-play-circle{font-family:VideoJS;font-weight:400;font-style:normal}.vjs-icon-play-circle:before{content:"\f102"}.video-js .vjs-play-control.vjs-playing .vjs-icon-placeholder,.vjs-icon-pause{font-family:VideoJS;font-weight:400;font-style:normal}.video-js .vjs-play-control.vjs-playing .vjs-icon-placeholder:before,.vjs-icon-pause:before{content:"\f103"}.video-js .vjs-mute-control.vjs-vol-0 .vjs-icon-placeholder,.vjs-icon-volume-mute{font-family:VideoJS;font-weight:400;font-style:normal}.video-js .vjs-mute-control.vjs-vol-0 .vjs-icon-placeholder:before,.vjs-icon-volume-mute:before{content:"\f104"}.video-js .vjs-mute-control.vjs-vol-1 .vjs-icon-placeholder,.vjs-icon-volume-low{font-family:VideoJS;font-weight:400;font-style:normal}.video-js .vjs-mute-control.vjs-vol-1 .vjs-icon-placeholder:before,.vjs-icon-volume-low:before{content:"\f105"}.video-js .vjs-mute-control.vjs-vol-2 .vjs-icon-placeholder,.vjs-icon-volume-mid{font-family:VideoJS;font-weight:400;font-style:normal}.video-js .vjs-mute-control.vjs-vol-2 .vjs-icon-placeholder:before,.vjs-icon-volume-mid:before{content:"\f106"}.video-js .vjs-mute-control .vjs-icon-placeholder,.vjs-icon-volume-high{font-family:VideoJS;font-weight:400;font-style:normal}.video-js .vjs-mute-control .vjs-icon-placeholder:before,.vjs-icon-volume-high:before{content:"\f107"}.video-js .vjs-fullscreen-control .vjs-icon-placeholder,.vjs-icon-fullscreen-enter{font-family:VideoJS;font-weight:400;font-style:normal}.video-js .vjs-fullscreen-control .vjs-icon-placeholder:before,.vjs-icon-fullscreen-enter:before{content:"\f108"}.video-js.vjs-fullscreen .vjs-fullscreen-control .vjs-icon-placeholder,.vjs-icon-fullscreen-exit{font-family:VideoJS;font-weight:400;font-style:normal}.video-js.vjs-fullscreen .vjs-fullscreen-control .vjs-icon-placeholder:before,.vjs-icon-fullscreen-exit:before{content:"\f109"}.vjs-icon-square{font-family:VideoJS;font-weight:400;font-style:normal}.vjs-icon-square:before{content:"\f10a"}.vjs-icon-spinner{font-family:VideoJS;font-weight:400;font-style:normal}.vjs-icon-spinner:before{content:"\f10b"}.video-js .vjs-subs-caps-button .vjs-icon-placeholder,.video-js .vjs-subtitles-button .vjs-icon-placeholder,.video-js.video-js:lang(en-AU) .vjs-subs-caps-button .vjs-icon-placeholder,.video-js.video-js:lang(en-GB) .vjs-subs-caps-button .vjs-icon-placeholder,.video-js.video-js:lang(en-IE) .vjs-subs-caps-button .vjs-icon-placeholder,.video-js.video-js:lang(en-NZ) .vjs-subs-caps-button .vjs-icon-placeholder,.vjs-icon-subtitles{font-family:VideoJS;font-weight:400;font-style:normal}.video-js .vjs-subs-caps-button .vjs-icon-placeholder:before,.video-js .vjs-subtitles-button .vjs-icon-placeholder:before,.video-js.video-js:lang(en-AU) .vjs-subs-caps-button .vjs-icon-placeholder:before,.video-js.video-js:lang(en-GB) .vjs-subs-caps-button .vjs-icon-placeholder:before,.video-js.video-js:lang(en-IE) .vjs-subs-caps-button .vjs-icon-placeholder:before,.video-js.video-js:lang(en-NZ) .vjs-subs-caps-button .vjs-icon-placeholder:before,.vjs-icon-subtitles:before{content:"\f10c"}.video-js .vjs-captions-button .vjs-icon-placeholder,.video-js:lang(en) .vjs-subs-caps-button .vjs-icon-placeholder,.video-js:lang(fr-CA) .vjs-subs-caps-button .vjs-icon-placeholder,.vjs-icon-captions{font-family:VideoJS;font-weight:400;font-style:normal}.video-js .vjs-captions-button .vjs-icon-placeholder:before,.video-js:lang(en) .vjs-subs-caps-button .vjs-icon-placeholder:before,.video-js:lang(fr-CA) .vjs-subs-caps-button .vjs-icon-placeholder:before,.vjs-icon-captions:before{content:"\f10d"}.video-js .vjs-chapters-button .vjs-icon-placeholder,.vjs-icon-chapters{font-family:VideoJS;font-weight:400;font-style:normal}.video-js .vjs-chapters-button .vjs-icon-placeholder:before,.vjs-icon-chapters:before{content:"\f10e"}.vjs-icon-share{font-family:VideoJS;font-weight:400;font-style:normal}.vjs-icon-share:before{content:"\f10f"}.vjs-icon-cog{font-family:VideoJS;font-weight:400;font-style:normal}.vjs-icon-cog:before{content:"\f110"}.video-js .vjs-play-progress,.video-js .vjs-volume-level,.vjs-icon-circle,.vjs-seek-to-live-control .vjs-icon-placeholder{font-family:VideoJS;font-weight:400;font-style:normal}.video-js .vjs-play-progress:before,.video-js .vjs-volume-level:before,.vjs-icon-circle:before,.vjs-seek-to-live-control .vjs-icon-placeholder:before{content:"\f111"}.vjs-icon-circle-outline{font-family:VideoJS;font-weight:400;font-style:normal}.vjs-icon-circle-outline:before{content:"\f112"}.vjs-icon-circle-inner-circle{font-family:VideoJS;font-weight:400;font-style:normal}.vjs-icon-circle-inner-circle:before{content:"\f113"}.vjs-icon-hd{font-family:VideoJS;font-weight:400;font-style:normal}.vjs-icon-hd:before{content:"\f114"}.video-js .vjs-control.vjs-close-button .vjs-icon-placeholder,.vjs-icon-cancel{font-family:VideoJS;font-weight:400;font-style:normal}.video-js .vjs-control.vjs-close-button .vjs-icon-placeholder:before,.vjs-icon-cancel:before{content:"\f115"}.video-js .vjs-play-control.vjs-ended .vjs-icon-placeholder,.vjs-icon-replay{font-family:VideoJS;font-weight:400;font-style:normal}.video-js .vjs-play-control.vjs-ended .vjs-icon-placeholder:before,.vjs-icon-replay:before{content:"\f116"}.vjs-icon-facebook{font-family:VideoJS;font-weight:400;font-style:normal}.vjs-icon-facebook:before{content:"\f117"}.vjs-icon-gplus{font-family:VideoJS;font-weight:400;font-style:normal}.vjs-icon-gplus:before{content:"\f118"}.vjs-icon-linkedin{font-family:VideoJS;font-weight:400;font-style:normal}.vjs-icon-linkedin:before{content:"\f119"}.vjs-icon-twitter{font-family:VideoJS;font-weight:400;font-style:normal}.vjs-icon-twitter:before{content:"\f11a"}.vjs-icon-tumblr{font-family:VideoJS;font-weight:400;font-style:normal}.vjs-icon-tumblr:before{content:"\f11b"}.vjs-icon-pinterest{font-family:VideoJS;font-weight:400;font-style:normal}.vjs-icon-pinterest:before{content:"\f11c"}.video-js .vjs-descriptions-button .vjs-icon-placeholder,.vjs-icon-audio-description{font-family:VideoJS;font-weight:400;font-style:normal}.video-js .vjs-descriptions-button .vjs-icon-placeholder:before,.vjs-icon-audio-description:before{content:"\f11d"}.video-js .vjs-audio-button .vjs-icon-placeholder,.vjs-icon-audio{font-family:VideoJS;font-weight:400;font-style:normal}.video-js .vjs-audio-button .vjs-icon-placeholder:before,.vjs-icon-audio:before{content:"\f11e"}.vjs-icon-next-item{font-family:VideoJS;font-weight:400;font-style:normal}.vjs-icon-next-item:before{content:"\f11f"}.vjs-icon-previous-item{font-family:VideoJS;font-weight:400;font-style:normal}.vjs-icon-previous-item:before{content:"\f120"}.video-js .vjs-picture-in-picture-control .vjs-icon-placeholder,.vjs-icon-picture-in-picture-enter{font-family:VideoJS;font-weight:400;font-style:normal}.video-js .vjs-picture-in-picture-control .vjs-icon-placeholder:before,.vjs-icon-picture-in-picture-enter:before{content:"\f121"}.video-js.vjs-picture-in-picture .vjs-picture-in-picture-control .vjs-icon-placeholder,.vjs-icon-picture-in-picture-exit{font-family:VideoJS;font-weight:400;font-style:normal}.video-js.vjs-picture-in-picture .vjs-picture-in-picture-control .vjs-icon-placeholder:before,.vjs-icon-picture-in-picture-exit:before{content:"\f122"}.video-js{display:block;vertical-align:top;box-sizing:border-box;color:#fff;background-color:#000;position:relative;padding:0;font-size:10px;line-height:1;font-weight:400;font-style:normal;font-family:Arial,Helvetica,sans-serif;word-break:initial}.video-js:-moz-full-screen{position:absolute}.video-js:-webkit-full-screen{width:100%!important;height:100%!important}.video-js[tabindex="-1"]{outline:0}.video-js *,.video-js :after,.video-js :before{box-sizing:inherit}.video-js ul{font-family:inherit;font-size:inherit;line-height:inherit;list-style-position:outside;margin-left:0;margin-right:0;margin-top:0;margin-bottom:0}.video-js.vjs-1-1,.video-js.vjs-16-9,.video-js.vjs-4-3,.video-js.vjs-9-16,.video-js.vjs-fluid{width:100%;max-width:100%}.video-js.vjs-1-1:not(.vjs-audio-only-mode),.video-js.vjs-16-9:not(.vjs-audio-only-mode),.video-js.vjs-4-3:not(.vjs-audio-only-mode),.video-js.vjs-9-16:not(.vjs-audio-only-mode),.video-js.vjs-fluid:not(.vjs-audio-only-mode){height:0}.video-js.vjs-16-9:not(.vjs-audio-only-mode){padding-top:56.25%}.video-js.vjs-4-3:not(.vjs-audio-only-mode){padding-top:75%}.video-js.vjs-9-16:not(.vjs-audio-only-mode){padding-top:177.7777777778%}.video-js.vjs-1-1:not(.vjs-audio-only-mode){padding-top:100%}.video-js.vjs-fill:not(.vjs-audio-only-mode){width:100%;height:100%}.video-js .vjs-tech{position:absolute;top:0;left:0;width:100%;height:100%}.video-js.vjs-audio-only-mode .vjs-tech{display:none}body.vjs-full-window{padding:0;margin:0;height:100%}.vjs-full-window .video-js.vjs-fullscreen{position:fixed;overflow:hidden;z-index:1000;left:0;top:0;bottom:0;right:0}.video-js.vjs-fullscreen:not(.vjs-ios-native-fs){width:100%!important;height:100%!important;padding-top:0!important}.video-js.vjs-fullscreen.vjs-user-inactive{cursor:none}.vjs-hidden{display:none!important}.vjs-disabled{opacity:.5;cursor:default}.video-js .vjs-offscreen{height:1px;left:-9999px;position:absolute;top:0;width:1px}.vjs-lock-showing{display:block!important;opacity:1!important;visibility:visible!important}.vjs-no-js{padding:20px;color:#fff;background-color:#000;font-size:18px;font-family:Arial,Helvetica,sans-serif;text-align:center;width:300px;height:150px;margin:0 auto}.vjs-no-js a,.vjs-no-js a:visited{color:#66a8cc}.video-js .vjs-big-play-button{font-size:3em;line-height:1.5em;height:1.63332em;width:3em;display:block;position:absolute;top:10px;left:10px;padding:0;cursor:pointer;opacity:1;border:.06666em solid #fff;background-color:#2b333f;background-color:rgba(43,51,63,.7);border-radius:.3em;transition:all .4s}.vjs-big-play-centered .vjs-big-play-button{top:50%;left:50%;margin-top:-.81666em;margin-left:-1.5em}.video-js .vjs-big-play-button:focus,.video-js:hover .vjs-big-play-button{border-color:#fff;background-color:#73859f;background-color:rgba(115,133,159,.5);transition:all 0s}.vjs-controls-disabled .vjs-big-play-button,.vjs-error .vjs-big-play-button,.vjs-has-started .vjs-big-play-button,.vjs-using-native-controls .vjs-big-play-button{display:none}.vjs-has-started.vjs-paused.vjs-show-big-play-button-on-pause .vjs-big-play-button{display:block}.video-js button{background:0 0;border:none;color:inherit;display:inline-block;font-size:inherit;line-height:inherit;text-transform:none;text-decoration:none;transition:none;-webkit-appearance:none;-moz-appearance:none;appearance:none}.vjs-control .vjs-button{width:100%;height:100%}.video-js .vjs-control.vjs-close-button{cursor:pointer;height:3em;position:absolute;right:0;top:.5em;z-index:2}.video-js .vjs-modal-dialog{background:rgba(0,0,0,.8);background:linear-gradient(180deg,rgba(0,0,0,.8),rgba(255,255,255,0));overflow:auto}.video-js .vjs-modal-dialog>*{box-sizing:border-box}.vjs-modal-dialog .vjs-modal-dialog-content{font-size:1.2em;line-height:1.5;padding:20px 24px;z-index:1}.vjs-menu-button{cursor:pointer}.vjs-menu-button.vjs-disabled{cursor:default}.vjs-workinghover .vjs-menu-button.vjs-disabled:hover .vjs-menu{display:none}.vjs-menu .vjs-menu-content{display:block;padding:0;margin:0;font-family:Arial,Helvetica,sans-serif;overflow:auto}.vjs-menu .vjs-menu-content>*{box-sizing:border-box}.vjs-scrubbing .vjs-control.vjs-menu-button:hover .vjs-menu{display:none}.vjs-menu li{list-style:none;margin:0;padding:.2em 0;line-height:1.4em;font-size:1.2em;text-align:center;text-transform:lowercase}.js-focus-visible .vjs-menu li.vjs-menu-item:hover,.vjs-menu li.vjs-menu-item:focus,.vjs-menu li.vjs-menu-item:hover{background-color:#73859f;background-color:rgba(115,133,159,.5)}.js-focus-visible .vjs-menu li.vjs-selected:hover,.vjs-menu li.vjs-selected,.vjs-menu li.vjs-selected:focus,.vjs-menu li.vjs-selected:hover{background-color:#fff;color:#2b333f}.js-focus-visible .vjs-menu :not(.vjs-selected):focus:not(.focus-visible),.video-js .vjs-menu :not(.vjs-selected):focus:not(:focus-visible){background:0 0}.vjs-menu li.vjs-menu-title{text-align:center;text-transform:uppercase;font-size:1em;line-height:2em;padding:0;margin:0 0 .3em 0;font-weight:700;cursor:default}.vjs-menu-button-popup .vjs-menu{display:none;position:absolute;bottom:0;width:10em;left:-3em;height:0;margin-bottom:1.5em;border-top-color:rgba(43,51,63,.7)}.vjs-menu-button-popup .vjs-menu .vjs-menu-content{background-color:#2b333f;background-color:rgba(43,51,63,.7);position:absolute;width:100%;bottom:1.5em;max-height:15em}.vjs-layout-tiny .vjs-menu-button-popup .vjs-menu .vjs-menu-content,.vjs-layout-x-small .vjs-menu-button-popup .vjs-menu .vjs-menu-content{max-height:5em}.vjs-layout-small .vjs-menu-button-popup .vjs-menu .vjs-menu-content{max-height:10em}.vjs-layout-medium .vjs-menu-button-popup .vjs-menu .vjs-menu-content{max-height:14em}.vjs-layout-huge .vjs-menu-button-popup .vjs-menu .vjs-menu-content,.vjs-layout-large .vjs-menu-button-popup .vjs-menu .vjs-menu-content,.vjs-layout-x-large .vjs-menu-button-popup .vjs-menu .vjs-menu-content{max-height:25em}.vjs-menu-button-popup .vjs-menu.vjs-lock-showing,.vjs-workinghover .vjs-menu-button-popup.vjs-hover .vjs-menu{display:block}.video-js .vjs-menu-button-inline{transition:all .4s;overflow:hidden}.video-js .vjs-menu-button-inline:before{width:2.222222222em}.video-js .vjs-menu-button-inline.vjs-slider-active,.video-js .vjs-menu-button-inline:focus,.video-js .vjs-menu-button-inline:hover,.video-js.vjs-no-flex .vjs-menu-button-inline{width:12em}.vjs-menu-button-inline .vjs-menu{opacity:0;height:100%;width:auto;position:absolute;left:4em;top:0;padding:0;margin:0;transition:all .4s}.vjs-menu-button-inline.vjs-slider-active .vjs-menu,.vjs-menu-button-inline:focus .vjs-menu,.vjs-menu-button-inline:hover .vjs-menu{display:block;opacity:1}.vjs-no-flex .vjs-menu-button-inline .vjs-menu{display:block;opacity:1;position:relative;width:auto}.vjs-no-flex .vjs-menu-button-inline.vjs-slider-active .vjs-menu,.vjs-no-flex .vjs-menu-button-inline:focus .vjs-menu,.vjs-no-flex .vjs-menu-button-inline:hover .vjs-menu{width:auto}.vjs-menu-button-inline .vjs-menu-content{width:auto;height:100%;margin:0;overflow:hidden}.video-js .vjs-control-bar{display:none;width:100%;position:absolute;bottom:0;left:0;right:0;height:3em;background-color:#2b333f;background-color:rgba(43,51,63,.7)}.vjs-audio-only-mode .vjs-control-bar,.vjs-has-started .vjs-control-bar{display:flex;visibility:visible;opacity:1;transition:visibility .1s,opacity .1s}.vjs-has-started.vjs-user-inactive.vjs-playing .vjs-control-bar{visibility:visible;opacity:0;pointer-events:none;transition:visibility 1s,opacity 1s}.vjs-controls-disabled .vjs-control-bar,.vjs-error .vjs-control-bar,.vjs-using-native-controls .vjs-control-bar{display:none!important}.vjs-audio-only-mode.vjs-has-started.vjs-user-inactive.vjs-playing .vjs-control-bar,.vjs-audio.vjs-has-started.vjs-user-inactive.vjs-playing .vjs-control-bar{opacity:1;visibility:visible;pointer-events:auto}.vjs-has-started.vjs-no-flex .vjs-control-bar{display:table}.video-js .vjs-control{position:relative;text-align:center;margin:0;padding:0;height:100%;width:4em;flex:none}.video-js .vjs-control.vjs-visible-text{width:auto;padding-left:1em;padding-right:1em}.vjs-button>.vjs-icon-placeholder:before{font-size:1.8em;line-height:1.67}.vjs-button>.vjs-icon-placeholder{display:block}.video-js .vjs-control:focus,.video-js .vjs-control:focus:before,.video-js .vjs-control:hover:before{text-shadow:0 0 1em #fff}.video-js :not(.vjs-visible-text)>.vjs-control-text{border:0;clip:rect(0 0 0 0);height:1px;overflow:hidden;padding:0;position:absolute;width:1px}.vjs-no-flex .vjs-control{display:table-cell;vertical-align:middle}.video-js .vjs-custom-control-spacer{display:none}.video-js .vjs-progress-control{cursor:pointer;flex:auto;display:flex;align-items:center;min-width:4em;touch-action:none}.video-js .vjs-progress-control.disabled{cursor:default}.vjs-live .vjs-progress-control{display:none}.vjs-liveui .vjs-progress-control{display:flex;align-items:center}.vjs-no-flex .vjs-progress-control{width:auto}.video-js .vjs-progress-holder{flex:auto;transition:all .2s;height:.3em}.video-js .vjs-progress-control .vjs-progress-holder{margin:0 10px}.video-js .vjs-progress-control:hover .vjs-progress-holder{font-size:1.6666666667em}.video-js .vjs-progress-control:hover .vjs-progress-holder.disabled{font-size:1em}.video-js .vjs-progress-holder .vjs-load-progress,.video-js .vjs-progress-holder .vjs-load-progress div,.video-js .vjs-progress-holder .vjs-play-progress{position:absolute;display:block;height:100%;margin:0;padding:0;width:0}.video-js .vjs-play-progress{background-color:#fff}.video-js .vjs-play-progress:before{font-size:.9em;position:absolute;right:-.5em;top:-.3333333333em;z-index:1}.video-js .vjs-load-progress{background:rgba(115,133,159,.5)}.video-js .vjs-load-progress div{background:rgba(115,133,159,.75)}.video-js .vjs-time-tooltip{background-color:#fff;background-color:rgba(255,255,255,.8);border-radius:.3em;color:#000;float:right;font-family:Arial,Helvetica,sans-serif;font-size:1em;padding:6px 8px 8px 8px;pointer-events:none;position:absolute;top:-3.4em;visibility:hidden;z-index:1}.video-js .vjs-progress-holder:focus .vjs-time-tooltip{display:none}.video-js .vjs-progress-control:hover .vjs-progress-holder:focus .vjs-time-tooltip,.video-js .vjs-progress-control:hover .vjs-time-tooltip{display:block;font-size:.6em;visibility:visible}.video-js .vjs-progress-control.disabled:hover .vjs-time-tooltip{font-size:1em}.video-js .vjs-progress-control .vjs-mouse-display{display:none;position:absolute;width:1px;height:100%;background-color:#000;z-index:1}.vjs-no-flex .vjs-progress-control .vjs-mouse-display{z-index:0}.video-js .vjs-progress-control:hover .vjs-mouse-display{display:block}.video-js.vjs-user-inactive .vjs-progress-control .vjs-mouse-display{visibility:hidden;opacity:0;transition:visibility 1s,opacity 1s}.video-js.vjs-user-inactive.vjs-no-flex .vjs-progress-control .vjs-mouse-display{display:none}.vjs-mouse-display .vjs-time-tooltip{color:#fff;background-color:#000;background-color:rgba(0,0,0,.8)}.video-js .vjs-slider{position:relative;cursor:pointer;padding:0;margin:0 .45em 0 .45em;-webkit-touch-callout:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;background-color:#73859f;background-color:rgba(115,133,159,.5)}.video-js .vjs-slider.disabled{cursor:default}.video-js .vjs-slider:focus{text-shadow:0 0 1em #fff;box-shadow:0 0 1em #fff}.video-js .vjs-mute-control{cursor:pointer;flex:none}.video-js .vjs-volume-control{cursor:pointer;margin-right:1em;display:flex}.video-js .vjs-volume-control.vjs-volume-horizontal{width:5em}.video-js .vjs-volume-panel .vjs-volume-control{visibility:visible;opacity:0;width:1px;height:1px;margin-left:-1px}.video-js .vjs-volume-panel{transition:width 1s}.video-js .vjs-volume-panel .vjs-volume-control.vjs-slider-active,.video-js .vjs-volume-panel .vjs-volume-control:active,.video-js .vjs-volume-panel.vjs-hover .vjs-mute-control~.vjs-volume-control,.video-js .vjs-volume-panel.vjs-hover .vjs-volume-control,.video-js .vjs-volume-panel:active .vjs-volume-control,.video-js .vjs-volume-panel:focus .vjs-volume-control{visibility:visible;opacity:1;position:relative;transition:visibility .1s,opacity .1s,height .1s,width .1s,left 0s,top 0s}.video-js .vjs-volume-panel .vjs-volume-control.vjs-slider-active.vjs-volume-horizontal,.video-js .vjs-volume-panel .vjs-volume-control:active.vjs-volume-horizontal,.video-js .vjs-volume-panel.vjs-hover .vjs-mute-control~.vjs-volume-control.vjs-volume-horizontal,.video-js .vjs-volume-panel.vjs-hover .vjs-volume-control.vjs-volume-horizontal,.video-js .vjs-volume-panel:active .vjs-volume-control.vjs-volume-horizontal,.video-js .vjs-volume-panel:focus .vjs-volume-control.vjs-volume-horizontal{width:5em;height:3em;margin-right:0}.video-js .vjs-volume-panel .vjs-volume-control.vjs-slider-active.vjs-volume-vertical,.video-js .vjs-volume-panel .vjs-volume-control:active.vjs-volume-vertical,.video-js .vjs-volume-panel.vjs-hover .vjs-mute-control~.vjs-volume-control.vjs-volume-vertical,.video-js .vjs-volume-panel.vjs-hover .vjs-volume-control.vjs-volume-vertical,.video-js .vjs-volume-panel:active .vjs-volume-control.vjs-volume-vertical,.video-js .vjs-volume-panel:focus .vjs-volume-control.vjs-volume-vertical{left:-3.5em;transition:left 0s}.video-js .vjs-volume-panel.vjs-volume-panel-horizontal.vjs-hover,.video-js .vjs-volume-panel.vjs-volume-panel-horizontal.vjs-slider-active,.video-js .vjs-volume-panel.vjs-volume-panel-horizontal:active{width:10em;transition:width .1s}.video-js .vjs-volume-panel.vjs-volume-panel-horizontal.vjs-mute-toggle-only{width:4em}.video-js .vjs-volume-panel .vjs-volume-control.vjs-volume-vertical{height:8em;width:3em;left:-3000em;transition:visibility 1s,opacity 1s,height 1s 1s,width 1s 1s,left 1s 1s,top 1s 1s}.video-js .vjs-volume-panel .vjs-volume-control.vjs-volume-horizontal{transition:visibility 1s,opacity 1s,height 1s 1s,width 1s,left 1s 1s,top 1s 1s}.video-js.vjs-no-flex .vjs-volume-panel .vjs-volume-control.vjs-volume-horizontal{width:5em;height:3em;visibility:visible;opacity:1;position:relative;transition:none}.video-js.vjs-no-flex .vjs-volume-control.vjs-volume-vertical,.video-js.vjs-no-flex .vjs-volume-panel .vjs-volume-control.vjs-volume-vertical{position:absolute;bottom:3em;left:.5em}.video-js .vjs-volume-panel{display:flex}.video-js .vjs-volume-bar{margin:1.35em .45em}.vjs-volume-bar.vjs-slider-horizontal{width:5em;height:.3em}.vjs-volume-bar.vjs-slider-vertical{width:.3em;height:5em;margin:1.35em auto}.video-js .vjs-volume-level{position:absolute;bottom:0;left:0;background-color:#fff}.video-js .vjs-volume-level:before{position:absolute;font-size:.9em;z-index:1}.vjs-slider-vertical .vjs-volume-level{width:.3em}.vjs-slider-vertical .vjs-volume-level:before{top:-.5em;left:-.3em;z-index:1}.vjs-slider-horizontal .vjs-volume-level{height:.3em}.vjs-slider-horizontal .vjs-volume-level:before{top:-.3em;right:-.5em}.video-js .vjs-volume-panel.vjs-volume-panel-vertical{width:4em}.vjs-volume-bar.vjs-slider-vertical .vjs-volume-level{height:100%}.vjs-volume-bar.vjs-slider-horizontal .vjs-volume-level{width:100%}.video-js .vjs-volume-vertical{width:3em;height:8em;bottom:8em;background-color:#2b333f;background-color:rgba(43,51,63,.7)}.video-js .vjs-volume-horizontal .vjs-menu{left:-2em}.video-js .vjs-volume-tooltip{background-color:#fff;background-color:rgba(255,255,255,.8);border-radius:.3em;color:#000;float:right;font-family:Arial,Helvetica,sans-serif;font-size:1em;padding:6px 8px 8px 8px;pointer-events:none;position:absolute;top:-3.4em;visibility:hidden;z-index:1}.video-js .vjs-volume-control:hover .vjs-progress-holder:focus .vjs-volume-tooltip,.video-js .vjs-volume-control:hover .vjs-volume-tooltip{display:block;font-size:1em;visibility:visible}.video-js .vjs-volume-vertical:hover .vjs-progress-holder:focus .vjs-volume-tooltip,.video-js .vjs-volume-vertical:hover .vjs-volume-tooltip{left:1em;top:-12px}.video-js .vjs-volume-control.disabled:hover .vjs-volume-tooltip{font-size:1em}.video-js .vjs-volume-control .vjs-mouse-display{display:none;position:absolute;width:100%;height:1px;background-color:#000;z-index:1}.video-js .vjs-volume-horizontal .vjs-mouse-display{width:1px;height:100%}.vjs-no-flex .vjs-volume-control .vjs-mouse-display{z-index:0}.video-js .vjs-volume-control:hover .vjs-mouse-display{display:block}.video-js.vjs-user-inactive .vjs-volume-control .vjs-mouse-display{visibility:hidden;opacity:0;transition:visibility 1s,opacity 1s}.video-js.vjs-user-inactive.vjs-no-flex .vjs-volume-control .vjs-mouse-display{display:none}.vjs-mouse-display .vjs-volume-tooltip{color:#fff;background-color:#000;background-color:rgba(0,0,0,.8)}.vjs-poster{display:inline-block;vertical-align:middle;background-repeat:no-repeat;background-position:50% 50%;background-size:contain;background-color:#000;cursor:pointer;margin:0;padding:0;position:absolute;top:0;right:0;bottom:0;left:0;height:100%}.vjs-has-started .vjs-poster,.vjs-using-native-controls .vjs-poster{display:none}.vjs-audio.vjs-has-started .vjs-poster,.vjs-has-started.vjs-audio-poster-mode .vjs-poster{display:block}.video-js .vjs-live-control{display:flex;align-items:flex-start;flex:auto;font-size:1em;line-height:3em}.vjs-no-flex .vjs-live-control{display:table-cell;width:auto;text-align:left}.video-js.vjs-liveui .vjs-live-control,.video-js:not(.vjs-live) .vjs-live-control{display:none}.video-js .vjs-seek-to-live-control{align-items:center;cursor:pointer;flex:none;display:inline-flex;height:100%;padding-left:.5em;padding-right:.5em;font-size:1em;line-height:3em;width:auto;min-width:4em}.vjs-no-flex .vjs-seek-to-live-control{display:table-cell;width:auto;text-align:left}.video-js.vjs-live:not(.vjs-liveui) .vjs-seek-to-live-control,.video-js:not(.vjs-live) .vjs-seek-to-live-control{display:none}.vjs-seek-to-live-control.vjs-control.vjs-at-live-edge{cursor:auto}.vjs-seek-to-live-control .vjs-icon-placeholder{margin-right:.5em;color:#888}.vjs-seek-to-live-control.vjs-control.vjs-at-live-edge .vjs-icon-placeholder{color:red}.video-js .vjs-time-control{flex:none;font-size:1em;line-height:3em;min-width:2em;width:auto;padding-left:1em;padding-right:1em}.vjs-live .vjs-time-control{display:none}.video-js .vjs-current-time,.vjs-no-flex .vjs-current-time{display:none}.video-js .vjs-duration,.vjs-no-flex .vjs-duration{display:none}.vjs-time-divider{display:none;line-height:3em}.vjs-live .vjs-time-divider{display:none}.video-js .vjs-play-control{cursor:pointer}.video-js .vjs-play-control .vjs-icon-placeholder{flex:none}.vjs-text-track-display{position:absolute;bottom:3em;left:0;right:0;top:0;pointer-events:none}.video-js.vjs-controls-disabled .vjs-text-track-display,.video-js.vjs-user-inactive.vjs-playing .vjs-text-track-display{bottom:1em}.video-js .vjs-text-track{font-size:1.4em;text-align:center;margin-bottom:.1em}.vjs-subtitles{color:#fff}.vjs-captions{color:#fc6}.vjs-tt-cue{display:block}video::-webkit-media-text-track-display{transform:translateY(-3em)}.video-js.vjs-controls-disabled video::-webkit-media-text-track-display,.video-js.vjs-user-inactive.vjs-playing video::-webkit-media-text-track-display{transform:translateY(-1.5em)}.video-js .vjs-picture-in-picture-control{cursor:pointer;flex:none}.video-js.vjs-audio-only-mode .vjs-picture-in-picture-control{display:none}.video-js .vjs-fullscreen-control{cursor:pointer;flex:none}.video-js.vjs-audio-only-mode .vjs-fullscreen-control{display:none}.vjs-playback-rate .vjs-playback-rate-value,.vjs-playback-rate>.vjs-menu-button{position:absolute;top:0;left:0;width:100%;height:100%}.vjs-playback-rate .vjs-playback-rate-value{pointer-events:none;font-size:1.5em;line-height:2;text-align:center}.vjs-playback-rate .vjs-menu{width:4em;left:0}.vjs-error .vjs-error-display .vjs-modal-dialog-content{font-size:1.4em;text-align:center}.vjs-error .vjs-error-display:before{color:#fff;content:"X";font-family:Arial,Helvetica,sans-serif;font-size:4em;left:0;line-height:1;margin-top:-.5em;position:absolute;text-shadow:.05em .05em .1em #000;text-align:center;top:50%;vertical-align:middle;width:100%}.vjs-loading-spinner{display:none;position:absolute;top:50%;left:50%;margin:-25px 0 0 -25px;opacity:.85;text-align:left;border:6px solid rgba(43,51,63,.7);box-sizing:border-box;background-clip:padding-box;width:50px;height:50px;border-radius:25px;visibility:hidden}.vjs-seeking .vjs-loading-spinner,.vjs-waiting .vjs-loading-spinner{display:block;-webkit-animation:vjs-spinner-show 0s linear .3s forwards;animation:vjs-spinner-show 0s linear .3s forwards}.vjs-loading-spinner:after,.vjs-loading-spinner:before{content:"";position:absolute;margin:-6px;box-sizing:inherit;width:inherit;height:inherit;border-radius:inherit;opacity:1;border:inherit;border-color:transparent;border-top-color:#fff}.vjs-seeking .vjs-loading-spinner:after,.vjs-seeking .vjs-loading-spinner:before,.vjs-waiting .vjs-loading-spinner:after,.vjs-waiting .vjs-loading-spinner:before{-webkit-animation:vjs-spinner-spin 1.1s cubic-bezier(.6,.2,0,.8) infinite,vjs-spinner-fade 1.1s linear infinite;animation:vjs-spinner-spin 1.1s cubic-bezier(.6,.2,0,.8) infinite,vjs-spinner-fade 1.1s linear infinite}.vjs-seeking .vjs-loading-spinner:before,.vjs-waiting .vjs-loading-spinner:before{border-top-color:#fff}.vjs-seeking .vjs-loading-spinner:after,.vjs-waiting .vjs-loading-spinner:after{border-top-color:#fff;-webkit-animation-delay:.44s;animation-delay:.44s}@keyframes vjs-spinner-show{to{visibility:visible}}@-webkit-keyframes vjs-spinner-show{to{visibility:visible}}@keyframes vjs-spinner-spin{100%{transform:rotate(360deg)}}@-webkit-keyframes vjs-spinner-spin{100%{-webkit-transform:rotate(360deg)}}@keyframes vjs-spinner-fade{0%{border-top-color:#73859f}20%{border-top-color:#73859f}35%{border-top-color:#fff}60%{border-top-color:#73859f}100%{border-top-color:#73859f}}@-webkit-keyframes vjs-spinner-fade{0%{border-top-color:#73859f}20%{border-top-color:#73859f}35%{border-top-color:#fff}60%{border-top-color:#73859f}100%{border-top-color:#73859f}}.video-js.vjs-audio-only-mode .vjs-captions-button{display:none}.vjs-chapters-button .vjs-menu ul{width:24em}.video-js.vjs-audio-only-mode .vjs-descriptions-button{display:none}.video-js .vjs-subs-caps-button+.vjs-menu .vjs-captions-menu-item .vjs-menu-item-text .vjs-icon-placeholder{vertical-align:middle;display:inline-block;margin-bottom:-.1em}.video-js .vjs-subs-caps-button+.vjs-menu .vjs-captions-menu-item .vjs-menu-item-text .vjs-icon-placeholder:before{font-family:VideoJS;content:"";font-size:1.5em;line-height:inherit}.video-js.vjs-audio-only-mode .vjs-subs-caps-button{display:none}.video-js .vjs-audio-button+.vjs-menu .vjs-main-desc-menu-item .vjs-menu-item-text .vjs-icon-placeholder{vertical-align:middle;display:inline-block;margin-bottom:-.1em}.video-js .vjs-audio-button+.vjs-menu .vjs-main-desc-menu-item .vjs-menu-item-text .vjs-icon-placeholder:before{font-family:VideoJS;content:" ";font-size:1.5em;line-height:inherit}.video-js.vjs-layout-small .vjs-current-time,.video-js.vjs-layout-small .vjs-duration,.video-js.vjs-layout-small .vjs-playback-rate,.video-js.vjs-layout-small .vjs-remaining-time,.video-js.vjs-layout-small .vjs-time-divider,.video-js.vjs-layout-small .vjs-volume-control,.video-js.vjs-layout-tiny .vjs-current-time,.video-js.vjs-layout-tiny .vjs-duration,.video-js.vjs-layout-tiny .vjs-playback-rate,.video-js.vjs-layout-tiny .vjs-remaining-time,.video-js.vjs-layout-tiny .vjs-time-divider,.video-js.vjs-layout-tiny .vjs-volume-control,.video-js.vjs-layout-x-small .vjs-current-time,.video-js.vjs-layout-x-small .vjs-duration,.video-js.vjs-layout-x-small .vjs-playback-rate,.video-js.vjs-layout-x-small .vjs-remaining-time,.video-js.vjs-layout-x-small .vjs-time-divider,.video-js.vjs-layout-x-small .vjs-volume-control{display:none}.video-js.vjs-layout-small .vjs-volume-panel.vjs-volume-panel-horizontal.vjs-hover,.video-js.vjs-layout-small .vjs-volume-panel.vjs-volume-panel-horizontal.vjs-slider-active,.video-js.vjs-layout-small .vjs-volume-panel.vjs-volume-panel-horizontal:active,.video-js.vjs-layout-small .vjs-volume-panel.vjs-volume-panel-horizontal:hover,.video-js.vjs-layout-tiny .vjs-volume-panel.vjs-volume-panel-horizontal.vjs-hover,.video-js.vjs-layout-tiny .vjs-volume-panel.vjs-volume-panel-horizontal.vjs-slider-active,.video-js.vjs-layout-tiny .vjs-volume-panel.vjs-volume-panel-horizontal:active,.video-js.vjs-layout-tiny .vjs-volume-panel.vjs-volume-panel-horizontal:hover,.video-js.vjs-layout-x-small .vjs-volume-panel.vjs-volume-panel-horizontal.vjs-hover,.video-js.vjs-layout-x-small .vjs-volume-panel.vjs-volume-panel-horizontal.vjs-slider-active,.video-js.vjs-layout-x-small .vjs-volume-panel.vjs-volume-panel-horizontal:active,.video-js.vjs-layout-x-small .vjs-volume-panel.vjs-volume-panel-horizontal:hover{width:auto;width:initial}.video-js.vjs-layout-tiny .vjs-progress-control,.video-js.vjs-layout-x-small .vjs-progress-control{display:none}.video-js.vjs-layout-x-small .vjs-custom-control-spacer{flex:auto;display:block}.video-js.vjs-layout-x-small.vjs-no-flex .vjs-custom-control-spacer{width:auto}.vjs-modal-dialog.vjs-text-track-settings{background-color:#2b333f;background-color:rgba(43,51,63,.75);color:#fff;height:70%}.vjs-text-track-settings .vjs-modal-dialog-content{display:table}.vjs-text-track-settings .vjs-track-settings-colors,.vjs-text-track-settings .vjs-track-settings-controls,.vjs-text-track-settings .vjs-track-settings-font{display:table-cell}.vjs-text-track-settings .vjs-track-settings-controls{text-align:right;vertical-align:bottom}@supports (display:grid){.vjs-text-track-settings .vjs-modal-dialog-content{display:grid;grid-template-columns:1fr 1fr;grid-template-rows:1fr;padding:20px 24px 0 24px}.vjs-track-settings-controls .vjs-default-button{margin-bottom:20px}.vjs-text-track-settings .vjs-track-settings-controls{grid-column:1/-1}.vjs-layout-small .vjs-text-track-settings .vjs-modal-dialog-content,.vjs-layout-tiny .vjs-text-track-settings .vjs-modal-dialog-content,.vjs-layout-x-small .vjs-text-track-settings .vjs-modal-dialog-content{grid-template-columns:1fr}}.vjs-track-setting>select{margin-right:1em;margin-bottom:.5em}.vjs-text-track-settings fieldset{margin:5px;padding:3px;border:none}.vjs-text-track-settings fieldset span{display:inline-block}.vjs-text-track-settings fieldset span>select{max-width:7.3em}.vjs-text-track-settings legend{color:#fff;margin:0 0 5px 0}.vjs-text-track-settings .vjs-label{position:absolute;clip:rect(1px 1px 1px 1px);clip:rect(1px,1px,1px,1px);display:block;margin:0 0 5px 0;padding:0;border:0;height:1px;width:1px;overflow:hidden}.vjs-track-settings-controls button:active,.vjs-track-settings-controls button:focus{outline-style:solid;outline-width:medium;background-image:linear-gradient(0deg,#fff 88%,#73859f 100%)}.vjs-track-settings-controls button:hover{color:rgba(43,51,63,.75)}.vjs-track-settings-controls button{background-color:#fff;background-image:linear-gradient(-180deg,#fff 88%,#73859f 100%);color:#2b333f;cursor:pointer;border-radius:2px}.vjs-track-settings-controls .vjs-default-button{margin-right:1em}@media print{.video-js>:not(.vjs-tech):not(.vjs-poster){visibility:hidden}}.vjs-resize-manager{position:absolute;top:0;left:0;width:100%;height:100%;border:none;z-index:-1000}.js-focus-visible .video-js :focus:not(.focus-visible){outline:0}.video-js :focus:not(:focus-visible){outline:0}
\ No newline at end of file
diff --git a/frontend/src/static/lib/video-js/7.20.2/video.cjs.js b/frontend/src/static/lib/video-js/7.20.2/video.cjs.js
new file mode 100644
index 0000000..daaf9cc
--- /dev/null
+++ b/frontend/src/static/lib/video-js/7.20.2/video.cjs.js
@@ -0,0 +1,55216 @@
+/**
+ * @license
+ * Video.js 7.20.2
+ * Copyright Brightcove, Inc.
+ * Available under Apache License Version 2.0
+ *
+ *
+ * Includes vtt.js
+ * Available under Apache License Version 2.0
+ *
+ */
+
+'use strict';
+
+var window$1 = require('global/window');
+var document = require('global/document');
+var _extends = require('@babel/runtime/helpers/extends');
+var keycode = require('keycode');
+var _assertThisInitialized = require('@babel/runtime/helpers/assertThisInitialized');
+var _inheritsLoose = require('@babel/runtime/helpers/inheritsLoose');
+var safeParseTuple = require('safe-json-parse/tuple');
+var XHR = require('@videojs/xhr');
+var vtt = require('videojs-vtt.js');
+var _construct = require('@babel/runtime/helpers/construct');
+var _inherits = require('@babel/runtime/helpers/inherits');
+var _resolveUrl = require('@videojs/vhs-utils/cjs/resolve-url.js');
+var m3u8Parser = require('m3u8-parser');
+var codecs_js = require('@videojs/vhs-utils/cjs/codecs.js');
+var mediaTypes_js = require('@videojs/vhs-utils/cjs/media-types.js');
+var byteHelpers = require('@videojs/vhs-utils/cjs/byte-helpers');
+var mpdParser = require('mpd-parser');
+var parseSidx = require('mux.js/lib/tools/parse-sidx');
+var id3Helpers = require('@videojs/vhs-utils/cjs/id3-helpers');
+var containers = require('@videojs/vhs-utils/cjs/containers');
+var clock = require('mux.js/lib/utils/clock');
+
+function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
+
+var window__default = /*#__PURE__*/_interopDefaultLegacy(window$1);
+var document__default = /*#__PURE__*/_interopDefaultLegacy(document);
+var _extends__default = /*#__PURE__*/_interopDefaultLegacy(_extends);
+var keycode__default = /*#__PURE__*/_interopDefaultLegacy(keycode);
+var _assertThisInitialized__default = /*#__PURE__*/_interopDefaultLegacy(_assertThisInitialized);
+var _inheritsLoose__default = /*#__PURE__*/_interopDefaultLegacy(_inheritsLoose);
+var safeParseTuple__default = /*#__PURE__*/_interopDefaultLegacy(safeParseTuple);
+var XHR__default = /*#__PURE__*/_interopDefaultLegacy(XHR);
+var vtt__default = /*#__PURE__*/_interopDefaultLegacy(vtt);
+var _construct__default = /*#__PURE__*/_interopDefaultLegacy(_construct);
+var _inherits__default = /*#__PURE__*/_interopDefaultLegacy(_inherits);
+var _resolveUrl__default = /*#__PURE__*/_interopDefaultLegacy(_resolveUrl);
+var parseSidx__default = /*#__PURE__*/_interopDefaultLegacy(parseSidx);
+
+var version$5 = "7.20.2";
+
+/**
+ * An Object that contains lifecycle hooks as keys which point to an array
+ * of functions that are run when a lifecycle is triggered
+ *
+ * @private
+ */
+var hooks_ = {};
+/**
+ * Get a list of hooks for a specific lifecycle
+ *
+ * @param {string} type
+ * the lifecyle to get hooks from
+ *
+ * @param {Function|Function[]} [fn]
+ * Optionally add a hook (or hooks) to the lifecycle that your are getting.
+ *
+ * @return {Array}
+ * an array of hooks, or an empty array if there are none.
+ */
+
+var hooks = function hooks(type, fn) {
+ hooks_[type] = hooks_[type] || [];
+
+ if (fn) {
+ hooks_[type] = hooks_[type].concat(fn);
+ }
+
+ return hooks_[type];
+};
+/**
+ * Add a function hook to a specific videojs lifecycle.
+ *
+ * @param {string} type
+ * the lifecycle to hook the function to.
+ *
+ * @param {Function|Function[]}
+ * The function or array of functions to attach.
+ */
+
+
+var hook = function hook(type, fn) {
+ hooks(type, fn);
+};
+/**
+ * Remove a hook from a specific videojs lifecycle.
+ *
+ * @param {string} type
+ * the lifecycle that the function hooked to
+ *
+ * @param {Function} fn
+ * The hooked function to remove
+ *
+ * @return {boolean}
+ * The function that was removed or undef
+ */
+
+
+var removeHook = function removeHook(type, fn) {
+ var index = hooks(type).indexOf(fn);
+
+ if (index <= -1) {
+ return false;
+ }
+
+ hooks_[type] = hooks_[type].slice();
+ hooks_[type].splice(index, 1);
+ return true;
+};
+/**
+ * Add a function hook that will only run once to a specific videojs lifecycle.
+ *
+ * @param {string} type
+ * the lifecycle to hook the function to.
+ *
+ * @param {Function|Function[]}
+ * The function or array of functions to attach.
+ */
+
+
+var hookOnce = function hookOnce(type, fn) {
+ hooks(type, [].concat(fn).map(function (original) {
+ var wrapper = function wrapper() {
+ removeHook(type, wrapper);
+ return original.apply(void 0, arguments);
+ };
+
+ return wrapper;
+ }));
+};
+
+/**
+ * @file fullscreen-api.js
+ * @module fullscreen-api
+ * @private
+ */
+/**
+ * Store the browser-specific methods for the fullscreen API.
+ *
+ * @type {Object}
+ * @see [Specification]{@link https://fullscreen.spec.whatwg.org}
+ * @see [Map Approach From Screenfull.js]{@link https://github.com/sindresorhus/screenfull.js}
+ */
+
+var FullscreenApi = {
+ prefixed: true
+}; // browser API methods
+
+var apiMap = [['requestFullscreen', 'exitFullscreen', 'fullscreenElement', 'fullscreenEnabled', 'fullscreenchange', 'fullscreenerror', 'fullscreen'], // WebKit
+['webkitRequestFullscreen', 'webkitExitFullscreen', 'webkitFullscreenElement', 'webkitFullscreenEnabled', 'webkitfullscreenchange', 'webkitfullscreenerror', '-webkit-full-screen'], // Mozilla
+['mozRequestFullScreen', 'mozCancelFullScreen', 'mozFullScreenElement', 'mozFullScreenEnabled', 'mozfullscreenchange', 'mozfullscreenerror', '-moz-full-screen'], // Microsoft
+['msRequestFullscreen', 'msExitFullscreen', 'msFullscreenElement', 'msFullscreenEnabled', 'MSFullscreenChange', 'MSFullscreenError', '-ms-fullscreen']];
+var specApi = apiMap[0];
+var browserApi; // determine the supported set of functions
+
+for (var i = 0; i < apiMap.length; i++) {
+ // check for exitFullscreen function
+ if (apiMap[i][1] in document__default['default']) {
+ browserApi = apiMap[i];
+ break;
+ }
+} // map the browser API names to the spec API names
+
+
+if (browserApi) {
+ for (var _i = 0; _i < browserApi.length; _i++) {
+ FullscreenApi[specApi[_i]] = browserApi[_i];
+ }
+
+ FullscreenApi.prefixed = browserApi[0] !== specApi[0];
+}
+
+/**
+ * @file create-logger.js
+ * @module create-logger
+ */
+
+var history = [];
+/**
+ * Log messages to the console and history based on the type of message
+ *
+ * @private
+ * @param {string} type
+ * The name of the console method to use.
+ *
+ * @param {Array} args
+ * The arguments to be passed to the matching console method.
+ */
+
+var LogByTypeFactory = function LogByTypeFactory(name, log) {
+ return function (type, level, args) {
+ var lvl = log.levels[level];
+ var lvlRegExp = new RegExp("^(" + lvl + ")$");
+
+ if (type !== 'log') {
+ // Add the type to the front of the message when it's not "log".
+ args.unshift(type.toUpperCase() + ':');
+ } // Add console prefix after adding to history.
+
+
+ args.unshift(name + ':'); // Add a clone of the args at this point to history.
+
+ if (history) {
+ history.push([].concat(args)); // only store 1000 history entries
+
+ var splice = history.length - 1000;
+ history.splice(0, splice > 0 ? splice : 0);
+ } // If there's no console then don't try to output messages, but they will
+ // still be stored in history.
+
+
+ if (!window__default['default'].console) {
+ return;
+ } // Was setting these once outside of this function, but containing them
+ // in the function makes it easier to test cases where console doesn't exist
+ // when the module is executed.
+
+
+ var fn = window__default['default'].console[type];
+
+ if (!fn && type === 'debug') {
+ // Certain browsers don't have support for console.debug. For those, we
+ // should default to the closest comparable log.
+ fn = window__default['default'].console.info || window__default['default'].console.log;
+ } // Bail out if there's no console or if this type is not allowed by the
+ // current logging level.
+
+
+ if (!fn || !lvl || !lvlRegExp.test(type)) {
+ return;
+ }
+
+ fn[Array.isArray(args) ? 'apply' : 'call'](window__default['default'].console, args);
+ };
+};
+
+function createLogger$1(name) {
+ // This is the private tracking variable for logging level.
+ var level = 'info'; // the curried logByType bound to the specific log and history
+
+ var logByType;
+ /**
+ * Logs plain debug messages. Similar to `console.log`.
+ *
+ * Due to [limitations](https://github.com/jsdoc3/jsdoc/issues/955#issuecomment-313829149)
+ * of our JSDoc template, we cannot properly document this as both a function
+ * and a namespace, so its function signature is documented here.
+ *
+ * #### Arguments
+ * ##### *args
+ * Mixed[]
+ *
+ * Any combination of values that could be passed to `console.log()`.
+ *
+ * #### Return Value
+ *
+ * `undefined`
+ *
+ * @namespace
+ * @param {Mixed[]} args
+ * One or more messages or objects that should be logged.
+ */
+
+ var log = function log() {
+ for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
+ args[_key] = arguments[_key];
+ }
+
+ logByType('log', level, args);
+ }; // This is the logByType helper that the logging methods below use
+
+
+ logByType = LogByTypeFactory(name, log);
+ /**
+ * Create a new sublogger which chains the old name to the new name.
+ *
+ * For example, doing `videojs.log.createLogger('player')` and then using that logger will log the following:
+ * ```js
+ * mylogger('foo');
+ * // > VIDEOJS: player: foo
+ * ```
+ *
+ * @param {string} name
+ * The name to add call the new logger
+ * @return {Object}
+ */
+
+ log.createLogger = function (subname) {
+ return createLogger$1(name + ': ' + subname);
+ };
+ /**
+ * Enumeration of available logging levels, where the keys are the level names
+ * and the values are `|`-separated strings containing logging methods allowed
+ * in that logging level. These strings are used to create a regular expression
+ * matching the function name being called.
+ *
+ * Levels provided by Video.js are:
+ *
+ * - `off`: Matches no calls. Any value that can be cast to `false` will have
+ * this effect. The most restrictive.
+ * - `all`: Matches only Video.js-provided functions (`debug`, `log`,
+ * `log.warn`, and `log.error`).
+ * - `debug`: Matches `log.debug`, `log`, `log.warn`, and `log.error` calls.
+ * - `info` (default): Matches `log`, `log.warn`, and `log.error` calls.
+ * - `warn`: Matches `log.warn` and `log.error` calls.
+ * - `error`: Matches only `log.error` calls.
+ *
+ * @type {Object}
+ */
+
+
+ log.levels = {
+ all: 'debug|log|warn|error',
+ off: '',
+ debug: 'debug|log|warn|error',
+ info: 'log|warn|error',
+ warn: 'warn|error',
+ error: 'error',
+ DEFAULT: level
+ };
+ /**
+ * Get or set the current logging level.
+ *
+ * If a string matching a key from {@link module:log.levels} is provided, acts
+ * as a setter.
+ *
+ * @param {string} [lvl]
+ * Pass a valid level to set a new logging level.
+ *
+ * @return {string}
+ * The current logging level.
+ */
+
+ log.level = function (lvl) {
+ if (typeof lvl === 'string') {
+ if (!log.levels.hasOwnProperty(lvl)) {
+ throw new Error("\"" + lvl + "\" in not a valid log level");
+ }
+
+ level = lvl;
+ }
+
+ return level;
+ };
+ /**
+ * Returns an array containing everything that has been logged to the history.
+ *
+ * This array is a shallow clone of the internal history record. However, its
+ * contents are _not_ cloned; so, mutating objects inside this array will
+ * mutate them in history.
+ *
+ * @return {Array}
+ */
+
+
+ log.history = function () {
+ return history ? [].concat(history) : [];
+ };
+ /**
+ * Allows you to filter the history by the given logger name
+ *
+ * @param {string} fname
+ * The name to filter by
+ *
+ * @return {Array}
+ * The filtered list to return
+ */
+
+
+ log.history.filter = function (fname) {
+ return (history || []).filter(function (historyItem) {
+ // if the first item in each historyItem includes `fname`, then it's a match
+ return new RegExp(".*" + fname + ".*").test(historyItem[0]);
+ });
+ };
+ /**
+ * Clears the internal history tracking, but does not prevent further history
+ * tracking.
+ */
+
+
+ log.history.clear = function () {
+ if (history) {
+ history.length = 0;
+ }
+ };
+ /**
+ * Disable history tracking if it is currently enabled.
+ */
+
+
+ log.history.disable = function () {
+ if (history !== null) {
+ history.length = 0;
+ history = null;
+ }
+ };
+ /**
+ * Enable history tracking if it is currently disabled.
+ */
+
+
+ log.history.enable = function () {
+ if (history === null) {
+ history = [];
+ }
+ };
+ /**
+ * Logs error messages. Similar to `console.error`.
+ *
+ * @param {Mixed[]} args
+ * One or more messages or objects that should be logged as an error
+ */
+
+
+ log.error = function () {
+ for (var _len2 = arguments.length, args = new Array(_len2), _key2 = 0; _key2 < _len2; _key2++) {
+ args[_key2] = arguments[_key2];
+ }
+
+ return logByType('error', level, args);
+ };
+ /**
+ * Logs warning messages. Similar to `console.warn`.
+ *
+ * @param {Mixed[]} args
+ * One or more messages or objects that should be logged as a warning.
+ */
+
+
+ log.warn = function () {
+ for (var _len3 = arguments.length, args = new Array(_len3), _key3 = 0; _key3 < _len3; _key3++) {
+ args[_key3] = arguments[_key3];
+ }
+
+ return logByType('warn', level, args);
+ };
+ /**
+ * Logs debug messages. Similar to `console.debug`, but may also act as a comparable
+ * log if `console.debug` is not available
+ *
+ * @param {Mixed[]} args
+ * One or more messages or objects that should be logged as debug.
+ */
+
+
+ log.debug = function () {
+ for (var _len4 = arguments.length, args = new Array(_len4), _key4 = 0; _key4 < _len4; _key4++) {
+ args[_key4] = arguments[_key4];
+ }
+
+ return logByType('debug', level, args);
+ };
+
+ return log;
+}
+
+/**
+ * @file log.js
+ * @module log
+ */
+var log$1 = createLogger$1('VIDEOJS');
+var createLogger = log$1.createLogger;
+
+/**
+ * @file obj.js
+ * @module obj
+ */
+
+/**
+ * @callback obj:EachCallback
+ *
+ * @param {Mixed} value
+ * The current key for the object that is being iterated over.
+ *
+ * @param {string} key
+ * The current key-value for object that is being iterated over
+ */
+
+/**
+ * @callback obj:ReduceCallback
+ *
+ * @param {Mixed} accum
+ * The value that is accumulating over the reduce loop.
+ *
+ * @param {Mixed} value
+ * The current key for the object that is being iterated over.
+ *
+ * @param {string} key
+ * The current key-value for object that is being iterated over
+ *
+ * @return {Mixed}
+ * The new accumulated value.
+ */
+var toString = Object.prototype.toString;
+/**
+ * Get the keys of an Object
+ *
+ * @param {Object}
+ * The Object to get the keys from
+ *
+ * @return {string[]}
+ * An array of the keys from the object. Returns an empty array if the
+ * object passed in was invalid or had no keys.
+ *
+ * @private
+ */
+
+var keys = function keys(object) {
+ return isObject(object) ? Object.keys(object) : [];
+};
+/**
+ * Array-like iteration for objects.
+ *
+ * @param {Object} object
+ * The object to iterate over
+ *
+ * @param {obj:EachCallback} fn
+ * The callback function which is called for each key in the object.
+ */
+
+
+function each(object, fn) {
+ keys(object).forEach(function (key) {
+ return fn(object[key], key);
+ });
+}
+/**
+ * Array-like reduce for objects.
+ *
+ * @param {Object} object
+ * The Object that you want to reduce.
+ *
+ * @param {Function} fn
+ * A callback function which is called for each key in the object. It
+ * receives the accumulated value and the per-iteration value and key
+ * as arguments.
+ *
+ * @param {Mixed} [initial = 0]
+ * Starting value
+ *
+ * @return {Mixed}
+ * The final accumulated value.
+ */
+
+function reduce(object, fn, initial) {
+ if (initial === void 0) {
+ initial = 0;
+ }
+
+ return keys(object).reduce(function (accum, key) {
+ return fn(accum, object[key], key);
+ }, initial);
+}
+/**
+ * Object.assign-style object shallow merge/extend.
+ *
+ * @param {Object} target
+ * @param {Object} ...sources
+ * @return {Object}
+ */
+
+function assign(target) {
+ for (var _len = arguments.length, sources = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
+ sources[_key - 1] = arguments[_key];
+ }
+
+ if (Object.assign) {
+ return _extends__default['default'].apply(void 0, [target].concat(sources));
+ }
+
+ sources.forEach(function (source) {
+ if (!source) {
+ return;
+ }
+
+ each(source, function (value, key) {
+ target[key] = value;
+ });
+ });
+ return target;
+}
+/**
+ * Returns whether a value is an object of any kind - including DOM nodes,
+ * arrays, regular expressions, etc. Not functions, though.
+ *
+ * This avoids the gotcha where using `typeof` on a `null` value
+ * results in `'object'`.
+ *
+ * @param {Object} value
+ * @return {boolean}
+ */
+
+function isObject(value) {
+ return !!value && typeof value === 'object';
+}
+/**
+ * Returns whether an object appears to be a "plain" object - that is, a
+ * direct instance of `Object`.
+ *
+ * @param {Object} value
+ * @return {boolean}
+ */
+
+function isPlain(value) {
+ return isObject(value) && toString.call(value) === '[object Object]' && value.constructor === Object;
+}
+
+/**
+ * @file computed-style.js
+ * @module computed-style
+ */
+/**
+ * A safe getComputedStyle.
+ *
+ * This is needed because in Firefox, if the player is loaded in an iframe with
+ * `display:none`, then `getComputedStyle` returns `null`, so, we do a
+ * null-check to make sure that the player doesn't break in these cases.
+ *
+ * @function
+ * @param {Element} el
+ * The element you want the computed style of
+ *
+ * @param {string} prop
+ * The property name you want
+ *
+ * @see https://bugzilla.mozilla.org/show_bug.cgi?id=548397
+ */
+
+function computedStyle(el, prop) {
+ if (!el || !prop) {
+ return '';
+ }
+
+ if (typeof window__default['default'].getComputedStyle === 'function') {
+ var computedStyleValue;
+
+ try {
+ computedStyleValue = window__default['default'].getComputedStyle(el);
+ } catch (e) {
+ return '';
+ }
+
+ return computedStyleValue ? computedStyleValue.getPropertyValue(prop) || computedStyleValue[prop] : '';
+ }
+
+ return '';
+}
+
+/**
+ * @file browser.js
+ * @module browser
+ */
+var USER_AGENT = window__default['default'].navigator && window__default['default'].navigator.userAgent || '';
+var webkitVersionMap = /AppleWebKit\/([\d.]+)/i.exec(USER_AGENT);
+var appleWebkitVersion = webkitVersionMap ? parseFloat(webkitVersionMap.pop()) : null;
+/**
+ * Whether or not this device is an iPod.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+var IS_IPOD = /iPod/i.test(USER_AGENT);
+/**
+ * The detected iOS version - or `null`.
+ *
+ * @static
+ * @const
+ * @type {string|null}
+ */
+
+var IOS_VERSION = function () {
+ var match = USER_AGENT.match(/OS (\d+)_/i);
+
+ if (match && match[1]) {
+ return match[1];
+ }
+
+ return null;
+}();
+/**
+ * Whether or not this is an Android device.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+var IS_ANDROID = /Android/i.test(USER_AGENT);
+/**
+ * The detected Android version - or `null`.
+ *
+ * @static
+ * @const
+ * @type {number|string|null}
+ */
+
+var ANDROID_VERSION = function () {
+ // This matches Android Major.Minor.Patch versions
+ // ANDROID_VERSION is Major.Minor as a Number, if Minor isn't available, then only Major is returned
+ var match = USER_AGENT.match(/Android (\d+)(?:\.(\d+))?(?:\.(\d+))*/i);
+
+ if (!match) {
+ return null;
+ }
+
+ var major = match[1] && parseFloat(match[1]);
+ var minor = match[2] && parseFloat(match[2]);
+
+ if (major && minor) {
+ return parseFloat(match[1] + '.' + match[2]);
+ } else if (major) {
+ return major;
+ }
+
+ return null;
+}();
+/**
+ * Whether or not this is a native Android browser.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+var IS_NATIVE_ANDROID = IS_ANDROID && ANDROID_VERSION < 5 && appleWebkitVersion < 537;
+/**
+ * Whether or not this is Mozilla Firefox.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+var IS_FIREFOX = /Firefox/i.test(USER_AGENT);
+/**
+ * Whether or not this is Microsoft Edge.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+var IS_EDGE = /Edg/i.test(USER_AGENT);
+/**
+ * Whether or not this is Google Chrome.
+ *
+ * This will also be `true` for Chrome on iOS, which will have different support
+ * as it is actually Safari under the hood.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+var IS_CHROME = !IS_EDGE && (/Chrome/i.test(USER_AGENT) || /CriOS/i.test(USER_AGENT));
+/**
+ * The detected Google Chrome version - or `null`.
+ *
+ * @static
+ * @const
+ * @type {number|null}
+ */
+
+var CHROME_VERSION = function () {
+ var match = USER_AGENT.match(/(Chrome|CriOS)\/(\d+)/);
+
+ if (match && match[2]) {
+ return parseFloat(match[2]);
+ }
+
+ return null;
+}();
+/**
+ * The detected Internet Explorer version - or `null`.
+ *
+ * @static
+ * @const
+ * @type {number|null}
+ */
+
+var IE_VERSION = function () {
+ var result = /MSIE\s(\d+)\.\d/.exec(USER_AGENT);
+ var version = result && parseFloat(result[1]);
+
+ if (!version && /Trident\/7.0/i.test(USER_AGENT) && /rv:11.0/.test(USER_AGENT)) {
+ // IE 11 has a different user agent string than other IE versions
+ version = 11.0;
+ }
+
+ return version;
+}();
+/**
+ * Whether or not this is desktop Safari.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+var IS_SAFARI = /Safari/i.test(USER_AGENT) && !IS_CHROME && !IS_ANDROID && !IS_EDGE;
+/**
+ * Whether or not this is a Windows machine.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+var IS_WINDOWS = /Windows/i.test(USER_AGENT);
+/**
+ * Whether or not this device is touch-enabled.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+var TOUCH_ENABLED = Boolean(isReal() && ('ontouchstart' in window__default['default'] || window__default['default'].navigator.maxTouchPoints || window__default['default'].DocumentTouch && window__default['default'].document instanceof window__default['default'].DocumentTouch));
+/**
+ * Whether or not this device is an iPad.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+var IS_IPAD = /iPad/i.test(USER_AGENT) || IS_SAFARI && TOUCH_ENABLED && !/iPhone/i.test(USER_AGENT);
+/**
+ * Whether or not this device is an iPhone.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+// The Facebook app's UIWebView identifies as both an iPhone and iPad, so
+// to identify iPhones, we need to exclude iPads.
+// http://artsy.github.io/blog/2012/10/18/the-perils-of-ios-user-agent-sniffing/
+
+var IS_IPHONE = /iPhone/i.test(USER_AGENT) && !IS_IPAD;
+/**
+ * Whether or not this is an iOS device.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+var IS_IOS = IS_IPHONE || IS_IPAD || IS_IPOD;
+/**
+ * Whether or not this is any flavor of Safari - including iOS.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+var IS_ANY_SAFARI = (IS_SAFARI || IS_IOS) && !IS_CHROME;
+
+var browser = /*#__PURE__*/Object.freeze({
+ __proto__: null,
+ IS_IPOD: IS_IPOD,
+ IOS_VERSION: IOS_VERSION,
+ IS_ANDROID: IS_ANDROID,
+ ANDROID_VERSION: ANDROID_VERSION,
+ IS_NATIVE_ANDROID: IS_NATIVE_ANDROID,
+ IS_FIREFOX: IS_FIREFOX,
+ IS_EDGE: IS_EDGE,
+ IS_CHROME: IS_CHROME,
+ CHROME_VERSION: CHROME_VERSION,
+ IE_VERSION: IE_VERSION,
+ IS_SAFARI: IS_SAFARI,
+ IS_WINDOWS: IS_WINDOWS,
+ TOUCH_ENABLED: TOUCH_ENABLED,
+ IS_IPAD: IS_IPAD,
+ IS_IPHONE: IS_IPHONE,
+ IS_IOS: IS_IOS,
+ IS_ANY_SAFARI: IS_ANY_SAFARI
+});
+
+/**
+ * @file dom.js
+ * @module dom
+ */
+/**
+ * Detect if a value is a string with any non-whitespace characters.
+ *
+ * @private
+ * @param {string} str
+ * The string to check
+ *
+ * @return {boolean}
+ * Will be `true` if the string is non-blank, `false` otherwise.
+ *
+ */
+
+function isNonBlankString(str) {
+ // we use str.trim as it will trim any whitespace characters
+ // from the front or back of non-whitespace characters. aka
+ // Any string that contains non-whitespace characters will
+ // still contain them after `trim` but whitespace only strings
+ // will have a length of 0, failing this check.
+ return typeof str === 'string' && Boolean(str.trim());
+}
+/**
+ * Throws an error if the passed string has whitespace. This is used by
+ * class methods to be relatively consistent with the classList API.
+ *
+ * @private
+ * @param {string} str
+ * The string to check for whitespace.
+ *
+ * @throws {Error}
+ * Throws an error if there is whitespace in the string.
+ */
+
+
+function throwIfWhitespace(str) {
+ // str.indexOf instead of regex because str.indexOf is faster performance wise.
+ if (str.indexOf(' ') >= 0) {
+ throw new Error('class has illegal whitespace characters');
+ }
+}
+/**
+ * Produce a regular expression for matching a className within an elements className.
+ *
+ * @private
+ * @param {string} className
+ * The className to generate the RegExp for.
+ *
+ * @return {RegExp}
+ * The RegExp that will check for a specific `className` in an elements
+ * className.
+ */
+
+
+function classRegExp(className) {
+ return new RegExp('(^|\\s)' + className + '($|\\s)');
+}
+/**
+ * Whether the current DOM interface appears to be real (i.e. not simulated).
+ *
+ * @return {boolean}
+ * Will be `true` if the DOM appears to be real, `false` otherwise.
+ */
+
+
+function isReal() {
+ // Both document and window will never be undefined thanks to `global`.
+ return document__default['default'] === window__default['default'].document;
+}
+/**
+ * Determines, via duck typing, whether or not a value is a DOM element.
+ *
+ * @param {Mixed} value
+ * The value to check.
+ *
+ * @return {boolean}
+ * Will be `true` if the value is a DOM element, `false` otherwise.
+ */
+
+function isEl(value) {
+ return isObject(value) && value.nodeType === 1;
+}
+/**
+ * Determines if the current DOM is embedded in an iframe.
+ *
+ * @return {boolean}
+ * Will be `true` if the DOM is embedded in an iframe, `false`
+ * otherwise.
+ */
+
+function isInFrame() {
+ // We need a try/catch here because Safari will throw errors when attempting
+ // to get either `parent` or `self`
+ try {
+ return window__default['default'].parent !== window__default['default'].self;
+ } catch (x) {
+ return true;
+ }
+}
+/**
+ * Creates functions to query the DOM using a given method.
+ *
+ * @private
+ * @param {string} method
+ * The method to create the query with.
+ *
+ * @return {Function}
+ * The query method
+ */
+
+function createQuerier(method) {
+ return function (selector, context) {
+ if (!isNonBlankString(selector)) {
+ return document__default['default'][method](null);
+ }
+
+ if (isNonBlankString(context)) {
+ context = document__default['default'].querySelector(context);
+ }
+
+ var ctx = isEl(context) ? context : document__default['default'];
+ return ctx[method] && ctx[method](selector);
+ };
+}
+/**
+ * Creates an element and applies properties, attributes, and inserts content.
+ *
+ * @param {string} [tagName='div']
+ * Name of tag to be created.
+ *
+ * @param {Object} [properties={}]
+ * Element properties to be applied.
+ *
+ * @param {Object} [attributes={}]
+ * Element attributes to be applied.
+ *
+ * @param {module:dom~ContentDescriptor} content
+ * A content descriptor object.
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+
+
+function createEl(tagName, properties, attributes, content) {
+ if (tagName === void 0) {
+ tagName = 'div';
+ }
+
+ if (properties === void 0) {
+ properties = {};
+ }
+
+ if (attributes === void 0) {
+ attributes = {};
+ }
+
+ var el = document__default['default'].createElement(tagName);
+ Object.getOwnPropertyNames(properties).forEach(function (propName) {
+ var val = properties[propName]; // See #2176
+ // We originally were accepting both properties and attributes in the
+ // same object, but that doesn't work so well.
+
+ if (propName.indexOf('aria-') !== -1 || propName === 'role' || propName === 'type') {
+ log$1.warn('Setting attributes in the second argument of createEl()\n' + 'has been deprecated. Use the third argument instead.\n' + ("createEl(type, properties, attributes). Attempting to set " + propName + " to " + val + "."));
+ el.setAttribute(propName, val); // Handle textContent since it's not supported everywhere and we have a
+ // method for it.
+ } else if (propName === 'textContent') {
+ textContent(el, val);
+ } else if (el[propName] !== val || propName === 'tabIndex') {
+ el[propName] = val;
+ }
+ });
+ Object.getOwnPropertyNames(attributes).forEach(function (attrName) {
+ el.setAttribute(attrName, attributes[attrName]);
+ });
+
+ if (content) {
+ appendContent(el, content);
+ }
+
+ return el;
+}
+/**
+ * Injects text into an element, replacing any existing contents entirely.
+ *
+ * @param {Element} el
+ * The element to add text content into
+ *
+ * @param {string} text
+ * The text content to add.
+ *
+ * @return {Element}
+ * The element with added text content.
+ */
+
+function textContent(el, text) {
+ if (typeof el.textContent === 'undefined') {
+ el.innerText = text;
+ } else {
+ el.textContent = text;
+ }
+
+ return el;
+}
+/**
+ * Insert an element as the first child node of another
+ *
+ * @param {Element} child
+ * Element to insert
+ *
+ * @param {Element} parent
+ * Element to insert child into
+ */
+
+function prependTo(child, parent) {
+ if (parent.firstChild) {
+ parent.insertBefore(child, parent.firstChild);
+ } else {
+ parent.appendChild(child);
+ }
+}
+/**
+ * Check if an element has a class name.
+ *
+ * @param {Element} element
+ * Element to check
+ *
+ * @param {string} classToCheck
+ * Class name to check for
+ *
+ * @return {boolean}
+ * Will be `true` if the element has a class, `false` otherwise.
+ *
+ * @throws {Error}
+ * Throws an error if `classToCheck` has white space.
+ */
+
+function hasClass(element, classToCheck) {
+ throwIfWhitespace(classToCheck);
+
+ if (element.classList) {
+ return element.classList.contains(classToCheck);
+ }
+
+ return classRegExp(classToCheck).test(element.className);
+}
+/**
+ * Add a class name to an element.
+ *
+ * @param {Element} element
+ * Element to add class name to.
+ *
+ * @param {string} classToAdd
+ * Class name to add.
+ *
+ * @return {Element}
+ * The DOM element with the added class name.
+ */
+
+function addClass(element, classToAdd) {
+ if (element.classList) {
+ element.classList.add(classToAdd); // Don't need to `throwIfWhitespace` here because `hasElClass` will do it
+ // in the case of classList not being supported.
+ } else if (!hasClass(element, classToAdd)) {
+ element.className = (element.className + ' ' + classToAdd).trim();
+ }
+
+ return element;
+}
+/**
+ * Remove a class name from an element.
+ *
+ * @param {Element} element
+ * Element to remove a class name from.
+ *
+ * @param {string} classToRemove
+ * Class name to remove
+ *
+ * @return {Element}
+ * The DOM element with class name removed.
+ */
+
+function removeClass(element, classToRemove) {
+ // Protect in case the player gets disposed
+ if (!element) {
+ log$1.warn("removeClass was called with an element that doesn't exist");
+ return null;
+ }
+
+ if (element.classList) {
+ element.classList.remove(classToRemove);
+ } else {
+ throwIfWhitespace(classToRemove);
+ element.className = element.className.split(/\s+/).filter(function (c) {
+ return c !== classToRemove;
+ }).join(' ');
+ }
+
+ return element;
+}
+/**
+ * The callback definition for toggleClass.
+ *
+ * @callback module:dom~PredicateCallback
+ * @param {Element} element
+ * The DOM element of the Component.
+ *
+ * @param {string} classToToggle
+ * The `className` that wants to be toggled
+ *
+ * @return {boolean|undefined}
+ * If `true` is returned, the `classToToggle` will be added to the
+ * `element`. If `false`, the `classToToggle` will be removed from
+ * the `element`. If `undefined`, the callback will be ignored.
+ */
+
+/**
+ * Adds or removes a class name to/from an element depending on an optional
+ * condition or the presence/absence of the class name.
+ *
+ * @param {Element} element
+ * The element to toggle a class name on.
+ *
+ * @param {string} classToToggle
+ * The class that should be toggled.
+ *
+ * @param {boolean|module:dom~PredicateCallback} [predicate]
+ * See the return value for {@link module:dom~PredicateCallback}
+ *
+ * @return {Element}
+ * The element with a class that has been toggled.
+ */
+
+function toggleClass(element, classToToggle, predicate) {
+ // This CANNOT use `classList` internally because IE11 does not support the
+ // second parameter to the `classList.toggle()` method! Which is fine because
+ // `classList` will be used by the add/remove functions.
+ var has = hasClass(element, classToToggle);
+
+ if (typeof predicate === 'function') {
+ predicate = predicate(element, classToToggle);
+ }
+
+ if (typeof predicate !== 'boolean') {
+ predicate = !has;
+ } // If the necessary class operation matches the current state of the
+ // element, no action is required.
+
+
+ if (predicate === has) {
+ return;
+ }
+
+ if (predicate) {
+ addClass(element, classToToggle);
+ } else {
+ removeClass(element, classToToggle);
+ }
+
+ return element;
+}
+/**
+ * Apply attributes to an HTML element.
+ *
+ * @param {Element} el
+ * Element to add attributes to.
+ *
+ * @param {Object} [attributes]
+ * Attributes to be applied.
+ */
+
+function setAttributes(el, attributes) {
+ Object.getOwnPropertyNames(attributes).forEach(function (attrName) {
+ var attrValue = attributes[attrName];
+
+ if (attrValue === null || typeof attrValue === 'undefined' || attrValue === false) {
+ el.removeAttribute(attrName);
+ } else {
+ el.setAttribute(attrName, attrValue === true ? '' : attrValue);
+ }
+ });
+}
+/**
+ * Get an element's attribute values, as defined on the HTML tag.
+ *
+ * Attributes are not the same as properties. They're defined on the tag
+ * or with setAttribute.
+ *
+ * @param {Element} tag
+ * Element from which to get tag attributes.
+ *
+ * @return {Object}
+ * All attributes of the element. Boolean attributes will be `true` or
+ * `false`, others will be strings.
+ */
+
+function getAttributes(tag) {
+ var obj = {}; // known boolean attributes
+ // we can check for matching boolean properties, but not all browsers
+ // and not all tags know about these attributes, so, we still want to check them manually
+
+ var knownBooleans = ',' + 'autoplay,controls,playsinline,loop,muted,default,defaultMuted' + ',';
+
+ if (tag && tag.attributes && tag.attributes.length > 0) {
+ var attrs = tag.attributes;
+
+ for (var i = attrs.length - 1; i >= 0; i--) {
+ var attrName = attrs[i].name;
+ var attrVal = attrs[i].value; // check for known booleans
+ // the matching element property will return a value for typeof
+
+ if (typeof tag[attrName] === 'boolean' || knownBooleans.indexOf(',' + attrName + ',') !== -1) {
+ // the value of an included boolean attribute is typically an empty
+ // string ('') which would equal false if we just check for a false value.
+ // we also don't want support bad code like autoplay='false'
+ attrVal = attrVal !== null ? true : false;
+ }
+
+ obj[attrName] = attrVal;
+ }
+ }
+
+ return obj;
+}
+/**
+ * Get the value of an element's attribute.
+ *
+ * @param {Element} el
+ * A DOM element.
+ *
+ * @param {string} attribute
+ * Attribute to get the value of.
+ *
+ * @return {string}
+ * The value of the attribute.
+ */
+
+function getAttribute(el, attribute) {
+ return el.getAttribute(attribute);
+}
+/**
+ * Set the value of an element's attribute.
+ *
+ * @param {Element} el
+ * A DOM element.
+ *
+ * @param {string} attribute
+ * Attribute to set.
+ *
+ * @param {string} value
+ * Value to set the attribute to.
+ */
+
+function setAttribute(el, attribute, value) {
+ el.setAttribute(attribute, value);
+}
+/**
+ * Remove an element's attribute.
+ *
+ * @param {Element} el
+ * A DOM element.
+ *
+ * @param {string} attribute
+ * Attribute to remove.
+ */
+
+function removeAttribute(el, attribute) {
+ el.removeAttribute(attribute);
+}
+/**
+ * Attempt to block the ability to select text.
+ */
+
+function blockTextSelection() {
+ document__default['default'].body.focus();
+
+ document__default['default'].onselectstart = function () {
+ return false;
+ };
+}
+/**
+ * Turn off text selection blocking.
+ */
+
+function unblockTextSelection() {
+ document__default['default'].onselectstart = function () {
+ return true;
+ };
+}
+/**
+ * Identical to the native `getBoundingClientRect` function, but ensures that
+ * the method is supported at all (it is in all browsers we claim to support)
+ * and that the element is in the DOM before continuing.
+ *
+ * This wrapper function also shims properties which are not provided by some
+ * older browsers (namely, IE8).
+ *
+ * Additionally, some browsers do not support adding properties to a
+ * `ClientRect`/`DOMRect` object; so, we shallow-copy it with the standard
+ * properties (except `x` and `y` which are not widely supported). This helps
+ * avoid implementations where keys are non-enumerable.
+ *
+ * @param {Element} el
+ * Element whose `ClientRect` we want to calculate.
+ *
+ * @return {Object|undefined}
+ * Always returns a plain object - or `undefined` if it cannot.
+ */
+
+function getBoundingClientRect(el) {
+ if (el && el.getBoundingClientRect && el.parentNode) {
+ var rect = el.getBoundingClientRect();
+ var result = {};
+ ['bottom', 'height', 'left', 'right', 'top', 'width'].forEach(function (k) {
+ if (rect[k] !== undefined) {
+ result[k] = rect[k];
+ }
+ });
+
+ if (!result.height) {
+ result.height = parseFloat(computedStyle(el, 'height'));
+ }
+
+ if (!result.width) {
+ result.width = parseFloat(computedStyle(el, 'width'));
+ }
+
+ return result;
+ }
+}
+/**
+ * Represents the position of a DOM element on the page.
+ *
+ * @typedef {Object} module:dom~Position
+ *
+ * @property {number} left
+ * Pixels to the left.
+ *
+ * @property {number} top
+ * Pixels from the top.
+ */
+
+/**
+ * Get the position of an element in the DOM.
+ *
+ * Uses `getBoundingClientRect` technique from John Resig.
+ *
+ * @see http://ejohn.org/blog/getboundingclientrect-is-awesome/
+ *
+ * @param {Element} el
+ * Element from which to get offset.
+ *
+ * @return {module:dom~Position}
+ * The position of the element that was passed in.
+ */
+
+function findPosition(el) {
+ if (!el || el && !el.offsetParent) {
+ return {
+ left: 0,
+ top: 0,
+ width: 0,
+ height: 0
+ };
+ }
+
+ var width = el.offsetWidth;
+ var height = el.offsetHeight;
+ var left = 0;
+ var top = 0;
+
+ while (el.offsetParent && el !== document__default['default'][FullscreenApi.fullscreenElement]) {
+ left += el.offsetLeft;
+ top += el.offsetTop;
+ el = el.offsetParent;
+ }
+
+ return {
+ left: left,
+ top: top,
+ width: width,
+ height: height
+ };
+}
+/**
+ * Represents x and y coordinates for a DOM element or mouse pointer.
+ *
+ * @typedef {Object} module:dom~Coordinates
+ *
+ * @property {number} x
+ * x coordinate in pixels
+ *
+ * @property {number} y
+ * y coordinate in pixels
+ */
+
+/**
+ * Get the pointer position within an element.
+ *
+ * The base on the coordinates are the bottom left of the element.
+ *
+ * @param {Element} el
+ * Element on which to get the pointer position on.
+ *
+ * @param {EventTarget~Event} event
+ * Event object.
+ *
+ * @return {module:dom~Coordinates}
+ * A coordinates object corresponding to the mouse position.
+ *
+ */
+
+function getPointerPosition(el, event) {
+ var translated = {
+ x: 0,
+ y: 0
+ };
+
+ if (IS_IOS) {
+ var item = el;
+
+ while (item && item.nodeName.toLowerCase() !== 'html') {
+ var transform = computedStyle(item, 'transform');
+
+ if (/^matrix/.test(transform)) {
+ var values = transform.slice(7, -1).split(/,\s/).map(Number);
+ translated.x += values[4];
+ translated.y += values[5];
+ } else if (/^matrix3d/.test(transform)) {
+ var _values = transform.slice(9, -1).split(/,\s/).map(Number);
+
+ translated.x += _values[12];
+ translated.y += _values[13];
+ }
+
+ item = item.parentNode;
+ }
+ }
+
+ var position = {};
+ var boxTarget = findPosition(event.target);
+ var box = findPosition(el);
+ var boxW = box.width;
+ var boxH = box.height;
+ var offsetY = event.offsetY - (box.top - boxTarget.top);
+ var offsetX = event.offsetX - (box.left - boxTarget.left);
+
+ if (event.changedTouches) {
+ offsetX = event.changedTouches[0].pageX - box.left;
+ offsetY = event.changedTouches[0].pageY + box.top;
+
+ if (IS_IOS) {
+ offsetX -= translated.x;
+ offsetY -= translated.y;
+ }
+ }
+
+ position.y = 1 - Math.max(0, Math.min(1, offsetY / boxH));
+ position.x = Math.max(0, Math.min(1, offsetX / boxW));
+ return position;
+}
+/**
+ * Determines, via duck typing, whether or not a value is a text node.
+ *
+ * @param {Mixed} value
+ * Check if this value is a text node.
+ *
+ * @return {boolean}
+ * Will be `true` if the value is a text node, `false` otherwise.
+ */
+
+function isTextNode(value) {
+ return isObject(value) && value.nodeType === 3;
+}
+/**
+ * Empties the contents of an element.
+ *
+ * @param {Element} el
+ * The element to empty children from
+ *
+ * @return {Element}
+ * The element with no children
+ */
+
+function emptyEl(el) {
+ while (el.firstChild) {
+ el.removeChild(el.firstChild);
+ }
+
+ return el;
+}
+/**
+ * This is a mixed value that describes content to be injected into the DOM
+ * via some method. It can be of the following types:
+ *
+ * Type | Description
+ * -----------|-------------
+ * `string` | The value will be normalized into a text node.
+ * `Element` | The value will be accepted as-is.
+ * `TextNode` | The value will be accepted as-is.
+ * `Array` | A one-dimensional array of strings, elements, text nodes, or functions. These functions should return a string, element, or text node (any other return value, like an array, will be ignored).
+ * `Function` | A function, which is expected to return a string, element, text node, or array - any of the other possible values described above. This means that a content descriptor could be a function that returns an array of functions, but those second-level functions must return strings, elements, or text nodes.
+ *
+ * @typedef {string|Element|TextNode|Array|Function} module:dom~ContentDescriptor
+ */
+
+/**
+ * Normalizes content for eventual insertion into the DOM.
+ *
+ * This allows a wide range of content definition methods, but helps protect
+ * from falling into the trap of simply writing to `innerHTML`, which could
+ * be an XSS concern.
+ *
+ * The content for an element can be passed in multiple types and
+ * combinations, whose behavior is as follows:
+ *
+ * @param {module:dom~ContentDescriptor} content
+ * A content descriptor value.
+ *
+ * @return {Array}
+ * All of the content that was passed in, normalized to an array of
+ * elements or text nodes.
+ */
+
+function normalizeContent(content) {
+ // First, invoke content if it is a function. If it produces an array,
+ // that needs to happen before normalization.
+ if (typeof content === 'function') {
+ content = content();
+ } // Next up, normalize to an array, so one or many items can be normalized,
+ // filtered, and returned.
+
+
+ return (Array.isArray(content) ? content : [content]).map(function (value) {
+ // First, invoke value if it is a function to produce a new value,
+ // which will be subsequently normalized to a Node of some kind.
+ if (typeof value === 'function') {
+ value = value();
+ }
+
+ if (isEl(value) || isTextNode(value)) {
+ return value;
+ }
+
+ if (typeof value === 'string' && /\S/.test(value)) {
+ return document__default['default'].createTextNode(value);
+ }
+ }).filter(function (value) {
+ return value;
+ });
+}
+/**
+ * Normalizes and appends content to an element.
+ *
+ * @param {Element} el
+ * Element to append normalized content to.
+ *
+ * @param {module:dom~ContentDescriptor} content
+ * A content descriptor value.
+ *
+ * @return {Element}
+ * The element with appended normalized content.
+ */
+
+function appendContent(el, content) {
+ normalizeContent(content).forEach(function (node) {
+ return el.appendChild(node);
+ });
+ return el;
+}
+/**
+ * Normalizes and inserts content into an element; this is identical to
+ * `appendContent()`, except it empties the element first.
+ *
+ * @param {Element} el
+ * Element to insert normalized content into.
+ *
+ * @param {module:dom~ContentDescriptor} content
+ * A content descriptor value.
+ *
+ * @return {Element}
+ * The element with inserted normalized content.
+ */
+
+function insertContent(el, content) {
+ return appendContent(emptyEl(el), content);
+}
+/**
+ * Check if an event was a single left click.
+ *
+ * @param {EventTarget~Event} event
+ * Event object.
+ *
+ * @return {boolean}
+ * Will be `true` if a single left click, `false` otherwise.
+ */
+
+function isSingleLeftClick(event) {
+ // Note: if you create something draggable, be sure to
+ // call it on both `mousedown` and `mousemove` event,
+ // otherwise `mousedown` should be enough for a button
+ if (event.button === undefined && event.buttons === undefined) {
+ // Why do we need `buttons` ?
+ // Because, middle mouse sometimes have this:
+ // e.button === 0 and e.buttons === 4
+ // Furthermore, we want to prevent combination click, something like
+ // HOLD middlemouse then left click, that would be
+ // e.button === 0, e.buttons === 5
+ // just `button` is not gonna work
+ // Alright, then what this block does ?
+ // this is for chrome `simulate mobile devices`
+ // I want to support this as well
+ return true;
+ }
+
+ if (event.button === 0 && event.buttons === undefined) {
+ // Touch screen, sometimes on some specific device, `buttons`
+ // doesn't have anything (safari on ios, blackberry...)
+ return true;
+ } // `mouseup` event on a single left click has
+ // `button` and `buttons` equal to 0
+
+
+ if (event.type === 'mouseup' && event.button === 0 && event.buttons === 0) {
+ return true;
+ }
+
+ if (event.button !== 0 || event.buttons !== 1) {
+ // This is the reason we have those if else block above
+ // if any special case we can catch and let it slide
+ // we do it above, when get to here, this definitely
+ // is-not-left-click
+ return false;
+ }
+
+ return true;
+}
+/**
+ * Finds a single DOM element matching `selector` within the optional
+ * `context` of another DOM element (defaulting to `document`).
+ *
+ * @param {string} selector
+ * A valid CSS selector, which will be passed to `querySelector`.
+ *
+ * @param {Element|String} [context=document]
+ * A DOM element within which to query. Can also be a selector
+ * string in which case the first matching element will be used
+ * as context. If missing (or no element matches selector), falls
+ * back to `document`.
+ *
+ * @return {Element|null}
+ * The element that was found or null.
+ */
+
+var $ = createQuerier('querySelector');
+/**
+ * Finds a all DOM elements matching `selector` within the optional
+ * `context` of another DOM element (defaulting to `document`).
+ *
+ * @param {string} selector
+ * A valid CSS selector, which will be passed to `querySelectorAll`.
+ *
+ * @param {Element|String} [context=document]
+ * A DOM element within which to query. Can also be a selector
+ * string in which case the first matching element will be used
+ * as context. If missing (or no element matches selector), falls
+ * back to `document`.
+ *
+ * @return {NodeList}
+ * A element list of elements that were found. Will be empty if none
+ * were found.
+ *
+ */
+
+var $$ = createQuerier('querySelectorAll');
+
+var Dom = /*#__PURE__*/Object.freeze({
+ __proto__: null,
+ isReal: isReal,
+ isEl: isEl,
+ isInFrame: isInFrame,
+ createEl: createEl,
+ textContent: textContent,
+ prependTo: prependTo,
+ hasClass: hasClass,
+ addClass: addClass,
+ removeClass: removeClass,
+ toggleClass: toggleClass,
+ setAttributes: setAttributes,
+ getAttributes: getAttributes,
+ getAttribute: getAttribute,
+ setAttribute: setAttribute,
+ removeAttribute: removeAttribute,
+ blockTextSelection: blockTextSelection,
+ unblockTextSelection: unblockTextSelection,
+ getBoundingClientRect: getBoundingClientRect,
+ findPosition: findPosition,
+ getPointerPosition: getPointerPosition,
+ isTextNode: isTextNode,
+ emptyEl: emptyEl,
+ normalizeContent: normalizeContent,
+ appendContent: appendContent,
+ insertContent: insertContent,
+ isSingleLeftClick: isSingleLeftClick,
+ $: $,
+ $$: $$
+});
+
+/**
+ * @file setup.js - Functions for setting up a player without
+ * user interaction based on the data-setup `attribute` of the video tag.
+ *
+ * @module setup
+ */
+var _windowLoaded = false;
+var videojs$1;
+/**
+ * Set up any tags that have a data-setup `attribute` when the player is started.
+ */
+
+var autoSetup = function autoSetup() {
+ if (videojs$1.options.autoSetup === false) {
+ return;
+ }
+
+ var vids = Array.prototype.slice.call(document__default['default'].getElementsByTagName('video'));
+ var audios = Array.prototype.slice.call(document__default['default'].getElementsByTagName('audio'));
+ var divs = Array.prototype.slice.call(document__default['default'].getElementsByTagName('video-js'));
+ var mediaEls = vids.concat(audios, divs); // Check if any media elements exist
+
+ if (mediaEls && mediaEls.length > 0) {
+ for (var i = 0, e = mediaEls.length; i < e; i++) {
+ var mediaEl = mediaEls[i]; // Check if element exists, has getAttribute func.
+
+ if (mediaEl && mediaEl.getAttribute) {
+ // Make sure this player hasn't already been set up.
+ if (mediaEl.player === undefined) {
+ var options = mediaEl.getAttribute('data-setup'); // Check if data-setup attr exists.
+ // We only auto-setup if they've added the data-setup attr.
+
+ if (options !== null) {
+ // Create new video.js instance.
+ videojs$1(mediaEl);
+ }
+ } // If getAttribute isn't defined, we need to wait for the DOM.
+
+ } else {
+ autoSetupTimeout(1);
+ break;
+ }
+ } // No videos were found, so keep looping unless page is finished loading.
+
+ } else if (!_windowLoaded) {
+ autoSetupTimeout(1);
+ }
+};
+/**
+ * Wait until the page is loaded before running autoSetup. This will be called in
+ * autoSetup if `hasLoaded` returns false.
+ *
+ * @param {number} wait
+ * How long to wait in ms
+ *
+ * @param {module:videojs} [vjs]
+ * The videojs library function
+ */
+
+
+function autoSetupTimeout(wait, vjs) {
+ // Protect against breakage in non-browser environments
+ if (!isReal()) {
+ return;
+ }
+
+ if (vjs) {
+ videojs$1 = vjs;
+ }
+
+ window__default['default'].setTimeout(autoSetup, wait);
+}
+/**
+ * Used to set the internal tracking of window loaded state to true.
+ *
+ * @private
+ */
+
+
+function setWindowLoaded() {
+ _windowLoaded = true;
+ window__default['default'].removeEventListener('load', setWindowLoaded);
+}
+
+if (isReal()) {
+ if (document__default['default'].readyState === 'complete') {
+ setWindowLoaded();
+ } else {
+ /**
+ * Listen for the load event on window, and set _windowLoaded to true.
+ *
+ * We use a standard event listener here to avoid incrementing the GUID
+ * before any players are created.
+ *
+ * @listens load
+ */
+ window__default['default'].addEventListener('load', setWindowLoaded);
+ }
+}
+
+/**
+ * @file stylesheet.js
+ * @module stylesheet
+ */
+/**
+ * Create a DOM syle element given a className for it.
+ *
+ * @param {string} className
+ * The className to add to the created style element.
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+
+var createStyleElement = function createStyleElement(className) {
+ var style = document__default['default'].createElement('style');
+ style.className = className;
+ return style;
+};
+/**
+ * Add text to a DOM element.
+ *
+ * @param {Element} el
+ * The Element to add text content to.
+ *
+ * @param {string} content
+ * The text to add to the element.
+ */
+
+var setTextContent = function setTextContent(el, content) {
+ if (el.styleSheet) {
+ el.styleSheet.cssText = content;
+ } else {
+ el.textContent = content;
+ }
+};
+
+/**
+ * @file guid.js
+ * @module guid
+ */
+// Default value for GUIDs. This allows us to reset the GUID counter in tests.
+//
+// The initial GUID is 3 because some users have come to rely on the first
+// default player ID ending up as `vjs_video_3`.
+//
+// See: https://github.com/videojs/video.js/pull/6216
+var _initialGuid = 3;
+/**
+ * Unique ID for an element or function
+ *
+ * @type {Number}
+ */
+
+var _guid = _initialGuid;
+/**
+ * Get a unique auto-incrementing ID by number that has not been returned before.
+ *
+ * @return {number}
+ * A new unique ID.
+ */
+
+function newGUID() {
+ return _guid++;
+}
+
+/**
+ * @file dom-data.js
+ * @module dom-data
+ */
+var FakeWeakMap;
+
+if (!window__default['default'].WeakMap) {
+ FakeWeakMap = /*#__PURE__*/function () {
+ function FakeWeakMap() {
+ this.vdata = 'vdata' + Math.floor(window__default['default'].performance && window__default['default'].performance.now() || Date.now());
+ this.data = {};
+ }
+
+ var _proto = FakeWeakMap.prototype;
+
+ _proto.set = function set(key, value) {
+ var access = key[this.vdata] || newGUID();
+
+ if (!key[this.vdata]) {
+ key[this.vdata] = access;
+ }
+
+ this.data[access] = value;
+ return this;
+ };
+
+ _proto.get = function get(key) {
+ var access = key[this.vdata]; // we have data, return it
+
+ if (access) {
+ return this.data[access];
+ } // we don't have data, return nothing.
+ // return undefined explicitly as that's the contract for this method
+
+
+ log$1('We have no data for this element', key);
+ return undefined;
+ };
+
+ _proto.has = function has(key) {
+ var access = key[this.vdata];
+ return access in this.data;
+ };
+
+ _proto["delete"] = function _delete(key) {
+ var access = key[this.vdata];
+
+ if (access) {
+ delete this.data[access];
+ delete key[this.vdata];
+ }
+ };
+
+ return FakeWeakMap;
+ }();
+}
+/**
+ * Element Data Store.
+ *
+ * Allows for binding data to an element without putting it directly on the
+ * element. Ex. Event listeners are stored here.
+ * (also from jsninja.com, slightly modified and updated for closure compiler)
+ *
+ * @type {Object}
+ * @private
+ */
+
+
+var DomData = window__default['default'].WeakMap ? new WeakMap() : new FakeWeakMap();
+
+/**
+ * @file events.js. An Event System (John Resig - Secrets of a JS Ninja http://jsninja.com/)
+ * (Original book version wasn't completely usable, so fixed some things and made Closure Compiler compatible)
+ * This should work very similarly to jQuery's events, however it's based off the book version which isn't as
+ * robust as jquery's, so there's probably some differences.
+ *
+ * @file events.js
+ * @module events
+ */
+/**
+ * Clean up the listener cache and dispatchers
+ *
+ * @param {Element|Object} elem
+ * Element to clean up
+ *
+ * @param {string} type
+ * Type of event to clean up
+ */
+
+function _cleanUpEvents(elem, type) {
+ if (!DomData.has(elem)) {
+ return;
+ }
+
+ var data = DomData.get(elem); // Remove the events of a particular type if there are none left
+
+ if (data.handlers[type].length === 0) {
+ delete data.handlers[type]; // data.handlers[type] = null;
+ // Setting to null was causing an error with data.handlers
+ // Remove the meta-handler from the element
+
+ if (elem.removeEventListener) {
+ elem.removeEventListener(type, data.dispatcher, false);
+ } else if (elem.detachEvent) {
+ elem.detachEvent('on' + type, data.dispatcher);
+ }
+ } // Remove the events object if there are no types left
+
+
+ if (Object.getOwnPropertyNames(data.handlers).length <= 0) {
+ delete data.handlers;
+ delete data.dispatcher;
+ delete data.disabled;
+ } // Finally remove the element data if there is no data left
+
+
+ if (Object.getOwnPropertyNames(data).length === 0) {
+ DomData["delete"](elem);
+ }
+}
+/**
+ * Loops through an array of event types and calls the requested method for each type.
+ *
+ * @param {Function} fn
+ * The event method we want to use.
+ *
+ * @param {Element|Object} elem
+ * Element or object to bind listeners to
+ *
+ * @param {string} type
+ * Type of event to bind to.
+ *
+ * @param {EventTarget~EventListener} callback
+ * Event listener.
+ */
+
+
+function _handleMultipleEvents(fn, elem, types, callback) {
+ types.forEach(function (type) {
+ // Call the event method for each one of the types
+ fn(elem, type, callback);
+ });
+}
+/**
+ * Fix a native event to have standard property values
+ *
+ * @param {Object} event
+ * Event object to fix.
+ *
+ * @return {Object}
+ * Fixed event object.
+ */
+
+
+function fixEvent(event) {
+ if (event.fixed_) {
+ return event;
+ }
+
+ function returnTrue() {
+ return true;
+ }
+
+ function returnFalse() {
+ return false;
+ } // Test if fixing up is needed
+ // Used to check if !event.stopPropagation instead of isPropagationStopped
+ // But native events return true for stopPropagation, but don't have
+ // other expected methods like isPropagationStopped. Seems to be a problem
+ // with the Javascript Ninja code. So we're just overriding all events now.
+
+
+ if (!event || !event.isPropagationStopped || !event.isImmediatePropagationStopped) {
+ var old = event || window__default['default'].event;
+ event = {}; // Clone the old object so that we can modify the values event = {};
+ // IE8 Doesn't like when you mess with native event properties
+ // Firefox returns false for event.hasOwnProperty('type') and other props
+ // which makes copying more difficult.
+ // TODO: Probably best to create a whitelist of event props
+
+ for (var key in old) {
+ // Safari 6.0.3 warns you if you try to copy deprecated layerX/Y
+ // Chrome warns you if you try to copy deprecated keyboardEvent.keyLocation
+ // and webkitMovementX/Y
+ // Lighthouse complains if Event.path is copied
+ if (key !== 'layerX' && key !== 'layerY' && key !== 'keyLocation' && key !== 'webkitMovementX' && key !== 'webkitMovementY' && key !== 'path') {
+ // Chrome 32+ warns if you try to copy deprecated returnValue, but
+ // we still want to if preventDefault isn't supported (IE8).
+ if (!(key === 'returnValue' && old.preventDefault)) {
+ event[key] = old[key];
+ }
+ }
+ } // The event occurred on this element
+
+
+ if (!event.target) {
+ event.target = event.srcElement || document__default['default'];
+ } // Handle which other element the event is related to
+
+
+ if (!event.relatedTarget) {
+ event.relatedTarget = event.fromElement === event.target ? event.toElement : event.fromElement;
+ } // Stop the default browser action
+
+
+ event.preventDefault = function () {
+ if (old.preventDefault) {
+ old.preventDefault();
+ }
+
+ event.returnValue = false;
+ old.returnValue = false;
+ event.defaultPrevented = true;
+ };
+
+ event.defaultPrevented = false; // Stop the event from bubbling
+
+ event.stopPropagation = function () {
+ if (old.stopPropagation) {
+ old.stopPropagation();
+ }
+
+ event.cancelBubble = true;
+ old.cancelBubble = true;
+ event.isPropagationStopped = returnTrue;
+ };
+
+ event.isPropagationStopped = returnFalse; // Stop the event from bubbling and executing other handlers
+
+ event.stopImmediatePropagation = function () {
+ if (old.stopImmediatePropagation) {
+ old.stopImmediatePropagation();
+ }
+
+ event.isImmediatePropagationStopped = returnTrue;
+ event.stopPropagation();
+ };
+
+ event.isImmediatePropagationStopped = returnFalse; // Handle mouse position
+
+ if (event.clientX !== null && event.clientX !== undefined) {
+ var doc = document__default['default'].documentElement;
+ var body = document__default['default'].body;
+ event.pageX = event.clientX + (doc && doc.scrollLeft || body && body.scrollLeft || 0) - (doc && doc.clientLeft || body && body.clientLeft || 0);
+ event.pageY = event.clientY + (doc && doc.scrollTop || body && body.scrollTop || 0) - (doc && doc.clientTop || body && body.clientTop || 0);
+ } // Handle key presses
+
+
+ event.which = event.charCode || event.keyCode; // Fix button for mouse clicks:
+ // 0 == left; 1 == middle; 2 == right
+
+ if (event.button !== null && event.button !== undefined) {
+ // The following is disabled because it does not pass videojs-standard
+ // and... yikes.
+
+ /* eslint-disable */
+ event.button = event.button & 1 ? 0 : event.button & 4 ? 1 : event.button & 2 ? 2 : 0;
+ /* eslint-enable */
+ }
+ }
+
+ event.fixed_ = true; // Returns fixed-up instance
+
+ return event;
+}
+/**
+ * Whether passive event listeners are supported
+ */
+
+var _supportsPassive;
+
+var supportsPassive = function supportsPassive() {
+ if (typeof _supportsPassive !== 'boolean') {
+ _supportsPassive = false;
+
+ try {
+ var opts = Object.defineProperty({}, 'passive', {
+ get: function get() {
+ _supportsPassive = true;
+ }
+ });
+ window__default['default'].addEventListener('test', null, opts);
+ window__default['default'].removeEventListener('test', null, opts);
+ } catch (e) {// disregard
+ }
+ }
+
+ return _supportsPassive;
+};
+/**
+ * Touch events Chrome expects to be passive
+ */
+
+
+var passiveEvents = ['touchstart', 'touchmove'];
+/**
+ * Add an event listener to element
+ * It stores the handler function in a separate cache object
+ * and adds a generic handler to the element's event,
+ * along with a unique id (guid) to the element.
+ *
+ * @param {Element|Object} elem
+ * Element or object to bind listeners to
+ *
+ * @param {string|string[]} type
+ * Type of event to bind to.
+ *
+ * @param {EventTarget~EventListener} fn
+ * Event listener.
+ */
+
+function on(elem, type, fn) {
+ if (Array.isArray(type)) {
+ return _handleMultipleEvents(on, elem, type, fn);
+ }
+
+ if (!DomData.has(elem)) {
+ DomData.set(elem, {});
+ }
+
+ var data = DomData.get(elem); // We need a place to store all our handler data
+
+ if (!data.handlers) {
+ data.handlers = {};
+ }
+
+ if (!data.handlers[type]) {
+ data.handlers[type] = [];
+ }
+
+ if (!fn.guid) {
+ fn.guid = newGUID();
+ }
+
+ data.handlers[type].push(fn);
+
+ if (!data.dispatcher) {
+ data.disabled = false;
+
+ data.dispatcher = function (event, hash) {
+ if (data.disabled) {
+ return;
+ }
+
+ event = fixEvent(event);
+ var handlers = data.handlers[event.type];
+
+ if (handlers) {
+ // Copy handlers so if handlers are added/removed during the process it doesn't throw everything off.
+ var handlersCopy = handlers.slice(0);
+
+ for (var m = 0, n = handlersCopy.length; m < n; m++) {
+ if (event.isImmediatePropagationStopped()) {
+ break;
+ } else {
+ try {
+ handlersCopy[m].call(elem, event, hash);
+ } catch (e) {
+ log$1.error(e);
+ }
+ }
+ }
+ }
+ };
+ }
+
+ if (data.handlers[type].length === 1) {
+ if (elem.addEventListener) {
+ var options = false;
+
+ if (supportsPassive() && passiveEvents.indexOf(type) > -1) {
+ options = {
+ passive: true
+ };
+ }
+
+ elem.addEventListener(type, data.dispatcher, options);
+ } else if (elem.attachEvent) {
+ elem.attachEvent('on' + type, data.dispatcher);
+ }
+ }
+}
+/**
+ * Removes event listeners from an element
+ *
+ * @param {Element|Object} elem
+ * Object to remove listeners from.
+ *
+ * @param {string|string[]} [type]
+ * Type of listener to remove. Don't include to remove all events from element.
+ *
+ * @param {EventTarget~EventListener} [fn]
+ * Specific listener to remove. Don't include to remove listeners for an event
+ * type.
+ */
+
+function off(elem, type, fn) {
+ // Don't want to add a cache object through getElData if not needed
+ if (!DomData.has(elem)) {
+ return;
+ }
+
+ var data = DomData.get(elem); // If no events exist, nothing to unbind
+
+ if (!data.handlers) {
+ return;
+ }
+
+ if (Array.isArray(type)) {
+ return _handleMultipleEvents(off, elem, type, fn);
+ } // Utility function
+
+
+ var removeType = function removeType(el, t) {
+ data.handlers[t] = [];
+
+ _cleanUpEvents(el, t);
+ }; // Are we removing all bound events?
+
+
+ if (type === undefined) {
+ for (var t in data.handlers) {
+ if (Object.prototype.hasOwnProperty.call(data.handlers || {}, t)) {
+ removeType(elem, t);
+ }
+ }
+
+ return;
+ }
+
+ var handlers = data.handlers[type]; // If no handlers exist, nothing to unbind
+
+ if (!handlers) {
+ return;
+ } // If no listener was provided, remove all listeners for type
+
+
+ if (!fn) {
+ removeType(elem, type);
+ return;
+ } // We're only removing a single handler
+
+
+ if (fn.guid) {
+ for (var n = 0; n < handlers.length; n++) {
+ if (handlers[n].guid === fn.guid) {
+ handlers.splice(n--, 1);
+ }
+ }
+ }
+
+ _cleanUpEvents(elem, type);
+}
+/**
+ * Trigger an event for an element
+ *
+ * @param {Element|Object} elem
+ * Element to trigger an event on
+ *
+ * @param {EventTarget~Event|string} event
+ * A string (the type) or an event object with a type attribute
+ *
+ * @param {Object} [hash]
+ * data hash to pass along with the event
+ *
+ * @return {boolean|undefined}
+ * Returns the opposite of `defaultPrevented` if default was
+ * prevented. Otherwise, returns `undefined`
+ */
+
+function trigger(elem, event, hash) {
+ // Fetches element data and a reference to the parent (for bubbling).
+ // Don't want to add a data object to cache for every parent,
+ // so checking hasElData first.
+ var elemData = DomData.has(elem) ? DomData.get(elem) : {};
+ var parent = elem.parentNode || elem.ownerDocument; // type = event.type || event,
+ // handler;
+ // If an event name was passed as a string, creates an event out of it
+
+ if (typeof event === 'string') {
+ event = {
+ type: event,
+ target: elem
+ };
+ } else if (!event.target) {
+ event.target = elem;
+ } // Normalizes the event properties.
+
+
+ event = fixEvent(event); // If the passed element has a dispatcher, executes the established handlers.
+
+ if (elemData.dispatcher) {
+ elemData.dispatcher.call(elem, event, hash);
+ } // Unless explicitly stopped or the event does not bubble (e.g. media events)
+ // recursively calls this function to bubble the event up the DOM.
+
+
+ if (parent && !event.isPropagationStopped() && event.bubbles === true) {
+ trigger.call(null, parent, event, hash); // If at the top of the DOM, triggers the default action unless disabled.
+ } else if (!parent && !event.defaultPrevented && event.target && event.target[event.type]) {
+ if (!DomData.has(event.target)) {
+ DomData.set(event.target, {});
+ }
+
+ var targetData = DomData.get(event.target); // Checks if the target has a default action for this event.
+
+ if (event.target[event.type]) {
+ // Temporarily disables event dispatching on the target as we have already executed the handler.
+ targetData.disabled = true; // Executes the default action.
+
+ if (typeof event.target[event.type] === 'function') {
+ event.target[event.type]();
+ } // Re-enables event dispatching.
+
+
+ targetData.disabled = false;
+ }
+ } // Inform the triggerer if the default was prevented by returning false
+
+
+ return !event.defaultPrevented;
+}
+/**
+ * Trigger a listener only once for an event.
+ *
+ * @param {Element|Object} elem
+ * Element or object to bind to.
+ *
+ * @param {string|string[]} type
+ * Name/type of event
+ *
+ * @param {Event~EventListener} fn
+ * Event listener function
+ */
+
+function one(elem, type, fn) {
+ if (Array.isArray(type)) {
+ return _handleMultipleEvents(one, elem, type, fn);
+ }
+
+ var func = function func() {
+ off(elem, type, func);
+ fn.apply(this, arguments);
+ }; // copy the guid to the new function so it can removed using the original function's ID
+
+
+ func.guid = fn.guid = fn.guid || newGUID();
+ on(elem, type, func);
+}
+/**
+ * Trigger a listener only once and then turn if off for all
+ * configured events
+ *
+ * @param {Element|Object} elem
+ * Element or object to bind to.
+ *
+ * @param {string|string[]} type
+ * Name/type of event
+ *
+ * @param {Event~EventListener} fn
+ * Event listener function
+ */
+
+function any(elem, type, fn) {
+ var func = function func() {
+ off(elem, type, func);
+ fn.apply(this, arguments);
+ }; // copy the guid to the new function so it can removed using the original function's ID
+
+
+ func.guid = fn.guid = fn.guid || newGUID(); // multiple ons, but one off for everything
+
+ on(elem, type, func);
+}
+
+var Events = /*#__PURE__*/Object.freeze({
+ __proto__: null,
+ fixEvent: fixEvent,
+ on: on,
+ off: off,
+ trigger: trigger,
+ one: one,
+ any: any
+});
+
+/**
+ * @file fn.js
+ * @module fn
+ */
+var UPDATE_REFRESH_INTERVAL = 30;
+/**
+ * Bind (a.k.a proxy or context). A simple method for changing the context of
+ * a function.
+ *
+ * It also stores a unique id on the function so it can be easily removed from
+ * events.
+ *
+ * @function
+ * @param {Mixed} context
+ * The object to bind as scope.
+ *
+ * @param {Function} fn
+ * The function to be bound to a scope.
+ *
+ * @param {number} [uid]
+ * An optional unique ID for the function to be set
+ *
+ * @return {Function}
+ * The new function that will be bound into the context given
+ */
+
+var bind = function bind(context, fn, uid) {
+ // Make sure the function has a unique ID
+ if (!fn.guid) {
+ fn.guid = newGUID();
+ } // Create the new function that changes the context
+
+
+ var bound = fn.bind(context); // Allow for the ability to individualize this function
+ // Needed in the case where multiple objects might share the same prototype
+ // IF both items add an event listener with the same function, then you try to remove just one
+ // it will remove both because they both have the same guid.
+ // when using this, you need to use the bind method when you remove the listener as well.
+ // currently used in text tracks
+
+ bound.guid = uid ? uid + '_' + fn.guid : fn.guid;
+ return bound;
+};
+/**
+ * Wraps the given function, `fn`, with a new function that only invokes `fn`
+ * at most once per every `wait` milliseconds.
+ *
+ * @function
+ * @param {Function} fn
+ * The function to be throttled.
+ *
+ * @param {number} wait
+ * The number of milliseconds by which to throttle.
+ *
+ * @return {Function}
+ */
+
+var throttle = function throttle(fn, wait) {
+ var last = window__default['default'].performance.now();
+
+ var throttled = function throttled() {
+ var now = window__default['default'].performance.now();
+
+ if (now - last >= wait) {
+ fn.apply(void 0, arguments);
+ last = now;
+ }
+ };
+
+ return throttled;
+};
+/**
+ * Creates a debounced function that delays invoking `func` until after `wait`
+ * milliseconds have elapsed since the last time the debounced function was
+ * invoked.
+ *
+ * Inspired by lodash and underscore implementations.
+ *
+ * @function
+ * @param {Function} func
+ * The function to wrap with debounce behavior.
+ *
+ * @param {number} wait
+ * The number of milliseconds to wait after the last invocation.
+ *
+ * @param {boolean} [immediate]
+ * Whether or not to invoke the function immediately upon creation.
+ *
+ * @param {Object} [context=window]
+ * The "context" in which the debounced function should debounce. For
+ * example, if this function should be tied to a Video.js player,
+ * the player can be passed here. Alternatively, defaults to the
+ * global `window` object.
+ *
+ * @return {Function}
+ * A debounced function.
+ */
+
+var debounce = function debounce(func, wait, immediate, context) {
+ if (context === void 0) {
+ context = window__default['default'];
+ }
+
+ var timeout;
+
+ var cancel = function cancel() {
+ context.clearTimeout(timeout);
+ timeout = null;
+ };
+ /* eslint-disable consistent-this */
+
+
+ var debounced = function debounced() {
+ var self = this;
+ var args = arguments;
+
+ var _later = function later() {
+ timeout = null;
+ _later = null;
+
+ if (!immediate) {
+ func.apply(self, args);
+ }
+ };
+
+ if (!timeout && immediate) {
+ func.apply(self, args);
+ }
+
+ context.clearTimeout(timeout);
+ timeout = context.setTimeout(_later, wait);
+ };
+ /* eslint-enable consistent-this */
+
+
+ debounced.cancel = cancel;
+ return debounced;
+};
+
+/**
+ * @file src/js/event-target.js
+ */
+/**
+ * `EventTarget` is a class that can have the same API as the DOM `EventTarget`. It
+ * adds shorthand functions that wrap around lengthy functions. For example:
+ * the `on` function is a wrapper around `addEventListener`.
+ *
+ * @see [EventTarget Spec]{@link https://www.w3.org/TR/DOM-Level-2-Events/events.html#Events-EventTarget}
+ * @class EventTarget
+ */
+
+var EventTarget$2 = function EventTarget() {};
+/**
+ * A Custom DOM event.
+ *
+ * @typedef {Object} EventTarget~Event
+ * @see [Properties]{@link https://developer.mozilla.org/en-US/docs/Web/API/CustomEvent}
+ */
+
+/**
+ * All event listeners should follow the following format.
+ *
+ * @callback EventTarget~EventListener
+ * @this {EventTarget}
+ *
+ * @param {EventTarget~Event} event
+ * the event that triggered this function
+ *
+ * @param {Object} [hash]
+ * hash of data sent during the event
+ */
+
+/**
+ * An object containing event names as keys and booleans as values.
+ *
+ * > NOTE: If an event name is set to a true value here {@link EventTarget#trigger}
+ * will have extra functionality. See that function for more information.
+ *
+ * @property EventTarget.prototype.allowedEvents_
+ * @private
+ */
+
+
+EventTarget$2.prototype.allowedEvents_ = {};
+/**
+ * Adds an `event listener` to an instance of an `EventTarget`. An `event listener` is a
+ * function that will get called when an event with a certain name gets triggered.
+ *
+ * @param {string|string[]} type
+ * An event name or an array of event names.
+ *
+ * @param {EventTarget~EventListener} fn
+ * The function to call with `EventTarget`s
+ */
+
+EventTarget$2.prototype.on = function (type, fn) {
+ // Remove the addEventListener alias before calling Events.on
+ // so we don't get into an infinite type loop
+ var ael = this.addEventListener;
+
+ this.addEventListener = function () {};
+
+ on(this, type, fn);
+ this.addEventListener = ael;
+};
+/**
+ * An alias of {@link EventTarget#on}. Allows `EventTarget` to mimic
+ * the standard DOM API.
+ *
+ * @function
+ * @see {@link EventTarget#on}
+ */
+
+
+EventTarget$2.prototype.addEventListener = EventTarget$2.prototype.on;
+/**
+ * Removes an `event listener` for a specific event from an instance of `EventTarget`.
+ * This makes it so that the `event listener` will no longer get called when the
+ * named event happens.
+ *
+ * @param {string|string[]} type
+ * An event name or an array of event names.
+ *
+ * @param {EventTarget~EventListener} fn
+ * The function to remove.
+ */
+
+EventTarget$2.prototype.off = function (type, fn) {
+ off(this, type, fn);
+};
+/**
+ * An alias of {@link EventTarget#off}. Allows `EventTarget` to mimic
+ * the standard DOM API.
+ *
+ * @function
+ * @see {@link EventTarget#off}
+ */
+
+
+EventTarget$2.prototype.removeEventListener = EventTarget$2.prototype.off;
+/**
+ * This function will add an `event listener` that gets triggered only once. After the
+ * first trigger it will get removed. This is like adding an `event listener`
+ * with {@link EventTarget#on} that calls {@link EventTarget#off} on itself.
+ *
+ * @param {string|string[]} type
+ * An event name or an array of event names.
+ *
+ * @param {EventTarget~EventListener} fn
+ * The function to be called once for each event name.
+ */
+
+EventTarget$2.prototype.one = function (type, fn) {
+ // Remove the addEventListener aliasing Events.on
+ // so we don't get into an infinite type loop
+ var ael = this.addEventListener;
+
+ this.addEventListener = function () {};
+
+ one(this, type, fn);
+ this.addEventListener = ael;
+};
+
+EventTarget$2.prototype.any = function (type, fn) {
+ // Remove the addEventListener aliasing Events.on
+ // so we don't get into an infinite type loop
+ var ael = this.addEventListener;
+
+ this.addEventListener = function () {};
+
+ any(this, type, fn);
+ this.addEventListener = ael;
+};
+/**
+ * This function causes an event to happen. This will then cause any `event listeners`
+ * that are waiting for that event, to get called. If there are no `event listeners`
+ * for an event then nothing will happen.
+ *
+ * If the name of the `Event` that is being triggered is in `EventTarget.allowedEvents_`.
+ * Trigger will also call the `on` + `uppercaseEventName` function.
+ *
+ * Example:
+ * 'click' is in `EventTarget.allowedEvents_`, so, trigger will attempt to call
+ * `onClick` if it exists.
+ *
+ * @param {string|EventTarget~Event|Object} event
+ * The name of the event, an `Event`, or an object with a key of type set to
+ * an event name.
+ */
+
+
+EventTarget$2.prototype.trigger = function (event) {
+ var type = event.type || event; // deprecation
+ // In a future version we should default target to `this`
+ // similar to how we default the target to `elem` in
+ // `Events.trigger`. Right now the default `target` will be
+ // `document` due to the `Event.fixEvent` call.
+
+ if (typeof event === 'string') {
+ event = {
+ type: type
+ };
+ }
+
+ event = fixEvent(event);
+
+ if (this.allowedEvents_[type] && this['on' + type]) {
+ this['on' + type](event);
+ }
+
+ trigger(this, event);
+};
+/**
+ * An alias of {@link EventTarget#trigger}. Allows `EventTarget` to mimic
+ * the standard DOM API.
+ *
+ * @function
+ * @see {@link EventTarget#trigger}
+ */
+
+
+EventTarget$2.prototype.dispatchEvent = EventTarget$2.prototype.trigger;
+var EVENT_MAP;
+
+EventTarget$2.prototype.queueTrigger = function (event) {
+ var _this = this;
+
+ // only set up EVENT_MAP if it'll be used
+ if (!EVENT_MAP) {
+ EVENT_MAP = new Map();
+ }
+
+ var type = event.type || event;
+ var map = EVENT_MAP.get(this);
+
+ if (!map) {
+ map = new Map();
+ EVENT_MAP.set(this, map);
+ }
+
+ var oldTimeout = map.get(type);
+ map["delete"](type);
+ window__default['default'].clearTimeout(oldTimeout);
+ var timeout = window__default['default'].setTimeout(function () {
+ // if we cleared out all timeouts for the current target, delete its map
+ if (map.size === 0) {
+ map = null;
+ EVENT_MAP["delete"](_this);
+ }
+
+ _this.trigger(event);
+ }, 0);
+ map.set(type, timeout);
+};
+
+/**
+ * @file mixins/evented.js
+ * @module evented
+ */
+
+var objName = function objName(obj) {
+ if (typeof obj.name === 'function') {
+ return obj.name();
+ }
+
+ if (typeof obj.name === 'string') {
+ return obj.name;
+ }
+
+ if (obj.name_) {
+ return obj.name_;
+ }
+
+ if (obj.constructor && obj.constructor.name) {
+ return obj.constructor.name;
+ }
+
+ return typeof obj;
+};
+/**
+ * Returns whether or not an object has had the evented mixin applied.
+ *
+ * @param {Object} object
+ * An object to test.
+ *
+ * @return {boolean}
+ * Whether or not the object appears to be evented.
+ */
+
+
+var isEvented = function isEvented(object) {
+ return object instanceof EventTarget$2 || !!object.eventBusEl_ && ['on', 'one', 'off', 'trigger'].every(function (k) {
+ return typeof object[k] === 'function';
+ });
+};
+/**
+ * Adds a callback to run after the evented mixin applied.
+ *
+ * @param {Object} object
+ * An object to Add
+ * @param {Function} callback
+ * The callback to run.
+ */
+
+
+var addEventedCallback = function addEventedCallback(target, callback) {
+ if (isEvented(target)) {
+ callback();
+ } else {
+ if (!target.eventedCallbacks) {
+ target.eventedCallbacks = [];
+ }
+
+ target.eventedCallbacks.push(callback);
+ }
+};
+/**
+ * Whether a value is a valid event type - non-empty string or array.
+ *
+ * @private
+ * @param {string|Array} type
+ * The type value to test.
+ *
+ * @return {boolean}
+ * Whether or not the type is a valid event type.
+ */
+
+
+var isValidEventType = function isValidEventType(type) {
+ return (// The regex here verifies that the `type` contains at least one non-
+ // whitespace character.
+ typeof type === 'string' && /\S/.test(type) || Array.isArray(type) && !!type.length
+ );
+};
+/**
+ * Validates a value to determine if it is a valid event target. Throws if not.
+ *
+ * @private
+ * @throws {Error}
+ * If the target does not appear to be a valid event target.
+ *
+ * @param {Object} target
+ * The object to test.
+ *
+ * @param {Object} obj
+ * The evented object we are validating for
+ *
+ * @param {string} fnName
+ * The name of the evented mixin function that called this.
+ */
+
+
+var validateTarget = function validateTarget(target, obj, fnName) {
+ if (!target || !target.nodeName && !isEvented(target)) {
+ throw new Error("Invalid target for " + objName(obj) + "#" + fnName + "; must be a DOM node or evented object.");
+ }
+};
+/**
+ * Validates a value to determine if it is a valid event target. Throws if not.
+ *
+ * @private
+ * @throws {Error}
+ * If the type does not appear to be a valid event type.
+ *
+ * @param {string|Array} type
+ * The type to test.
+ *
+ * @param {Object} obj
+* The evented object we are validating for
+ *
+ * @param {string} fnName
+ * The name of the evented mixin function that called this.
+ */
+
+
+var validateEventType = function validateEventType(type, obj, fnName) {
+ if (!isValidEventType(type)) {
+ throw new Error("Invalid event type for " + objName(obj) + "#" + fnName + "; must be a non-empty string or array.");
+ }
+};
+/**
+ * Validates a value to determine if it is a valid listener. Throws if not.
+ *
+ * @private
+ * @throws {Error}
+ * If the listener is not a function.
+ *
+ * @param {Function} listener
+ * The listener to test.
+ *
+ * @param {Object} obj
+ * The evented object we are validating for
+ *
+ * @param {string} fnName
+ * The name of the evented mixin function that called this.
+ */
+
+
+var validateListener = function validateListener(listener, obj, fnName) {
+ if (typeof listener !== 'function') {
+ throw new Error("Invalid listener for " + objName(obj) + "#" + fnName + "; must be a function.");
+ }
+};
+/**
+ * Takes an array of arguments given to `on()` or `one()`, validates them, and
+ * normalizes them into an object.
+ *
+ * @private
+ * @param {Object} self
+ * The evented object on which `on()` or `one()` was called. This
+ * object will be bound as the `this` value for the listener.
+ *
+ * @param {Array} args
+ * An array of arguments passed to `on()` or `one()`.
+ *
+ * @param {string} fnName
+ * The name of the evented mixin function that called this.
+ *
+ * @return {Object}
+ * An object containing useful values for `on()` or `one()` calls.
+ */
+
+
+var normalizeListenArgs = function normalizeListenArgs(self, args, fnName) {
+ // If the number of arguments is less than 3, the target is always the
+ // evented object itself.
+ var isTargetingSelf = args.length < 3 || args[0] === self || args[0] === self.eventBusEl_;
+ var target;
+ var type;
+ var listener;
+
+ if (isTargetingSelf) {
+ target = self.eventBusEl_; // Deal with cases where we got 3 arguments, but we are still listening to
+ // the evented object itself.
+
+ if (args.length >= 3) {
+ args.shift();
+ }
+
+ type = args[0];
+ listener = args[1];
+ } else {
+ target = args[0];
+ type = args[1];
+ listener = args[2];
+ }
+
+ validateTarget(target, self, fnName);
+ validateEventType(type, self, fnName);
+ validateListener(listener, self, fnName);
+ listener = bind(self, listener);
+ return {
+ isTargetingSelf: isTargetingSelf,
+ target: target,
+ type: type,
+ listener: listener
+ };
+};
+/**
+ * Adds the listener to the event type(s) on the target, normalizing for
+ * the type of target.
+ *
+ * @private
+ * @param {Element|Object} target
+ * A DOM node or evented object.
+ *
+ * @param {string} method
+ * The event binding method to use ("on" or "one").
+ *
+ * @param {string|Array} type
+ * One or more event type(s).
+ *
+ * @param {Function} listener
+ * A listener function.
+ */
+
+
+var listen = function listen(target, method, type, listener) {
+ validateTarget(target, target, method);
+
+ if (target.nodeName) {
+ Events[method](target, type, listener);
+ } else {
+ target[method](type, listener);
+ }
+};
+/**
+ * Contains methods that provide event capabilities to an object which is passed
+ * to {@link module:evented|evented}.
+ *
+ * @mixin EventedMixin
+ */
+
+
+var EventedMixin = {
+ /**
+ * Add a listener to an event (or events) on this object or another evented
+ * object.
+ *
+ * @param {string|Array|Element|Object} targetOrType
+ * If this is a string or array, it represents the event type(s)
+ * that will trigger the listener.
+ *
+ * Another evented object can be passed here instead, which will
+ * cause the listener to listen for events on _that_ object.
+ *
+ * In either case, the listener's `this` value will be bound to
+ * this object.
+ *
+ * @param {string|Array|Function} typeOrListener
+ * If the first argument was a string or array, this should be the
+ * listener function. Otherwise, this is a string or array of event
+ * type(s).
+ *
+ * @param {Function} [listener]
+ * If the first argument was another evented object, this will be
+ * the listener function.
+ */
+ on: function on() {
+ var _this = this;
+
+ for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
+ args[_key] = arguments[_key];
+ }
+
+ var _normalizeListenArgs = normalizeListenArgs(this, args, 'on'),
+ isTargetingSelf = _normalizeListenArgs.isTargetingSelf,
+ target = _normalizeListenArgs.target,
+ type = _normalizeListenArgs.type,
+ listener = _normalizeListenArgs.listener;
+
+ listen(target, 'on', type, listener); // If this object is listening to another evented object.
+
+ if (!isTargetingSelf) {
+ // If this object is disposed, remove the listener.
+ var removeListenerOnDispose = function removeListenerOnDispose() {
+ return _this.off(target, type, listener);
+ }; // Use the same function ID as the listener so we can remove it later it
+ // using the ID of the original listener.
+
+
+ removeListenerOnDispose.guid = listener.guid; // Add a listener to the target's dispose event as well. This ensures
+ // that if the target is disposed BEFORE this object, we remove the
+ // removal listener that was just added. Otherwise, we create a memory leak.
+
+ var removeRemoverOnTargetDispose = function removeRemoverOnTargetDispose() {
+ return _this.off('dispose', removeListenerOnDispose);
+ }; // Use the same function ID as the listener so we can remove it later
+ // it using the ID of the original listener.
+
+
+ removeRemoverOnTargetDispose.guid = listener.guid;
+ listen(this, 'on', 'dispose', removeListenerOnDispose);
+ listen(target, 'on', 'dispose', removeRemoverOnTargetDispose);
+ }
+ },
+
+ /**
+ * Add a listener to an event (or events) on this object or another evented
+ * object. The listener will be called once per event and then removed.
+ *
+ * @param {string|Array|Element|Object} targetOrType
+ * If this is a string or array, it represents the event type(s)
+ * that will trigger the listener.
+ *
+ * Another evented object can be passed here instead, which will
+ * cause the listener to listen for events on _that_ object.
+ *
+ * In either case, the listener's `this` value will be bound to
+ * this object.
+ *
+ * @param {string|Array|Function} typeOrListener
+ * If the first argument was a string or array, this should be the
+ * listener function. Otherwise, this is a string or array of event
+ * type(s).
+ *
+ * @param {Function} [listener]
+ * If the first argument was another evented object, this will be
+ * the listener function.
+ */
+ one: function one() {
+ var _this2 = this;
+
+ for (var _len2 = arguments.length, args = new Array(_len2), _key2 = 0; _key2 < _len2; _key2++) {
+ args[_key2] = arguments[_key2];
+ }
+
+ var _normalizeListenArgs2 = normalizeListenArgs(this, args, 'one'),
+ isTargetingSelf = _normalizeListenArgs2.isTargetingSelf,
+ target = _normalizeListenArgs2.target,
+ type = _normalizeListenArgs2.type,
+ listener = _normalizeListenArgs2.listener; // Targeting this evented object.
+
+
+ if (isTargetingSelf) {
+ listen(target, 'one', type, listener); // Targeting another evented object.
+ } else {
+ // TODO: This wrapper is incorrect! It should only
+ // remove the wrapper for the event type that called it.
+ // Instead all listners are removed on the first trigger!
+ // see https://github.com/videojs/video.js/issues/5962
+ var wrapper = function wrapper() {
+ _this2.off(target, type, wrapper);
+
+ for (var _len3 = arguments.length, largs = new Array(_len3), _key3 = 0; _key3 < _len3; _key3++) {
+ largs[_key3] = arguments[_key3];
+ }
+
+ listener.apply(null, largs);
+ }; // Use the same function ID as the listener so we can remove it later
+ // it using the ID of the original listener.
+
+
+ wrapper.guid = listener.guid;
+ listen(target, 'one', type, wrapper);
+ }
+ },
+
+ /**
+ * Add a listener to an event (or events) on this object or another evented
+ * object. The listener will only be called once for the first event that is triggered
+ * then removed.
+ *
+ * @param {string|Array|Element|Object} targetOrType
+ * If this is a string or array, it represents the event type(s)
+ * that will trigger the listener.
+ *
+ * Another evented object can be passed here instead, which will
+ * cause the listener to listen for events on _that_ object.
+ *
+ * In either case, the listener's `this` value will be bound to
+ * this object.
+ *
+ * @param {string|Array|Function} typeOrListener
+ * If the first argument was a string or array, this should be the
+ * listener function. Otherwise, this is a string or array of event
+ * type(s).
+ *
+ * @param {Function} [listener]
+ * If the first argument was another evented object, this will be
+ * the listener function.
+ */
+ any: function any() {
+ var _this3 = this;
+
+ for (var _len4 = arguments.length, args = new Array(_len4), _key4 = 0; _key4 < _len4; _key4++) {
+ args[_key4] = arguments[_key4];
+ }
+
+ var _normalizeListenArgs3 = normalizeListenArgs(this, args, 'any'),
+ isTargetingSelf = _normalizeListenArgs3.isTargetingSelf,
+ target = _normalizeListenArgs3.target,
+ type = _normalizeListenArgs3.type,
+ listener = _normalizeListenArgs3.listener; // Targeting this evented object.
+
+
+ if (isTargetingSelf) {
+ listen(target, 'any', type, listener); // Targeting another evented object.
+ } else {
+ var wrapper = function wrapper() {
+ _this3.off(target, type, wrapper);
+
+ for (var _len5 = arguments.length, largs = new Array(_len5), _key5 = 0; _key5 < _len5; _key5++) {
+ largs[_key5] = arguments[_key5];
+ }
+
+ listener.apply(null, largs);
+ }; // Use the same function ID as the listener so we can remove it later
+ // it using the ID of the original listener.
+
+
+ wrapper.guid = listener.guid;
+ listen(target, 'any', type, wrapper);
+ }
+ },
+
+ /**
+ * Removes listener(s) from event(s) on an evented object.
+ *
+ * @param {string|Array|Element|Object} [targetOrType]
+ * If this is a string or array, it represents the event type(s).
+ *
+ * Another evented object can be passed here instead, in which case
+ * ALL 3 arguments are _required_.
+ *
+ * @param {string|Array|Function} [typeOrListener]
+ * If the first argument was a string or array, this may be the
+ * listener function. Otherwise, this is a string or array of event
+ * type(s).
+ *
+ * @param {Function} [listener]
+ * If the first argument was another evented object, this will be
+ * the listener function; otherwise, _all_ listeners bound to the
+ * event type(s) will be removed.
+ */
+ off: function off$1(targetOrType, typeOrListener, listener) {
+ // Targeting this evented object.
+ if (!targetOrType || isValidEventType(targetOrType)) {
+ off(this.eventBusEl_, targetOrType, typeOrListener); // Targeting another evented object.
+ } else {
+ var target = targetOrType;
+ var type = typeOrListener; // Fail fast and in a meaningful way!
+
+ validateTarget(target, this, 'off');
+ validateEventType(type, this, 'off');
+ validateListener(listener, this, 'off'); // Ensure there's at least a guid, even if the function hasn't been used
+
+ listener = bind(this, listener); // Remove the dispose listener on this evented object, which was given
+ // the same guid as the event listener in on().
+
+ this.off('dispose', listener);
+
+ if (target.nodeName) {
+ off(target, type, listener);
+ off(target, 'dispose', listener);
+ } else if (isEvented(target)) {
+ target.off(type, listener);
+ target.off('dispose', listener);
+ }
+ }
+ },
+
+ /**
+ * Fire an event on this evented object, causing its listeners to be called.
+ *
+ * @param {string|Object} event
+ * An event type or an object with a type property.
+ *
+ * @param {Object} [hash]
+ * An additional object to pass along to listeners.
+ *
+ * @return {boolean}
+ * Whether or not the default behavior was prevented.
+ */
+ trigger: function trigger$1(event, hash) {
+ validateTarget(this.eventBusEl_, this, 'trigger');
+ var type = event && typeof event !== 'string' ? event.type : event;
+
+ if (!isValidEventType(type)) {
+ var error = "Invalid event type for " + objName(this) + "#trigger; " + 'must be a non-empty string or object with a type key that has a non-empty value.';
+
+ if (event) {
+ (this.log || log$1).error(error);
+ } else {
+ throw new Error(error);
+ }
+ }
+
+ return trigger(this.eventBusEl_, event, hash);
+ }
+};
+/**
+ * Applies {@link module:evented~EventedMixin|EventedMixin} to a target object.
+ *
+ * @param {Object} target
+ * The object to which to add event methods.
+ *
+ * @param {Object} [options={}]
+ * Options for customizing the mixin behavior.
+ *
+ * @param {string} [options.eventBusKey]
+ * By default, adds a `eventBusEl_` DOM element to the target object,
+ * which is used as an event bus. If the target object already has a
+ * DOM element that should be used, pass its key here.
+ *
+ * @return {Object}
+ * The target object.
+ */
+
+function evented(target, options) {
+ if (options === void 0) {
+ options = {};
+ }
+
+ var _options = options,
+ eventBusKey = _options.eventBusKey; // Set or create the eventBusEl_.
+
+ if (eventBusKey) {
+ if (!target[eventBusKey].nodeName) {
+ throw new Error("The eventBusKey \"" + eventBusKey + "\" does not refer to an element.");
+ }
+
+ target.eventBusEl_ = target[eventBusKey];
+ } else {
+ target.eventBusEl_ = createEl('span', {
+ className: 'vjs-event-bus'
+ });
+ }
+
+ assign(target, EventedMixin);
+
+ if (target.eventedCallbacks) {
+ target.eventedCallbacks.forEach(function (callback) {
+ callback();
+ });
+ } // When any evented object is disposed, it removes all its listeners.
+
+
+ target.on('dispose', function () {
+ target.off();
+ [target, target.el_, target.eventBusEl_].forEach(function (val) {
+ if (val && DomData.has(val)) {
+ DomData["delete"](val);
+ }
+ });
+ window__default['default'].setTimeout(function () {
+ target.eventBusEl_ = null;
+ }, 0);
+ });
+ return target;
+}
+
+/**
+ * @file mixins/stateful.js
+ * @module stateful
+ */
+/**
+ * Contains methods that provide statefulness to an object which is passed
+ * to {@link module:stateful}.
+ *
+ * @mixin StatefulMixin
+ */
+
+var StatefulMixin = {
+ /**
+ * A hash containing arbitrary keys and values representing the state of
+ * the object.
+ *
+ * @type {Object}
+ */
+ state: {},
+
+ /**
+ * Set the state of an object by mutating its
+ * {@link module:stateful~StatefulMixin.state|state} object in place.
+ *
+ * @fires module:stateful~StatefulMixin#statechanged
+ * @param {Object|Function} stateUpdates
+ * A new set of properties to shallow-merge into the plugin state.
+ * Can be a plain object or a function returning a plain object.
+ *
+ * @return {Object|undefined}
+ * An object containing changes that occurred. If no changes
+ * occurred, returns `undefined`.
+ */
+ setState: function setState(stateUpdates) {
+ var _this = this;
+
+ // Support providing the `stateUpdates` state as a function.
+ if (typeof stateUpdates === 'function') {
+ stateUpdates = stateUpdates();
+ }
+
+ var changes;
+ each(stateUpdates, function (value, key) {
+ // Record the change if the value is different from what's in the
+ // current state.
+ if (_this.state[key] !== value) {
+ changes = changes || {};
+ changes[key] = {
+ from: _this.state[key],
+ to: value
+ };
+ }
+
+ _this.state[key] = value;
+ }); // Only trigger "statechange" if there were changes AND we have a trigger
+ // function. This allows us to not require that the target object be an
+ // evented object.
+
+ if (changes && isEvented(this)) {
+ /**
+ * An event triggered on an object that is both
+ * {@link module:stateful|stateful} and {@link module:evented|evented}
+ * indicating that its state has changed.
+ *
+ * @event module:stateful~StatefulMixin#statechanged
+ * @type {Object}
+ * @property {Object} changes
+ * A hash containing the properties that were changed and
+ * the values they were changed `from` and `to`.
+ */
+ this.trigger({
+ changes: changes,
+ type: 'statechanged'
+ });
+ }
+
+ return changes;
+ }
+};
+/**
+ * Applies {@link module:stateful~StatefulMixin|StatefulMixin} to a target
+ * object.
+ *
+ * If the target object is {@link module:evented|evented} and has a
+ * `handleStateChanged` method, that method will be automatically bound to the
+ * `statechanged` event on itself.
+ *
+ * @param {Object} target
+ * The object to be made stateful.
+ *
+ * @param {Object} [defaultState]
+ * A default set of properties to populate the newly-stateful object's
+ * `state` property.
+ *
+ * @return {Object}
+ * Returns the `target`.
+ */
+
+function stateful(target, defaultState) {
+ assign(target, StatefulMixin); // This happens after the mixing-in because we need to replace the `state`
+ // added in that step.
+
+ target.state = assign({}, target.state, defaultState); // Auto-bind the `handleStateChanged` method of the target object if it exists.
+
+ if (typeof target.handleStateChanged === 'function' && isEvented(target)) {
+ target.on('statechanged', target.handleStateChanged);
+ }
+
+ return target;
+}
+
+/**
+ * @file string-cases.js
+ * @module to-lower-case
+ */
+
+/**
+ * Lowercase the first letter of a string.
+ *
+ * @param {string} string
+ * String to be lowercased
+ *
+ * @return {string}
+ * The string with a lowercased first letter
+ */
+var toLowerCase = function toLowerCase(string) {
+ if (typeof string !== 'string') {
+ return string;
+ }
+
+ return string.replace(/./, function (w) {
+ return w.toLowerCase();
+ });
+};
+/**
+ * Uppercase the first letter of a string.
+ *
+ * @param {string} string
+ * String to be uppercased
+ *
+ * @return {string}
+ * The string with an uppercased first letter
+ */
+
+var toTitleCase$1 = function toTitleCase(string) {
+ if (typeof string !== 'string') {
+ return string;
+ }
+
+ return string.replace(/./, function (w) {
+ return w.toUpperCase();
+ });
+};
+/**
+ * Compares the TitleCase versions of the two strings for equality.
+ *
+ * @param {string} str1
+ * The first string to compare
+ *
+ * @param {string} str2
+ * The second string to compare
+ *
+ * @return {boolean}
+ * Whether the TitleCase versions of the strings are equal
+ */
+
+var titleCaseEquals = function titleCaseEquals(str1, str2) {
+ return toTitleCase$1(str1) === toTitleCase$1(str2);
+};
+
+/**
+ * @file merge-options.js
+ * @module merge-options
+ */
+/**
+ * Merge two objects recursively.
+ *
+ * Performs a deep merge like
+ * {@link https://lodash.com/docs/4.17.10#merge|lodash.merge}, but only merges
+ * plain objects (not arrays, elements, or anything else).
+ *
+ * Non-plain object values will be copied directly from the right-most
+ * argument.
+ *
+ * @static
+ * @param {Object[]} sources
+ * One or more objects to merge into a new object.
+ *
+ * @return {Object}
+ * A new object that is the merged result of all sources.
+ */
+
+function mergeOptions$3() {
+ var result = {};
+
+ for (var _len = arguments.length, sources = new Array(_len), _key = 0; _key < _len; _key++) {
+ sources[_key] = arguments[_key];
+ }
+
+ sources.forEach(function (source) {
+ if (!source) {
+ return;
+ }
+
+ each(source, function (value, key) {
+ if (!isPlain(value)) {
+ result[key] = value;
+ return;
+ }
+
+ if (!isPlain(result[key])) {
+ result[key] = {};
+ }
+
+ result[key] = mergeOptions$3(result[key], value);
+ });
+ });
+ return result;
+}
+
+var MapSham = /*#__PURE__*/function () {
+ function MapSham() {
+ this.map_ = {};
+ }
+
+ var _proto = MapSham.prototype;
+
+ _proto.has = function has(key) {
+ return key in this.map_;
+ };
+
+ _proto["delete"] = function _delete(key) {
+ var has = this.has(key);
+ delete this.map_[key];
+ return has;
+ };
+
+ _proto.set = function set(key, value) {
+ this.map_[key] = value;
+ return this;
+ };
+
+ _proto.forEach = function forEach(callback, thisArg) {
+ for (var key in this.map_) {
+ callback.call(thisArg, this.map_[key], key, this);
+ }
+ };
+
+ return MapSham;
+}();
+
+var Map$1 = window__default['default'].Map ? window__default['default'].Map : MapSham;
+
+var SetSham = /*#__PURE__*/function () {
+ function SetSham() {
+ this.set_ = {};
+ }
+
+ var _proto = SetSham.prototype;
+
+ _proto.has = function has(key) {
+ return key in this.set_;
+ };
+
+ _proto["delete"] = function _delete(key) {
+ var has = this.has(key);
+ delete this.set_[key];
+ return has;
+ };
+
+ _proto.add = function add(key) {
+ this.set_[key] = 1;
+ return this;
+ };
+
+ _proto.forEach = function forEach(callback, thisArg) {
+ for (var key in this.set_) {
+ callback.call(thisArg, key, key, this);
+ }
+ };
+
+ return SetSham;
+}();
+
+var Set$1 = window__default['default'].Set ? window__default['default'].Set : SetSham;
+
+/**
+ * Player Component - Base class for all UI objects
+ *
+ * @file component.js
+ */
+/**
+ * Base class for all UI Components.
+ * Components are UI objects which represent both a javascript object and an element
+ * in the DOM. They can be children of other components, and can have
+ * children themselves.
+ *
+ * Components can also use methods from {@link EventTarget}
+ */
+
+var Component$1 = /*#__PURE__*/function () {
+ /**
+ * A callback that is called when a component is ready. Does not have any
+ * paramters and any callback value will be ignored.
+ *
+ * @callback Component~ReadyCallback
+ * @this Component
+ */
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of component options.
+ *
+ * @param {Object[]} [options.children]
+ * An array of children objects to intialize this component with. Children objects have
+ * a name property that will be used if more than one component of the same type needs to be
+ * added.
+ *
+ * @param {string} [options.className]
+ * A class or space separated list of classes to add the component
+ *
+ * @param {Component~ReadyCallback} [ready]
+ * Function that gets called when the `Component` is ready.
+ */
+ function Component(player, options, ready) {
+ var _this = this;
+
+ // The component might be the player itself and we can't pass `this` to super
+ if (!player && this.play) {
+ this.player_ = player = this; // eslint-disable-line
+ } else {
+ this.player_ = player;
+ }
+
+ this.isDisposed_ = false; // Hold the reference to the parent component via `addChild` method
+
+ this.parentComponent_ = null; // Make a copy of prototype.options_ to protect against overriding defaults
+
+ this.options_ = mergeOptions$3({}, this.options_); // Updated options with supplied options
+
+ options = this.options_ = mergeOptions$3(this.options_, options); // Get ID from options or options element if one is supplied
+
+ this.id_ = options.id || options.el && options.el.id; // If there was no ID from the options, generate one
+
+ if (!this.id_) {
+ // Don't require the player ID function in the case of mock players
+ var id = player && player.id && player.id() || 'no_player';
+ this.id_ = id + "_component_" + newGUID();
+ }
+
+ this.name_ = options.name || null; // Create element if one wasn't provided in options
+
+ if (options.el) {
+ this.el_ = options.el;
+ } else if (options.createEl !== false) {
+ this.el_ = this.createEl();
+ }
+
+ if (options.className && this.el_) {
+ options.className.split(' ').forEach(function (c) {
+ return _this.addClass(c);
+ });
+ } // if evented is anything except false, we want to mixin in evented
+
+
+ if (options.evented !== false) {
+ // Make this an evented object and use `el_`, if available, as its event bus
+ evented(this, {
+ eventBusKey: this.el_ ? 'el_' : null
+ });
+ this.handleLanguagechange = this.handleLanguagechange.bind(this);
+ this.on(this.player_, 'languagechange', this.handleLanguagechange);
+ }
+
+ stateful(this, this.constructor.defaultState);
+ this.children_ = [];
+ this.childIndex_ = {};
+ this.childNameIndex_ = {};
+ this.setTimeoutIds_ = new Set$1();
+ this.setIntervalIds_ = new Set$1();
+ this.rafIds_ = new Set$1();
+ this.namedRafs_ = new Map$1();
+ this.clearingTimersOnDispose_ = false; // Add any child components in options
+
+ if (options.initChildren !== false) {
+ this.initChildren();
+ } // Don't want to trigger ready here or it will go before init is actually
+ // finished for all children that run this constructor
+
+
+ this.ready(ready);
+
+ if (options.reportTouchActivity !== false) {
+ this.enableTouchActivity();
+ }
+ }
+ /**
+ * Dispose of the `Component` and all child components.
+ *
+ * @fires Component#dispose
+ *
+ * @param {Object} options
+ * @param {Element} options.originalEl element with which to replace player element
+ */
+
+
+ var _proto = Component.prototype;
+
+ _proto.dispose = function dispose(options) {
+ if (options === void 0) {
+ options = {};
+ }
+
+ // Bail out if the component has already been disposed.
+ if (this.isDisposed_) {
+ return;
+ }
+
+ if (this.readyQueue_) {
+ this.readyQueue_.length = 0;
+ }
+ /**
+ * Triggered when a `Component` is disposed.
+ *
+ * @event Component#dispose
+ * @type {EventTarget~Event}
+ *
+ * @property {boolean} [bubbles=false]
+ * set to false so that the dispose event does not
+ * bubble up
+ */
+
+
+ this.trigger({
+ type: 'dispose',
+ bubbles: false
+ });
+ this.isDisposed_ = true; // Dispose all children.
+
+ if (this.children_) {
+ for (var i = this.children_.length - 1; i >= 0; i--) {
+ if (this.children_[i].dispose) {
+ this.children_[i].dispose();
+ }
+ }
+ } // Delete child references
+
+
+ this.children_ = null;
+ this.childIndex_ = null;
+ this.childNameIndex_ = null;
+ this.parentComponent_ = null;
+
+ if (this.el_) {
+ // Remove element from DOM
+ if (this.el_.parentNode) {
+ if (options.restoreEl) {
+ this.el_.parentNode.replaceChild(options.restoreEl, this.el_);
+ } else {
+ this.el_.parentNode.removeChild(this.el_);
+ }
+ }
+
+ this.el_ = null;
+ } // remove reference to the player after disposing of the element
+
+
+ this.player_ = null;
+ }
+ /**
+ * Determine whether or not this component has been disposed.
+ *
+ * @return {boolean}
+ * If the component has been disposed, will be `true`. Otherwise, `false`.
+ */
+ ;
+
+ _proto.isDisposed = function isDisposed() {
+ return Boolean(this.isDisposed_);
+ }
+ /**
+ * Return the {@link Player} that the `Component` has attached to.
+ *
+ * @return {Player}
+ * The player that this `Component` has attached to.
+ */
+ ;
+
+ _proto.player = function player() {
+ return this.player_;
+ }
+ /**
+ * Deep merge of options objects with new options.
+ * > Note: When both `obj` and `options` contain properties whose values are objects.
+ * The two properties get merged using {@link module:mergeOptions}
+ *
+ * @param {Object} obj
+ * The object that contains new options.
+ *
+ * @return {Object}
+ * A new object of `this.options_` and `obj` merged together.
+ */
+ ;
+
+ _proto.options = function options(obj) {
+ if (!obj) {
+ return this.options_;
+ }
+
+ this.options_ = mergeOptions$3(this.options_, obj);
+ return this.options_;
+ }
+ /**
+ * Get the `Component`s DOM element
+ *
+ * @return {Element}
+ * The DOM element for this `Component`.
+ */
+ ;
+
+ _proto.el = function el() {
+ return this.el_;
+ }
+ /**
+ * Create the `Component`s DOM element.
+ *
+ * @param {string} [tagName]
+ * Element's DOM node type. e.g. 'div'
+ *
+ * @param {Object} [properties]
+ * An object of properties that should be set.
+ *
+ * @param {Object} [attributes]
+ * An object of attributes that should be set.
+ *
+ * @return {Element}
+ * The element that gets created.
+ */
+ ;
+
+ _proto.createEl = function createEl$1(tagName, properties, attributes) {
+ return createEl(tagName, properties, attributes);
+ }
+ /**
+ * Localize a string given the string in english.
+ *
+ * If tokens are provided, it'll try and run a simple token replacement on the provided string.
+ * The tokens it looks for look like `{1}` with the index being 1-indexed into the tokens array.
+ *
+ * If a `defaultValue` is provided, it'll use that over `string`,
+ * if a value isn't found in provided language files.
+ * This is useful if you want to have a descriptive key for token replacement
+ * but have a succinct localized string and not require `en.json` to be included.
+ *
+ * Currently, it is used for the progress bar timing.
+ * ```js
+ * {
+ * "progress bar timing: currentTime={1} duration={2}": "{1} of {2}"
+ * }
+ * ```
+ * It is then used like so:
+ * ```js
+ * this.localize('progress bar timing: currentTime={1} duration{2}',
+ * [this.player_.currentTime(), this.player_.duration()],
+ * '{1} of {2}');
+ * ```
+ *
+ * Which outputs something like: `01:23 of 24:56`.
+ *
+ *
+ * @param {string} string
+ * The string to localize and the key to lookup in the language files.
+ * @param {string[]} [tokens]
+ * If the current item has token replacements, provide the tokens here.
+ * @param {string} [defaultValue]
+ * Defaults to `string`. Can be a default value to use for token replacement
+ * if the lookup key is needed to be separate.
+ *
+ * @return {string}
+ * The localized string or if no localization exists the english string.
+ */
+ ;
+
+ _proto.localize = function localize(string, tokens, defaultValue) {
+ if (defaultValue === void 0) {
+ defaultValue = string;
+ }
+
+ var code = this.player_.language && this.player_.language();
+ var languages = this.player_.languages && this.player_.languages();
+ var language = languages && languages[code];
+ var primaryCode = code && code.split('-')[0];
+ var primaryLang = languages && languages[primaryCode];
+ var localizedString = defaultValue;
+
+ if (language && language[string]) {
+ localizedString = language[string];
+ } else if (primaryLang && primaryLang[string]) {
+ localizedString = primaryLang[string];
+ }
+
+ if (tokens) {
+ localizedString = localizedString.replace(/\{(\d+)\}/g, function (match, index) {
+ var value = tokens[index - 1];
+ var ret = value;
+
+ if (typeof value === 'undefined') {
+ ret = match;
+ }
+
+ return ret;
+ });
+ }
+
+ return localizedString;
+ }
+ /**
+ * Handles language change for the player in components. Should be overriden by sub-components.
+ *
+ * @abstract
+ */
+ ;
+
+ _proto.handleLanguagechange = function handleLanguagechange() {}
+ /**
+ * Return the `Component`s DOM element. This is where children get inserted.
+ * This will usually be the the same as the element returned in {@link Component#el}.
+ *
+ * @return {Element}
+ * The content element for this `Component`.
+ */
+ ;
+
+ _proto.contentEl = function contentEl() {
+ return this.contentEl_ || this.el_;
+ }
+ /**
+ * Get this `Component`s ID
+ *
+ * @return {string}
+ * The id of this `Component`
+ */
+ ;
+
+ _proto.id = function id() {
+ return this.id_;
+ }
+ /**
+ * Get the `Component`s name. The name gets used to reference the `Component`
+ * and is set during registration.
+ *
+ * @return {string}
+ * The name of this `Component`.
+ */
+ ;
+
+ _proto.name = function name() {
+ return this.name_;
+ }
+ /**
+ * Get an array of all child components
+ *
+ * @return {Array}
+ * The children
+ */
+ ;
+
+ _proto.children = function children() {
+ return this.children_;
+ }
+ /**
+ * Returns the child `Component` with the given `id`.
+ *
+ * @param {string} id
+ * The id of the child `Component` to get.
+ *
+ * @return {Component|undefined}
+ * The child `Component` with the given `id` or undefined.
+ */
+ ;
+
+ _proto.getChildById = function getChildById(id) {
+ return this.childIndex_[id];
+ }
+ /**
+ * Returns the child `Component` with the given `name`.
+ *
+ * @param {string} name
+ * The name of the child `Component` to get.
+ *
+ * @return {Component|undefined}
+ * The child `Component` with the given `name` or undefined.
+ */
+ ;
+
+ _proto.getChild = function getChild(name) {
+ if (!name) {
+ return;
+ }
+
+ return this.childNameIndex_[name];
+ }
+ /**
+ * Returns the descendant `Component` following the givent
+ * descendant `names`. For instance ['foo', 'bar', 'baz'] would
+ * try to get 'foo' on the current component, 'bar' on the 'foo'
+ * component and 'baz' on the 'bar' component and return undefined
+ * if any of those don't exist.
+ *
+ * @param {...string[]|...string} names
+ * The name of the child `Component` to get.
+ *
+ * @return {Component|undefined}
+ * The descendant `Component` following the given descendant
+ * `names` or undefined.
+ */
+ ;
+
+ _proto.getDescendant = function getDescendant() {
+ for (var _len = arguments.length, names = new Array(_len), _key = 0; _key < _len; _key++) {
+ names[_key] = arguments[_key];
+ }
+
+ // flatten array argument into the main array
+ names = names.reduce(function (acc, n) {
+ return acc.concat(n);
+ }, []);
+ var currentChild = this;
+
+ for (var i = 0; i < names.length; i++) {
+ currentChild = currentChild.getChild(names[i]);
+
+ if (!currentChild || !currentChild.getChild) {
+ return;
+ }
+ }
+
+ return currentChild;
+ }
+ /**
+ * Add a child `Component` inside the current `Component`.
+ *
+ *
+ * @param {string|Component} child
+ * The name or instance of a child to add.
+ *
+ * @param {Object} [options={}]
+ * The key/value store of options that will get passed to children of
+ * the child.
+ *
+ * @param {number} [index=this.children_.length]
+ * The index to attempt to add a child into.
+ *
+ * @return {Component}
+ * The `Component` that gets added as a child. When using a string the
+ * `Component` will get created by this process.
+ */
+ ;
+
+ _proto.addChild = function addChild(child, options, index) {
+ if (options === void 0) {
+ options = {};
+ }
+
+ if (index === void 0) {
+ index = this.children_.length;
+ }
+
+ var component;
+ var componentName; // If child is a string, create component with options
+
+ if (typeof child === 'string') {
+ componentName = toTitleCase$1(child);
+ var componentClassName = options.componentClass || componentName; // Set name through options
+
+ options.name = componentName; // Create a new object & element for this controls set
+ // If there's no .player_, this is a player
+
+ var ComponentClass = Component.getComponent(componentClassName);
+
+ if (!ComponentClass) {
+ throw new Error("Component " + componentClassName + " does not exist");
+ } // data stored directly on the videojs object may be
+ // misidentified as a component to retain
+ // backwards-compatibility with 4.x. check to make sure the
+ // component class can be instantiated.
+
+
+ if (typeof ComponentClass !== 'function') {
+ return null;
+ }
+
+ component = new ComponentClass(this.player_ || this, options); // child is a component instance
+ } else {
+ component = child;
+ }
+
+ if (component.parentComponent_) {
+ component.parentComponent_.removeChild(component);
+ }
+
+ this.children_.splice(index, 0, component);
+ component.parentComponent_ = this;
+
+ if (typeof component.id === 'function') {
+ this.childIndex_[component.id()] = component;
+ } // If a name wasn't used to create the component, check if we can use the
+ // name function of the component
+
+
+ componentName = componentName || component.name && toTitleCase$1(component.name());
+
+ if (componentName) {
+ this.childNameIndex_[componentName] = component;
+ this.childNameIndex_[toLowerCase(componentName)] = component;
+ } // Add the UI object's element to the container div (box)
+ // Having an element is not required
+
+
+ if (typeof component.el === 'function' && component.el()) {
+ // If inserting before a component, insert before that component's element
+ var refNode = null;
+
+ if (this.children_[index + 1]) {
+ // Most children are components, but the video tech is an HTML element
+ if (this.children_[index + 1].el_) {
+ refNode = this.children_[index + 1].el_;
+ } else if (isEl(this.children_[index + 1])) {
+ refNode = this.children_[index + 1];
+ }
+ }
+
+ this.contentEl().insertBefore(component.el(), refNode);
+ } // Return so it can stored on parent object if desired.
+
+
+ return component;
+ }
+ /**
+ * Remove a child `Component` from this `Component`s list of children. Also removes
+ * the child `Component`s element from this `Component`s element.
+ *
+ * @param {Component} component
+ * The child `Component` to remove.
+ */
+ ;
+
+ _proto.removeChild = function removeChild(component) {
+ if (typeof component === 'string') {
+ component = this.getChild(component);
+ }
+
+ if (!component || !this.children_) {
+ return;
+ }
+
+ var childFound = false;
+
+ for (var i = this.children_.length - 1; i >= 0; i--) {
+ if (this.children_[i] === component) {
+ childFound = true;
+ this.children_.splice(i, 1);
+ break;
+ }
+ }
+
+ if (!childFound) {
+ return;
+ }
+
+ component.parentComponent_ = null;
+ this.childIndex_[component.id()] = null;
+ this.childNameIndex_[toTitleCase$1(component.name())] = null;
+ this.childNameIndex_[toLowerCase(component.name())] = null;
+ var compEl = component.el();
+
+ if (compEl && compEl.parentNode === this.contentEl()) {
+ this.contentEl().removeChild(component.el());
+ }
+ }
+ /**
+ * Add and initialize default child `Component`s based upon options.
+ */
+ ;
+
+ _proto.initChildren = function initChildren() {
+ var _this2 = this;
+
+ var children = this.options_.children;
+
+ if (children) {
+ // `this` is `parent`
+ var parentOptions = this.options_;
+
+ var handleAdd = function handleAdd(child) {
+ var name = child.name;
+ var opts = child.opts; // Allow options for children to be set at the parent options
+ // e.g. videojs(id, { controlBar: false });
+ // instead of videojs(id, { children: { controlBar: false });
+
+ if (parentOptions[name] !== undefined) {
+ opts = parentOptions[name];
+ } // Allow for disabling default components
+ // e.g. options['children']['posterImage'] = false
+
+
+ if (opts === false) {
+ return;
+ } // Allow options to be passed as a simple boolean if no configuration
+ // is necessary.
+
+
+ if (opts === true) {
+ opts = {};
+ } // We also want to pass the original player options
+ // to each component as well so they don't need to
+ // reach back into the player for options later.
+
+
+ opts.playerOptions = _this2.options_.playerOptions; // Create and add the child component.
+ // Add a direct reference to the child by name on the parent instance.
+ // If two of the same component are used, different names should be supplied
+ // for each
+
+ var newChild = _this2.addChild(name, opts);
+
+ if (newChild) {
+ _this2[name] = newChild;
+ }
+ }; // Allow for an array of children details to passed in the options
+
+
+ var workingChildren;
+ var Tech = Component.getComponent('Tech');
+
+ if (Array.isArray(children)) {
+ workingChildren = children;
+ } else {
+ workingChildren = Object.keys(children);
+ }
+
+ workingChildren // children that are in this.options_ but also in workingChildren would
+ // give us extra children we do not want. So, we want to filter them out.
+ .concat(Object.keys(this.options_).filter(function (child) {
+ return !workingChildren.some(function (wchild) {
+ if (typeof wchild === 'string') {
+ return child === wchild;
+ }
+
+ return child === wchild.name;
+ });
+ })).map(function (child) {
+ var name;
+ var opts;
+
+ if (typeof child === 'string') {
+ name = child;
+ opts = children[name] || _this2.options_[name] || {};
+ } else {
+ name = child.name;
+ opts = child;
+ }
+
+ return {
+ name: name,
+ opts: opts
+ };
+ }).filter(function (child) {
+ // we have to make sure that child.name isn't in the techOrder since
+ // techs are registerd as Components but can't aren't compatible
+ // See https://github.com/videojs/video.js/issues/2772
+ var c = Component.getComponent(child.opts.componentClass || toTitleCase$1(child.name));
+ return c && !Tech.isTech(c);
+ }).forEach(handleAdd);
+ }
+ }
+ /**
+ * Builds the default DOM class name. Should be overriden by sub-components.
+ *
+ * @return {string}
+ * The DOM class name for this object.
+ *
+ * @abstract
+ */
+ ;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ // Child classes can include a function that does:
+ // return 'CLASS NAME' + this._super();
+ return '';
+ }
+ /**
+ * Bind a listener to the component's ready state.
+ * Different from event listeners in that if the ready event has already happened
+ * it will trigger the function immediately.
+ *
+ * @return {Component}
+ * Returns itself; method can be chained.
+ */
+ ;
+
+ _proto.ready = function ready(fn, sync) {
+ if (sync === void 0) {
+ sync = false;
+ }
+
+ if (!fn) {
+ return;
+ }
+
+ if (!this.isReady_) {
+ this.readyQueue_ = this.readyQueue_ || [];
+ this.readyQueue_.push(fn);
+ return;
+ }
+
+ if (sync) {
+ fn.call(this);
+ } else {
+ // Call the function asynchronously by default for consistency
+ this.setTimeout(fn, 1);
+ }
+ }
+ /**
+ * Trigger all the ready listeners for this `Component`.
+ *
+ * @fires Component#ready
+ */
+ ;
+
+ _proto.triggerReady = function triggerReady() {
+ this.isReady_ = true; // Ensure ready is triggered asynchronously
+
+ this.setTimeout(function () {
+ var readyQueue = this.readyQueue_; // Reset Ready Queue
+
+ this.readyQueue_ = [];
+
+ if (readyQueue && readyQueue.length > 0) {
+ readyQueue.forEach(function (fn) {
+ fn.call(this);
+ }, this);
+ } // Allow for using event listeners also
+
+ /**
+ * Triggered when a `Component` is ready.
+ *
+ * @event Component#ready
+ * @type {EventTarget~Event}
+ */
+
+
+ this.trigger('ready');
+ }, 1);
+ }
+ /**
+ * Find a single DOM element matching a `selector`. This can be within the `Component`s
+ * `contentEl()` or another custom context.
+ *
+ * @param {string} selector
+ * A valid CSS selector, which will be passed to `querySelector`.
+ *
+ * @param {Element|string} [context=this.contentEl()]
+ * A DOM element within which to query. Can also be a selector string in
+ * which case the first matching element will get used as context. If
+ * missing `this.contentEl()` gets used. If `this.contentEl()` returns
+ * nothing it falls back to `document`.
+ *
+ * @return {Element|null}
+ * the dom element that was found, or null
+ *
+ * @see [Information on CSS Selectors](https://developer.mozilla.org/en-US/docs/Web/Guide/CSS/Getting_Started/Selectors)
+ */
+ ;
+
+ _proto.$ = function $$1(selector, context) {
+ return $(selector, context || this.contentEl());
+ }
+ /**
+ * Finds all DOM element matching a `selector`. This can be within the `Component`s
+ * `contentEl()` or another custom context.
+ *
+ * @param {string} selector
+ * A valid CSS selector, which will be passed to `querySelectorAll`.
+ *
+ * @param {Element|string} [context=this.contentEl()]
+ * A DOM element within which to query. Can also be a selector string in
+ * which case the first matching element will get used as context. If
+ * missing `this.contentEl()` gets used. If `this.contentEl()` returns
+ * nothing it falls back to `document`.
+ *
+ * @return {NodeList}
+ * a list of dom elements that were found
+ *
+ * @see [Information on CSS Selectors](https://developer.mozilla.org/en-US/docs/Web/Guide/CSS/Getting_Started/Selectors)
+ */
+ ;
+
+ _proto.$$ = function $$$1(selector, context) {
+ return $$(selector, context || this.contentEl());
+ }
+ /**
+ * Check if a component's element has a CSS class name.
+ *
+ * @param {string} classToCheck
+ * CSS class name to check.
+ *
+ * @return {boolean}
+ * - True if the `Component` has the class.
+ * - False if the `Component` does not have the class`
+ */
+ ;
+
+ _proto.hasClass = function hasClass$1(classToCheck) {
+ return hasClass(this.el_, classToCheck);
+ }
+ /**
+ * Add a CSS class name to the `Component`s element.
+ *
+ * @param {string} classToAdd
+ * CSS class name to add
+ */
+ ;
+
+ _proto.addClass = function addClass$1(classToAdd) {
+ addClass(this.el_, classToAdd);
+ }
+ /**
+ * Remove a CSS class name from the `Component`s element.
+ *
+ * @param {string} classToRemove
+ * CSS class name to remove
+ */
+ ;
+
+ _proto.removeClass = function removeClass$1(classToRemove) {
+ removeClass(this.el_, classToRemove);
+ }
+ /**
+ * Add or remove a CSS class name from the component's element.
+ * - `classToToggle` gets added when {@link Component#hasClass} would return false.
+ * - `classToToggle` gets removed when {@link Component#hasClass} would return true.
+ *
+ * @param {string} classToToggle
+ * The class to add or remove based on (@link Component#hasClass}
+ *
+ * @param {boolean|Dom~predicate} [predicate]
+ * An {@link Dom~predicate} function or a boolean
+ */
+ ;
+
+ _proto.toggleClass = function toggleClass$1(classToToggle, predicate) {
+ toggleClass(this.el_, classToToggle, predicate);
+ }
+ /**
+ * Show the `Component`s element if it is hidden by removing the
+ * 'vjs-hidden' class name from it.
+ */
+ ;
+
+ _proto.show = function show() {
+ this.removeClass('vjs-hidden');
+ }
+ /**
+ * Hide the `Component`s element if it is currently showing by adding the
+ * 'vjs-hidden` class name to it.
+ */
+ ;
+
+ _proto.hide = function hide() {
+ this.addClass('vjs-hidden');
+ }
+ /**
+ * Lock a `Component`s element in its visible state by adding the 'vjs-lock-showing'
+ * class name to it. Used during fadeIn/fadeOut.
+ *
+ * @private
+ */
+ ;
+
+ _proto.lockShowing = function lockShowing() {
+ this.addClass('vjs-lock-showing');
+ }
+ /**
+ * Unlock a `Component`s element from its visible state by removing the 'vjs-lock-showing'
+ * class name from it. Used during fadeIn/fadeOut.
+ *
+ * @private
+ */
+ ;
+
+ _proto.unlockShowing = function unlockShowing() {
+ this.removeClass('vjs-lock-showing');
+ }
+ /**
+ * Get the value of an attribute on the `Component`s element.
+ *
+ * @param {string} attribute
+ * Name of the attribute to get the value from.
+ *
+ * @return {string|null}
+ * - The value of the attribute that was asked for.
+ * - Can be an empty string on some browsers if the attribute does not exist
+ * or has no value
+ * - Most browsers will return null if the attibute does not exist or has
+ * no value.
+ *
+ * @see [DOM API]{@link https://developer.mozilla.org/en-US/docs/Web/API/Element/getAttribute}
+ */
+ ;
+
+ _proto.getAttribute = function getAttribute$1(attribute) {
+ return getAttribute(this.el_, attribute);
+ }
+ /**
+ * Set the value of an attribute on the `Component`'s element
+ *
+ * @param {string} attribute
+ * Name of the attribute to set.
+ *
+ * @param {string} value
+ * Value to set the attribute to.
+ *
+ * @see [DOM API]{@link https://developer.mozilla.org/en-US/docs/Web/API/Element/setAttribute}
+ */
+ ;
+
+ _proto.setAttribute = function setAttribute$1(attribute, value) {
+ setAttribute(this.el_, attribute, value);
+ }
+ /**
+ * Remove an attribute from the `Component`s element.
+ *
+ * @param {string} attribute
+ * Name of the attribute to remove.
+ *
+ * @see [DOM API]{@link https://developer.mozilla.org/en-US/docs/Web/API/Element/removeAttribute}
+ */
+ ;
+
+ _proto.removeAttribute = function removeAttribute$1(attribute) {
+ removeAttribute(this.el_, attribute);
+ }
+ /**
+ * Get or set the width of the component based upon the CSS styles.
+ * See {@link Component#dimension} for more detailed information.
+ *
+ * @param {number|string} [num]
+ * The width that you want to set postfixed with '%', 'px' or nothing.
+ *
+ * @param {boolean} [skipListeners]
+ * Skip the componentresize event trigger
+ *
+ * @return {number|string}
+ * The width when getting, zero if there is no width. Can be a string
+ * postpixed with '%' or 'px'.
+ */
+ ;
+
+ _proto.width = function width(num, skipListeners) {
+ return this.dimension('width', num, skipListeners);
+ }
+ /**
+ * Get or set the height of the component based upon the CSS styles.
+ * See {@link Component#dimension} for more detailed information.
+ *
+ * @param {number|string} [num]
+ * The height that you want to set postfixed with '%', 'px' or nothing.
+ *
+ * @param {boolean} [skipListeners]
+ * Skip the componentresize event trigger
+ *
+ * @return {number|string}
+ * The width when getting, zero if there is no width. Can be a string
+ * postpixed with '%' or 'px'.
+ */
+ ;
+
+ _proto.height = function height(num, skipListeners) {
+ return this.dimension('height', num, skipListeners);
+ }
+ /**
+ * Set both the width and height of the `Component` element at the same time.
+ *
+ * @param {number|string} width
+ * Width to set the `Component`s element to.
+ *
+ * @param {number|string} height
+ * Height to set the `Component`s element to.
+ */
+ ;
+
+ _proto.dimensions = function dimensions(width, height) {
+ // Skip componentresize listeners on width for optimization
+ this.width(width, true);
+ this.height(height);
+ }
+ /**
+ * Get or set width or height of the `Component` element. This is the shared code
+ * for the {@link Component#width} and {@link Component#height}.
+ *
+ * Things to know:
+ * - If the width or height in an number this will return the number postfixed with 'px'.
+ * - If the width/height is a percent this will return the percent postfixed with '%'
+ * - Hidden elements have a width of 0 with `window.getComputedStyle`. This function
+ * defaults to the `Component`s `style.width` and falls back to `window.getComputedStyle`.
+ * See [this]{@link http://www.foliotek.com/devblog/getting-the-width-of-a-hidden-element-with-jquery-using-width/}
+ * for more information
+ * - If you want the computed style of the component, use {@link Component#currentWidth}
+ * and {@link {Component#currentHeight}
+ *
+ * @fires Component#componentresize
+ *
+ * @param {string} widthOrHeight
+ 8 'width' or 'height'
+ *
+ * @param {number|string} [num]
+ 8 New dimension
+ *
+ * @param {boolean} [skipListeners]
+ * Skip componentresize event trigger
+ *
+ * @return {number}
+ * The dimension when getting or 0 if unset
+ */
+ ;
+
+ _proto.dimension = function dimension(widthOrHeight, num, skipListeners) {
+ if (num !== undefined) {
+ // Set to zero if null or literally NaN (NaN !== NaN)
+ if (num === null || num !== num) {
+ num = 0;
+ } // Check if using css width/height (% or px) and adjust
+
+
+ if (('' + num).indexOf('%') !== -1 || ('' + num).indexOf('px') !== -1) {
+ this.el_.style[widthOrHeight] = num;
+ } else if (num === 'auto') {
+ this.el_.style[widthOrHeight] = '';
+ } else {
+ this.el_.style[widthOrHeight] = num + 'px';
+ } // skipListeners allows us to avoid triggering the resize event when setting both width and height
+
+
+ if (!skipListeners) {
+ /**
+ * Triggered when a component is resized.
+ *
+ * @event Component#componentresize
+ * @type {EventTarget~Event}
+ */
+ this.trigger('componentresize');
+ }
+
+ return;
+ } // Not setting a value, so getting it
+ // Make sure element exists
+
+
+ if (!this.el_) {
+ return 0;
+ } // Get dimension value from style
+
+
+ var val = this.el_.style[widthOrHeight];
+ var pxIndex = val.indexOf('px');
+
+ if (pxIndex !== -1) {
+ // Return the pixel value with no 'px'
+ return parseInt(val.slice(0, pxIndex), 10);
+ } // No px so using % or no style was set, so falling back to offsetWidth/height
+ // If component has display:none, offset will return 0
+ // TODO: handle display:none and no dimension style using px
+
+
+ return parseInt(this.el_['offset' + toTitleCase$1(widthOrHeight)], 10);
+ }
+ /**
+ * Get the computed width or the height of the component's element.
+ *
+ * Uses `window.getComputedStyle`.
+ *
+ * @param {string} widthOrHeight
+ * A string containing 'width' or 'height'. Whichever one you want to get.
+ *
+ * @return {number}
+ * The dimension that gets asked for or 0 if nothing was set
+ * for that dimension.
+ */
+ ;
+
+ _proto.currentDimension = function currentDimension(widthOrHeight) {
+ var computedWidthOrHeight = 0;
+
+ if (widthOrHeight !== 'width' && widthOrHeight !== 'height') {
+ throw new Error('currentDimension only accepts width or height value');
+ }
+
+ computedWidthOrHeight = computedStyle(this.el_, widthOrHeight); // remove 'px' from variable and parse as integer
+
+ computedWidthOrHeight = parseFloat(computedWidthOrHeight); // if the computed value is still 0, it's possible that the browser is lying
+ // and we want to check the offset values.
+ // This code also runs wherever getComputedStyle doesn't exist.
+
+ if (computedWidthOrHeight === 0 || isNaN(computedWidthOrHeight)) {
+ var rule = "offset" + toTitleCase$1(widthOrHeight);
+ computedWidthOrHeight = this.el_[rule];
+ }
+
+ return computedWidthOrHeight;
+ }
+ /**
+ * An object that contains width and height values of the `Component`s
+ * computed style. Uses `window.getComputedStyle`.
+ *
+ * @typedef {Object} Component~DimensionObject
+ *
+ * @property {number} width
+ * The width of the `Component`s computed style.
+ *
+ * @property {number} height
+ * The height of the `Component`s computed style.
+ */
+
+ /**
+ * Get an object that contains computed width and height values of the
+ * component's element.
+ *
+ * Uses `window.getComputedStyle`.
+ *
+ * @return {Component~DimensionObject}
+ * The computed dimensions of the component's element.
+ */
+ ;
+
+ _proto.currentDimensions = function currentDimensions() {
+ return {
+ width: this.currentDimension('width'),
+ height: this.currentDimension('height')
+ };
+ }
+ /**
+ * Get the computed width of the component's element.
+ *
+ * Uses `window.getComputedStyle`.
+ *
+ * @return {number}
+ * The computed width of the component's element.
+ */
+ ;
+
+ _proto.currentWidth = function currentWidth() {
+ return this.currentDimension('width');
+ }
+ /**
+ * Get the computed height of the component's element.
+ *
+ * Uses `window.getComputedStyle`.
+ *
+ * @return {number}
+ * The computed height of the component's element.
+ */
+ ;
+
+ _proto.currentHeight = function currentHeight() {
+ return this.currentDimension('height');
+ }
+ /**
+ * Set the focus to this component
+ */
+ ;
+
+ _proto.focus = function focus() {
+ this.el_.focus();
+ }
+ /**
+ * Remove the focus from this component
+ */
+ ;
+
+ _proto.blur = function blur() {
+ this.el_.blur();
+ }
+ /**
+ * When this Component receives a `keydown` event which it does not process,
+ * it passes the event to the Player for handling.
+ *
+ * @param {EventTarget~Event} event
+ * The `keydown` event that caused this function to be called.
+ */
+ ;
+
+ _proto.handleKeyDown = function handleKeyDown(event) {
+ if (this.player_) {
+ // We only stop propagation here because we want unhandled events to fall
+ // back to the browser. Exclude Tab for focus trapping.
+ if (!keycode__default['default'].isEventKey(event, 'Tab')) {
+ event.stopPropagation();
+ }
+
+ this.player_.handleKeyDown(event);
+ }
+ }
+ /**
+ * Many components used to have a `handleKeyPress` method, which was poorly
+ * named because it listened to a `keydown` event. This method name now
+ * delegates to `handleKeyDown`. This means anyone calling `handleKeyPress`
+ * will not see their method calls stop working.
+ *
+ * @param {EventTarget~Event} event
+ * The event that caused this function to be called.
+ */
+ ;
+
+ _proto.handleKeyPress = function handleKeyPress(event) {
+ this.handleKeyDown(event);
+ }
+ /**
+ * Emit a 'tap' events when touch event support gets detected. This gets used to
+ * support toggling the controls through a tap on the video. They get enabled
+ * because every sub-component would have extra overhead otherwise.
+ *
+ * @private
+ * @fires Component#tap
+ * @listens Component#touchstart
+ * @listens Component#touchmove
+ * @listens Component#touchleave
+ * @listens Component#touchcancel
+ * @listens Component#touchend
+ */
+ ;
+
+ _proto.emitTapEvents = function emitTapEvents() {
+ // Track the start time so we can determine how long the touch lasted
+ var touchStart = 0;
+ var firstTouch = null; // Maximum movement allowed during a touch event to still be considered a tap
+ // Other popular libs use anywhere from 2 (hammer.js) to 15,
+ // so 10 seems like a nice, round number.
+
+ var tapMovementThreshold = 10; // The maximum length a touch can be while still being considered a tap
+
+ var touchTimeThreshold = 200;
+ var couldBeTap;
+ this.on('touchstart', function (event) {
+ // If more than one finger, don't consider treating this as a click
+ if (event.touches.length === 1) {
+ // Copy pageX/pageY from the object
+ firstTouch = {
+ pageX: event.touches[0].pageX,
+ pageY: event.touches[0].pageY
+ }; // Record start time so we can detect a tap vs. "touch and hold"
+
+ touchStart = window__default['default'].performance.now(); // Reset couldBeTap tracking
+
+ couldBeTap = true;
+ }
+ });
+ this.on('touchmove', function (event) {
+ // If more than one finger, don't consider treating this as a click
+ if (event.touches.length > 1) {
+ couldBeTap = false;
+ } else if (firstTouch) {
+ // Some devices will throw touchmoves for all but the slightest of taps.
+ // So, if we moved only a small distance, this could still be a tap
+ var xdiff = event.touches[0].pageX - firstTouch.pageX;
+ var ydiff = event.touches[0].pageY - firstTouch.pageY;
+ var touchDistance = Math.sqrt(xdiff * xdiff + ydiff * ydiff);
+
+ if (touchDistance > tapMovementThreshold) {
+ couldBeTap = false;
+ }
+ }
+ });
+
+ var noTap = function noTap() {
+ couldBeTap = false;
+ }; // TODO: Listen to the original target. http://youtu.be/DujfpXOKUp8?t=13m8s
+
+
+ this.on('touchleave', noTap);
+ this.on('touchcancel', noTap); // When the touch ends, measure how long it took and trigger the appropriate
+ // event
+
+ this.on('touchend', function (event) {
+ firstTouch = null; // Proceed only if the touchmove/leave/cancel event didn't happen
+
+ if (couldBeTap === true) {
+ // Measure how long the touch lasted
+ var touchTime = window__default['default'].performance.now() - touchStart; // Make sure the touch was less than the threshold to be considered a tap
+
+ if (touchTime < touchTimeThreshold) {
+ // Don't let browser turn this into a click
+ event.preventDefault();
+ /**
+ * Triggered when a `Component` is tapped.
+ *
+ * @event Component#tap
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('tap'); // It may be good to copy the touchend event object and change the
+ // type to tap, if the other event properties aren't exact after
+ // Events.fixEvent runs (e.g. event.target)
+ }
+ }
+ });
+ }
+ /**
+ * This function reports user activity whenever touch events happen. This can get
+ * turned off by any sub-components that wants touch events to act another way.
+ *
+ * Report user touch activity when touch events occur. User activity gets used to
+ * determine when controls should show/hide. It is simple when it comes to mouse
+ * events, because any mouse event should show the controls. So we capture mouse
+ * events that bubble up to the player and report activity when that happens.
+ * With touch events it isn't as easy as `touchstart` and `touchend` toggle player
+ * controls. So touch events can't help us at the player level either.
+ *
+ * User activity gets checked asynchronously. So what could happen is a tap event
+ * on the video turns the controls off. Then the `touchend` event bubbles up to
+ * the player. Which, if it reported user activity, would turn the controls right
+ * back on. We also don't want to completely block touch events from bubbling up.
+ * Furthermore a `touchmove` event and anything other than a tap, should not turn
+ * controls back on.
+ *
+ * @listens Component#touchstart
+ * @listens Component#touchmove
+ * @listens Component#touchend
+ * @listens Component#touchcancel
+ */
+ ;
+
+ _proto.enableTouchActivity = function enableTouchActivity() {
+ // Don't continue if the root player doesn't support reporting user activity
+ if (!this.player() || !this.player().reportUserActivity) {
+ return;
+ } // listener for reporting that the user is active
+
+
+ var report = bind(this.player(), this.player().reportUserActivity);
+ var touchHolding;
+ this.on('touchstart', function () {
+ report(); // For as long as the they are touching the device or have their mouse down,
+ // we consider them active even if they're not moving their finger or mouse.
+ // So we want to continue to update that they are active
+
+ this.clearInterval(touchHolding); // report at the same interval as activityCheck
+
+ touchHolding = this.setInterval(report, 250);
+ });
+
+ var touchEnd = function touchEnd(event) {
+ report(); // stop the interval that maintains activity if the touch is holding
+
+ this.clearInterval(touchHolding);
+ };
+
+ this.on('touchmove', report);
+ this.on('touchend', touchEnd);
+ this.on('touchcancel', touchEnd);
+ }
+ /**
+ * A callback that has no parameters and is bound into `Component`s context.
+ *
+ * @callback Component~GenericCallback
+ * @this Component
+ */
+
+ /**
+ * Creates a function that runs after an `x` millisecond timeout. This function is a
+ * wrapper around `window.setTimeout`. There are a few reasons to use this one
+ * instead though:
+ * 1. It gets cleared via {@link Component#clearTimeout} when
+ * {@link Component#dispose} gets called.
+ * 2. The function callback will gets turned into a {@link Component~GenericCallback}
+ *
+ * > Note: You can't use `window.clearTimeout` on the id returned by this function. This
+ * will cause its dispose listener not to get cleaned up! Please use
+ * {@link Component#clearTimeout} or {@link Component#dispose} instead.
+ *
+ * @param {Component~GenericCallback} fn
+ * The function that will be run after `timeout`.
+ *
+ * @param {number} timeout
+ * Timeout in milliseconds to delay before executing the specified function.
+ *
+ * @return {number}
+ * Returns a timeout ID that gets used to identify the timeout. It can also
+ * get used in {@link Component#clearTimeout} to clear the timeout that
+ * was set.
+ *
+ * @listens Component#dispose
+ * @see [Similar to]{@link https://developer.mozilla.org/en-US/docs/Web/API/WindowTimers/setTimeout}
+ */
+ ;
+
+ _proto.setTimeout = function setTimeout(fn, timeout) {
+ var _this3 = this;
+
+ // declare as variables so they are properly available in timeout function
+ // eslint-disable-next-line
+ var timeoutId;
+ fn = bind(this, fn);
+ this.clearTimersOnDispose_();
+ timeoutId = window__default['default'].setTimeout(function () {
+ if (_this3.setTimeoutIds_.has(timeoutId)) {
+ _this3.setTimeoutIds_["delete"](timeoutId);
+ }
+
+ fn();
+ }, timeout);
+ this.setTimeoutIds_.add(timeoutId);
+ return timeoutId;
+ }
+ /**
+ * Clears a timeout that gets created via `window.setTimeout` or
+ * {@link Component#setTimeout}. If you set a timeout via {@link Component#setTimeout}
+ * use this function instead of `window.clearTimout`. If you don't your dispose
+ * listener will not get cleaned up until {@link Component#dispose}!
+ *
+ * @param {number} timeoutId
+ * The id of the timeout to clear. The return value of
+ * {@link Component#setTimeout} or `window.setTimeout`.
+ *
+ * @return {number}
+ * Returns the timeout id that was cleared.
+ *
+ * @see [Similar to]{@link https://developer.mozilla.org/en-US/docs/Web/API/WindowTimers/clearTimeout}
+ */
+ ;
+
+ _proto.clearTimeout = function clearTimeout(timeoutId) {
+ if (this.setTimeoutIds_.has(timeoutId)) {
+ this.setTimeoutIds_["delete"](timeoutId);
+ window__default['default'].clearTimeout(timeoutId);
+ }
+
+ return timeoutId;
+ }
+ /**
+ * Creates a function that gets run every `x` milliseconds. This function is a wrapper
+ * around `window.setInterval`. There are a few reasons to use this one instead though.
+ * 1. It gets cleared via {@link Component#clearInterval} when
+ * {@link Component#dispose} gets called.
+ * 2. The function callback will be a {@link Component~GenericCallback}
+ *
+ * @param {Component~GenericCallback} fn
+ * The function to run every `x` seconds.
+ *
+ * @param {number} interval
+ * Execute the specified function every `x` milliseconds.
+ *
+ * @return {number}
+ * Returns an id that can be used to identify the interval. It can also be be used in
+ * {@link Component#clearInterval} to clear the interval.
+ *
+ * @listens Component#dispose
+ * @see [Similar to]{@link https://developer.mozilla.org/en-US/docs/Web/API/WindowTimers/setInterval}
+ */
+ ;
+
+ _proto.setInterval = function setInterval(fn, interval) {
+ fn = bind(this, fn);
+ this.clearTimersOnDispose_();
+ var intervalId = window__default['default'].setInterval(fn, interval);
+ this.setIntervalIds_.add(intervalId);
+ return intervalId;
+ }
+ /**
+ * Clears an interval that gets created via `window.setInterval` or
+ * {@link Component#setInterval}. If you set an inteval via {@link Component#setInterval}
+ * use this function instead of `window.clearInterval`. If you don't your dispose
+ * listener will not get cleaned up until {@link Component#dispose}!
+ *
+ * @param {number} intervalId
+ * The id of the interval to clear. The return value of
+ * {@link Component#setInterval} or `window.setInterval`.
+ *
+ * @return {number}
+ * Returns the interval id that was cleared.
+ *
+ * @see [Similar to]{@link https://developer.mozilla.org/en-US/docs/Web/API/WindowTimers/clearInterval}
+ */
+ ;
+
+ _proto.clearInterval = function clearInterval(intervalId) {
+ if (this.setIntervalIds_.has(intervalId)) {
+ this.setIntervalIds_["delete"](intervalId);
+ window__default['default'].clearInterval(intervalId);
+ }
+
+ return intervalId;
+ }
+ /**
+ * Queues up a callback to be passed to requestAnimationFrame (rAF), but
+ * with a few extra bonuses:
+ *
+ * - Supports browsers that do not support rAF by falling back to
+ * {@link Component#setTimeout}.
+ *
+ * - The callback is turned into a {@link Component~GenericCallback} (i.e.
+ * bound to the component).
+ *
+ * - Automatic cancellation of the rAF callback is handled if the component
+ * is disposed before it is called.
+ *
+ * @param {Component~GenericCallback} fn
+ * A function that will be bound to this component and executed just
+ * before the browser's next repaint.
+ *
+ * @return {number}
+ * Returns an rAF ID that gets used to identify the timeout. It can
+ * also be used in {@link Component#cancelAnimationFrame} to cancel
+ * the animation frame callback.
+ *
+ * @listens Component#dispose
+ * @see [Similar to]{@link https://developer.mozilla.org/en-US/docs/Web/API/window/requestAnimationFrame}
+ */
+ ;
+
+ _proto.requestAnimationFrame = function requestAnimationFrame(fn) {
+ var _this4 = this;
+
+ // Fall back to using a timer.
+ if (!this.supportsRaf_) {
+ return this.setTimeout(fn, 1000 / 60);
+ }
+
+ this.clearTimersOnDispose_(); // declare as variables so they are properly available in rAF function
+ // eslint-disable-next-line
+
+ var id;
+ fn = bind(this, fn);
+ id = window__default['default'].requestAnimationFrame(function () {
+ if (_this4.rafIds_.has(id)) {
+ _this4.rafIds_["delete"](id);
+ }
+
+ fn();
+ });
+ this.rafIds_.add(id);
+ return id;
+ }
+ /**
+ * Request an animation frame, but only one named animation
+ * frame will be queued. Another will never be added until
+ * the previous one finishes.
+ *
+ * @param {string} name
+ * The name to give this requestAnimationFrame
+ *
+ * @param {Component~GenericCallback} fn
+ * A function that will be bound to this component and executed just
+ * before the browser's next repaint.
+ */
+ ;
+
+ _proto.requestNamedAnimationFrame = function requestNamedAnimationFrame(name, fn) {
+ var _this5 = this;
+
+ if (this.namedRafs_.has(name)) {
+ return;
+ }
+
+ this.clearTimersOnDispose_();
+ fn = bind(this, fn);
+ var id = this.requestAnimationFrame(function () {
+ fn();
+
+ if (_this5.namedRafs_.has(name)) {
+ _this5.namedRafs_["delete"](name);
+ }
+ });
+ this.namedRafs_.set(name, id);
+ return name;
+ }
+ /**
+ * Cancels a current named animation frame if it exists.
+ *
+ * @param {string} name
+ * The name of the requestAnimationFrame to cancel.
+ */
+ ;
+
+ _proto.cancelNamedAnimationFrame = function cancelNamedAnimationFrame(name) {
+ if (!this.namedRafs_.has(name)) {
+ return;
+ }
+
+ this.cancelAnimationFrame(this.namedRafs_.get(name));
+ this.namedRafs_["delete"](name);
+ }
+ /**
+ * Cancels a queued callback passed to {@link Component#requestAnimationFrame}
+ * (rAF).
+ *
+ * If you queue an rAF callback via {@link Component#requestAnimationFrame},
+ * use this function instead of `window.cancelAnimationFrame`. If you don't,
+ * your dispose listener will not get cleaned up until {@link Component#dispose}!
+ *
+ * @param {number} id
+ * The rAF ID to clear. The return value of {@link Component#requestAnimationFrame}.
+ *
+ * @return {number}
+ * Returns the rAF ID that was cleared.
+ *
+ * @see [Similar to]{@link https://developer.mozilla.org/en-US/docs/Web/API/window/cancelAnimationFrame}
+ */
+ ;
+
+ _proto.cancelAnimationFrame = function cancelAnimationFrame(id) {
+ // Fall back to using a timer.
+ if (!this.supportsRaf_) {
+ return this.clearTimeout(id);
+ }
+
+ if (this.rafIds_.has(id)) {
+ this.rafIds_["delete"](id);
+ window__default['default'].cancelAnimationFrame(id);
+ }
+
+ return id;
+ }
+ /**
+ * A function to setup `requestAnimationFrame`, `setTimeout`,
+ * and `setInterval`, clearing on dispose.
+ *
+ * > Previously each timer added and removed dispose listeners on it's own.
+ * For better performance it was decided to batch them all, and use `Set`s
+ * to track outstanding timer ids.
+ *
+ * @private
+ */
+ ;
+
+ _proto.clearTimersOnDispose_ = function clearTimersOnDispose_() {
+ var _this6 = this;
+
+ if (this.clearingTimersOnDispose_) {
+ return;
+ }
+
+ this.clearingTimersOnDispose_ = true;
+ this.one('dispose', function () {
+ [['namedRafs_', 'cancelNamedAnimationFrame'], ['rafIds_', 'cancelAnimationFrame'], ['setTimeoutIds_', 'clearTimeout'], ['setIntervalIds_', 'clearInterval']].forEach(function (_ref) {
+ var idName = _ref[0],
+ cancelName = _ref[1];
+
+ // for a `Set` key will actually be the value again
+ // so forEach((val, val) =>` but for maps we want to use
+ // the key.
+ _this6[idName].forEach(function (val, key) {
+ return _this6[cancelName](key);
+ });
+ });
+ _this6.clearingTimersOnDispose_ = false;
+ });
+ }
+ /**
+ * Register a `Component` with `videojs` given the name and the component.
+ *
+ * > NOTE: {@link Tech}s should not be registered as a `Component`. {@link Tech}s
+ * should be registered using {@link Tech.registerTech} or
+ * {@link videojs:videojs.registerTech}.
+ *
+ * > NOTE: This function can also be seen on videojs as
+ * {@link videojs:videojs.registerComponent}.
+ *
+ * @param {string} name
+ * The name of the `Component` to register.
+ *
+ * @param {Component} ComponentToRegister
+ * The `Component` class to register.
+ *
+ * @return {Component}
+ * The `Component` that was registered.
+ */
+ ;
+
+ Component.registerComponent = function registerComponent(name, ComponentToRegister) {
+ if (typeof name !== 'string' || !name) {
+ throw new Error("Illegal component name, \"" + name + "\"; must be a non-empty string.");
+ }
+
+ var Tech = Component.getComponent('Tech'); // We need to make sure this check is only done if Tech has been registered.
+
+ var isTech = Tech && Tech.isTech(ComponentToRegister);
+ var isComp = Component === ComponentToRegister || Component.prototype.isPrototypeOf(ComponentToRegister.prototype);
+
+ if (isTech || !isComp) {
+ var reason;
+
+ if (isTech) {
+ reason = 'techs must be registered using Tech.registerTech()';
+ } else {
+ reason = 'must be a Component subclass';
+ }
+
+ throw new Error("Illegal component, \"" + name + "\"; " + reason + ".");
+ }
+
+ name = toTitleCase$1(name);
+
+ if (!Component.components_) {
+ Component.components_ = {};
+ }
+
+ var Player = Component.getComponent('Player');
+
+ if (name === 'Player' && Player && Player.players) {
+ var players = Player.players;
+ var playerNames = Object.keys(players); // If we have players that were disposed, then their name will still be
+ // in Players.players. So, we must loop through and verify that the value
+ // for each item is not null. This allows registration of the Player component
+ // after all players have been disposed or before any were created.
+
+ if (players && playerNames.length > 0 && playerNames.map(function (pname) {
+ return players[pname];
+ }).every(Boolean)) {
+ throw new Error('Can not register Player component after player has been created.');
+ }
+ }
+
+ Component.components_[name] = ComponentToRegister;
+ Component.components_[toLowerCase(name)] = ComponentToRegister;
+ return ComponentToRegister;
+ }
+ /**
+ * Get a `Component` based on the name it was registered with.
+ *
+ * @param {string} name
+ * The Name of the component to get.
+ *
+ * @return {Component}
+ * The `Component` that got registered under the given name.
+ */
+ ;
+
+ Component.getComponent = function getComponent(name) {
+ if (!name || !Component.components_) {
+ return;
+ }
+
+ return Component.components_[name];
+ };
+
+ return Component;
+}();
+/**
+ * Whether or not this component supports `requestAnimationFrame`.
+ *
+ * This is exposed primarily for testing purposes.
+ *
+ * @private
+ * @type {Boolean}
+ */
+
+
+Component$1.prototype.supportsRaf_ = typeof window__default['default'].requestAnimationFrame === 'function' && typeof window__default['default'].cancelAnimationFrame === 'function';
+Component$1.registerComponent('Component', Component$1);
+
+/**
+ * @file time-ranges.js
+ * @module time-ranges
+ */
+/**
+ * Returns the time for the specified index at the start or end
+ * of a TimeRange object.
+ *
+ * @typedef {Function} TimeRangeIndex
+ *
+ * @param {number} [index=0]
+ * The range number to return the time for.
+ *
+ * @return {number}
+ * The time offset at the specified index.
+ *
+ * @deprecated The index argument must be provided.
+ * In the future, leaving it out will throw an error.
+ */
+
+/**
+ * An object that contains ranges of time.
+ *
+ * @typedef {Object} TimeRange
+ *
+ * @property {number} length
+ * The number of time ranges represented by this object.
+ *
+ * @property {module:time-ranges~TimeRangeIndex} start
+ * Returns the time offset at which a specified time range begins.
+ *
+ * @property {module:time-ranges~TimeRangeIndex} end
+ * Returns the time offset at which a specified time range ends.
+ *
+ * @see https://developer.mozilla.org/en-US/docs/Web/API/TimeRanges
+ */
+
+/**
+ * Check if any of the time ranges are over the maximum index.
+ *
+ * @private
+ * @param {string} fnName
+ * The function name to use for logging
+ *
+ * @param {number} index
+ * The index to check
+ *
+ * @param {number} maxIndex
+ * The maximum possible index
+ *
+ * @throws {Error} if the timeRanges provided are over the maxIndex
+ */
+
+function rangeCheck(fnName, index, maxIndex) {
+ if (typeof index !== 'number' || index < 0 || index > maxIndex) {
+ throw new Error("Failed to execute '" + fnName + "' on 'TimeRanges': The index provided (" + index + ") is non-numeric or out of bounds (0-" + maxIndex + ").");
+ }
+}
+/**
+ * Get the time for the specified index at the start or end
+ * of a TimeRange object.
+ *
+ * @private
+ * @param {string} fnName
+ * The function name to use for logging
+ *
+ * @param {string} valueIndex
+ * The property that should be used to get the time. should be
+ * 'start' or 'end'
+ *
+ * @param {Array} ranges
+ * An array of time ranges
+ *
+ * @param {Array} [rangeIndex=0]
+ * The index to start the search at
+ *
+ * @return {number}
+ * The time that offset at the specified index.
+ *
+ * @deprecated rangeIndex must be set to a value, in the future this will throw an error.
+ * @throws {Error} if rangeIndex is more than the length of ranges
+ */
+
+
+function getRange(fnName, valueIndex, ranges, rangeIndex) {
+ rangeCheck(fnName, rangeIndex, ranges.length - 1);
+ return ranges[rangeIndex][valueIndex];
+}
+/**
+ * Create a time range object given ranges of time.
+ *
+ * @private
+ * @param {Array} [ranges]
+ * An array of time ranges.
+ */
+
+
+function createTimeRangesObj(ranges) {
+ var timeRangesObj;
+
+ if (ranges === undefined || ranges.length === 0) {
+ timeRangesObj = {
+ length: 0,
+ start: function start() {
+ throw new Error('This TimeRanges object is empty');
+ },
+ end: function end() {
+ throw new Error('This TimeRanges object is empty');
+ }
+ };
+ } else {
+ timeRangesObj = {
+ length: ranges.length,
+ start: getRange.bind(null, 'start', 0, ranges),
+ end: getRange.bind(null, 'end', 1, ranges)
+ };
+ }
+
+ if (window__default['default'].Symbol && window__default['default'].Symbol.iterator) {
+ timeRangesObj[window__default['default'].Symbol.iterator] = function () {
+ return (ranges || []).values();
+ };
+ }
+
+ return timeRangesObj;
+}
+/**
+ * Create a `TimeRange` object which mimics an
+ * {@link https://developer.mozilla.org/en-US/docs/Web/API/TimeRanges|HTML5 TimeRanges instance}.
+ *
+ * @param {number|Array[]} start
+ * The start of a single range (a number) or an array of ranges (an
+ * array of arrays of two numbers each).
+ *
+ * @param {number} end
+ * The end of a single range. Cannot be used with the array form of
+ * the `start` argument.
+ */
+
+
+function createTimeRanges(start, end) {
+ if (Array.isArray(start)) {
+ return createTimeRangesObj(start);
+ } else if (start === undefined || end === undefined) {
+ return createTimeRangesObj();
+ }
+
+ return createTimeRangesObj([[start, end]]);
+}
+
+/**
+ * @file buffer.js
+ * @module buffer
+ */
+/**
+ * Compute the percentage of the media that has been buffered.
+ *
+ * @param {TimeRange} buffered
+ * The current `TimeRange` object representing buffered time ranges
+ *
+ * @param {number} duration
+ * Total duration of the media
+ *
+ * @return {number}
+ * Percent buffered of the total duration in decimal form.
+ */
+
+function bufferedPercent(buffered, duration) {
+ var bufferedDuration = 0;
+ var start;
+ var end;
+
+ if (!duration) {
+ return 0;
+ }
+
+ if (!buffered || !buffered.length) {
+ buffered = createTimeRanges(0, 0);
+ }
+
+ for (var i = 0; i < buffered.length; i++) {
+ start = buffered.start(i);
+ end = buffered.end(i); // buffered end can be bigger than duration by a very small fraction
+
+ if (end > duration) {
+ end = duration;
+ }
+
+ bufferedDuration += end - start;
+ }
+
+ return bufferedDuration / duration;
+}
+
+/**
+ * @file media-error.js
+ */
+/**
+ * A Custom `MediaError` class which mimics the standard HTML5 `MediaError` class.
+ *
+ * @param {number|string|Object|MediaError} value
+ * This can be of multiple types:
+ * - number: should be a standard error code
+ * - string: an error message (the code will be 0)
+ * - Object: arbitrary properties
+ * - `MediaError` (native): used to populate a video.js `MediaError` object
+ * - `MediaError` (video.js): will return itself if it's already a
+ * video.js `MediaError` object.
+ *
+ * @see [MediaError Spec]{@link https://dev.w3.org/html5/spec-author-view/video.html#mediaerror}
+ * @see [Encrypted MediaError Spec]{@link https://www.w3.org/TR/2013/WD-encrypted-media-20130510/#error-codes}
+ *
+ * @class MediaError
+ */
+
+function MediaError(value) {
+ // Allow redundant calls to this constructor to avoid having `instanceof`
+ // checks peppered around the code.
+ if (value instanceof MediaError) {
+ return value;
+ }
+
+ if (typeof value === 'number') {
+ this.code = value;
+ } else if (typeof value === 'string') {
+ // default code is zero, so this is a custom error
+ this.message = value;
+ } else if (isObject(value)) {
+ // We assign the `code` property manually because native `MediaError` objects
+ // do not expose it as an own/enumerable property of the object.
+ if (typeof value.code === 'number') {
+ this.code = value.code;
+ }
+
+ assign(this, value);
+ }
+
+ if (!this.message) {
+ this.message = MediaError.defaultMessages[this.code] || '';
+ }
+}
+/**
+ * The error code that refers two one of the defined `MediaError` types
+ *
+ * @type {Number}
+ */
+
+
+MediaError.prototype.code = 0;
+/**
+ * An optional message that to show with the error. Message is not part of the HTML5
+ * video spec but allows for more informative custom errors.
+ *
+ * @type {String}
+ */
+
+MediaError.prototype.message = '';
+/**
+ * An optional status code that can be set by plugins to allow even more detail about
+ * the error. For example a plugin might provide a specific HTTP status code and an
+ * error message for that code. Then when the plugin gets that error this class will
+ * know how to display an error message for it. This allows a custom message to show
+ * up on the `Player` error overlay.
+ *
+ * @type {Array}
+ */
+
+MediaError.prototype.status = null;
+/**
+ * Errors indexed by the W3C standard. The order **CANNOT CHANGE**! See the
+ * specification listed under {@link MediaError} for more information.
+ *
+ * @enum {array}
+ * @readonly
+ * @property {string} 0 - MEDIA_ERR_CUSTOM
+ * @property {string} 1 - MEDIA_ERR_ABORTED
+ * @property {string} 2 - MEDIA_ERR_NETWORK
+ * @property {string} 3 - MEDIA_ERR_DECODE
+ * @property {string} 4 - MEDIA_ERR_SRC_NOT_SUPPORTED
+ * @property {string} 5 - MEDIA_ERR_ENCRYPTED
+ */
+
+MediaError.errorTypes = ['MEDIA_ERR_CUSTOM', 'MEDIA_ERR_ABORTED', 'MEDIA_ERR_NETWORK', 'MEDIA_ERR_DECODE', 'MEDIA_ERR_SRC_NOT_SUPPORTED', 'MEDIA_ERR_ENCRYPTED'];
+/**
+ * The default `MediaError` messages based on the {@link MediaError.errorTypes}.
+ *
+ * @type {Array}
+ * @constant
+ */
+
+MediaError.defaultMessages = {
+ 1: 'You aborted the media playback',
+ 2: 'A network error caused the media download to fail part-way.',
+ 3: 'The media playback was aborted due to a corruption problem or because the media used features your browser did not support.',
+ 4: 'The media could not be loaded, either because the server or network failed or because the format is not supported.',
+ 5: 'The media is encrypted and we do not have the keys to decrypt it.'
+}; // Add types as properties on MediaError
+// e.g. MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED = 4;
+
+for (var errNum = 0; errNum < MediaError.errorTypes.length; errNum++) {
+ MediaError[MediaError.errorTypes[errNum]] = errNum; // values should be accessible on both the class and instance
+
+ MediaError.prototype[MediaError.errorTypes[errNum]] = errNum;
+} // jsdocs for instance/static members added above
+
+/**
+ * Returns whether an object is `Promise`-like (i.e. has a `then` method).
+ *
+ * @param {Object} value
+ * An object that may or may not be `Promise`-like.
+ *
+ * @return {boolean}
+ * Whether or not the object is `Promise`-like.
+ */
+function isPromise(value) {
+ return value !== undefined && value !== null && typeof value.then === 'function';
+}
+/**
+ * Silence a Promise-like object.
+ *
+ * This is useful for avoiding non-harmful, but potentially confusing "uncaught
+ * play promise" rejection error messages.
+ *
+ * @param {Object} value
+ * An object that may or may not be `Promise`-like.
+ */
+
+function silencePromise(value) {
+ if (isPromise(value)) {
+ value.then(null, function (e) {});
+ }
+}
+
+/**
+ * @file text-track-list-converter.js Utilities for capturing text track state and
+ * re-creating tracks based on a capture.
+ *
+ * @module text-track-list-converter
+ */
+
+/**
+ * Examine a single {@link TextTrack} and return a JSON-compatible javascript object that
+ * represents the {@link TextTrack}'s state.
+ *
+ * @param {TextTrack} track
+ * The text track to query.
+ *
+ * @return {Object}
+ * A serializable javascript representation of the TextTrack.
+ * @private
+ */
+var trackToJson_ = function trackToJson_(track) {
+ var ret = ['kind', 'label', 'language', 'id', 'inBandMetadataTrackDispatchType', 'mode', 'src'].reduce(function (acc, prop, i) {
+ if (track[prop]) {
+ acc[prop] = track[prop];
+ }
+
+ return acc;
+ }, {
+ cues: track.cues && Array.prototype.map.call(track.cues, function (cue) {
+ return {
+ startTime: cue.startTime,
+ endTime: cue.endTime,
+ text: cue.text,
+ id: cue.id
+ };
+ })
+ });
+ return ret;
+};
+/**
+ * Examine a {@link Tech} and return a JSON-compatible javascript array that represents the
+ * state of all {@link TextTrack}s currently configured. The return array is compatible with
+ * {@link text-track-list-converter:jsonToTextTracks}.
+ *
+ * @param {Tech} tech
+ * The tech object to query
+ *
+ * @return {Array}
+ * A serializable javascript representation of the {@link Tech}s
+ * {@link TextTrackList}.
+ */
+
+
+var textTracksToJson = function textTracksToJson(tech) {
+ var trackEls = tech.$$('track');
+ var trackObjs = Array.prototype.map.call(trackEls, function (t) {
+ return t.track;
+ });
+ var tracks = Array.prototype.map.call(trackEls, function (trackEl) {
+ var json = trackToJson_(trackEl.track);
+
+ if (trackEl.src) {
+ json.src = trackEl.src;
+ }
+
+ return json;
+ });
+ return tracks.concat(Array.prototype.filter.call(tech.textTracks(), function (track) {
+ return trackObjs.indexOf(track) === -1;
+ }).map(trackToJson_));
+};
+/**
+ * Create a set of remote {@link TextTrack}s on a {@link Tech} based on an array of javascript
+ * object {@link TextTrack} representations.
+ *
+ * @param {Array} json
+ * An array of `TextTrack` representation objects, like those that would be
+ * produced by `textTracksToJson`.
+ *
+ * @param {Tech} tech
+ * The `Tech` to create the `TextTrack`s on.
+ */
+
+
+var jsonToTextTracks = function jsonToTextTracks(json, tech) {
+ json.forEach(function (track) {
+ var addedTrack = tech.addRemoteTextTrack(track).track;
+
+ if (!track.src && track.cues) {
+ track.cues.forEach(function (cue) {
+ return addedTrack.addCue(cue);
+ });
+ }
+ });
+ return tech.textTracks();
+};
+
+var textTrackConverter = {
+ textTracksToJson: textTracksToJson,
+ jsonToTextTracks: jsonToTextTracks,
+ trackToJson_: trackToJson_
+};
+
+var MODAL_CLASS_NAME = 'vjs-modal-dialog';
+/**
+ * The `ModalDialog` displays over the video and its controls, which blocks
+ * interaction with the player until it is closed.
+ *
+ * Modal dialogs include a "Close" button and will close when that button
+ * is activated - or when ESC is pressed anywhere.
+ *
+ * @extends Component
+ */
+
+var ModalDialog = /*#__PURE__*/function (_Component) {
+ _inheritsLoose__default['default'](ModalDialog, _Component);
+
+ /**
+ * Create an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ *
+ * @param {Mixed} [options.content=undefined]
+ * Provide customized content for this modal.
+ *
+ * @param {string} [options.description]
+ * A text description for the modal, primarily for accessibility.
+ *
+ * @param {boolean} [options.fillAlways=false]
+ * Normally, modals are automatically filled only the first time
+ * they open. This tells the modal to refresh its content
+ * every time it opens.
+ *
+ * @param {string} [options.label]
+ * A text label for the modal, primarily for accessibility.
+ *
+ * @param {boolean} [options.pauseOnOpen=true]
+ * If `true`, playback will will be paused if playing when
+ * the modal opens, and resumed when it closes.
+ *
+ * @param {boolean} [options.temporary=true]
+ * If `true`, the modal can only be opened once; it will be
+ * disposed as soon as it's closed.
+ *
+ * @param {boolean} [options.uncloseable=false]
+ * If `true`, the user will not be able to close the modal
+ * through the UI in the normal ways. Programmatic closing is
+ * still possible.
+ */
+ function ModalDialog(player, options) {
+ var _this;
+
+ _this = _Component.call(this, player, options) || this;
+
+ _this.handleKeyDown_ = function (e) {
+ return _this.handleKeyDown(e);
+ };
+
+ _this.close_ = function (e) {
+ return _this.close(e);
+ };
+
+ _this.opened_ = _this.hasBeenOpened_ = _this.hasBeenFilled_ = false;
+
+ _this.closeable(!_this.options_.uncloseable);
+
+ _this.content(_this.options_.content); // Make sure the contentEl is defined AFTER any children are initialized
+ // because we only want the contents of the modal in the contentEl
+ // (not the UI elements like the close button).
+
+
+ _this.contentEl_ = createEl('div', {
+ className: MODAL_CLASS_NAME + "-content"
+ }, {
+ role: 'document'
+ });
+ _this.descEl_ = createEl('p', {
+ className: MODAL_CLASS_NAME + "-description vjs-control-text",
+ id: _this.el().getAttribute('aria-describedby')
+ });
+ textContent(_this.descEl_, _this.description());
+
+ _this.el_.appendChild(_this.descEl_);
+
+ _this.el_.appendChild(_this.contentEl_);
+
+ return _this;
+ }
+ /**
+ * Create the `ModalDialog`'s DOM element
+ *
+ * @return {Element}
+ * The DOM element that gets created.
+ */
+
+
+ var _proto = ModalDialog.prototype;
+
+ _proto.createEl = function createEl() {
+ return _Component.prototype.createEl.call(this, 'div', {
+ className: this.buildCSSClass(),
+ tabIndex: -1
+ }, {
+ 'aria-describedby': this.id() + "_description",
+ 'aria-hidden': 'true',
+ 'aria-label': this.label(),
+ 'role': 'dialog'
+ });
+ };
+
+ _proto.dispose = function dispose() {
+ this.contentEl_ = null;
+ this.descEl_ = null;
+ this.previouslyActiveEl_ = null;
+
+ _Component.prototype.dispose.call(this);
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+ ;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return MODAL_CLASS_NAME + " vjs-hidden " + _Component.prototype.buildCSSClass.call(this);
+ }
+ /**
+ * Returns the label string for this modal. Primarily used for accessibility.
+ *
+ * @return {string}
+ * the localized or raw label of this modal.
+ */
+ ;
+
+ _proto.label = function label() {
+ return this.localize(this.options_.label || 'Modal Window');
+ }
+ /**
+ * Returns the description string for this modal. Primarily used for
+ * accessibility.
+ *
+ * @return {string}
+ * The localized or raw description of this modal.
+ */
+ ;
+
+ _proto.description = function description() {
+ var desc = this.options_.description || this.localize('This is a modal window.'); // Append a universal closeability message if the modal is closeable.
+
+ if (this.closeable()) {
+ desc += ' ' + this.localize('This modal can be closed by pressing the Escape key or activating the close button.');
+ }
+
+ return desc;
+ }
+ /**
+ * Opens the modal.
+ *
+ * @fires ModalDialog#beforemodalopen
+ * @fires ModalDialog#modalopen
+ */
+ ;
+
+ _proto.open = function open() {
+ if (!this.opened_) {
+ var player = this.player();
+ /**
+ * Fired just before a `ModalDialog` is opened.
+ *
+ * @event ModalDialog#beforemodalopen
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('beforemodalopen');
+ this.opened_ = true; // Fill content if the modal has never opened before and
+ // never been filled.
+
+ if (this.options_.fillAlways || !this.hasBeenOpened_ && !this.hasBeenFilled_) {
+ this.fill();
+ } // If the player was playing, pause it and take note of its previously
+ // playing state.
+
+
+ this.wasPlaying_ = !player.paused();
+
+ if (this.options_.pauseOnOpen && this.wasPlaying_) {
+ player.pause();
+ }
+
+ this.on('keydown', this.handleKeyDown_); // Hide controls and note if they were enabled.
+
+ this.hadControls_ = player.controls();
+ player.controls(false);
+ this.show();
+ this.conditionalFocus_();
+ this.el().setAttribute('aria-hidden', 'false');
+ /**
+ * Fired just after a `ModalDialog` is opened.
+ *
+ * @event ModalDialog#modalopen
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('modalopen');
+ this.hasBeenOpened_ = true;
+ }
+ }
+ /**
+ * If the `ModalDialog` is currently open or closed.
+ *
+ * @param {boolean} [value]
+ * If given, it will open (`true`) or close (`false`) the modal.
+ *
+ * @return {boolean}
+ * the current open state of the modaldialog
+ */
+ ;
+
+ _proto.opened = function opened(value) {
+ if (typeof value === 'boolean') {
+ this[value ? 'open' : 'close']();
+ }
+
+ return this.opened_;
+ }
+ /**
+ * Closes the modal, does nothing if the `ModalDialog` is
+ * not open.
+ *
+ * @fires ModalDialog#beforemodalclose
+ * @fires ModalDialog#modalclose
+ */
+ ;
+
+ _proto.close = function close() {
+ if (!this.opened_) {
+ return;
+ }
+
+ var player = this.player();
+ /**
+ * Fired just before a `ModalDialog` is closed.
+ *
+ * @event ModalDialog#beforemodalclose
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('beforemodalclose');
+ this.opened_ = false;
+
+ if (this.wasPlaying_ && this.options_.pauseOnOpen) {
+ player.play();
+ }
+
+ this.off('keydown', this.handleKeyDown_);
+
+ if (this.hadControls_) {
+ player.controls(true);
+ }
+
+ this.hide();
+ this.el().setAttribute('aria-hidden', 'true');
+ /**
+ * Fired just after a `ModalDialog` is closed.
+ *
+ * @event ModalDialog#modalclose
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('modalclose');
+ this.conditionalBlur_();
+
+ if (this.options_.temporary) {
+ this.dispose();
+ }
+ }
+ /**
+ * Check to see if the `ModalDialog` is closeable via the UI.
+ *
+ * @param {boolean} [value]
+ * If given as a boolean, it will set the `closeable` option.
+ *
+ * @return {boolean}
+ * Returns the final value of the closable option.
+ */
+ ;
+
+ _proto.closeable = function closeable(value) {
+ if (typeof value === 'boolean') {
+ var closeable = this.closeable_ = !!value;
+ var close = this.getChild('closeButton'); // If this is being made closeable and has no close button, add one.
+
+ if (closeable && !close) {
+ // The close button should be a child of the modal - not its
+ // content element, so temporarily change the content element.
+ var temp = this.contentEl_;
+ this.contentEl_ = this.el_;
+ close = this.addChild('closeButton', {
+ controlText: 'Close Modal Dialog'
+ });
+ this.contentEl_ = temp;
+ this.on(close, 'close', this.close_);
+ } // If this is being made uncloseable and has a close button, remove it.
+
+
+ if (!closeable && close) {
+ this.off(close, 'close', this.close_);
+ this.removeChild(close);
+ close.dispose();
+ }
+ }
+
+ return this.closeable_;
+ }
+ /**
+ * Fill the modal's content element with the modal's "content" option.
+ * The content element will be emptied before this change takes place.
+ */
+ ;
+
+ _proto.fill = function fill() {
+ this.fillWith(this.content());
+ }
+ /**
+ * Fill the modal's content element with arbitrary content.
+ * The content element will be emptied before this change takes place.
+ *
+ * @fires ModalDialog#beforemodalfill
+ * @fires ModalDialog#modalfill
+ *
+ * @param {Mixed} [content]
+ * The same rules apply to this as apply to the `content` option.
+ */
+ ;
+
+ _proto.fillWith = function fillWith(content) {
+ var contentEl = this.contentEl();
+ var parentEl = contentEl.parentNode;
+ var nextSiblingEl = contentEl.nextSibling;
+ /**
+ * Fired just before a `ModalDialog` is filled with content.
+ *
+ * @event ModalDialog#beforemodalfill
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('beforemodalfill');
+ this.hasBeenFilled_ = true; // Detach the content element from the DOM before performing
+ // manipulation to avoid modifying the live DOM multiple times.
+
+ parentEl.removeChild(contentEl);
+ this.empty();
+ insertContent(contentEl, content);
+ /**
+ * Fired just after a `ModalDialog` is filled with content.
+ *
+ * @event ModalDialog#modalfill
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('modalfill'); // Re-inject the re-filled content element.
+
+ if (nextSiblingEl) {
+ parentEl.insertBefore(contentEl, nextSiblingEl);
+ } else {
+ parentEl.appendChild(contentEl);
+ } // make sure that the close button is last in the dialog DOM
+
+
+ var closeButton = this.getChild('closeButton');
+
+ if (closeButton) {
+ parentEl.appendChild(closeButton.el_);
+ }
+ }
+ /**
+ * Empties the content element. This happens anytime the modal is filled.
+ *
+ * @fires ModalDialog#beforemodalempty
+ * @fires ModalDialog#modalempty
+ */
+ ;
+
+ _proto.empty = function empty() {
+ /**
+ * Fired just before a `ModalDialog` is emptied.
+ *
+ * @event ModalDialog#beforemodalempty
+ * @type {EventTarget~Event}
+ */
+ this.trigger('beforemodalempty');
+ emptyEl(this.contentEl());
+ /**
+ * Fired just after a `ModalDialog` is emptied.
+ *
+ * @event ModalDialog#modalempty
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('modalempty');
+ }
+ /**
+ * Gets or sets the modal content, which gets normalized before being
+ * rendered into the DOM.
+ *
+ * This does not update the DOM or fill the modal, but it is called during
+ * that process.
+ *
+ * @param {Mixed} [value]
+ * If defined, sets the internal content value to be used on the
+ * next call(s) to `fill`. This value is normalized before being
+ * inserted. To "clear" the internal content value, pass `null`.
+ *
+ * @return {Mixed}
+ * The current content of the modal dialog
+ */
+ ;
+
+ _proto.content = function content(value) {
+ if (typeof value !== 'undefined') {
+ this.content_ = value;
+ }
+
+ return this.content_;
+ }
+ /**
+ * conditionally focus the modal dialog if focus was previously on the player.
+ *
+ * @private
+ */
+ ;
+
+ _proto.conditionalFocus_ = function conditionalFocus_() {
+ var activeEl = document__default['default'].activeElement;
+ var playerEl = this.player_.el_;
+ this.previouslyActiveEl_ = null;
+
+ if (playerEl.contains(activeEl) || playerEl === activeEl) {
+ this.previouslyActiveEl_ = activeEl;
+ this.focus();
+ }
+ }
+ /**
+ * conditionally blur the element and refocus the last focused element
+ *
+ * @private
+ */
+ ;
+
+ _proto.conditionalBlur_ = function conditionalBlur_() {
+ if (this.previouslyActiveEl_) {
+ this.previouslyActiveEl_.focus();
+ this.previouslyActiveEl_ = null;
+ }
+ }
+ /**
+ * Keydown handler. Attached when modal is focused.
+ *
+ * @listens keydown
+ */
+ ;
+
+ _proto.handleKeyDown = function handleKeyDown(event) {
+ // Do not allow keydowns to reach out of the modal dialog.
+ event.stopPropagation();
+
+ if (keycode__default['default'].isEventKey(event, 'Escape') && this.closeable()) {
+ event.preventDefault();
+ this.close();
+ return;
+ } // exit early if it isn't a tab key
+
+
+ if (!keycode__default['default'].isEventKey(event, 'Tab')) {
+ return;
+ }
+
+ var focusableEls = this.focusableEls_();
+ var activeEl = this.el_.querySelector(':focus');
+ var focusIndex;
+
+ for (var i = 0; i < focusableEls.length; i++) {
+ if (activeEl === focusableEls[i]) {
+ focusIndex = i;
+ break;
+ }
+ }
+
+ if (document__default['default'].activeElement === this.el_) {
+ focusIndex = 0;
+ }
+
+ if (event.shiftKey && focusIndex === 0) {
+ focusableEls[focusableEls.length - 1].focus();
+ event.preventDefault();
+ } else if (!event.shiftKey && focusIndex === focusableEls.length - 1) {
+ focusableEls[0].focus();
+ event.preventDefault();
+ }
+ }
+ /**
+ * get all focusable elements
+ *
+ * @private
+ */
+ ;
+
+ _proto.focusableEls_ = function focusableEls_() {
+ var allChildren = this.el_.querySelectorAll('*');
+ return Array.prototype.filter.call(allChildren, function (child) {
+ return (child instanceof window__default['default'].HTMLAnchorElement || child instanceof window__default['default'].HTMLAreaElement) && child.hasAttribute('href') || (child instanceof window__default['default'].HTMLInputElement || child instanceof window__default['default'].HTMLSelectElement || child instanceof window__default['default'].HTMLTextAreaElement || child instanceof window__default['default'].HTMLButtonElement) && !child.hasAttribute('disabled') || child instanceof window__default['default'].HTMLIFrameElement || child instanceof window__default['default'].HTMLObjectElement || child instanceof window__default['default'].HTMLEmbedElement || child.hasAttribute('tabindex') && child.getAttribute('tabindex') !== -1 || child.hasAttribute('contenteditable');
+ });
+ };
+
+ return ModalDialog;
+}(Component$1);
+/**
+ * Default options for `ModalDialog` default options.
+ *
+ * @type {Object}
+ * @private
+ */
+
+
+ModalDialog.prototype.options_ = {
+ pauseOnOpen: true,
+ temporary: true
+};
+Component$1.registerComponent('ModalDialog', ModalDialog);
+
+/**
+ * Common functionaliy between {@link TextTrackList}, {@link AudioTrackList}, and
+ * {@link VideoTrackList}
+ *
+ * @extends EventTarget
+ */
+
+var TrackList = /*#__PURE__*/function (_EventTarget) {
+ _inheritsLoose__default['default'](TrackList, _EventTarget);
+
+ /**
+ * Create an instance of this class
+ *
+ * @param {Track[]} tracks
+ * A list of tracks to initialize the list with.
+ *
+ * @abstract
+ */
+ function TrackList(tracks) {
+ var _this;
+
+ if (tracks === void 0) {
+ tracks = [];
+ }
+
+ _this = _EventTarget.call(this) || this;
+ _this.tracks_ = [];
+ /**
+ * @memberof TrackList
+ * @member {number} length
+ * The current number of `Track`s in the this Trackist.
+ * @instance
+ */
+
+ Object.defineProperty(_assertThisInitialized__default['default'](_this), 'length', {
+ get: function get() {
+ return this.tracks_.length;
+ }
+ });
+
+ for (var i = 0; i < tracks.length; i++) {
+ _this.addTrack(tracks[i]);
+ }
+
+ return _this;
+ }
+ /**
+ * Add a {@link Track} to the `TrackList`
+ *
+ * @param {Track} track
+ * The audio, video, or text track to add to the list.
+ *
+ * @fires TrackList#addtrack
+ */
+
+
+ var _proto = TrackList.prototype;
+
+ _proto.addTrack = function addTrack(track) {
+ var _this2 = this;
+
+ var index = this.tracks_.length;
+
+ if (!('' + index in this)) {
+ Object.defineProperty(this, index, {
+ get: function get() {
+ return this.tracks_[index];
+ }
+ });
+ } // Do not add duplicate tracks
+
+
+ if (this.tracks_.indexOf(track) === -1) {
+ this.tracks_.push(track);
+ /**
+ * Triggered when a track is added to a track list.
+ *
+ * @event TrackList#addtrack
+ * @type {EventTarget~Event}
+ * @property {Track} track
+ * A reference to track that was added.
+ */
+
+ this.trigger({
+ track: track,
+ type: 'addtrack',
+ target: this
+ });
+ }
+ /**
+ * Triggered when a track label is changed.
+ *
+ * @event TrackList#addtrack
+ * @type {EventTarget~Event}
+ * @property {Track} track
+ * A reference to track that was added.
+ */
+
+
+ track.labelchange_ = function () {
+ _this2.trigger({
+ track: track,
+ type: 'labelchange',
+ target: _this2
+ });
+ };
+
+ if (isEvented(track)) {
+ track.addEventListener('labelchange', track.labelchange_);
+ }
+ }
+ /**
+ * Remove a {@link Track} from the `TrackList`
+ *
+ * @param {Track} rtrack
+ * The audio, video, or text track to remove from the list.
+ *
+ * @fires TrackList#removetrack
+ */
+ ;
+
+ _proto.removeTrack = function removeTrack(rtrack) {
+ var track;
+
+ for (var i = 0, l = this.length; i < l; i++) {
+ if (this[i] === rtrack) {
+ track = this[i];
+
+ if (track.off) {
+ track.off();
+ }
+
+ this.tracks_.splice(i, 1);
+ break;
+ }
+ }
+
+ if (!track) {
+ return;
+ }
+ /**
+ * Triggered when a track is removed from track list.
+ *
+ * @event TrackList#removetrack
+ * @type {EventTarget~Event}
+ * @property {Track} track
+ * A reference to track that was removed.
+ */
+
+
+ this.trigger({
+ track: track,
+ type: 'removetrack',
+ target: this
+ });
+ }
+ /**
+ * Get a Track from the TrackList by a tracks id
+ *
+ * @param {string} id - the id of the track to get
+ * @method getTrackById
+ * @return {Track}
+ * @private
+ */
+ ;
+
+ _proto.getTrackById = function getTrackById(id) {
+ var result = null;
+
+ for (var i = 0, l = this.length; i < l; i++) {
+ var track = this[i];
+
+ if (track.id === id) {
+ result = track;
+ break;
+ }
+ }
+
+ return result;
+ };
+
+ return TrackList;
+}(EventTarget$2);
+/**
+ * Triggered when a different track is selected/enabled.
+ *
+ * @event TrackList#change
+ * @type {EventTarget~Event}
+ */
+
+/**
+ * Events that can be called with on + eventName. See {@link EventHandler}.
+ *
+ * @property {Object} TrackList#allowedEvents_
+ * @private
+ */
+
+
+TrackList.prototype.allowedEvents_ = {
+ change: 'change',
+ addtrack: 'addtrack',
+ removetrack: 'removetrack',
+ labelchange: 'labelchange'
+}; // emulate attribute EventHandler support to allow for feature detection
+
+for (var event in TrackList.prototype.allowedEvents_) {
+ TrackList.prototype['on' + event] = null;
+}
+
+/**
+ * Anywhere we call this function we diverge from the spec
+ * as we only support one enabled audiotrack at a time
+ *
+ * @param {AudioTrackList} list
+ * list to work on
+ *
+ * @param {AudioTrack} track
+ * The track to skip
+ *
+ * @private
+ */
+
+var disableOthers$1 = function disableOthers(list, track) {
+ for (var i = 0; i < list.length; i++) {
+ if (!Object.keys(list[i]).length || track.id === list[i].id) {
+ continue;
+ } // another audio track is enabled, disable it
+
+
+ list[i].enabled = false;
+ }
+};
+/**
+ * The current list of {@link AudioTrack} for a media file.
+ *
+ * @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#audiotracklist}
+ * @extends TrackList
+ */
+
+
+var AudioTrackList = /*#__PURE__*/function (_TrackList) {
+ _inheritsLoose__default['default'](AudioTrackList, _TrackList);
+
+ /**
+ * Create an instance of this class.
+ *
+ * @param {AudioTrack[]} [tracks=[]]
+ * A list of `AudioTrack` to instantiate the list with.
+ */
+ function AudioTrackList(tracks) {
+ var _this;
+
+ if (tracks === void 0) {
+ tracks = [];
+ }
+
+ // make sure only 1 track is enabled
+ // sorted from last index to first index
+ for (var i = tracks.length - 1; i >= 0; i--) {
+ if (tracks[i].enabled) {
+ disableOthers$1(tracks, tracks[i]);
+ break;
+ }
+ }
+
+ _this = _TrackList.call(this, tracks) || this;
+ _this.changing_ = false;
+ return _this;
+ }
+ /**
+ * Add an {@link AudioTrack} to the `AudioTrackList`.
+ *
+ * @param {AudioTrack} track
+ * The AudioTrack to add to the list
+ *
+ * @fires TrackList#addtrack
+ */
+
+
+ var _proto = AudioTrackList.prototype;
+
+ _proto.addTrack = function addTrack(track) {
+ var _this2 = this;
+
+ if (track.enabled) {
+ disableOthers$1(this, track);
+ }
+
+ _TrackList.prototype.addTrack.call(this, track); // native tracks don't have this
+
+
+ if (!track.addEventListener) {
+ return;
+ }
+
+ track.enabledChange_ = function () {
+ // when we are disabling other tracks (since we don't support
+ // more than one track at a time) we will set changing_
+ // to true so that we don't trigger additional change events
+ if (_this2.changing_) {
+ return;
+ }
+
+ _this2.changing_ = true;
+ disableOthers$1(_this2, track);
+ _this2.changing_ = false;
+
+ _this2.trigger('change');
+ };
+ /**
+ * @listens AudioTrack#enabledchange
+ * @fires TrackList#change
+ */
+
+
+ track.addEventListener('enabledchange', track.enabledChange_);
+ };
+
+ _proto.removeTrack = function removeTrack(rtrack) {
+ _TrackList.prototype.removeTrack.call(this, rtrack);
+
+ if (rtrack.removeEventListener && rtrack.enabledChange_) {
+ rtrack.removeEventListener('enabledchange', rtrack.enabledChange_);
+ rtrack.enabledChange_ = null;
+ }
+ };
+
+ return AudioTrackList;
+}(TrackList);
+
+/**
+ * Un-select all other {@link VideoTrack}s that are selected.
+ *
+ * @param {VideoTrackList} list
+ * list to work on
+ *
+ * @param {VideoTrack} track
+ * The track to skip
+ *
+ * @private
+ */
+
+var disableOthers = function disableOthers(list, track) {
+ for (var i = 0; i < list.length; i++) {
+ if (!Object.keys(list[i]).length || track.id === list[i].id) {
+ continue;
+ } // another video track is enabled, disable it
+
+
+ list[i].selected = false;
+ }
+};
+/**
+ * The current list of {@link VideoTrack} for a video.
+ *
+ * @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#videotracklist}
+ * @extends TrackList
+ */
+
+
+var VideoTrackList = /*#__PURE__*/function (_TrackList) {
+ _inheritsLoose__default['default'](VideoTrackList, _TrackList);
+
+ /**
+ * Create an instance of this class.
+ *
+ * @param {VideoTrack[]} [tracks=[]]
+ * A list of `VideoTrack` to instantiate the list with.
+ */
+ function VideoTrackList(tracks) {
+ var _this;
+
+ if (tracks === void 0) {
+ tracks = [];
+ }
+
+ // make sure only 1 track is enabled
+ // sorted from last index to first index
+ for (var i = tracks.length - 1; i >= 0; i--) {
+ if (tracks[i].selected) {
+ disableOthers(tracks, tracks[i]);
+ break;
+ }
+ }
+
+ _this = _TrackList.call(this, tracks) || this;
+ _this.changing_ = false;
+ /**
+ * @member {number} VideoTrackList#selectedIndex
+ * The current index of the selected {@link VideoTrack`}.
+ */
+
+ Object.defineProperty(_assertThisInitialized__default['default'](_this), 'selectedIndex', {
+ get: function get() {
+ for (var _i = 0; _i < this.length; _i++) {
+ if (this[_i].selected) {
+ return _i;
+ }
+ }
+
+ return -1;
+ },
+ set: function set() {}
+ });
+ return _this;
+ }
+ /**
+ * Add a {@link VideoTrack} to the `VideoTrackList`.
+ *
+ * @param {VideoTrack} track
+ * The VideoTrack to add to the list
+ *
+ * @fires TrackList#addtrack
+ */
+
+
+ var _proto = VideoTrackList.prototype;
+
+ _proto.addTrack = function addTrack(track) {
+ var _this2 = this;
+
+ if (track.selected) {
+ disableOthers(this, track);
+ }
+
+ _TrackList.prototype.addTrack.call(this, track); // native tracks don't have this
+
+
+ if (!track.addEventListener) {
+ return;
+ }
+
+ track.selectedChange_ = function () {
+ if (_this2.changing_) {
+ return;
+ }
+
+ _this2.changing_ = true;
+ disableOthers(_this2, track);
+ _this2.changing_ = false;
+
+ _this2.trigger('change');
+ };
+ /**
+ * @listens VideoTrack#selectedchange
+ * @fires TrackList#change
+ */
+
+
+ track.addEventListener('selectedchange', track.selectedChange_);
+ };
+
+ _proto.removeTrack = function removeTrack(rtrack) {
+ _TrackList.prototype.removeTrack.call(this, rtrack);
+
+ if (rtrack.removeEventListener && rtrack.selectedChange_) {
+ rtrack.removeEventListener('selectedchange', rtrack.selectedChange_);
+ rtrack.selectedChange_ = null;
+ }
+ };
+
+ return VideoTrackList;
+}(TrackList);
+
+/**
+ * The current list of {@link TextTrack} for a media file.
+ *
+ * @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#texttracklist}
+ * @extends TrackList
+ */
+
+var TextTrackList = /*#__PURE__*/function (_TrackList) {
+ _inheritsLoose__default['default'](TextTrackList, _TrackList);
+
+ function TextTrackList() {
+ return _TrackList.apply(this, arguments) || this;
+ }
+
+ var _proto = TextTrackList.prototype;
+
+ /**
+ * Add a {@link TextTrack} to the `TextTrackList`
+ *
+ * @param {TextTrack} track
+ * The text track to add to the list.
+ *
+ * @fires TrackList#addtrack
+ */
+ _proto.addTrack = function addTrack(track) {
+ var _this = this;
+
+ _TrackList.prototype.addTrack.call(this, track);
+
+ if (!this.queueChange_) {
+ this.queueChange_ = function () {
+ return _this.queueTrigger('change');
+ };
+ }
+
+ if (!this.triggerSelectedlanguagechange) {
+ this.triggerSelectedlanguagechange_ = function () {
+ return _this.trigger('selectedlanguagechange');
+ };
+ }
+ /**
+ * @listens TextTrack#modechange
+ * @fires TrackList#change
+ */
+
+
+ track.addEventListener('modechange', this.queueChange_);
+ var nonLanguageTextTrackKind = ['metadata', 'chapters'];
+
+ if (nonLanguageTextTrackKind.indexOf(track.kind) === -1) {
+ track.addEventListener('modechange', this.triggerSelectedlanguagechange_);
+ }
+ };
+
+ _proto.removeTrack = function removeTrack(rtrack) {
+ _TrackList.prototype.removeTrack.call(this, rtrack); // manually remove the event handlers we added
+
+
+ if (rtrack.removeEventListener) {
+ if (this.queueChange_) {
+ rtrack.removeEventListener('modechange', this.queueChange_);
+ }
+
+ if (this.selectedlanguagechange_) {
+ rtrack.removeEventListener('modechange', this.triggerSelectedlanguagechange_);
+ }
+ }
+ };
+
+ return TextTrackList;
+}(TrackList);
+
+/**
+ * @file html-track-element-list.js
+ */
+
+/**
+ * The current list of {@link HtmlTrackElement}s.
+ */
+var HtmlTrackElementList = /*#__PURE__*/function () {
+ /**
+ * Create an instance of this class.
+ *
+ * @param {HtmlTrackElement[]} [tracks=[]]
+ * A list of `HtmlTrackElement` to instantiate the list with.
+ */
+ function HtmlTrackElementList(trackElements) {
+ if (trackElements === void 0) {
+ trackElements = [];
+ }
+
+ this.trackElements_ = [];
+ /**
+ * @memberof HtmlTrackElementList
+ * @member {number} length
+ * The current number of `Track`s in the this Trackist.
+ * @instance
+ */
+
+ Object.defineProperty(this, 'length', {
+ get: function get() {
+ return this.trackElements_.length;
+ }
+ });
+
+ for (var i = 0, length = trackElements.length; i < length; i++) {
+ this.addTrackElement_(trackElements[i]);
+ }
+ }
+ /**
+ * Add an {@link HtmlTrackElement} to the `HtmlTrackElementList`
+ *
+ * @param {HtmlTrackElement} trackElement
+ * The track element to add to the list.
+ *
+ * @private
+ */
+
+
+ var _proto = HtmlTrackElementList.prototype;
+
+ _proto.addTrackElement_ = function addTrackElement_(trackElement) {
+ var index = this.trackElements_.length;
+
+ if (!('' + index in this)) {
+ Object.defineProperty(this, index, {
+ get: function get() {
+ return this.trackElements_[index];
+ }
+ });
+ } // Do not add duplicate elements
+
+
+ if (this.trackElements_.indexOf(trackElement) === -1) {
+ this.trackElements_.push(trackElement);
+ }
+ }
+ /**
+ * Get an {@link HtmlTrackElement} from the `HtmlTrackElementList` given an
+ * {@link TextTrack}.
+ *
+ * @param {TextTrack} track
+ * The track associated with a track element.
+ *
+ * @return {HtmlTrackElement|undefined}
+ * The track element that was found or undefined.
+ *
+ * @private
+ */
+ ;
+
+ _proto.getTrackElementByTrack_ = function getTrackElementByTrack_(track) {
+ var trackElement_;
+
+ for (var i = 0, length = this.trackElements_.length; i < length; i++) {
+ if (track === this.trackElements_[i].track) {
+ trackElement_ = this.trackElements_[i];
+ break;
+ }
+ }
+
+ return trackElement_;
+ }
+ /**
+ * Remove a {@link HtmlTrackElement} from the `HtmlTrackElementList`
+ *
+ * @param {HtmlTrackElement} trackElement
+ * The track element to remove from the list.
+ *
+ * @private
+ */
+ ;
+
+ _proto.removeTrackElement_ = function removeTrackElement_(trackElement) {
+ for (var i = 0, length = this.trackElements_.length; i < length; i++) {
+ if (trackElement === this.trackElements_[i]) {
+ if (this.trackElements_[i].track && typeof this.trackElements_[i].track.off === 'function') {
+ this.trackElements_[i].track.off();
+ }
+
+ if (typeof this.trackElements_[i].off === 'function') {
+ this.trackElements_[i].off();
+ }
+
+ this.trackElements_.splice(i, 1);
+ break;
+ }
+ }
+ };
+
+ return HtmlTrackElementList;
+}();
+
+/**
+ * @file text-track-cue-list.js
+ */
+
+/**
+ * @typedef {Object} TextTrackCueList~TextTrackCue
+ *
+ * @property {string} id
+ * The unique id for this text track cue
+ *
+ * @property {number} startTime
+ * The start time for this text track cue
+ *
+ * @property {number} endTime
+ * The end time for this text track cue
+ *
+ * @property {boolean} pauseOnExit
+ * Pause when the end time is reached if true.
+ *
+ * @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#texttrackcue}
+ */
+
+/**
+ * A List of TextTrackCues.
+ *
+ * @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#texttrackcuelist}
+ */
+var TextTrackCueList = /*#__PURE__*/function () {
+ /**
+ * Create an instance of this class..
+ *
+ * @param {Array} cues
+ * A list of cues to be initialized with
+ */
+ function TextTrackCueList(cues) {
+ TextTrackCueList.prototype.setCues_.call(this, cues);
+ /**
+ * @memberof TextTrackCueList
+ * @member {number} length
+ * The current number of `TextTrackCue`s in the TextTrackCueList.
+ * @instance
+ */
+
+ Object.defineProperty(this, 'length', {
+ get: function get() {
+ return this.length_;
+ }
+ });
+ }
+ /**
+ * A setter for cues in this list. Creates getters
+ * an an index for the cues.
+ *
+ * @param {Array} cues
+ * An array of cues to set
+ *
+ * @private
+ */
+
+
+ var _proto = TextTrackCueList.prototype;
+
+ _proto.setCues_ = function setCues_(cues) {
+ var oldLength = this.length || 0;
+ var i = 0;
+ var l = cues.length;
+ this.cues_ = cues;
+ this.length_ = cues.length;
+
+ var defineProp = function defineProp(index) {
+ if (!('' + index in this)) {
+ Object.defineProperty(this, '' + index, {
+ get: function get() {
+ return this.cues_[index];
+ }
+ });
+ }
+ };
+
+ if (oldLength < l) {
+ i = oldLength;
+
+ for (; i < l; i++) {
+ defineProp.call(this, i);
+ }
+ }
+ }
+ /**
+ * Get a `TextTrackCue` that is currently in the `TextTrackCueList` by id.
+ *
+ * @param {string} id
+ * The id of the cue that should be searched for.
+ *
+ * @return {TextTrackCueList~TextTrackCue|null}
+ * A single cue or null if none was found.
+ */
+ ;
+
+ _proto.getCueById = function getCueById(id) {
+ var result = null;
+
+ for (var i = 0, l = this.length; i < l; i++) {
+ var cue = this[i];
+
+ if (cue.id === id) {
+ result = cue;
+ break;
+ }
+ }
+
+ return result;
+ };
+
+ return TextTrackCueList;
+}();
+
+/**
+ * @file track-kinds.js
+ */
+
+/**
+ * All possible `VideoTrackKind`s
+ *
+ * @see https://html.spec.whatwg.org/multipage/embedded-content.html#dom-videotrack-kind
+ * @typedef VideoTrack~Kind
+ * @enum
+ */
+var VideoTrackKind = {
+ alternative: 'alternative',
+ captions: 'captions',
+ main: 'main',
+ sign: 'sign',
+ subtitles: 'subtitles',
+ commentary: 'commentary'
+};
+/**
+ * All possible `AudioTrackKind`s
+ *
+ * @see https://html.spec.whatwg.org/multipage/embedded-content.html#dom-audiotrack-kind
+ * @typedef AudioTrack~Kind
+ * @enum
+ */
+
+var AudioTrackKind = {
+ 'alternative': 'alternative',
+ 'descriptions': 'descriptions',
+ 'main': 'main',
+ 'main-desc': 'main-desc',
+ 'translation': 'translation',
+ 'commentary': 'commentary'
+};
+/**
+ * All possible `TextTrackKind`s
+ *
+ * @see https://html.spec.whatwg.org/multipage/embedded-content.html#dom-texttrack-kind
+ * @typedef TextTrack~Kind
+ * @enum
+ */
+
+var TextTrackKind = {
+ subtitles: 'subtitles',
+ captions: 'captions',
+ descriptions: 'descriptions',
+ chapters: 'chapters',
+ metadata: 'metadata'
+};
+/**
+ * All possible `TextTrackMode`s
+ *
+ * @see https://html.spec.whatwg.org/multipage/embedded-content.html#texttrackmode
+ * @typedef TextTrack~Mode
+ * @enum
+ */
+
+var TextTrackMode = {
+ disabled: 'disabled',
+ hidden: 'hidden',
+ showing: 'showing'
+};
+
+/**
+ * A Track class that contains all of the common functionality for {@link AudioTrack},
+ * {@link VideoTrack}, and {@link TextTrack}.
+ *
+ * > Note: This class should not be used directly
+ *
+ * @see {@link https://html.spec.whatwg.org/multipage/embedded-content.html}
+ * @extends EventTarget
+ * @abstract
+ */
+
+var Track = /*#__PURE__*/function (_EventTarget) {
+ _inheritsLoose__default['default'](Track, _EventTarget);
+
+ /**
+ * Create an instance of this class.
+ *
+ * @param {Object} [options={}]
+ * Object of option names and values
+ *
+ * @param {string} [options.kind='']
+ * A valid kind for the track type you are creating.
+ *
+ * @param {string} [options.id='vjs_track_' + Guid.newGUID()]
+ * A unique id for this AudioTrack.
+ *
+ * @param {string} [options.label='']
+ * The menu label for this track.
+ *
+ * @param {string} [options.language='']
+ * A valid two character language code.
+ *
+ * @abstract
+ */
+ function Track(options) {
+ var _this;
+
+ if (options === void 0) {
+ options = {};
+ }
+
+ _this = _EventTarget.call(this) || this;
+ var trackProps = {
+ id: options.id || 'vjs_track_' + newGUID(),
+ kind: options.kind || '',
+ language: options.language || ''
+ };
+ var label = options.label || '';
+ /**
+ * @memberof Track
+ * @member {string} id
+ * The id of this track. Cannot be changed after creation.
+ * @instance
+ *
+ * @readonly
+ */
+
+ /**
+ * @memberof Track
+ * @member {string} kind
+ * The kind of track that this is. Cannot be changed after creation.
+ * @instance
+ *
+ * @readonly
+ */
+
+ /**
+ * @memberof Track
+ * @member {string} language
+ * The two letter language code for this track. Cannot be changed after
+ * creation.
+ * @instance
+ *
+ * @readonly
+ */
+
+ var _loop = function _loop(key) {
+ Object.defineProperty(_assertThisInitialized__default['default'](_this), key, {
+ get: function get() {
+ return trackProps[key];
+ },
+ set: function set() {}
+ });
+ };
+
+ for (var key in trackProps) {
+ _loop(key);
+ }
+ /**
+ * @memberof Track
+ * @member {string} label
+ * The label of this track. Cannot be changed after creation.
+ * @instance
+ *
+ * @fires Track#labelchange
+ */
+
+
+ Object.defineProperty(_assertThisInitialized__default['default'](_this), 'label', {
+ get: function get() {
+ return label;
+ },
+ set: function set(newLabel) {
+ if (newLabel !== label) {
+ label = newLabel;
+ /**
+ * An event that fires when label changes on this track.
+ *
+ * > Note: This is not part of the spec!
+ *
+ * @event Track#labelchange
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('labelchange');
+ }
+ }
+ });
+ return _this;
+ }
+
+ return Track;
+}(EventTarget$2);
+
+/**
+ * @file url.js
+ * @module url
+ */
+/**
+ * @typedef {Object} url:URLObject
+ *
+ * @property {string} protocol
+ * The protocol of the url that was parsed.
+ *
+ * @property {string} hostname
+ * The hostname of the url that was parsed.
+ *
+ * @property {string} port
+ * The port of the url that was parsed.
+ *
+ * @property {string} pathname
+ * The pathname of the url that was parsed.
+ *
+ * @property {string} search
+ * The search query of the url that was parsed.
+ *
+ * @property {string} hash
+ * The hash of the url that was parsed.
+ *
+ * @property {string} host
+ * The host of the url that was parsed.
+ */
+
+/**
+ * Resolve and parse the elements of a URL.
+ *
+ * @function
+ * @param {String} url
+ * The url to parse
+ *
+ * @return {url:URLObject}
+ * An object of url details
+ */
+
+var parseUrl = function parseUrl(url) {
+ // This entire method can be replace with URL once we are able to drop IE11
+ var props = ['protocol', 'hostname', 'port', 'pathname', 'search', 'hash', 'host']; // add the url to an anchor and let the browser parse the URL
+
+ var a = document__default['default'].createElement('a');
+ a.href = url; // Copy the specific URL properties to a new object
+ // This is also needed for IE because the anchor loses its
+ // properties when it's removed from the dom
+
+ var details = {};
+
+ for (var i = 0; i < props.length; i++) {
+ details[props[i]] = a[props[i]];
+ } // IE adds the port to the host property unlike everyone else. If
+ // a port identifier is added for standard ports, strip it.
+
+
+ if (details.protocol === 'http:') {
+ details.host = details.host.replace(/:80$/, '');
+ }
+
+ if (details.protocol === 'https:') {
+ details.host = details.host.replace(/:443$/, '');
+ }
+
+ if (!details.protocol) {
+ details.protocol = window__default['default'].location.protocol;
+ }
+ /* istanbul ignore if */
+
+
+ if (!details.host) {
+ details.host = window__default['default'].location.host;
+ }
+
+ return details;
+};
+/**
+ * Get absolute version of relative URL. Used to tell Flash the correct URL.
+ *
+ * @function
+ * @param {string} url
+ * URL to make absolute
+ *
+ * @return {string}
+ * Absolute URL
+ *
+ * @see http://stackoverflow.com/questions/470832/getting-an-absolute-url-from-a-relative-one-ie6-issue
+ */
+
+var getAbsoluteURL = function getAbsoluteURL(url) {
+ // Check if absolute URL
+ if (!url.match(/^https?:\/\//)) {
+ // Convert to absolute URL. Flash hosted off-site needs an absolute URL.
+ // add the url to an anchor and let the browser parse the URL
+ var a = document__default['default'].createElement('a');
+ a.href = url;
+ url = a.href;
+ }
+
+ return url;
+};
+/**
+ * Returns the extension of the passed file name. It will return an empty string
+ * if passed an invalid path.
+ *
+ * @function
+ * @param {string} path
+ * The fileName path like '/path/to/file.mp4'
+ *
+ * @return {string}
+ * The extension in lower case or an empty string if no
+ * extension could be found.
+ */
+
+var getFileExtension = function getFileExtension(path) {
+ if (typeof path === 'string') {
+ var splitPathRe = /^(\/?)([\s\S]*?)((?:\.{1,2}|[^\/]+?)(\.([^\.\/\?]+)))(?:[\/]*|[\?].*)$/;
+ var pathParts = splitPathRe.exec(path);
+
+ if (pathParts) {
+ return pathParts.pop().toLowerCase();
+ }
+ }
+
+ return '';
+};
+/**
+ * Returns whether the url passed is a cross domain request or not.
+ *
+ * @function
+ * @param {string} url
+ * The url to check.
+ *
+ * @param {Object} [winLoc]
+ * the domain to check the url against, defaults to window.location
+ *
+ * @param {string} [winLoc.protocol]
+ * The window location protocol defaults to window.location.protocol
+ *
+ * @param {string} [winLoc.host]
+ * The window location host defaults to window.location.host
+ *
+ * @return {boolean}
+ * Whether it is a cross domain request or not.
+ */
+
+var isCrossOrigin = function isCrossOrigin(url, winLoc) {
+ if (winLoc === void 0) {
+ winLoc = window__default['default'].location;
+ }
+
+ var urlInfo = parseUrl(url); // IE8 protocol relative urls will return ':' for protocol
+
+ var srcProtocol = urlInfo.protocol === ':' ? winLoc.protocol : urlInfo.protocol; // Check if url is for another domain/origin
+ // IE8 doesn't know location.origin, so we won't rely on it here
+
+ var crossOrigin = srcProtocol + urlInfo.host !== winLoc.protocol + winLoc.host;
+ return crossOrigin;
+};
+
+var Url = /*#__PURE__*/Object.freeze({
+ __proto__: null,
+ parseUrl: parseUrl,
+ getAbsoluteURL: getAbsoluteURL,
+ getFileExtension: getFileExtension,
+ isCrossOrigin: isCrossOrigin
+});
+
+/**
+ * Takes a webvtt file contents and parses it into cues
+ *
+ * @param {string} srcContent
+ * webVTT file contents
+ *
+ * @param {TextTrack} track
+ * TextTrack to add cues to. Cues come from the srcContent.
+ *
+ * @private
+ */
+
+var parseCues = function parseCues(srcContent, track) {
+ var parser = new window__default['default'].WebVTT.Parser(window__default['default'], window__default['default'].vttjs, window__default['default'].WebVTT.StringDecoder());
+ var errors = [];
+
+ parser.oncue = function (cue) {
+ track.addCue(cue);
+ };
+
+ parser.onparsingerror = function (error) {
+ errors.push(error);
+ };
+
+ parser.onflush = function () {
+ track.trigger({
+ type: 'loadeddata',
+ target: track
+ });
+ };
+
+ parser.parse(srcContent);
+
+ if (errors.length > 0) {
+ if (window__default['default'].console && window__default['default'].console.groupCollapsed) {
+ window__default['default'].console.groupCollapsed("Text Track parsing errors for " + track.src);
+ }
+
+ errors.forEach(function (error) {
+ return log$1.error(error);
+ });
+
+ if (window__default['default'].console && window__default['default'].console.groupEnd) {
+ window__default['default'].console.groupEnd();
+ }
+ }
+
+ parser.flush();
+};
+/**
+ * Load a `TextTrack` from a specified url.
+ *
+ * @param {string} src
+ * Url to load track from.
+ *
+ * @param {TextTrack} track
+ * Track to add cues to. Comes from the content at the end of `url`.
+ *
+ * @private
+ */
+
+
+var loadTrack = function loadTrack(src, track) {
+ var opts = {
+ uri: src
+ };
+ var crossOrigin = isCrossOrigin(src);
+
+ if (crossOrigin) {
+ opts.cors = crossOrigin;
+ }
+
+ var withCredentials = track.tech_.crossOrigin() === 'use-credentials';
+
+ if (withCredentials) {
+ opts.withCredentials = withCredentials;
+ }
+
+ XHR__default['default'](opts, bind(this, function (err, response, responseBody) {
+ if (err) {
+ return log$1.error(err, response);
+ }
+
+ track.loaded_ = true; // Make sure that vttjs has loaded, otherwise, wait till it finished loading
+ // NOTE: this is only used for the alt/video.novtt.js build
+
+ if (typeof window__default['default'].WebVTT !== 'function') {
+ if (track.tech_) {
+ // to prevent use before define eslint error, we define loadHandler
+ // as a let here
+ track.tech_.any(['vttjsloaded', 'vttjserror'], function (event) {
+ if (event.type === 'vttjserror') {
+ log$1.error("vttjs failed to load, stopping trying to process " + track.src);
+ return;
+ }
+
+ return parseCues(responseBody, track);
+ });
+ }
+ } else {
+ parseCues(responseBody, track);
+ }
+ }));
+};
+/**
+ * A representation of a single `TextTrack`.
+ *
+ * @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#texttrack}
+ * @extends Track
+ */
+
+
+var TextTrack = /*#__PURE__*/function (_Track) {
+ _inheritsLoose__default['default'](TextTrack, _Track);
+
+ /**
+ * Create an instance of this class.
+ *
+ * @param {Object} options={}
+ * Object of option names and values
+ *
+ * @param {Tech} options.tech
+ * A reference to the tech that owns this TextTrack.
+ *
+ * @param {TextTrack~Kind} [options.kind='subtitles']
+ * A valid text track kind.
+ *
+ * @param {TextTrack~Mode} [options.mode='disabled']
+ * A valid text track mode.
+ *
+ * @param {string} [options.id='vjs_track_' + Guid.newGUID()]
+ * A unique id for this TextTrack.
+ *
+ * @param {string} [options.label='']
+ * The menu label for this track.
+ *
+ * @param {string} [options.language='']
+ * A valid two character language code.
+ *
+ * @param {string} [options.srclang='']
+ * A valid two character language code. An alternative, but deprioritized
+ * version of `options.language`
+ *
+ * @param {string} [options.src]
+ * A url to TextTrack cues.
+ *
+ * @param {boolean} [options.default]
+ * If this track should default to on or off.
+ */
+ function TextTrack(options) {
+ var _this;
+
+ if (options === void 0) {
+ options = {};
+ }
+
+ if (!options.tech) {
+ throw new Error('A tech was not provided.');
+ }
+
+ var settings = mergeOptions$3(options, {
+ kind: TextTrackKind[options.kind] || 'subtitles',
+ language: options.language || options.srclang || ''
+ });
+ var mode = TextTrackMode[settings.mode] || 'disabled';
+ var default_ = settings["default"];
+
+ if (settings.kind === 'metadata' || settings.kind === 'chapters') {
+ mode = 'hidden';
+ }
+
+ _this = _Track.call(this, settings) || this;
+ _this.tech_ = settings.tech;
+ _this.cues_ = [];
+ _this.activeCues_ = [];
+ _this.preload_ = _this.tech_.preloadTextTracks !== false;
+ var cues = new TextTrackCueList(_this.cues_);
+ var activeCues = new TextTrackCueList(_this.activeCues_);
+ var changed = false;
+ _this.timeupdateHandler = bind(_assertThisInitialized__default['default'](_this), function () {
+ if (this.tech_.isDisposed()) {
+ return;
+ }
+
+ if (!this.tech_.isReady_) {
+ this.rvf_ = this.tech_.requestVideoFrameCallback(this.timeupdateHandler);
+ return;
+ } // Accessing this.activeCues for the side-effects of updating itself
+ // due to its nature as a getter function. Do not remove or cues will
+ // stop updating!
+ // Use the setter to prevent deletion from uglify (pure_getters rule)
+
+
+ this.activeCues = this.activeCues;
+
+ if (changed) {
+ this.trigger('cuechange');
+ changed = false;
+ }
+
+ this.rvf_ = this.tech_.requestVideoFrameCallback(this.timeupdateHandler);
+ });
+
+ var disposeHandler = function disposeHandler() {
+ _this.stopTracking();
+ };
+
+ _this.tech_.one('dispose', disposeHandler);
+
+ if (mode !== 'disabled') {
+ _this.startTracking();
+ }
+
+ Object.defineProperties(_assertThisInitialized__default['default'](_this), {
+ /**
+ * @memberof TextTrack
+ * @member {boolean} default
+ * If this track was set to be on or off by default. Cannot be changed after
+ * creation.
+ * @instance
+ *
+ * @readonly
+ */
+ "default": {
+ get: function get() {
+ return default_;
+ },
+ set: function set() {}
+ },
+
+ /**
+ * @memberof TextTrack
+ * @member {string} mode
+ * Set the mode of this TextTrack to a valid {@link TextTrack~Mode}. Will
+ * not be set if setting to an invalid mode.
+ * @instance
+ *
+ * @fires TextTrack#modechange
+ */
+ mode: {
+ get: function get() {
+ return mode;
+ },
+ set: function set(newMode) {
+ if (!TextTrackMode[newMode]) {
+ return;
+ }
+
+ if (mode === newMode) {
+ return;
+ }
+
+ mode = newMode;
+
+ if (!this.preload_ && mode !== 'disabled' && this.cues.length === 0) {
+ // On-demand load.
+ loadTrack(this.src, this);
+ }
+
+ this.stopTracking();
+
+ if (mode !== 'disabled') {
+ this.startTracking();
+ }
+ /**
+ * An event that fires when mode changes on this track. This allows
+ * the TextTrackList that holds this track to act accordingly.
+ *
+ * > Note: This is not part of the spec!
+ *
+ * @event TextTrack#modechange
+ * @type {EventTarget~Event}
+ */
+
+
+ this.trigger('modechange');
+ }
+ },
+
+ /**
+ * @memberof TextTrack
+ * @member {TextTrackCueList} cues
+ * The text track cue list for this TextTrack.
+ * @instance
+ */
+ cues: {
+ get: function get() {
+ if (!this.loaded_) {
+ return null;
+ }
+
+ return cues;
+ },
+ set: function set() {}
+ },
+
+ /**
+ * @memberof TextTrack
+ * @member {TextTrackCueList} activeCues
+ * The list text track cues that are currently active for this TextTrack.
+ * @instance
+ */
+ activeCues: {
+ get: function get() {
+ if (!this.loaded_) {
+ return null;
+ } // nothing to do
+
+
+ if (this.cues.length === 0) {
+ return activeCues;
+ }
+
+ var ct = this.tech_.currentTime();
+ var active = [];
+
+ for (var i = 0, l = this.cues.length; i < l; i++) {
+ var cue = this.cues[i];
+
+ if (cue.startTime <= ct && cue.endTime >= ct) {
+ active.push(cue);
+ } else if (cue.startTime === cue.endTime && cue.startTime <= ct && cue.startTime + 0.5 >= ct) {
+ active.push(cue);
+ }
+ }
+
+ changed = false;
+
+ if (active.length !== this.activeCues_.length) {
+ changed = true;
+ } else {
+ for (var _i = 0; _i < active.length; _i++) {
+ if (this.activeCues_.indexOf(active[_i]) === -1) {
+ changed = true;
+ }
+ }
+ }
+
+ this.activeCues_ = active;
+ activeCues.setCues_(this.activeCues_);
+ return activeCues;
+ },
+ // /!\ Keep this setter empty (see the timeupdate handler above)
+ set: function set() {}
+ }
+ });
+
+ if (settings.src) {
+ _this.src = settings.src;
+
+ if (!_this.preload_) {
+ // Tracks will load on-demand.
+ // Act like we're loaded for other purposes.
+ _this.loaded_ = true;
+ }
+
+ if (_this.preload_ || settings.kind !== 'subtitles' && settings.kind !== 'captions') {
+ loadTrack(_this.src, _assertThisInitialized__default['default'](_this));
+ }
+ } else {
+ _this.loaded_ = true;
+ }
+
+ return _this;
+ }
+
+ var _proto = TextTrack.prototype;
+
+ _proto.startTracking = function startTracking() {
+ this.rvf_ = this.tech_.requestVideoFrameCallback(this.timeupdateHandler);
+ };
+
+ _proto.stopTracking = function stopTracking() {
+ if (this.rvf_) {
+ this.tech_.cancelVideoFrameCallback(this.rvf_);
+ this.rvf_ = undefined;
+ }
+ }
+ /**
+ * Add a cue to the internal list of cues.
+ *
+ * @param {TextTrack~Cue} cue
+ * The cue to add to our internal list
+ */
+ ;
+
+ _proto.addCue = function addCue(originalCue) {
+ var cue = originalCue;
+
+ if (window__default['default'].vttjs && !(originalCue instanceof window__default['default'].vttjs.VTTCue)) {
+ cue = new window__default['default'].vttjs.VTTCue(originalCue.startTime, originalCue.endTime, originalCue.text);
+
+ for (var prop in originalCue) {
+ if (!(prop in cue)) {
+ cue[prop] = originalCue[prop];
+ }
+ } // make sure that `id` is copied over
+
+
+ cue.id = originalCue.id;
+ cue.originalCue_ = originalCue;
+ }
+
+ var tracks = this.tech_.textTracks();
+
+ for (var i = 0; i < tracks.length; i++) {
+ if (tracks[i] !== this) {
+ tracks[i].removeCue(cue);
+ }
+ }
+
+ this.cues_.push(cue);
+ this.cues.setCues_(this.cues_);
+ }
+ /**
+ * Remove a cue from our internal list
+ *
+ * @param {TextTrack~Cue} removeCue
+ * The cue to remove from our internal list
+ */
+ ;
+
+ _proto.removeCue = function removeCue(_removeCue) {
+ var i = this.cues_.length;
+
+ while (i--) {
+ var cue = this.cues_[i];
+
+ if (cue === _removeCue || cue.originalCue_ && cue.originalCue_ === _removeCue) {
+ this.cues_.splice(i, 1);
+ this.cues.setCues_(this.cues_);
+ break;
+ }
+ }
+ };
+
+ return TextTrack;
+}(Track);
+/**
+ * cuechange - One or more cues in the track have become active or stopped being active.
+ */
+
+
+TextTrack.prototype.allowedEvents_ = {
+ cuechange: 'cuechange'
+};
+
+/**
+ * A representation of a single `AudioTrack`. If it is part of an {@link AudioTrackList}
+ * only one `AudioTrack` in the list will be enabled at a time.
+ *
+ * @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#audiotrack}
+ * @extends Track
+ */
+
+var AudioTrack = /*#__PURE__*/function (_Track) {
+ _inheritsLoose__default['default'](AudioTrack, _Track);
+
+ /**
+ * Create an instance of this class.
+ *
+ * @param {Object} [options={}]
+ * Object of option names and values
+ *
+ * @param {AudioTrack~Kind} [options.kind='']
+ * A valid audio track kind
+ *
+ * @param {string} [options.id='vjs_track_' + Guid.newGUID()]
+ * A unique id for this AudioTrack.
+ *
+ * @param {string} [options.label='']
+ * The menu label for this track.
+ *
+ * @param {string} [options.language='']
+ * A valid two character language code.
+ *
+ * @param {boolean} [options.enabled]
+ * If this track is the one that is currently playing. If this track is part of
+ * an {@link AudioTrackList}, only one {@link AudioTrack} will be enabled.
+ */
+ function AudioTrack(options) {
+ var _this;
+
+ if (options === void 0) {
+ options = {};
+ }
+
+ var settings = mergeOptions$3(options, {
+ kind: AudioTrackKind[options.kind] || ''
+ });
+ _this = _Track.call(this, settings) || this;
+ var enabled = false;
+ /**
+ * @memberof AudioTrack
+ * @member {boolean} enabled
+ * If this `AudioTrack` is enabled or not. When setting this will
+ * fire {@link AudioTrack#enabledchange} if the state of enabled is changed.
+ * @instance
+ *
+ * @fires VideoTrack#selectedchange
+ */
+
+ Object.defineProperty(_assertThisInitialized__default['default'](_this), 'enabled', {
+ get: function get() {
+ return enabled;
+ },
+ set: function set(newEnabled) {
+ // an invalid or unchanged value
+ if (typeof newEnabled !== 'boolean' || newEnabled === enabled) {
+ return;
+ }
+
+ enabled = newEnabled;
+ /**
+ * An event that fires when enabled changes on this track. This allows
+ * the AudioTrackList that holds this track to act accordingly.
+ *
+ * > Note: This is not part of the spec! Native tracks will do
+ * this internally without an event.
+ *
+ * @event AudioTrack#enabledchange
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('enabledchange');
+ }
+ }); // if the user sets this track to selected then
+ // set selected to that true value otherwise
+ // we keep it false
+
+ if (settings.enabled) {
+ _this.enabled = settings.enabled;
+ }
+
+ _this.loaded_ = true;
+ return _this;
+ }
+
+ return AudioTrack;
+}(Track);
+
+/**
+ * A representation of a single `VideoTrack`.
+ *
+ * @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#videotrack}
+ * @extends Track
+ */
+
+var VideoTrack = /*#__PURE__*/function (_Track) {
+ _inheritsLoose__default['default'](VideoTrack, _Track);
+
+ /**
+ * Create an instance of this class.
+ *
+ * @param {Object} [options={}]
+ * Object of option names and values
+ *
+ * @param {string} [options.kind='']
+ * A valid {@link VideoTrack~Kind}
+ *
+ * @param {string} [options.id='vjs_track_' + Guid.newGUID()]
+ * A unique id for this AudioTrack.
+ *
+ * @param {string} [options.label='']
+ * The menu label for this track.
+ *
+ * @param {string} [options.language='']
+ * A valid two character language code.
+ *
+ * @param {boolean} [options.selected]
+ * If this track is the one that is currently playing.
+ */
+ function VideoTrack(options) {
+ var _this;
+
+ if (options === void 0) {
+ options = {};
+ }
+
+ var settings = mergeOptions$3(options, {
+ kind: VideoTrackKind[options.kind] || ''
+ });
+ _this = _Track.call(this, settings) || this;
+ var selected = false;
+ /**
+ * @memberof VideoTrack
+ * @member {boolean} selected
+ * If this `VideoTrack` is selected or not. When setting this will
+ * fire {@link VideoTrack#selectedchange} if the state of selected changed.
+ * @instance
+ *
+ * @fires VideoTrack#selectedchange
+ */
+
+ Object.defineProperty(_assertThisInitialized__default['default'](_this), 'selected', {
+ get: function get() {
+ return selected;
+ },
+ set: function set(newSelected) {
+ // an invalid or unchanged value
+ if (typeof newSelected !== 'boolean' || newSelected === selected) {
+ return;
+ }
+
+ selected = newSelected;
+ /**
+ * An event that fires when selected changes on this track. This allows
+ * the VideoTrackList that holds this track to act accordingly.
+ *
+ * > Note: This is not part of the spec! Native tracks will do
+ * this internally without an event.
+ *
+ * @event VideoTrack#selectedchange
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('selectedchange');
+ }
+ }); // if the user sets this track to selected then
+ // set selected to that true value otherwise
+ // we keep it false
+
+ if (settings.selected) {
+ _this.selected = settings.selected;
+ }
+
+ return _this;
+ }
+
+ return VideoTrack;
+}(Track);
+
+/**
+ * @memberof HTMLTrackElement
+ * @typedef {HTMLTrackElement~ReadyState}
+ * @enum {number}
+ */
+
+var NONE = 0;
+var LOADING = 1;
+var LOADED = 2;
+var ERROR = 3;
+/**
+ * A single track represented in the DOM.
+ *
+ * @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#htmltrackelement}
+ * @extends EventTarget
+ */
+
+var HTMLTrackElement = /*#__PURE__*/function (_EventTarget) {
+ _inheritsLoose__default['default'](HTMLTrackElement, _EventTarget);
+
+ /**
+ * Create an instance of this class.
+ *
+ * @param {Object} options={}
+ * Object of option names and values
+ *
+ * @param {Tech} options.tech
+ * A reference to the tech that owns this HTMLTrackElement.
+ *
+ * @param {TextTrack~Kind} [options.kind='subtitles']
+ * A valid text track kind.
+ *
+ * @param {TextTrack~Mode} [options.mode='disabled']
+ * A valid text track mode.
+ *
+ * @param {string} [options.id='vjs_track_' + Guid.newGUID()]
+ * A unique id for this TextTrack.
+ *
+ * @param {string} [options.label='']
+ * The menu label for this track.
+ *
+ * @param {string} [options.language='']
+ * A valid two character language code.
+ *
+ * @param {string} [options.srclang='']
+ * A valid two character language code. An alternative, but deprioritized
+ * version of `options.language`
+ *
+ * @param {string} [options.src]
+ * A url to TextTrack cues.
+ *
+ * @param {boolean} [options.default]
+ * If this track should default to on or off.
+ */
+ function HTMLTrackElement(options) {
+ var _this;
+
+ if (options === void 0) {
+ options = {};
+ }
+
+ _this = _EventTarget.call(this) || this;
+ var readyState;
+ var track = new TextTrack(options);
+ _this.kind = track.kind;
+ _this.src = track.src;
+ _this.srclang = track.language;
+ _this.label = track.label;
+ _this["default"] = track["default"];
+ Object.defineProperties(_assertThisInitialized__default['default'](_this), {
+ /**
+ * @memberof HTMLTrackElement
+ * @member {HTMLTrackElement~ReadyState} readyState
+ * The current ready state of the track element.
+ * @instance
+ */
+ readyState: {
+ get: function get() {
+ return readyState;
+ }
+ },
+
+ /**
+ * @memberof HTMLTrackElement
+ * @member {TextTrack} track
+ * The underlying TextTrack object.
+ * @instance
+ *
+ */
+ track: {
+ get: function get() {
+ return track;
+ }
+ }
+ });
+ readyState = NONE;
+ /**
+ * @listens TextTrack#loadeddata
+ * @fires HTMLTrackElement#load
+ */
+
+ track.addEventListener('loadeddata', function () {
+ readyState = LOADED;
+
+ _this.trigger({
+ type: 'load',
+ target: _assertThisInitialized__default['default'](_this)
+ });
+ });
+ return _this;
+ }
+
+ return HTMLTrackElement;
+}(EventTarget$2);
+
+HTMLTrackElement.prototype.allowedEvents_ = {
+ load: 'load'
+};
+HTMLTrackElement.NONE = NONE;
+HTMLTrackElement.LOADING = LOADING;
+HTMLTrackElement.LOADED = LOADED;
+HTMLTrackElement.ERROR = ERROR;
+
+/*
+ * This file contains all track properties that are used in
+ * player.js, tech.js, html5.js and possibly other techs in the future.
+ */
+
+var NORMAL = {
+ audio: {
+ ListClass: AudioTrackList,
+ TrackClass: AudioTrack,
+ capitalName: 'Audio'
+ },
+ video: {
+ ListClass: VideoTrackList,
+ TrackClass: VideoTrack,
+ capitalName: 'Video'
+ },
+ text: {
+ ListClass: TextTrackList,
+ TrackClass: TextTrack,
+ capitalName: 'Text'
+ }
+};
+Object.keys(NORMAL).forEach(function (type) {
+ NORMAL[type].getterName = type + "Tracks";
+ NORMAL[type].privateName = type + "Tracks_";
+});
+var REMOTE = {
+ remoteText: {
+ ListClass: TextTrackList,
+ TrackClass: TextTrack,
+ capitalName: 'RemoteText',
+ getterName: 'remoteTextTracks',
+ privateName: 'remoteTextTracks_'
+ },
+ remoteTextEl: {
+ ListClass: HtmlTrackElementList,
+ TrackClass: HTMLTrackElement,
+ capitalName: 'RemoteTextTrackEls',
+ getterName: 'remoteTextTrackEls',
+ privateName: 'remoteTextTrackEls_'
+ }
+};
+
+var ALL = _extends__default['default']({}, NORMAL, REMOTE);
+
+REMOTE.names = Object.keys(REMOTE);
+NORMAL.names = Object.keys(NORMAL);
+ALL.names = [].concat(REMOTE.names).concat(NORMAL.names);
+
+/**
+ * An Object containing a structure like: `{src: 'url', type: 'mimetype'}` or string
+ * that just contains the src url alone.
+ * * `var SourceObject = {src: 'http://ex.com/video.mp4', type: 'video/mp4'};`
+ * `var SourceString = 'http://example.com/some-video.mp4';`
+ *
+ * @typedef {Object|string} Tech~SourceObject
+ *
+ * @property {string} src
+ * The url to the source
+ *
+ * @property {string} type
+ * The mime type of the source
+ */
+
+/**
+ * A function used by {@link Tech} to create a new {@link TextTrack}.
+ *
+ * @private
+ *
+ * @param {Tech} self
+ * An instance of the Tech class.
+ *
+ * @param {string} kind
+ * `TextTrack` kind (subtitles, captions, descriptions, chapters, or metadata)
+ *
+ * @param {string} [label]
+ * Label to identify the text track
+ *
+ * @param {string} [language]
+ * Two letter language abbreviation
+ *
+ * @param {Object} [options={}]
+ * An object with additional text track options
+ *
+ * @return {TextTrack}
+ * The text track that was created.
+ */
+
+function createTrackHelper(self, kind, label, language, options) {
+ if (options === void 0) {
+ options = {};
+ }
+
+ var tracks = self.textTracks();
+ options.kind = kind;
+
+ if (label) {
+ options.label = label;
+ }
+
+ if (language) {
+ options.language = language;
+ }
+
+ options.tech = self;
+ var track = new ALL.text.TrackClass(options);
+ tracks.addTrack(track);
+ return track;
+}
+/**
+ * This is the base class for media playback technology controllers, such as
+ * {@link HTML5}
+ *
+ * @extends Component
+ */
+
+
+var Tech = /*#__PURE__*/function (_Component) {
+ _inheritsLoose__default['default'](Tech, _Component);
+
+ /**
+ * Create an instance of this Tech.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ *
+ * @param {Component~ReadyCallback} ready
+ * Callback function to call when the `HTML5` Tech is ready.
+ */
+ function Tech(options, ready) {
+ var _this;
+
+ if (options === void 0) {
+ options = {};
+ }
+
+ if (ready === void 0) {
+ ready = function ready() {};
+ }
+
+ // we don't want the tech to report user activity automatically.
+ // This is done manually in addControlsListeners
+ options.reportTouchActivity = false;
+ _this = _Component.call(this, null, options, ready) || this;
+
+ _this.onDurationChange_ = function (e) {
+ return _this.onDurationChange(e);
+ };
+
+ _this.trackProgress_ = function (e) {
+ return _this.trackProgress(e);
+ };
+
+ _this.trackCurrentTime_ = function (e) {
+ return _this.trackCurrentTime(e);
+ };
+
+ _this.stopTrackingCurrentTime_ = function (e) {
+ return _this.stopTrackingCurrentTime(e);
+ };
+
+ _this.disposeSourceHandler_ = function (e) {
+ return _this.disposeSourceHandler(e);
+ };
+
+ _this.queuedHanders_ = new Set(); // keep track of whether the current source has played at all to
+ // implement a very limited played()
+
+ _this.hasStarted_ = false;
+
+ _this.on('playing', function () {
+ this.hasStarted_ = true;
+ });
+
+ _this.on('loadstart', function () {
+ this.hasStarted_ = false;
+ });
+
+ ALL.names.forEach(function (name) {
+ var props = ALL[name];
+
+ if (options && options[props.getterName]) {
+ _this[props.privateName] = options[props.getterName];
+ }
+ }); // Manually track progress in cases where the browser/tech doesn't report it.
+
+ if (!_this.featuresProgressEvents) {
+ _this.manualProgressOn();
+ } // Manually track timeupdates in cases where the browser/tech doesn't report it.
+
+
+ if (!_this.featuresTimeupdateEvents) {
+ _this.manualTimeUpdatesOn();
+ }
+
+ ['Text', 'Audio', 'Video'].forEach(function (track) {
+ if (options["native" + track + "Tracks"] === false) {
+ _this["featuresNative" + track + "Tracks"] = false;
+ }
+ });
+
+ if (options.nativeCaptions === false || options.nativeTextTracks === false) {
+ _this.featuresNativeTextTracks = false;
+ } else if (options.nativeCaptions === true || options.nativeTextTracks === true) {
+ _this.featuresNativeTextTracks = true;
+ }
+
+ if (!_this.featuresNativeTextTracks) {
+ _this.emulateTextTracks();
+ }
+
+ _this.preloadTextTracks = options.preloadTextTracks !== false;
+ _this.autoRemoteTextTracks_ = new ALL.text.ListClass();
+
+ _this.initTrackListeners(); // Turn on component tap events only if not using native controls
+
+
+ if (!options.nativeControlsForTouch) {
+ _this.emitTapEvents();
+ }
+
+ if (_this.constructor) {
+ _this.name_ = _this.constructor.name || 'Unknown Tech';
+ }
+
+ return _this;
+ }
+ /**
+ * A special function to trigger source set in a way that will allow player
+ * to re-trigger if the player or tech are not ready yet.
+ *
+ * @fires Tech#sourceset
+ * @param {string} src The source string at the time of the source changing.
+ */
+
+
+ var _proto = Tech.prototype;
+
+ _proto.triggerSourceset = function triggerSourceset(src) {
+ var _this2 = this;
+
+ if (!this.isReady_) {
+ // on initial ready we have to trigger source set
+ // 1ms after ready so that player can watch for it.
+ this.one('ready', function () {
+ return _this2.setTimeout(function () {
+ return _this2.triggerSourceset(src);
+ }, 1);
+ });
+ }
+ /**
+ * Fired when the source is set on the tech causing the media element
+ * to reload.
+ *
+ * @see {@link Player#event:sourceset}
+ * @event Tech#sourceset
+ * @type {EventTarget~Event}
+ */
+
+
+ this.trigger({
+ src: src,
+ type: 'sourceset'
+ });
+ }
+ /* Fallbacks for unsupported event types
+ ================================================================================ */
+
+ /**
+ * Polyfill the `progress` event for browsers that don't support it natively.
+ *
+ * @see {@link Tech#trackProgress}
+ */
+ ;
+
+ _proto.manualProgressOn = function manualProgressOn() {
+ this.on('durationchange', this.onDurationChange_);
+ this.manualProgress = true; // Trigger progress watching when a source begins loading
+
+ this.one('ready', this.trackProgress_);
+ }
+ /**
+ * Turn off the polyfill for `progress` events that was created in
+ * {@link Tech#manualProgressOn}
+ */
+ ;
+
+ _proto.manualProgressOff = function manualProgressOff() {
+ this.manualProgress = false;
+ this.stopTrackingProgress();
+ this.off('durationchange', this.onDurationChange_);
+ }
+ /**
+ * This is used to trigger a `progress` event when the buffered percent changes. It
+ * sets an interval function that will be called every 500 milliseconds to check if the
+ * buffer end percent has changed.
+ *
+ * > This function is called by {@link Tech#manualProgressOn}
+ *
+ * @param {EventTarget~Event} event
+ * The `ready` event that caused this to run.
+ *
+ * @listens Tech#ready
+ * @fires Tech#progress
+ */
+ ;
+
+ _proto.trackProgress = function trackProgress(event) {
+ this.stopTrackingProgress();
+ this.progressInterval = this.setInterval(bind(this, function () {
+ // Don't trigger unless buffered amount is greater than last time
+ var numBufferedPercent = this.bufferedPercent();
+
+ if (this.bufferedPercent_ !== numBufferedPercent) {
+ /**
+ * See {@link Player#progress}
+ *
+ * @event Tech#progress
+ * @type {EventTarget~Event}
+ */
+ this.trigger('progress');
+ }
+
+ this.bufferedPercent_ = numBufferedPercent;
+
+ if (numBufferedPercent === 1) {
+ this.stopTrackingProgress();
+ }
+ }), 500);
+ }
+ /**
+ * Update our internal duration on a `durationchange` event by calling
+ * {@link Tech#duration}.
+ *
+ * @param {EventTarget~Event} event
+ * The `durationchange` event that caused this to run.
+ *
+ * @listens Tech#durationchange
+ */
+ ;
+
+ _proto.onDurationChange = function onDurationChange(event) {
+ this.duration_ = this.duration();
+ }
+ /**
+ * Get and create a `TimeRange` object for buffering.
+ *
+ * @return {TimeRange}
+ * The time range object that was created.
+ */
+ ;
+
+ _proto.buffered = function buffered() {
+ return createTimeRanges(0, 0);
+ }
+ /**
+ * Get the percentage of the current video that is currently buffered.
+ *
+ * @return {number}
+ * A number from 0 to 1 that represents the decimal percentage of the
+ * video that is buffered.
+ *
+ */
+ ;
+
+ _proto.bufferedPercent = function bufferedPercent$1() {
+ return bufferedPercent(this.buffered(), this.duration_);
+ }
+ /**
+ * Turn off the polyfill for `progress` events that was created in
+ * {@link Tech#manualProgressOn}
+ * Stop manually tracking progress events by clearing the interval that was set in
+ * {@link Tech#trackProgress}.
+ */
+ ;
+
+ _proto.stopTrackingProgress = function stopTrackingProgress() {
+ this.clearInterval(this.progressInterval);
+ }
+ /**
+ * Polyfill the `timeupdate` event for browsers that don't support it.
+ *
+ * @see {@link Tech#trackCurrentTime}
+ */
+ ;
+
+ _proto.manualTimeUpdatesOn = function manualTimeUpdatesOn() {
+ this.manualTimeUpdates = true;
+ this.on('play', this.trackCurrentTime_);
+ this.on('pause', this.stopTrackingCurrentTime_);
+ }
+ /**
+ * Turn off the polyfill for `timeupdate` events that was created in
+ * {@link Tech#manualTimeUpdatesOn}
+ */
+ ;
+
+ _proto.manualTimeUpdatesOff = function manualTimeUpdatesOff() {
+ this.manualTimeUpdates = false;
+ this.stopTrackingCurrentTime();
+ this.off('play', this.trackCurrentTime_);
+ this.off('pause', this.stopTrackingCurrentTime_);
+ }
+ /**
+ * Sets up an interval function to track current time and trigger `timeupdate` every
+ * 250 milliseconds.
+ *
+ * @listens Tech#play
+ * @triggers Tech#timeupdate
+ */
+ ;
+
+ _proto.trackCurrentTime = function trackCurrentTime() {
+ if (this.currentTimeInterval) {
+ this.stopTrackingCurrentTime();
+ }
+
+ this.currentTimeInterval = this.setInterval(function () {
+ /**
+ * Triggered at an interval of 250ms to indicated that time is passing in the video.
+ *
+ * @event Tech#timeupdate
+ * @type {EventTarget~Event}
+ */
+ this.trigger({
+ type: 'timeupdate',
+ target: this,
+ manuallyTriggered: true
+ }); // 42 = 24 fps // 250 is what Webkit uses // FF uses 15
+ }, 250);
+ }
+ /**
+ * Stop the interval function created in {@link Tech#trackCurrentTime} so that the
+ * `timeupdate` event is no longer triggered.
+ *
+ * @listens {Tech#pause}
+ */
+ ;
+
+ _proto.stopTrackingCurrentTime = function stopTrackingCurrentTime() {
+ this.clearInterval(this.currentTimeInterval); // #1002 - if the video ends right before the next timeupdate would happen,
+ // the progress bar won't make it all the way to the end
+
+ this.trigger({
+ type: 'timeupdate',
+ target: this,
+ manuallyTriggered: true
+ });
+ }
+ /**
+ * Turn off all event polyfills, clear the `Tech`s {@link AudioTrackList},
+ * {@link VideoTrackList}, and {@link TextTrackList}, and dispose of this Tech.
+ *
+ * @fires Component#dispose
+ */
+ ;
+
+ _proto.dispose = function dispose() {
+ // clear out all tracks because we can't reuse them between techs
+ this.clearTracks(NORMAL.names); // Turn off any manual progress or timeupdate tracking
+
+ if (this.manualProgress) {
+ this.manualProgressOff();
+ }
+
+ if (this.manualTimeUpdates) {
+ this.manualTimeUpdatesOff();
+ }
+
+ _Component.prototype.dispose.call(this);
+ }
+ /**
+ * Clear out a single `TrackList` or an array of `TrackLists` given their names.
+ *
+ * > Note: Techs without source handlers should call this between sources for `video`
+ * & `audio` tracks. You don't want to use them between tracks!
+ *
+ * @param {string[]|string} types
+ * TrackList names to clear, valid names are `video`, `audio`, and
+ * `text`.
+ */
+ ;
+
+ _proto.clearTracks = function clearTracks(types) {
+ var _this3 = this;
+
+ types = [].concat(types); // clear out all tracks because we can't reuse them between techs
+
+ types.forEach(function (type) {
+ var list = _this3[type + "Tracks"]() || [];
+ var i = list.length;
+
+ while (i--) {
+ var track = list[i];
+
+ if (type === 'text') {
+ _this3.removeRemoteTextTrack(track);
+ }
+
+ list.removeTrack(track);
+ }
+ });
+ }
+ /**
+ * Remove any TextTracks added via addRemoteTextTrack that are
+ * flagged for automatic garbage collection
+ */
+ ;
+
+ _proto.cleanupAutoTextTracks = function cleanupAutoTextTracks() {
+ var list = this.autoRemoteTextTracks_ || [];
+ var i = list.length;
+
+ while (i--) {
+ var track = list[i];
+ this.removeRemoteTextTrack(track);
+ }
+ }
+ /**
+ * Reset the tech, which will removes all sources and reset the internal readyState.
+ *
+ * @abstract
+ */
+ ;
+
+ _proto.reset = function reset() {}
+ /**
+ * Get the value of `crossOrigin` from the tech.
+ *
+ * @abstract
+ *
+ * @see {Html5#crossOrigin}
+ */
+ ;
+
+ _proto.crossOrigin = function crossOrigin() {}
+ /**
+ * Set the value of `crossOrigin` on the tech.
+ *
+ * @abstract
+ *
+ * @param {string} crossOrigin the crossOrigin value
+ * @see {Html5#setCrossOrigin}
+ */
+ ;
+
+ _proto.setCrossOrigin = function setCrossOrigin() {}
+ /**
+ * Get or set an error on the Tech.
+ *
+ * @param {MediaError} [err]
+ * Error to set on the Tech
+ *
+ * @return {MediaError|null}
+ * The current error object on the tech, or null if there isn't one.
+ */
+ ;
+
+ _proto.error = function error(err) {
+ if (err !== undefined) {
+ this.error_ = new MediaError(err);
+ this.trigger('error');
+ }
+
+ return this.error_;
+ }
+ /**
+ * Returns the `TimeRange`s that have been played through for the current source.
+ *
+ * > NOTE: This implementation is incomplete. It does not track the played `TimeRange`.
+ * It only checks whether the source has played at all or not.
+ *
+ * @return {TimeRange}
+ * - A single time range if this video has played
+ * - An empty set of ranges if not.
+ */
+ ;
+
+ _proto.played = function played() {
+ if (this.hasStarted_) {
+ return createTimeRanges(0, 0);
+ }
+
+ return createTimeRanges();
+ }
+ /**
+ * Start playback
+ *
+ * @abstract
+ *
+ * @see {Html5#play}
+ */
+ ;
+
+ _proto.play = function play() {}
+ /**
+ * Set whether we are scrubbing or not
+ *
+ * @abstract
+ *
+ * @see {Html5#setScrubbing}
+ */
+ ;
+
+ _proto.setScrubbing = function setScrubbing() {}
+ /**
+ * Get whether we are scrubbing or not
+ *
+ * @abstract
+ *
+ * @see {Html5#scrubbing}
+ */
+ ;
+
+ _proto.scrubbing = function scrubbing() {}
+ /**
+ * Causes a manual time update to occur if {@link Tech#manualTimeUpdatesOn} was
+ * previously called.
+ *
+ * @fires Tech#timeupdate
+ */
+ ;
+
+ _proto.setCurrentTime = function setCurrentTime() {
+ // improve the accuracy of manual timeupdates
+ if (this.manualTimeUpdates) {
+ /**
+ * A manual `timeupdate` event.
+ *
+ * @event Tech#timeupdate
+ * @type {EventTarget~Event}
+ */
+ this.trigger({
+ type: 'timeupdate',
+ target: this,
+ manuallyTriggered: true
+ });
+ }
+ }
+ /**
+ * Turn on listeners for {@link VideoTrackList}, {@link {AudioTrackList}, and
+ * {@link TextTrackList} events.
+ *
+ * This adds {@link EventTarget~EventListeners} for `addtrack`, and `removetrack`.
+ *
+ * @fires Tech#audiotrackchange
+ * @fires Tech#videotrackchange
+ * @fires Tech#texttrackchange
+ */
+ ;
+
+ _proto.initTrackListeners = function initTrackListeners() {
+ var _this4 = this;
+
+ /**
+ * Triggered when tracks are added or removed on the Tech {@link AudioTrackList}
+ *
+ * @event Tech#audiotrackchange
+ * @type {EventTarget~Event}
+ */
+
+ /**
+ * Triggered when tracks are added or removed on the Tech {@link VideoTrackList}
+ *
+ * @event Tech#videotrackchange
+ * @type {EventTarget~Event}
+ */
+
+ /**
+ * Triggered when tracks are added or removed on the Tech {@link TextTrackList}
+ *
+ * @event Tech#texttrackchange
+ * @type {EventTarget~Event}
+ */
+ NORMAL.names.forEach(function (name) {
+ var props = NORMAL[name];
+
+ var trackListChanges = function trackListChanges() {
+ _this4.trigger(name + "trackchange");
+ };
+
+ var tracks = _this4[props.getterName]();
+
+ tracks.addEventListener('removetrack', trackListChanges);
+ tracks.addEventListener('addtrack', trackListChanges);
+
+ _this4.on('dispose', function () {
+ tracks.removeEventListener('removetrack', trackListChanges);
+ tracks.removeEventListener('addtrack', trackListChanges);
+ });
+ });
+ }
+ /**
+ * Emulate TextTracks using vtt.js if necessary
+ *
+ * @fires Tech#vttjsloaded
+ * @fires Tech#vttjserror
+ */
+ ;
+
+ _proto.addWebVttScript_ = function addWebVttScript_() {
+ var _this5 = this;
+
+ if (window__default['default'].WebVTT) {
+ return;
+ } // Initially, Tech.el_ is a child of a dummy-div wait until the Component system
+ // signals that the Tech is ready at which point Tech.el_ is part of the DOM
+ // before inserting the WebVTT script
+
+
+ if (document__default['default'].body.contains(this.el())) {
+ // load via require if available and vtt.js script location was not passed in
+ // as an option. novtt builds will turn the above require call into an empty object
+ // which will cause this if check to always fail.
+ if (!this.options_['vtt.js'] && isPlain(vtt__default['default']) && Object.keys(vtt__default['default']).length > 0) {
+ this.trigger('vttjsloaded');
+ return;
+ } // load vtt.js via the script location option or the cdn of no location was
+ // passed in
+
+
+ var script = document__default['default'].createElement('script');
+ script.src = this.options_['vtt.js'] || 'https://vjs.zencdn.net/vttjs/0.14.1/vtt.min.js';
+
+ script.onload = function () {
+ /**
+ * Fired when vtt.js is loaded.
+ *
+ * @event Tech#vttjsloaded
+ * @type {EventTarget~Event}
+ */
+ _this5.trigger('vttjsloaded');
+ };
+
+ script.onerror = function () {
+ /**
+ * Fired when vtt.js was not loaded due to an error
+ *
+ * @event Tech#vttjsloaded
+ * @type {EventTarget~Event}
+ */
+ _this5.trigger('vttjserror');
+ };
+
+ this.on('dispose', function () {
+ script.onload = null;
+ script.onerror = null;
+ }); // but have not loaded yet and we set it to true before the inject so that
+ // we don't overwrite the injected window.WebVTT if it loads right away
+
+ window__default['default'].WebVTT = true;
+ this.el().parentNode.appendChild(script);
+ } else {
+ this.ready(this.addWebVttScript_);
+ }
+ }
+ /**
+ * Emulate texttracks
+ *
+ */
+ ;
+
+ _proto.emulateTextTracks = function emulateTextTracks() {
+ var _this6 = this;
+
+ var tracks = this.textTracks();
+ var remoteTracks = this.remoteTextTracks();
+
+ var handleAddTrack = function handleAddTrack(e) {
+ return tracks.addTrack(e.track);
+ };
+
+ var handleRemoveTrack = function handleRemoveTrack(e) {
+ return tracks.removeTrack(e.track);
+ };
+
+ remoteTracks.on('addtrack', handleAddTrack);
+ remoteTracks.on('removetrack', handleRemoveTrack);
+ this.addWebVttScript_();
+
+ var updateDisplay = function updateDisplay() {
+ return _this6.trigger('texttrackchange');
+ };
+
+ var textTracksChanges = function textTracksChanges() {
+ updateDisplay();
+
+ for (var i = 0; i < tracks.length; i++) {
+ var track = tracks[i];
+ track.removeEventListener('cuechange', updateDisplay);
+
+ if (track.mode === 'showing') {
+ track.addEventListener('cuechange', updateDisplay);
+ }
+ }
+ };
+
+ textTracksChanges();
+ tracks.addEventListener('change', textTracksChanges);
+ tracks.addEventListener('addtrack', textTracksChanges);
+ tracks.addEventListener('removetrack', textTracksChanges);
+ this.on('dispose', function () {
+ remoteTracks.off('addtrack', handleAddTrack);
+ remoteTracks.off('removetrack', handleRemoveTrack);
+ tracks.removeEventListener('change', textTracksChanges);
+ tracks.removeEventListener('addtrack', textTracksChanges);
+ tracks.removeEventListener('removetrack', textTracksChanges);
+
+ for (var i = 0; i < tracks.length; i++) {
+ var track = tracks[i];
+ track.removeEventListener('cuechange', updateDisplay);
+ }
+ });
+ }
+ /**
+ * Create and returns a remote {@link TextTrack} object.
+ *
+ * @param {string} kind
+ * `TextTrack` kind (subtitles, captions, descriptions, chapters, or metadata)
+ *
+ * @param {string} [label]
+ * Label to identify the text track
+ *
+ * @param {string} [language]
+ * Two letter language abbreviation
+ *
+ * @return {TextTrack}
+ * The TextTrack that gets created.
+ */
+ ;
+
+ _proto.addTextTrack = function addTextTrack(kind, label, language) {
+ if (!kind) {
+ throw new Error('TextTrack kind is required but was not provided');
+ }
+
+ return createTrackHelper(this, kind, label, language);
+ }
+ /**
+ * Create an emulated TextTrack for use by addRemoteTextTrack
+ *
+ * This is intended to be overridden by classes that inherit from
+ * Tech in order to create native or custom TextTracks.
+ *
+ * @param {Object} options
+ * The object should contain the options to initialize the TextTrack with.
+ *
+ * @param {string} [options.kind]
+ * `TextTrack` kind (subtitles, captions, descriptions, chapters, or metadata).
+ *
+ * @param {string} [options.label].
+ * Label to identify the text track
+ *
+ * @param {string} [options.language]
+ * Two letter language abbreviation.
+ *
+ * @return {HTMLTrackElement}
+ * The track element that gets created.
+ */
+ ;
+
+ _proto.createRemoteTextTrack = function createRemoteTextTrack(options) {
+ var track = mergeOptions$3(options, {
+ tech: this
+ });
+ return new REMOTE.remoteTextEl.TrackClass(track);
+ }
+ /**
+ * Creates a remote text track object and returns an html track element.
+ *
+ * > Note: This can be an emulated {@link HTMLTrackElement} or a native one.
+ *
+ * @param {Object} options
+ * See {@link Tech#createRemoteTextTrack} for more detailed properties.
+ *
+ * @param {boolean} [manualCleanup=true]
+ * - When false: the TextTrack will be automatically removed from the video
+ * element whenever the source changes
+ * - When True: The TextTrack will have to be cleaned up manually
+ *
+ * @return {HTMLTrackElement}
+ * An Html Track Element.
+ *
+ * @deprecated The default functionality for this function will be equivalent
+ * to "manualCleanup=false" in the future. The manualCleanup parameter will
+ * also be removed.
+ */
+ ;
+
+ _proto.addRemoteTextTrack = function addRemoteTextTrack(options, manualCleanup) {
+ var _this7 = this;
+
+ if (options === void 0) {
+ options = {};
+ }
+
+ var htmlTrackElement = this.createRemoteTextTrack(options);
+
+ if (manualCleanup !== true && manualCleanup !== false) {
+ // deprecation warning
+ log$1.warn('Calling addRemoteTextTrack without explicitly setting the "manualCleanup" parameter to `true` is deprecated and default to `false` in future version of video.js');
+ manualCleanup = true;
+ } // store HTMLTrackElement and TextTrack to remote list
+
+
+ this.remoteTextTrackEls().addTrackElement_(htmlTrackElement);
+ this.remoteTextTracks().addTrack(htmlTrackElement.track);
+
+ if (manualCleanup !== true) {
+ // create the TextTrackList if it doesn't exist
+ this.ready(function () {
+ return _this7.autoRemoteTextTracks_.addTrack(htmlTrackElement.track);
+ });
+ }
+
+ return htmlTrackElement;
+ }
+ /**
+ * Remove a remote text track from the remote `TextTrackList`.
+ *
+ * @param {TextTrack} track
+ * `TextTrack` to remove from the `TextTrackList`
+ */
+ ;
+
+ _proto.removeRemoteTextTrack = function removeRemoteTextTrack(track) {
+ var trackElement = this.remoteTextTrackEls().getTrackElementByTrack_(track); // remove HTMLTrackElement and TextTrack from remote list
+
+ this.remoteTextTrackEls().removeTrackElement_(trackElement);
+ this.remoteTextTracks().removeTrack(track);
+ this.autoRemoteTextTracks_.removeTrack(track);
+ }
+ /**
+ * Gets available media playback quality metrics as specified by the W3C's Media
+ * Playback Quality API.
+ *
+ * @see [Spec]{@link https://wicg.github.io/media-playback-quality}
+ *
+ * @return {Object}
+ * An object with supported media playback quality metrics
+ *
+ * @abstract
+ */
+ ;
+
+ _proto.getVideoPlaybackQuality = function getVideoPlaybackQuality() {
+ return {};
+ }
+ /**
+ * Attempt to create a floating video window always on top of other windows
+ * so that users may continue consuming media while they interact with other
+ * content sites, or applications on their device.
+ *
+ * @see [Spec]{@link https://wicg.github.io/picture-in-picture}
+ *
+ * @return {Promise|undefined}
+ * A promise with a Picture-in-Picture window if the browser supports
+ * Promises (or one was passed in as an option). It returns undefined
+ * otherwise.
+ *
+ * @abstract
+ */
+ ;
+
+ _proto.requestPictureInPicture = function requestPictureInPicture() {
+ var PromiseClass = this.options_.Promise || window__default['default'].Promise;
+
+ if (PromiseClass) {
+ return PromiseClass.reject();
+ }
+ }
+ /**
+ * A method to check for the value of the 'disablePictureInPicture' property.
+ * Defaults to true, as it should be considered disabled if the tech does not support pip
+ *
+ * @abstract
+ */
+ ;
+
+ _proto.disablePictureInPicture = function disablePictureInPicture() {
+ return true;
+ }
+ /**
+ * A method to set or unset the 'disablePictureInPicture' property.
+ *
+ * @abstract
+ */
+ ;
+
+ _proto.setDisablePictureInPicture = function setDisablePictureInPicture() {}
+ /**
+ * A fallback implementation of requestVideoFrameCallback using requestAnimationFrame
+ *
+ * @param {function} cb
+ * @return {number} request id
+ */
+ ;
+
+ _proto.requestVideoFrameCallback = function requestVideoFrameCallback(cb) {
+ var _this8 = this;
+
+ var id = newGUID();
+
+ if (this.paused()) {
+ this.queuedHanders_.add(id);
+ this.one('playing', function () {
+ if (_this8.queuedHanders_.has(id)) {
+ _this8.queuedHanders_["delete"](id);
+
+ cb();
+ }
+ });
+ } else {
+ this.requestNamedAnimationFrame(id, cb);
+ }
+
+ return id;
+ }
+ /**
+ * A fallback implementation of cancelVideoFrameCallback
+ *
+ * @param {number} id id of callback to be cancelled
+ */
+ ;
+
+ _proto.cancelVideoFrameCallback = function cancelVideoFrameCallback(id) {
+ if (this.queuedHanders_.has(id)) {
+ this.queuedHanders_["delete"](id);
+ } else {
+ this.cancelNamedAnimationFrame(id);
+ }
+ }
+ /**
+ * A method to set a poster from a `Tech`.
+ *
+ * @abstract
+ */
+ ;
+
+ _proto.setPoster = function setPoster() {}
+ /**
+ * A method to check for the presence of the 'playsinline' attribute.
+ *
+ * @abstract
+ */
+ ;
+
+ _proto.playsinline = function playsinline() {}
+ /**
+ * A method to set or unset the 'playsinline' attribute.
+ *
+ * @abstract
+ */
+ ;
+
+ _proto.setPlaysinline = function setPlaysinline() {}
+ /**
+ * Attempt to force override of native audio tracks.
+ *
+ * @param {boolean} override - If set to true native audio will be overridden,
+ * otherwise native audio will potentially be used.
+ *
+ * @abstract
+ */
+ ;
+
+ _proto.overrideNativeAudioTracks = function overrideNativeAudioTracks() {}
+ /**
+ * Attempt to force override of native video tracks.
+ *
+ * @param {boolean} override - If set to true native video will be overridden,
+ * otherwise native video will potentially be used.
+ *
+ * @abstract
+ */
+ ;
+
+ _proto.overrideNativeVideoTracks = function overrideNativeVideoTracks() {}
+ /*
+ * Check if the tech can support the given mime-type.
+ *
+ * The base tech does not support any type, but source handlers might
+ * overwrite this.
+ *
+ * @param {string} type
+ * The mimetype to check for support
+ *
+ * @return {string}
+ * 'probably', 'maybe', or empty string
+ *
+ * @see [Spec]{@link https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/canPlayType}
+ *
+ * @abstract
+ */
+ ;
+
+ _proto.canPlayType = function canPlayType() {
+ return '';
+ }
+ /**
+ * Check if the type is supported by this tech.
+ *
+ * The base tech does not support any type, but source handlers might
+ * overwrite this.
+ *
+ * @param {string} type
+ * The media type to check
+ * @return {string} Returns the native video element's response
+ */
+ ;
+
+ Tech.canPlayType = function canPlayType() {
+ return '';
+ }
+ /**
+ * Check if the tech can support the given source
+ *
+ * @param {Object} srcObj
+ * The source object
+ * @param {Object} options
+ * The options passed to the tech
+ * @return {string} 'probably', 'maybe', or '' (empty string)
+ */
+ ;
+
+ Tech.canPlaySource = function canPlaySource(srcObj, options) {
+ return Tech.canPlayType(srcObj.type);
+ }
+ /*
+ * Return whether the argument is a Tech or not.
+ * Can be passed either a Class like `Html5` or a instance like `player.tech_`
+ *
+ * @param {Object} component
+ * The item to check
+ *
+ * @return {boolean}
+ * Whether it is a tech or not
+ * - True if it is a tech
+ * - False if it is not
+ */
+ ;
+
+ Tech.isTech = function isTech(component) {
+ return component.prototype instanceof Tech || component instanceof Tech || component === Tech;
+ }
+ /**
+ * Registers a `Tech` into a shared list for videojs.
+ *
+ * @param {string} name
+ * Name of the `Tech` to register.
+ *
+ * @param {Object} tech
+ * The `Tech` class to register.
+ */
+ ;
+
+ Tech.registerTech = function registerTech(name, tech) {
+ if (!Tech.techs_) {
+ Tech.techs_ = {};
+ }
+
+ if (!Tech.isTech(tech)) {
+ throw new Error("Tech " + name + " must be a Tech");
+ }
+
+ if (!Tech.canPlayType) {
+ throw new Error('Techs must have a static canPlayType method on them');
+ }
+
+ if (!Tech.canPlaySource) {
+ throw new Error('Techs must have a static canPlaySource method on them');
+ }
+
+ name = toTitleCase$1(name);
+ Tech.techs_[name] = tech;
+ Tech.techs_[toLowerCase(name)] = tech;
+
+ if (name !== 'Tech') {
+ // camel case the techName for use in techOrder
+ Tech.defaultTechOrder_.push(name);
+ }
+
+ return tech;
+ }
+ /**
+ * Get a `Tech` from the shared list by name.
+ *
+ * @param {string} name
+ * `camelCase` or `TitleCase` name of the Tech to get
+ *
+ * @return {Tech|undefined}
+ * The `Tech` or undefined if there was no tech with the name requested.
+ */
+ ;
+
+ Tech.getTech = function getTech(name) {
+ if (!name) {
+ return;
+ }
+
+ if (Tech.techs_ && Tech.techs_[name]) {
+ return Tech.techs_[name];
+ }
+
+ name = toTitleCase$1(name);
+
+ if (window__default['default'] && window__default['default'].videojs && window__default['default'].videojs[name]) {
+ log$1.warn("The " + name + " tech was added to the videojs object when it should be registered using videojs.registerTech(name, tech)");
+ return window__default['default'].videojs[name];
+ }
+ };
+
+ return Tech;
+}(Component$1);
+/**
+ * Get the {@link VideoTrackList}
+ *
+ * @returns {VideoTrackList}
+ * @method Tech.prototype.videoTracks
+ */
+
+/**
+ * Get the {@link AudioTrackList}
+ *
+ * @returns {AudioTrackList}
+ * @method Tech.prototype.audioTracks
+ */
+
+/**
+ * Get the {@link TextTrackList}
+ *
+ * @returns {TextTrackList}
+ * @method Tech.prototype.textTracks
+ */
+
+/**
+ * Get the remote element {@link TextTrackList}
+ *
+ * @returns {TextTrackList}
+ * @method Tech.prototype.remoteTextTracks
+ */
+
+/**
+ * Get the remote element {@link HtmlTrackElementList}
+ *
+ * @returns {HtmlTrackElementList}
+ * @method Tech.prototype.remoteTextTrackEls
+ */
+
+
+ALL.names.forEach(function (name) {
+ var props = ALL[name];
+
+ Tech.prototype[props.getterName] = function () {
+ this[props.privateName] = this[props.privateName] || new props.ListClass();
+ return this[props.privateName];
+ };
+});
+/**
+ * List of associated text tracks
+ *
+ * @type {TextTrackList}
+ * @private
+ * @property Tech#textTracks_
+ */
+
+/**
+ * List of associated audio tracks.
+ *
+ * @type {AudioTrackList}
+ * @private
+ * @property Tech#audioTracks_
+ */
+
+/**
+ * List of associated video tracks.
+ *
+ * @type {VideoTrackList}
+ * @private
+ * @property Tech#videoTracks_
+ */
+
+/**
+ * Boolean indicating whether the `Tech` supports volume control.
+ *
+ * @type {boolean}
+ * @default
+ */
+
+Tech.prototype.featuresVolumeControl = true;
+/**
+ * Boolean indicating whether the `Tech` supports muting volume.
+ *
+ * @type {bolean}
+ * @default
+ */
+
+Tech.prototype.featuresMuteControl = true;
+/**
+ * Boolean indicating whether the `Tech` supports fullscreen resize control.
+ * Resizing plugins using request fullscreen reloads the plugin
+ *
+ * @type {boolean}
+ * @default
+ */
+
+Tech.prototype.featuresFullscreenResize = false;
+/**
+ * Boolean indicating whether the `Tech` supports changing the speed at which the video
+ * plays. Examples:
+ * - Set player to play 2x (twice) as fast
+ * - Set player to play 0.5x (half) as fast
+ *
+ * @type {boolean}
+ * @default
+ */
+
+Tech.prototype.featuresPlaybackRate = false;
+/**
+ * Boolean indicating whether the `Tech` supports the `progress` event. This is currently
+ * not triggered by video-js-swf. This will be used to determine if
+ * {@link Tech#manualProgressOn} should be called.
+ *
+ * @type {boolean}
+ * @default
+ */
+
+Tech.prototype.featuresProgressEvents = false;
+/**
+ * Boolean indicating whether the `Tech` supports the `sourceset` event.
+ *
+ * A tech should set this to `true` and then use {@link Tech#triggerSourceset}
+ * to trigger a {@link Tech#event:sourceset} at the earliest time after getting
+ * a new source.
+ *
+ * @type {boolean}
+ * @default
+ */
+
+Tech.prototype.featuresSourceset = false;
+/**
+ * Boolean indicating whether the `Tech` supports the `timeupdate` event. This is currently
+ * not triggered by video-js-swf. This will be used to determine if
+ * {@link Tech#manualTimeUpdates} should be called.
+ *
+ * @type {boolean}
+ * @default
+ */
+
+Tech.prototype.featuresTimeupdateEvents = false;
+/**
+ * Boolean indicating whether the `Tech` supports the native `TextTrack`s.
+ * This will help us integrate with native `TextTrack`s if the browser supports them.
+ *
+ * @type {boolean}
+ * @default
+ */
+
+Tech.prototype.featuresNativeTextTracks = false;
+/**
+ * Boolean indicating whether the `Tech` supports `requestVideoFrameCallback`.
+ *
+ * @type {boolean}
+ * @default
+ */
+
+Tech.prototype.featuresVideoFrameCallback = false;
+/**
+ * A functional mixin for techs that want to use the Source Handler pattern.
+ * Source handlers are scripts for handling specific formats.
+ * The source handler pattern is used for adaptive formats (HLS, DASH) that
+ * manually load video data and feed it into a Source Buffer (Media Source Extensions)
+ * Example: `Tech.withSourceHandlers.call(MyTech);`
+ *
+ * @param {Tech} _Tech
+ * The tech to add source handler functions to.
+ *
+ * @mixes Tech~SourceHandlerAdditions
+ */
+
+Tech.withSourceHandlers = function (_Tech) {
+ /**
+ * Register a source handler
+ *
+ * @param {Function} handler
+ * The source handler class
+ *
+ * @param {number} [index]
+ * Register it at the following index
+ */
+ _Tech.registerSourceHandler = function (handler, index) {
+ var handlers = _Tech.sourceHandlers;
+
+ if (!handlers) {
+ handlers = _Tech.sourceHandlers = [];
+ }
+
+ if (index === undefined) {
+ // add to the end of the list
+ index = handlers.length;
+ }
+
+ handlers.splice(index, 0, handler);
+ };
+ /**
+ * Check if the tech can support the given type. Also checks the
+ * Techs sourceHandlers.
+ *
+ * @param {string} type
+ * The mimetype to check.
+ *
+ * @return {string}
+ * 'probably', 'maybe', or '' (empty string)
+ */
+
+
+ _Tech.canPlayType = function (type) {
+ var handlers = _Tech.sourceHandlers || [];
+ var can;
+
+ for (var i = 0; i < handlers.length; i++) {
+ can = handlers[i].canPlayType(type);
+
+ if (can) {
+ return can;
+ }
+ }
+
+ return '';
+ };
+ /**
+ * Returns the first source handler that supports the source.
+ *
+ * TODO: Answer question: should 'probably' be prioritized over 'maybe'
+ *
+ * @param {Tech~SourceObject} source
+ * The source object
+ *
+ * @param {Object} options
+ * The options passed to the tech
+ *
+ * @return {SourceHandler|null}
+ * The first source handler that supports the source or null if
+ * no SourceHandler supports the source
+ */
+
+
+ _Tech.selectSourceHandler = function (source, options) {
+ var handlers = _Tech.sourceHandlers || [];
+ var can;
+
+ for (var i = 0; i < handlers.length; i++) {
+ can = handlers[i].canHandleSource(source, options);
+
+ if (can) {
+ return handlers[i];
+ }
+ }
+
+ return null;
+ };
+ /**
+ * Check if the tech can support the given source.
+ *
+ * @param {Tech~SourceObject} srcObj
+ * The source object
+ *
+ * @param {Object} options
+ * The options passed to the tech
+ *
+ * @return {string}
+ * 'probably', 'maybe', or '' (empty string)
+ */
+
+
+ _Tech.canPlaySource = function (srcObj, options) {
+ var sh = _Tech.selectSourceHandler(srcObj, options);
+
+ if (sh) {
+ return sh.canHandleSource(srcObj, options);
+ }
+
+ return '';
+ };
+ /**
+ * When using a source handler, prefer its implementation of
+ * any function normally provided by the tech.
+ */
+
+
+ var deferrable = ['seekable', 'seeking', 'duration'];
+ /**
+ * A wrapper around {@link Tech#seekable} that will call a `SourceHandler`s seekable
+ * function if it exists, with a fallback to the Techs seekable function.
+ *
+ * @method _Tech.seekable
+ */
+
+ /**
+ * A wrapper around {@link Tech#duration} that will call a `SourceHandler`s duration
+ * function if it exists, otherwise it will fallback to the techs duration function.
+ *
+ * @method _Tech.duration
+ */
+
+ deferrable.forEach(function (fnName) {
+ var originalFn = this[fnName];
+
+ if (typeof originalFn !== 'function') {
+ return;
+ }
+
+ this[fnName] = function () {
+ if (this.sourceHandler_ && this.sourceHandler_[fnName]) {
+ return this.sourceHandler_[fnName].apply(this.sourceHandler_, arguments);
+ }
+
+ return originalFn.apply(this, arguments);
+ };
+ }, _Tech.prototype);
+ /**
+ * Create a function for setting the source using a source object
+ * and source handlers.
+ * Should never be called unless a source handler was found.
+ *
+ * @param {Tech~SourceObject} source
+ * A source object with src and type keys
+ */
+
+ _Tech.prototype.setSource = function (source) {
+ var sh = _Tech.selectSourceHandler(source, this.options_);
+
+ if (!sh) {
+ // Fall back to a native source hander when unsupported sources are
+ // deliberately set
+ if (_Tech.nativeSourceHandler) {
+ sh = _Tech.nativeSourceHandler;
+ } else {
+ log$1.error('No source handler found for the current source.');
+ }
+ } // Dispose any existing source handler
+
+
+ this.disposeSourceHandler();
+ this.off('dispose', this.disposeSourceHandler_);
+
+ if (sh !== _Tech.nativeSourceHandler) {
+ this.currentSource_ = source;
+ }
+
+ this.sourceHandler_ = sh.handleSource(source, this, this.options_);
+ this.one('dispose', this.disposeSourceHandler_);
+ };
+ /**
+ * Clean up any existing SourceHandlers and listeners when the Tech is disposed.
+ *
+ * @listens Tech#dispose
+ */
+
+
+ _Tech.prototype.disposeSourceHandler = function () {
+ // if we have a source and get another one
+ // then we are loading something new
+ // than clear all of our current tracks
+ if (this.currentSource_) {
+ this.clearTracks(['audio', 'video']);
+ this.currentSource_ = null;
+ } // always clean up auto-text tracks
+
+
+ this.cleanupAutoTextTracks();
+
+ if (this.sourceHandler_) {
+ if (this.sourceHandler_.dispose) {
+ this.sourceHandler_.dispose();
+ }
+
+ this.sourceHandler_ = null;
+ }
+ };
+}; // The base Tech class needs to be registered as a Component. It is the only
+// Tech that can be registered as a Component.
+
+
+Component$1.registerComponent('Tech', Tech);
+Tech.registerTech('Tech', Tech);
+/**
+ * A list of techs that should be added to techOrder on Players
+ *
+ * @private
+ */
+
+Tech.defaultTechOrder_ = [];
+
+/**
+ * @file middleware.js
+ * @module middleware
+ */
+var middlewares = {};
+var middlewareInstances = {};
+var TERMINATOR = {};
+/**
+ * A middleware object is a plain JavaScript object that has methods that
+ * match the {@link Tech} methods found in the lists of allowed
+ * {@link module:middleware.allowedGetters|getters},
+ * {@link module:middleware.allowedSetters|setters}, and
+ * {@link module:middleware.allowedMediators|mediators}.
+ *
+ * @typedef {Object} MiddlewareObject
+ */
+
+/**
+ * A middleware factory function that should return a
+ * {@link module:middleware~MiddlewareObject|MiddlewareObject}.
+ *
+ * This factory will be called for each player when needed, with the player
+ * passed in as an argument.
+ *
+ * @callback MiddlewareFactory
+ * @param {Player} player
+ * A Video.js player.
+ */
+
+/**
+ * Define a middleware that the player should use by way of a factory function
+ * that returns a middleware object.
+ *
+ * @param {string} type
+ * The MIME type to match or `"*"` for all MIME types.
+ *
+ * @param {MiddlewareFactory} middleware
+ * A middleware factory function that will be executed for
+ * matching types.
+ */
+
+function use(type, middleware) {
+ middlewares[type] = middlewares[type] || [];
+ middlewares[type].push(middleware);
+}
+/**
+ * Asynchronously sets a source using middleware by recursing through any
+ * matching middlewares and calling `setSource` on each, passing along the
+ * previous returned value each time.
+ *
+ * @param {Player} player
+ * A {@link Player} instance.
+ *
+ * @param {Tech~SourceObject} src
+ * A source object.
+ *
+ * @param {Function}
+ * The next middleware to run.
+ */
+
+function setSource(player, src, next) {
+ player.setTimeout(function () {
+ return setSourceHelper(src, middlewares[src.type], next, player);
+ }, 1);
+}
+/**
+ * When the tech is set, passes the tech to each middleware's `setTech` method.
+ *
+ * @param {Object[]} middleware
+ * An array of middleware instances.
+ *
+ * @param {Tech} tech
+ * A Video.js tech.
+ */
+
+function setTech(middleware, tech) {
+ middleware.forEach(function (mw) {
+ return mw.setTech && mw.setTech(tech);
+ });
+}
+/**
+ * Calls a getter on the tech first, through each middleware
+ * from right to left to the player.
+ *
+ * @param {Object[]} middleware
+ * An array of middleware instances.
+ *
+ * @param {Tech} tech
+ * The current tech.
+ *
+ * @param {string} method
+ * A method name.
+ *
+ * @return {Mixed}
+ * The final value from the tech after middleware has intercepted it.
+ */
+
+function get(middleware, tech, method) {
+ return middleware.reduceRight(middlewareIterator(method), tech[method]());
+}
+/**
+ * Takes the argument given to the player and calls the setter method on each
+ * middleware from left to right to the tech.
+ *
+ * @param {Object[]} middleware
+ * An array of middleware instances.
+ *
+ * @param {Tech} tech
+ * The current tech.
+ *
+ * @param {string} method
+ * A method name.
+ *
+ * @param {Mixed} arg
+ * The value to set on the tech.
+ *
+ * @return {Mixed}
+ * The return value of the `method` of the `tech`.
+ */
+
+function set(middleware, tech, method, arg) {
+ return tech[method](middleware.reduce(middlewareIterator(method), arg));
+}
+/**
+ * Takes the argument given to the player and calls the `call` version of the
+ * method on each middleware from left to right.
+ *
+ * Then, call the passed in method on the tech and return the result unchanged
+ * back to the player, through middleware, this time from right to left.
+ *
+ * @param {Object[]} middleware
+ * An array of middleware instances.
+ *
+ * @param {Tech} tech
+ * The current tech.
+ *
+ * @param {string} method
+ * A method name.
+ *
+ * @param {Mixed} arg
+ * The value to set on the tech.
+ *
+ * @return {Mixed}
+ * The return value of the `method` of the `tech`, regardless of the
+ * return values of middlewares.
+ */
+
+function mediate(middleware, tech, method, arg) {
+ if (arg === void 0) {
+ arg = null;
+ }
+
+ var callMethod = 'call' + toTitleCase$1(method);
+ var middlewareValue = middleware.reduce(middlewareIterator(callMethod), arg);
+ var terminated = middlewareValue === TERMINATOR; // deprecated. The `null` return value should instead return TERMINATOR to
+ // prevent confusion if a techs method actually returns null.
+
+ var returnValue = terminated ? null : tech[method](middlewareValue);
+ executeRight(middleware, method, returnValue, terminated);
+ return returnValue;
+}
+/**
+ * Enumeration of allowed getters where the keys are method names.
+ *
+ * @type {Object}
+ */
+
+var allowedGetters = {
+ buffered: 1,
+ currentTime: 1,
+ duration: 1,
+ muted: 1,
+ played: 1,
+ paused: 1,
+ seekable: 1,
+ volume: 1,
+ ended: 1
+};
+/**
+ * Enumeration of allowed setters where the keys are method names.
+ *
+ * @type {Object}
+ */
+
+var allowedSetters = {
+ setCurrentTime: 1,
+ setMuted: 1,
+ setVolume: 1
+};
+/**
+ * Enumeration of allowed mediators where the keys are method names.
+ *
+ * @type {Object}
+ */
+
+var allowedMediators = {
+ play: 1,
+ pause: 1
+};
+
+function middlewareIterator(method) {
+ return function (value, mw) {
+ // if the previous middleware terminated, pass along the termination
+ if (value === TERMINATOR) {
+ return TERMINATOR;
+ }
+
+ if (mw[method]) {
+ return mw[method](value);
+ }
+
+ return value;
+ };
+}
+
+function executeRight(mws, method, value, terminated) {
+ for (var i = mws.length - 1; i >= 0; i--) {
+ var mw = mws[i];
+
+ if (mw[method]) {
+ mw[method](terminated, value);
+ }
+ }
+}
+/**
+ * Clear the middleware cache for a player.
+ *
+ * @param {Player} player
+ * A {@link Player} instance.
+ */
+
+
+function clearCacheForPlayer(player) {
+ middlewareInstances[player.id()] = null;
+}
+/**
+ * {
+ * [playerId]: [[mwFactory, mwInstance], ...]
+ * }
+ *
+ * @private
+ */
+
+function getOrCreateFactory(player, mwFactory) {
+ var mws = middlewareInstances[player.id()];
+ var mw = null;
+
+ if (mws === undefined || mws === null) {
+ mw = mwFactory(player);
+ middlewareInstances[player.id()] = [[mwFactory, mw]];
+ return mw;
+ }
+
+ for (var i = 0; i < mws.length; i++) {
+ var _mws$i = mws[i],
+ mwf = _mws$i[0],
+ mwi = _mws$i[1];
+
+ if (mwf !== mwFactory) {
+ continue;
+ }
+
+ mw = mwi;
+ }
+
+ if (mw === null) {
+ mw = mwFactory(player);
+ mws.push([mwFactory, mw]);
+ }
+
+ return mw;
+}
+
+function setSourceHelper(src, middleware, next, player, acc, lastRun) {
+ if (src === void 0) {
+ src = {};
+ }
+
+ if (middleware === void 0) {
+ middleware = [];
+ }
+
+ if (acc === void 0) {
+ acc = [];
+ }
+
+ if (lastRun === void 0) {
+ lastRun = false;
+ }
+
+ var _middleware = middleware,
+ mwFactory = _middleware[0],
+ mwrest = _middleware.slice(1); // if mwFactory is a string, then we're at a fork in the road
+
+
+ if (typeof mwFactory === 'string') {
+ setSourceHelper(src, middlewares[mwFactory], next, player, acc, lastRun); // if we have an mwFactory, call it with the player to get the mw,
+ // then call the mw's setSource method
+ } else if (mwFactory) {
+ var mw = getOrCreateFactory(player, mwFactory); // if setSource isn't present, implicitly select this middleware
+
+ if (!mw.setSource) {
+ acc.push(mw);
+ return setSourceHelper(src, mwrest, next, player, acc, lastRun);
+ }
+
+ mw.setSource(assign({}, src), function (err, _src) {
+ // something happened, try the next middleware on the current level
+ // make sure to use the old src
+ if (err) {
+ return setSourceHelper(src, mwrest, next, player, acc, lastRun);
+ } // we've succeeded, now we need to go deeper
+
+
+ acc.push(mw); // if it's the same type, continue down the current chain
+ // otherwise, we want to go down the new chain
+
+ setSourceHelper(_src, src.type === _src.type ? mwrest : middlewares[_src.type], next, player, acc, lastRun);
+ });
+ } else if (mwrest.length) {
+ setSourceHelper(src, mwrest, next, player, acc, lastRun);
+ } else if (lastRun) {
+ next(src, acc);
+ } else {
+ setSourceHelper(src, middlewares['*'], next, player, acc, true);
+ }
+}
+
+/**
+ * Mimetypes
+ *
+ * @see https://www.iana.org/assignments/media-types/media-types.xhtml
+ * @typedef Mimetypes~Kind
+ * @enum
+ */
+
+var MimetypesKind = {
+ opus: 'video/ogg',
+ ogv: 'video/ogg',
+ mp4: 'video/mp4',
+ mov: 'video/mp4',
+ m4v: 'video/mp4',
+ mkv: 'video/x-matroska',
+ m4a: 'audio/mp4',
+ mp3: 'audio/mpeg',
+ aac: 'audio/aac',
+ caf: 'audio/x-caf',
+ flac: 'audio/flac',
+ oga: 'audio/ogg',
+ wav: 'audio/wav',
+ m3u8: 'application/x-mpegURL',
+ mpd: 'application/dash+xml',
+ jpg: 'image/jpeg',
+ jpeg: 'image/jpeg',
+ gif: 'image/gif',
+ png: 'image/png',
+ svg: 'image/svg+xml',
+ webp: 'image/webp'
+};
+/**
+ * Get the mimetype of a given src url if possible
+ *
+ * @param {string} src
+ * The url to the src
+ *
+ * @return {string}
+ * return the mimetype if it was known or empty string otherwise
+ */
+
+var getMimetype = function getMimetype(src) {
+ if (src === void 0) {
+ src = '';
+ }
+
+ var ext = getFileExtension(src);
+ var mimetype = MimetypesKind[ext.toLowerCase()];
+ return mimetype || '';
+};
+/**
+ * Find the mime type of a given source string if possible. Uses the player
+ * source cache.
+ *
+ * @param {Player} player
+ * The player object
+ *
+ * @param {string} src
+ * The source string
+ *
+ * @return {string}
+ * The type that was found
+ */
+
+var findMimetype = function findMimetype(player, src) {
+ if (!src) {
+ return '';
+ } // 1. check for the type in the `source` cache
+
+
+ if (player.cache_.source.src === src && player.cache_.source.type) {
+ return player.cache_.source.type;
+ } // 2. see if we have this source in our `currentSources` cache
+
+
+ var matchingSources = player.cache_.sources.filter(function (s) {
+ return s.src === src;
+ });
+
+ if (matchingSources.length) {
+ return matchingSources[0].type;
+ } // 3. look for the src url in source elements and use the type there
+
+
+ var sources = player.$$('source');
+
+ for (var i = 0; i < sources.length; i++) {
+ var s = sources[i];
+
+ if (s.type && s.src && s.src === src) {
+ return s.type;
+ }
+ } // 4. finally fallback to our list of mime types based on src url extension
+
+
+ return getMimetype(src);
+};
+
+/**
+ * @module filter-source
+ */
+/**
+ * Filter out single bad source objects or multiple source objects in an
+ * array. Also flattens nested source object arrays into a 1 dimensional
+ * array of source objects.
+ *
+ * @param {Tech~SourceObject|Tech~SourceObject[]} src
+ * The src object to filter
+ *
+ * @return {Tech~SourceObject[]}
+ * An array of sourceobjects containing only valid sources
+ *
+ * @private
+ */
+
+var filterSource = function filterSource(src) {
+ // traverse array
+ if (Array.isArray(src)) {
+ var newsrc = [];
+ src.forEach(function (srcobj) {
+ srcobj = filterSource(srcobj);
+
+ if (Array.isArray(srcobj)) {
+ newsrc = newsrc.concat(srcobj);
+ } else if (isObject(srcobj)) {
+ newsrc.push(srcobj);
+ }
+ });
+ src = newsrc;
+ } else if (typeof src === 'string' && src.trim()) {
+ // convert string into object
+ src = [fixSource({
+ src: src
+ })];
+ } else if (isObject(src) && typeof src.src === 'string' && src.src && src.src.trim()) {
+ // src is already valid
+ src = [fixSource(src)];
+ } else {
+ // invalid source, turn it into an empty array
+ src = [];
+ }
+
+ return src;
+};
+/**
+ * Checks src mimetype, adding it when possible
+ *
+ * @param {Tech~SourceObject} src
+ * The src object to check
+ * @return {Tech~SourceObject}
+ * src Object with known type
+ */
+
+
+function fixSource(src) {
+ if (!src.type) {
+ var mimetype = getMimetype(src.src);
+
+ if (mimetype) {
+ src.type = mimetype;
+ }
+ }
+
+ return src;
+}
+
+/**
+ * The `MediaLoader` is the `Component` that decides which playback technology to load
+ * when a player is initialized.
+ *
+ * @extends Component
+ */
+
+var MediaLoader = /*#__PURE__*/function (_Component) {
+ _inheritsLoose__default['default'](MediaLoader, _Component);
+
+ /**
+ * Create an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should attach to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ *
+ * @param {Component~ReadyCallback} [ready]
+ * The function that is run when this component is ready.
+ */
+ function MediaLoader(player, options, ready) {
+ var _this;
+
+ // MediaLoader has no element
+ var options_ = mergeOptions$3({
+ createEl: false
+ }, options);
+ _this = _Component.call(this, player, options_, ready) || this; // If there are no sources when the player is initialized,
+ // load the first supported playback technology.
+
+ if (!options.playerOptions.sources || options.playerOptions.sources.length === 0) {
+ for (var i = 0, j = options.playerOptions.techOrder; i < j.length; i++) {
+ var techName = toTitleCase$1(j[i]);
+ var tech = Tech.getTech(techName); // Support old behavior of techs being registered as components.
+ // Remove once that deprecated behavior is removed.
+
+ if (!techName) {
+ tech = Component$1.getComponent(techName);
+ } // Check if the browser supports this technology
+
+
+ if (tech && tech.isSupported()) {
+ player.loadTech_(techName);
+ break;
+ }
+ }
+ } else {
+ // Loop through playback technologies (e.g. HTML5) and check for support.
+ // Then load the best source.
+ // A few assumptions here:
+ // All playback technologies respect preload false.
+ player.src(options.playerOptions.sources);
+ }
+
+ return _this;
+ }
+
+ return MediaLoader;
+}(Component$1);
+
+Component$1.registerComponent('MediaLoader', MediaLoader);
+
+/**
+ * Component which is clickable or keyboard actionable, but is not a
+ * native HTML button.
+ *
+ * @extends Component
+ */
+
+var ClickableComponent = /*#__PURE__*/function (_Component) {
+ _inheritsLoose__default['default'](ClickableComponent, _Component);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of component options.
+ *
+ * @param {function} [options.clickHandler]
+ * The function to call when the button is clicked / activated
+ *
+ * @param {string} [options.controlText]
+ * The text to set on the button
+ *
+ * @param {string} [options.className]
+ * A class or space separated list of classes to add the component
+ *
+ */
+ function ClickableComponent(player, options) {
+ var _this;
+
+ _this = _Component.call(this, player, options) || this;
+
+ if (_this.options_.controlText) {
+ _this.controlText(_this.options_.controlText);
+ }
+
+ _this.handleMouseOver_ = function (e) {
+ return _this.handleMouseOver(e);
+ };
+
+ _this.handleMouseOut_ = function (e) {
+ return _this.handleMouseOut(e);
+ };
+
+ _this.handleClick_ = function (e) {
+ return _this.handleClick(e);
+ };
+
+ _this.handleKeyDown_ = function (e) {
+ return _this.handleKeyDown(e);
+ };
+
+ _this.emitTapEvents();
+
+ _this.enable();
+
+ return _this;
+ }
+ /**
+ * Create the `ClickableComponent`s DOM element.
+ *
+ * @param {string} [tag=div]
+ * The element's node type.
+ *
+ * @param {Object} [props={}]
+ * An object of properties that should be set on the element.
+ *
+ * @param {Object} [attributes={}]
+ * An object of attributes that should be set on the element.
+ *
+ * @return {Element}
+ * The element that gets created.
+ */
+
+
+ var _proto = ClickableComponent.prototype;
+
+ _proto.createEl = function createEl$1(tag, props, attributes) {
+ if (tag === void 0) {
+ tag = 'div';
+ }
+
+ if (props === void 0) {
+ props = {};
+ }
+
+ if (attributes === void 0) {
+ attributes = {};
+ }
+
+ props = assign({
+ className: this.buildCSSClass(),
+ tabIndex: 0
+ }, props);
+
+ if (tag === 'button') {
+ log$1.error("Creating a ClickableComponent with an HTML element of " + tag + " is not supported; use a Button instead.");
+ } // Add ARIA attributes for clickable element which is not a native HTML button
+
+
+ attributes = assign({
+ role: 'button'
+ }, attributes);
+ this.tabIndex_ = props.tabIndex;
+ var el = createEl(tag, props, attributes);
+ el.appendChild(createEl('span', {
+ className: 'vjs-icon-placeholder'
+ }, {
+ 'aria-hidden': true
+ }));
+ this.createControlTextEl(el);
+ return el;
+ };
+
+ _proto.dispose = function dispose() {
+ // remove controlTextEl_ on dispose
+ this.controlTextEl_ = null;
+
+ _Component.prototype.dispose.call(this);
+ }
+ /**
+ * Create a control text element on this `ClickableComponent`
+ *
+ * @param {Element} [el]
+ * Parent element for the control text.
+ *
+ * @return {Element}
+ * The control text element that gets created.
+ */
+ ;
+
+ _proto.createControlTextEl = function createControlTextEl(el) {
+ this.controlTextEl_ = createEl('span', {
+ className: 'vjs-control-text'
+ }, {
+ // let the screen reader user know that the text of the element may change
+ 'aria-live': 'polite'
+ });
+
+ if (el) {
+ el.appendChild(this.controlTextEl_);
+ }
+
+ this.controlText(this.controlText_, el);
+ return this.controlTextEl_;
+ }
+ /**
+ * Get or set the localize text to use for the controls on the `ClickableComponent`.
+ *
+ * @param {string} [text]
+ * Control text for element.
+ *
+ * @param {Element} [el=this.el()]
+ * Element to set the title on.
+ *
+ * @return {string}
+ * - The control text when getting
+ */
+ ;
+
+ _proto.controlText = function controlText(text, el) {
+ if (el === void 0) {
+ el = this.el();
+ }
+
+ if (text === undefined) {
+ return this.controlText_ || 'Need Text';
+ }
+
+ var localizedText = this.localize(text);
+ this.controlText_ = text;
+ textContent(this.controlTextEl_, localizedText);
+
+ if (!this.nonIconControl && !this.player_.options_.noUITitleAttributes) {
+ // Set title attribute if only an icon is shown
+ el.setAttribute('title', localizedText);
+ }
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+ ;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return "vjs-control vjs-button " + _Component.prototype.buildCSSClass.call(this);
+ }
+ /**
+ * Enable this `ClickableComponent`
+ */
+ ;
+
+ _proto.enable = function enable() {
+ if (!this.enabled_) {
+ this.enabled_ = true;
+ this.removeClass('vjs-disabled');
+ this.el_.setAttribute('aria-disabled', 'false');
+
+ if (typeof this.tabIndex_ !== 'undefined') {
+ this.el_.setAttribute('tabIndex', this.tabIndex_);
+ }
+
+ this.on(['tap', 'click'], this.handleClick_);
+ this.on('keydown', this.handleKeyDown_);
+ }
+ }
+ /**
+ * Disable this `ClickableComponent`
+ */
+ ;
+
+ _proto.disable = function disable() {
+ this.enabled_ = false;
+ this.addClass('vjs-disabled');
+ this.el_.setAttribute('aria-disabled', 'true');
+
+ if (typeof this.tabIndex_ !== 'undefined') {
+ this.el_.removeAttribute('tabIndex');
+ }
+
+ this.off('mouseover', this.handleMouseOver_);
+ this.off('mouseout', this.handleMouseOut_);
+ this.off(['tap', 'click'], this.handleClick_);
+ this.off('keydown', this.handleKeyDown_);
+ }
+ /**
+ * Handles language change in ClickableComponent for the player in components
+ *
+ *
+ */
+ ;
+
+ _proto.handleLanguagechange = function handleLanguagechange() {
+ this.controlText(this.controlText_);
+ }
+ /**
+ * Event handler that is called when a `ClickableComponent` receives a
+ * `click` or `tap` event.
+ *
+ * @param {EventTarget~Event} event
+ * The `tap` or `click` event that caused this function to be called.
+ *
+ * @listens tap
+ * @listens click
+ * @abstract
+ */
+ ;
+
+ _proto.handleClick = function handleClick(event) {
+ if (this.options_.clickHandler) {
+ this.options_.clickHandler.call(this, arguments);
+ }
+ }
+ /**
+ * Event handler that is called when a `ClickableComponent` receives a
+ * `keydown` event.
+ *
+ * By default, if the key is Space or Enter, it will trigger a `click` event.
+ *
+ * @param {EventTarget~Event} event
+ * The `keydown` event that caused this function to be called.
+ *
+ * @listens keydown
+ */
+ ;
+
+ _proto.handleKeyDown = function handleKeyDown(event) {
+ // Support Space or Enter key operation to fire a click event. Also,
+ // prevent the event from propagating through the DOM and triggering
+ // Player hotkeys.
+ if (keycode__default['default'].isEventKey(event, 'Space') || keycode__default['default'].isEventKey(event, 'Enter')) {
+ event.preventDefault();
+ event.stopPropagation();
+ this.trigger('click');
+ } else {
+ // Pass keypress handling up for unsupported keys
+ _Component.prototype.handleKeyDown.call(this, event);
+ }
+ };
+
+ return ClickableComponent;
+}(Component$1);
+
+Component$1.registerComponent('ClickableComponent', ClickableComponent);
+
+/**
+ * A `ClickableComponent` that handles showing the poster image for the player.
+ *
+ * @extends ClickableComponent
+ */
+
+var PosterImage = /*#__PURE__*/function (_ClickableComponent) {
+ _inheritsLoose__default['default'](PosterImage, _ClickableComponent);
+
+ /**
+ * Create an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should attach to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function PosterImage(player, options) {
+ var _this;
+
+ _this = _ClickableComponent.call(this, player, options) || this;
+
+ _this.update();
+
+ _this.update_ = function (e) {
+ return _this.update(e);
+ };
+
+ player.on('posterchange', _this.update_);
+ return _this;
+ }
+ /**
+ * Clean up and dispose of the `PosterImage`.
+ */
+
+
+ var _proto = PosterImage.prototype;
+
+ _proto.dispose = function dispose() {
+ this.player().off('posterchange', this.update_);
+
+ _ClickableComponent.prototype.dispose.call(this);
+ }
+ /**
+ * Create the `PosterImage`s DOM element.
+ *
+ * @return {Element}
+ * The element that gets created.
+ */
+ ;
+
+ _proto.createEl = function createEl$1() {
+ var el = createEl('div', {
+ className: 'vjs-poster',
+ // Don't want poster to be tabbable.
+ tabIndex: -1
+ });
+ return el;
+ }
+ /**
+ * An {@link EventTarget~EventListener} for {@link Player#posterchange} events.
+ *
+ * @listens Player#posterchange
+ *
+ * @param {EventTarget~Event} [event]
+ * The `Player#posterchange` event that triggered this function.
+ */
+ ;
+
+ _proto.update = function update(event) {
+ var url = this.player().poster();
+ this.setSrc(url); // If there's no poster source we should display:none on this component
+ // so it's not still clickable or right-clickable
+
+ if (url) {
+ this.show();
+ } else {
+ this.hide();
+ }
+ }
+ /**
+ * Set the source of the `PosterImage` depending on the display method.
+ *
+ * @param {string} url
+ * The URL to the source for the `PosterImage`.
+ */
+ ;
+
+ _proto.setSrc = function setSrc(url) {
+ var backgroundImage = ''; // Any falsy value should stay as an empty string, otherwise
+ // this will throw an extra error
+
+ if (url) {
+ backgroundImage = "url(\"" + url + "\")";
+ }
+
+ this.el_.style.backgroundImage = backgroundImage;
+ }
+ /**
+ * An {@link EventTarget~EventListener} for clicks on the `PosterImage`. See
+ * {@link ClickableComponent#handleClick} for instances where this will be triggered.
+ *
+ * @listens tap
+ * @listens click
+ * @listens keydown
+ *
+ * @param {EventTarget~Event} event
+ + The `click`, `tap` or `keydown` event that caused this function to be called.
+ */
+ ;
+
+ _proto.handleClick = function handleClick(event) {
+ // We don't want a click to trigger playback when controls are disabled
+ if (!this.player_.controls()) {
+ return;
+ }
+
+ var sourceIsEncrypted = this.player_.usingPlugin('eme') && this.player_.eme.sessions && this.player_.eme.sessions.length > 0;
+
+ if (this.player_.tech(true) && // We've observed a bug in IE and Edge when playing back DRM content where
+ // calling .focus() on the video element causes the video to go black,
+ // so we avoid it in that specific case
+ !((IE_VERSION || IS_EDGE) && sourceIsEncrypted)) {
+ this.player_.tech(true).focus();
+ }
+
+ if (this.player_.paused()) {
+ silencePromise(this.player_.play());
+ } else {
+ this.player_.pause();
+ }
+ };
+
+ return PosterImage;
+}(ClickableComponent);
+
+Component$1.registerComponent('PosterImage', PosterImage);
+
+var darkGray = '#222';
+var lightGray = '#ccc';
+var fontMap = {
+ monospace: 'monospace',
+ sansSerif: 'sans-serif',
+ serif: 'serif',
+ monospaceSansSerif: '"Andale Mono", "Lucida Console", monospace',
+ monospaceSerif: '"Courier New", monospace',
+ proportionalSansSerif: 'sans-serif',
+ proportionalSerif: 'serif',
+ casual: '"Comic Sans MS", Impact, fantasy',
+ script: '"Monotype Corsiva", cursive',
+ smallcaps: '"Andale Mono", "Lucida Console", monospace, sans-serif'
+};
+/**
+ * Construct an rgba color from a given hex color code.
+ *
+ * @param {number} color
+ * Hex number for color, like #f0e or #f604e2.
+ *
+ * @param {number} opacity
+ * Value for opacity, 0.0 - 1.0.
+ *
+ * @return {string}
+ * The rgba color that was created, like 'rgba(255, 0, 0, 0.3)'.
+ */
+
+function constructColor(color, opacity) {
+ var hex;
+
+ if (color.length === 4) {
+ // color looks like "#f0e"
+ hex = color[1] + color[1] + color[2] + color[2] + color[3] + color[3];
+ } else if (color.length === 7) {
+ // color looks like "#f604e2"
+ hex = color.slice(1);
+ } else {
+ throw new Error('Invalid color code provided, ' + color + '; must be formatted as e.g. #f0e or #f604e2.');
+ }
+
+ return 'rgba(' + parseInt(hex.slice(0, 2), 16) + ',' + parseInt(hex.slice(2, 4), 16) + ',' + parseInt(hex.slice(4, 6), 16) + ',' + opacity + ')';
+}
+/**
+ * Try to update the style of a DOM element. Some style changes will throw an error,
+ * particularly in IE8. Those should be noops.
+ *
+ * @param {Element} el
+ * The DOM element to be styled.
+ *
+ * @param {string} style
+ * The CSS property on the element that should be styled.
+ *
+ * @param {string} rule
+ * The style rule that should be applied to the property.
+ *
+ * @private
+ */
+
+function tryUpdateStyle(el, style, rule) {
+ try {
+ el.style[style] = rule;
+ } catch (e) {
+ // Satisfies linter.
+ return;
+ }
+}
+/**
+ * The component for displaying text track cues.
+ *
+ * @extends Component
+ */
+
+
+var TextTrackDisplay = /*#__PURE__*/function (_Component) {
+ _inheritsLoose__default['default'](TextTrackDisplay, _Component);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ *
+ * @param {Component~ReadyCallback} [ready]
+ * The function to call when `TextTrackDisplay` is ready.
+ */
+ function TextTrackDisplay(player, options, ready) {
+ var _this;
+
+ _this = _Component.call(this, player, options, ready) || this;
+
+ var updateDisplayHandler = function updateDisplayHandler(e) {
+ return _this.updateDisplay(e);
+ };
+
+ player.on('loadstart', function (e) {
+ return _this.toggleDisplay(e);
+ });
+ player.on('texttrackchange', updateDisplayHandler);
+ player.on('loadedmetadata', function (e) {
+ return _this.preselectTrack(e);
+ }); // This used to be called during player init, but was causing an error
+ // if a track should show by default and the display hadn't loaded yet.
+ // Should probably be moved to an external track loader when we support
+ // tracks that don't need a display.
+
+ player.ready(bind(_assertThisInitialized__default['default'](_this), function () {
+ if (player.tech_ && player.tech_.featuresNativeTextTracks) {
+ this.hide();
+ return;
+ }
+
+ player.on('fullscreenchange', updateDisplayHandler);
+ player.on('playerresize', updateDisplayHandler);
+ window__default['default'].addEventListener('orientationchange', updateDisplayHandler);
+ player.on('dispose', function () {
+ return window__default['default'].removeEventListener('orientationchange', updateDisplayHandler);
+ });
+ var tracks = this.options_.playerOptions.tracks || [];
+
+ for (var i = 0; i < tracks.length; i++) {
+ this.player_.addRemoteTextTrack(tracks[i], true);
+ }
+
+ this.preselectTrack();
+ }));
+ return _this;
+ }
+ /**
+ * Preselect a track following this precedence:
+ * - matches the previously selected {@link TextTrack}'s language and kind
+ * - matches the previously selected {@link TextTrack}'s language only
+ * - is the first default captions track
+ * - is the first default descriptions track
+ *
+ * @listens Player#loadstart
+ */
+
+
+ var _proto = TextTrackDisplay.prototype;
+
+ _proto.preselectTrack = function preselectTrack() {
+ var modes = {
+ captions: 1,
+ subtitles: 1
+ };
+ var trackList = this.player_.textTracks();
+ var userPref = this.player_.cache_.selectedLanguage;
+ var firstDesc;
+ var firstCaptions;
+ var preferredTrack;
+
+ for (var i = 0; i < trackList.length; i++) {
+ var track = trackList[i];
+
+ if (userPref && userPref.enabled && userPref.language && userPref.language === track.language && track.kind in modes) {
+ // Always choose the track that matches both language and kind
+ if (track.kind === userPref.kind) {
+ preferredTrack = track; // or choose the first track that matches language
+ } else if (!preferredTrack) {
+ preferredTrack = track;
+ } // clear everything if offTextTrackMenuItem was clicked
+
+ } else if (userPref && !userPref.enabled) {
+ preferredTrack = null;
+ firstDesc = null;
+ firstCaptions = null;
+ } else if (track["default"]) {
+ if (track.kind === 'descriptions' && !firstDesc) {
+ firstDesc = track;
+ } else if (track.kind in modes && !firstCaptions) {
+ firstCaptions = track;
+ }
+ }
+ } // The preferredTrack matches the user preference and takes
+ // precedence over all the other tracks.
+ // So, display the preferredTrack before the first default track
+ // and the subtitles/captions track before the descriptions track
+
+
+ if (preferredTrack) {
+ preferredTrack.mode = 'showing';
+ } else if (firstCaptions) {
+ firstCaptions.mode = 'showing';
+ } else if (firstDesc) {
+ firstDesc.mode = 'showing';
+ }
+ }
+ /**
+ * Turn display of {@link TextTrack}'s from the current state into the other state.
+ * There are only two states:
+ * - 'shown'
+ * - 'hidden'
+ *
+ * @listens Player#loadstart
+ */
+ ;
+
+ _proto.toggleDisplay = function toggleDisplay() {
+ if (this.player_.tech_ && this.player_.tech_.featuresNativeTextTracks) {
+ this.hide();
+ } else {
+ this.show();
+ }
+ }
+ /**
+ * Create the {@link Component}'s DOM element.
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+ ;
+
+ _proto.createEl = function createEl() {
+ return _Component.prototype.createEl.call(this, 'div', {
+ className: 'vjs-text-track-display'
+ }, {
+ 'translate': 'yes',
+ 'aria-live': 'off',
+ 'aria-atomic': 'true'
+ });
+ }
+ /**
+ * Clear all displayed {@link TextTrack}s.
+ */
+ ;
+
+ _proto.clearDisplay = function clearDisplay() {
+ if (typeof window__default['default'].WebVTT === 'function') {
+ window__default['default'].WebVTT.processCues(window__default['default'], [], this.el_);
+ }
+ }
+ /**
+ * Update the displayed TextTrack when a either a {@link Player#texttrackchange} or
+ * a {@link Player#fullscreenchange} is fired.
+ *
+ * @listens Player#texttrackchange
+ * @listens Player#fullscreenchange
+ */
+ ;
+
+ _proto.updateDisplay = function updateDisplay() {
+ var tracks = this.player_.textTracks();
+ var allowMultipleShowingTracks = this.options_.allowMultipleShowingTracks;
+ this.clearDisplay();
+
+ if (allowMultipleShowingTracks) {
+ var showingTracks = [];
+
+ for (var _i = 0; _i < tracks.length; ++_i) {
+ var track = tracks[_i];
+
+ if (track.mode !== 'showing') {
+ continue;
+ }
+
+ showingTracks.push(track);
+ }
+
+ this.updateForTrack(showingTracks);
+ return;
+ } // Track display prioritization model: if multiple tracks are 'showing',
+ // display the first 'subtitles' or 'captions' track which is 'showing',
+ // otherwise display the first 'descriptions' track which is 'showing'
+
+
+ var descriptionsTrack = null;
+ var captionsSubtitlesTrack = null;
+ var i = tracks.length;
+
+ while (i--) {
+ var _track = tracks[i];
+
+ if (_track.mode === 'showing') {
+ if (_track.kind === 'descriptions') {
+ descriptionsTrack = _track;
+ } else {
+ captionsSubtitlesTrack = _track;
+ }
+ }
+ }
+
+ if (captionsSubtitlesTrack) {
+ if (this.getAttribute('aria-live') !== 'off') {
+ this.setAttribute('aria-live', 'off');
+ }
+
+ this.updateForTrack(captionsSubtitlesTrack);
+ } else if (descriptionsTrack) {
+ if (this.getAttribute('aria-live') !== 'assertive') {
+ this.setAttribute('aria-live', 'assertive');
+ }
+
+ this.updateForTrack(descriptionsTrack);
+ }
+ }
+ /**
+ * Style {@Link TextTrack} activeCues according to {@Link TextTrackSettings}.
+ *
+ * @param {TextTrack} track
+ * Text track object containing active cues to style.
+ */
+ ;
+
+ _proto.updateDisplayState = function updateDisplayState(track) {
+ var overrides = this.player_.textTrackSettings.getValues();
+ var cues = track.activeCues;
+ var i = cues.length;
+
+ while (i--) {
+ var cue = cues[i];
+
+ if (!cue) {
+ continue;
+ }
+
+ var cueDiv = cue.displayState;
+
+ if (overrides.color) {
+ cueDiv.firstChild.style.color = overrides.color;
+ }
+
+ if (overrides.textOpacity) {
+ tryUpdateStyle(cueDiv.firstChild, 'color', constructColor(overrides.color || '#fff', overrides.textOpacity));
+ }
+
+ if (overrides.backgroundColor) {
+ cueDiv.firstChild.style.backgroundColor = overrides.backgroundColor;
+ }
+
+ if (overrides.backgroundOpacity) {
+ tryUpdateStyle(cueDiv.firstChild, 'backgroundColor', constructColor(overrides.backgroundColor || '#000', overrides.backgroundOpacity));
+ }
+
+ if (overrides.windowColor) {
+ if (overrides.windowOpacity) {
+ tryUpdateStyle(cueDiv, 'backgroundColor', constructColor(overrides.windowColor, overrides.windowOpacity));
+ } else {
+ cueDiv.style.backgroundColor = overrides.windowColor;
+ }
+ }
+
+ if (overrides.edgeStyle) {
+ if (overrides.edgeStyle === 'dropshadow') {
+ cueDiv.firstChild.style.textShadow = "2px 2px 3px " + darkGray + ", 2px 2px 4px " + darkGray + ", 2px 2px 5px " + darkGray;
+ } else if (overrides.edgeStyle === 'raised') {
+ cueDiv.firstChild.style.textShadow = "1px 1px " + darkGray + ", 2px 2px " + darkGray + ", 3px 3px " + darkGray;
+ } else if (overrides.edgeStyle === 'depressed') {
+ cueDiv.firstChild.style.textShadow = "1px 1px " + lightGray + ", 0 1px " + lightGray + ", -1px -1px " + darkGray + ", 0 -1px " + darkGray;
+ } else if (overrides.edgeStyle === 'uniform') {
+ cueDiv.firstChild.style.textShadow = "0 0 4px " + darkGray + ", 0 0 4px " + darkGray + ", 0 0 4px " + darkGray + ", 0 0 4px " + darkGray;
+ }
+ }
+
+ if (overrides.fontPercent && overrides.fontPercent !== 1) {
+ var fontSize = window__default['default'].parseFloat(cueDiv.style.fontSize);
+ cueDiv.style.fontSize = fontSize * overrides.fontPercent + 'px';
+ cueDiv.style.height = 'auto';
+ cueDiv.style.top = 'auto';
+ }
+
+ if (overrides.fontFamily && overrides.fontFamily !== 'default') {
+ if (overrides.fontFamily === 'small-caps') {
+ cueDiv.firstChild.style.fontVariant = 'small-caps';
+ } else {
+ cueDiv.firstChild.style.fontFamily = fontMap[overrides.fontFamily];
+ }
+ }
+ }
+ }
+ /**
+ * Add an {@link TextTrack} to to the {@link Tech}s {@link TextTrackList}.
+ *
+ * @param {TextTrack|TextTrack[]} tracks
+ * Text track object or text track array to be added to the list.
+ */
+ ;
+
+ _proto.updateForTrack = function updateForTrack(tracks) {
+ if (!Array.isArray(tracks)) {
+ tracks = [tracks];
+ }
+
+ if (typeof window__default['default'].WebVTT !== 'function' || tracks.every(function (track) {
+ return !track.activeCues;
+ })) {
+ return;
+ }
+
+ var cues = []; // push all active track cues
+
+ for (var i = 0; i < tracks.length; ++i) {
+ var track = tracks[i];
+
+ for (var j = 0; j < track.activeCues.length; ++j) {
+ cues.push(track.activeCues[j]);
+ }
+ } // removes all cues before it processes new ones
+
+
+ window__default['default'].WebVTT.processCues(window__default['default'], cues, this.el_); // add unique class to each language text track & add settings styling if necessary
+
+ for (var _i2 = 0; _i2 < tracks.length; ++_i2) {
+ var _track2 = tracks[_i2];
+
+ for (var _j = 0; _j < _track2.activeCues.length; ++_j) {
+ var cueEl = _track2.activeCues[_j].displayState;
+ addClass(cueEl, 'vjs-text-track-cue');
+ addClass(cueEl, 'vjs-text-track-cue-' + (_track2.language ? _track2.language : _i2));
+
+ if (_track2.language) {
+ setAttribute(cueEl, 'lang', _track2.language);
+ }
+ }
+
+ if (this.player_.textTrackSettings) {
+ this.updateDisplayState(_track2);
+ }
+ }
+ };
+
+ return TextTrackDisplay;
+}(Component$1);
+
+Component$1.registerComponent('TextTrackDisplay', TextTrackDisplay);
+
+/**
+ * A loading spinner for use during waiting/loading events.
+ *
+ * @extends Component
+ */
+
+var LoadingSpinner = /*#__PURE__*/function (_Component) {
+ _inheritsLoose__default['default'](LoadingSpinner, _Component);
+
+ function LoadingSpinner() {
+ return _Component.apply(this, arguments) || this;
+ }
+
+ var _proto = LoadingSpinner.prototype;
+
+ /**
+ * Create the `LoadingSpinner`s DOM element.
+ *
+ * @return {Element}
+ * The dom element that gets created.
+ */
+ _proto.createEl = function createEl$1() {
+ var isAudio = this.player_.isAudio();
+ var playerType = this.localize(isAudio ? 'Audio Player' : 'Video Player');
+ var controlText = createEl('span', {
+ className: 'vjs-control-text',
+ textContent: this.localize('{1} is loading.', [playerType])
+ });
+
+ var el = _Component.prototype.createEl.call(this, 'div', {
+ className: 'vjs-loading-spinner',
+ dir: 'ltr'
+ });
+
+ el.appendChild(controlText);
+ return el;
+ };
+
+ return LoadingSpinner;
+}(Component$1);
+
+Component$1.registerComponent('LoadingSpinner', LoadingSpinner);
+
+/**
+ * Base class for all buttons.
+ *
+ * @extends ClickableComponent
+ */
+
+var Button = /*#__PURE__*/function (_ClickableComponent) {
+ _inheritsLoose__default['default'](Button, _ClickableComponent);
+
+ function Button() {
+ return _ClickableComponent.apply(this, arguments) || this;
+ }
+
+ var _proto = Button.prototype;
+
+ /**
+ * Create the `Button`s DOM element.
+ *
+ * @param {string} [tag="button"]
+ * The element's node type. This argument is IGNORED: no matter what
+ * is passed, it will always create a `button` element.
+ *
+ * @param {Object} [props={}]
+ * An object of properties that should be set on the element.
+ *
+ * @param {Object} [attributes={}]
+ * An object of attributes that should be set on the element.
+ *
+ * @return {Element}
+ * The element that gets created.
+ */
+ _proto.createEl = function createEl$1(tag, props, attributes) {
+ if (props === void 0) {
+ props = {};
+ }
+
+ if (attributes === void 0) {
+ attributes = {};
+ }
+
+ tag = 'button';
+ props = assign({
+ className: this.buildCSSClass()
+ }, props); // Add attributes for button element
+
+ attributes = assign({
+ // Necessary since the default button type is "submit"
+ type: 'button'
+ }, attributes);
+
+ var el = createEl(tag, props, attributes);
+
+ el.appendChild(createEl('span', {
+ className: 'vjs-icon-placeholder'
+ }, {
+ 'aria-hidden': true
+ }));
+ this.createControlTextEl(el);
+ return el;
+ }
+ /**
+ * Add a child `Component` inside of this `Button`.
+ *
+ * @param {string|Component} child
+ * The name or instance of a child to add.
+ *
+ * @param {Object} [options={}]
+ * The key/value store of options that will get passed to children of
+ * the child.
+ *
+ * @return {Component}
+ * The `Component` that gets added as a child. When using a string the
+ * `Component` will get created by this process.
+ *
+ * @deprecated since version 5
+ */
+ ;
+
+ _proto.addChild = function addChild(child, options) {
+ if (options === void 0) {
+ options = {};
+ }
+
+ var className = this.constructor.name;
+ log$1.warn("Adding an actionable (user controllable) child to a Button (" + className + ") is not supported; use a ClickableComponent instead."); // Avoid the error message generated by ClickableComponent's addChild method
+
+ return Component$1.prototype.addChild.call(this, child, options);
+ }
+ /**
+ * Enable the `Button` element so that it can be activated or clicked. Use this with
+ * {@link Button#disable}.
+ */
+ ;
+
+ _proto.enable = function enable() {
+ _ClickableComponent.prototype.enable.call(this);
+
+ this.el_.removeAttribute('disabled');
+ }
+ /**
+ * Disable the `Button` element so that it cannot be activated or clicked. Use this with
+ * {@link Button#enable}.
+ */
+ ;
+
+ _proto.disable = function disable() {
+ _ClickableComponent.prototype.disable.call(this);
+
+ this.el_.setAttribute('disabled', 'disabled');
+ }
+ /**
+ * This gets called when a `Button` has focus and `keydown` is triggered via a key
+ * press.
+ *
+ * @param {EventTarget~Event} event
+ * The event that caused this function to get called.
+ *
+ * @listens keydown
+ */
+ ;
+
+ _proto.handleKeyDown = function handleKeyDown(event) {
+ // Ignore Space or Enter key operation, which is handled by the browser for
+ // a button - though not for its super class, ClickableComponent. Also,
+ // prevent the event from propagating through the DOM and triggering Player
+ // hotkeys. We do not preventDefault here because we _want_ the browser to
+ // handle it.
+ if (keycode__default['default'].isEventKey(event, 'Space') || keycode__default['default'].isEventKey(event, 'Enter')) {
+ event.stopPropagation();
+ return;
+ } // Pass keypress handling up for unsupported keys
+
+
+ _ClickableComponent.prototype.handleKeyDown.call(this, event);
+ };
+
+ return Button;
+}(ClickableComponent);
+
+Component$1.registerComponent('Button', Button);
+
+/**
+ * The initial play button that shows before the video has played. The hiding of the
+ * `BigPlayButton` get done via CSS and `Player` states.
+ *
+ * @extends Button
+ */
+
+var BigPlayButton = /*#__PURE__*/function (_Button) {
+ _inheritsLoose__default['default'](BigPlayButton, _Button);
+
+ function BigPlayButton(player, options) {
+ var _this;
+
+ _this = _Button.call(this, player, options) || this;
+ _this.mouseused_ = false;
+
+ _this.on('mousedown', function (e) {
+ return _this.handleMouseDown(e);
+ });
+
+ return _this;
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object. Always returns 'vjs-big-play-button'.
+ */
+
+
+ var _proto = BigPlayButton.prototype;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return 'vjs-big-play-button';
+ }
+ /**
+ * This gets called when a `BigPlayButton` "clicked". See {@link ClickableComponent}
+ * for more detailed information on what a click can be.
+ *
+ * @param {EventTarget~Event} event
+ * The `keydown`, `tap`, or `click` event that caused this function to be
+ * called.
+ *
+ * @listens tap
+ * @listens click
+ */
+ ;
+
+ _proto.handleClick = function handleClick(event) {
+ var playPromise = this.player_.play(); // exit early if clicked via the mouse
+
+ if (this.mouseused_ && event.clientX && event.clientY) {
+ var sourceIsEncrypted = this.player_.usingPlugin('eme') && this.player_.eme.sessions && this.player_.eme.sessions.length > 0;
+ silencePromise(playPromise);
+
+ if (this.player_.tech(true) && // We've observed a bug in IE and Edge when playing back DRM content where
+ // calling .focus() on the video element causes the video to go black,
+ // so we avoid it in that specific case
+ !((IE_VERSION || IS_EDGE) && sourceIsEncrypted)) {
+ this.player_.tech(true).focus();
+ }
+
+ return;
+ }
+
+ var cb = this.player_.getChild('controlBar');
+ var playToggle = cb && cb.getChild('playToggle');
+
+ if (!playToggle) {
+ this.player_.tech(true).focus();
+ return;
+ }
+
+ var playFocus = function playFocus() {
+ return playToggle.focus();
+ };
+
+ if (isPromise(playPromise)) {
+ playPromise.then(playFocus, function () {});
+ } else {
+ this.setTimeout(playFocus, 1);
+ }
+ };
+
+ _proto.handleKeyDown = function handleKeyDown(event) {
+ this.mouseused_ = false;
+
+ _Button.prototype.handleKeyDown.call(this, event);
+ };
+
+ _proto.handleMouseDown = function handleMouseDown(event) {
+ this.mouseused_ = true;
+ };
+
+ return BigPlayButton;
+}(Button);
+/**
+ * The text that should display over the `BigPlayButton`s controls. Added to for localization.
+ *
+ * @type {string}
+ * @private
+ */
+
+
+BigPlayButton.prototype.controlText_ = 'Play Video';
+Component$1.registerComponent('BigPlayButton', BigPlayButton);
+
+/**
+ * The `CloseButton` is a `{@link Button}` that fires a `close` event when
+ * it gets clicked.
+ *
+ * @extends Button
+ */
+
+var CloseButton = /*#__PURE__*/function (_Button) {
+ _inheritsLoose__default['default'](CloseButton, _Button);
+
+ /**
+ * Creates an instance of the this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function CloseButton(player, options) {
+ var _this;
+
+ _this = _Button.call(this, player, options) || this;
+
+ _this.controlText(options && options.controlText || _this.localize('Close'));
+
+ return _this;
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+
+
+ var _proto = CloseButton.prototype;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return "vjs-close-button " + _Button.prototype.buildCSSClass.call(this);
+ }
+ /**
+ * This gets called when a `CloseButton` gets clicked. See
+ * {@link ClickableComponent#handleClick} for more information on when
+ * this will be triggered
+ *
+ * @param {EventTarget~Event} event
+ * The `keydown`, `tap`, or `click` event that caused this function to be
+ * called.
+ *
+ * @listens tap
+ * @listens click
+ * @fires CloseButton#close
+ */
+ ;
+
+ _proto.handleClick = function handleClick(event) {
+ /**
+ * Triggered when the a `CloseButton` is clicked.
+ *
+ * @event CloseButton#close
+ * @type {EventTarget~Event}
+ *
+ * @property {boolean} [bubbles=false]
+ * set to false so that the close event does not
+ * bubble up to parents if there is no listener
+ */
+ this.trigger({
+ type: 'close',
+ bubbles: false
+ });
+ }
+ /**
+ * Event handler that is called when a `CloseButton` receives a
+ * `keydown` event.
+ *
+ * By default, if the key is Esc, it will trigger a `click` event.
+ *
+ * @param {EventTarget~Event} event
+ * The `keydown` event that caused this function to be called.
+ *
+ * @listens keydown
+ */
+ ;
+
+ _proto.handleKeyDown = function handleKeyDown(event) {
+ // Esc button will trigger `click` event
+ if (keycode__default['default'].isEventKey(event, 'Esc')) {
+ event.preventDefault();
+ event.stopPropagation();
+ this.trigger('click');
+ } else {
+ // Pass keypress handling up for unsupported keys
+ _Button.prototype.handleKeyDown.call(this, event);
+ }
+ };
+
+ return CloseButton;
+}(Button);
+
+Component$1.registerComponent('CloseButton', CloseButton);
+
+/**
+ * Button to toggle between play and pause.
+ *
+ * @extends Button
+ */
+
+var PlayToggle = /*#__PURE__*/function (_Button) {
+ _inheritsLoose__default['default'](PlayToggle, _Button);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options={}]
+ * The key/value store of player options.
+ */
+ function PlayToggle(player, options) {
+ var _this;
+
+ if (options === void 0) {
+ options = {};
+ }
+
+ _this = _Button.call(this, player, options) || this; // show or hide replay icon
+
+ options.replay = options.replay === undefined || options.replay;
+
+ _this.on(player, 'play', function (e) {
+ return _this.handlePlay(e);
+ });
+
+ _this.on(player, 'pause', function (e) {
+ return _this.handlePause(e);
+ });
+
+ if (options.replay) {
+ _this.on(player, 'ended', function (e) {
+ return _this.handleEnded(e);
+ });
+ }
+
+ return _this;
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+
+
+ var _proto = PlayToggle.prototype;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return "vjs-play-control " + _Button.prototype.buildCSSClass.call(this);
+ }
+ /**
+ * This gets called when an `PlayToggle` is "clicked". See
+ * {@link ClickableComponent} for more detailed information on what a click can be.
+ *
+ * @param {EventTarget~Event} [event]
+ * The `keydown`, `tap`, or `click` event that caused this function to be
+ * called.
+ *
+ * @listens tap
+ * @listens click
+ */
+ ;
+
+ _proto.handleClick = function handleClick(event) {
+ if (this.player_.paused()) {
+ silencePromise(this.player_.play());
+ } else {
+ this.player_.pause();
+ }
+ }
+ /**
+ * This gets called once after the video has ended and the user seeks so that
+ * we can change the replay button back to a play button.
+ *
+ * @param {EventTarget~Event} [event]
+ * The event that caused this function to run.
+ *
+ * @listens Player#seeked
+ */
+ ;
+
+ _proto.handleSeeked = function handleSeeked(event) {
+ this.removeClass('vjs-ended');
+
+ if (this.player_.paused()) {
+ this.handlePause(event);
+ } else {
+ this.handlePlay(event);
+ }
+ }
+ /**
+ * Add the vjs-playing class to the element so it can change appearance.
+ *
+ * @param {EventTarget~Event} [event]
+ * The event that caused this function to run.
+ *
+ * @listens Player#play
+ */
+ ;
+
+ _proto.handlePlay = function handlePlay(event) {
+ this.removeClass('vjs-ended');
+ this.removeClass('vjs-paused');
+ this.addClass('vjs-playing'); // change the button text to "Pause"
+
+ this.controlText('Pause');
+ }
+ /**
+ * Add the vjs-paused class to the element so it can change appearance.
+ *
+ * @param {EventTarget~Event} [event]
+ * The event that caused this function to run.
+ *
+ * @listens Player#pause
+ */
+ ;
+
+ _proto.handlePause = function handlePause(event) {
+ this.removeClass('vjs-playing');
+ this.addClass('vjs-paused'); // change the button text to "Play"
+
+ this.controlText('Play');
+ }
+ /**
+ * Add the vjs-ended class to the element so it can change appearance
+ *
+ * @param {EventTarget~Event} [event]
+ * The event that caused this function to run.
+ *
+ * @listens Player#ended
+ */
+ ;
+
+ _proto.handleEnded = function handleEnded(event) {
+ var _this2 = this;
+
+ this.removeClass('vjs-playing');
+ this.addClass('vjs-ended'); // change the button text to "Replay"
+
+ this.controlText('Replay'); // on the next seek remove the replay button
+
+ this.one(this.player_, 'seeked', function (e) {
+ return _this2.handleSeeked(e);
+ });
+ };
+
+ return PlayToggle;
+}(Button);
+/**
+ * The text that should display over the `PlayToggle`s controls. Added for localization.
+ *
+ * @type {string}
+ * @private
+ */
+
+
+PlayToggle.prototype.controlText_ = 'Play';
+Component$1.registerComponent('PlayToggle', PlayToggle);
+
+/**
+ * @file format-time.js
+ * @module format-time
+ */
+
+/**
+ * Format seconds as a time string, H:MM:SS or M:SS. Supplying a guide (in
+ * seconds) will force a number of leading zeros to cover the length of the
+ * guide.
+ *
+ * @private
+ * @param {number} seconds
+ * Number of seconds to be turned into a string
+ *
+ * @param {number} guide
+ * Number (in seconds) to model the string after
+ *
+ * @return {string}
+ * Time formatted as H:MM:SS or M:SS
+ */
+var defaultImplementation = function defaultImplementation(seconds, guide) {
+ seconds = seconds < 0 ? 0 : seconds;
+ var s = Math.floor(seconds % 60);
+ var m = Math.floor(seconds / 60 % 60);
+ var h = Math.floor(seconds / 3600);
+ var gm = Math.floor(guide / 60 % 60);
+ var gh = Math.floor(guide / 3600); // handle invalid times
+
+ if (isNaN(seconds) || seconds === Infinity) {
+ // '-' is false for all relational operators (e.g. <, >=) so this setting
+ // will add the minimum number of fields specified by the guide
+ h = m = s = '-';
+ } // Check if we need to show hours
+
+
+ h = h > 0 || gh > 0 ? h + ':' : ''; // If hours are showing, we may need to add a leading zero.
+ // Always show at least one digit of minutes.
+
+ m = ((h || gm >= 10) && m < 10 ? '0' + m : m) + ':'; // Check if leading zero is need for seconds
+
+ s = s < 10 ? '0' + s : s;
+ return h + m + s;
+}; // Internal pointer to the current implementation.
+
+
+var implementation = defaultImplementation;
+/**
+ * Replaces the default formatTime implementation with a custom implementation.
+ *
+ * @param {Function} customImplementation
+ * A function which will be used in place of the default formatTime
+ * implementation. Will receive the current time in seconds and the
+ * guide (in seconds) as arguments.
+ */
+
+function setFormatTime(customImplementation) {
+ implementation = customImplementation;
+}
+/**
+ * Resets formatTime to the default implementation.
+ */
+
+function resetFormatTime() {
+ implementation = defaultImplementation;
+}
+/**
+ * Delegates to either the default time formatting function or a custom
+ * function supplied via `setFormatTime`.
+ *
+ * Formats seconds as a time string (H:MM:SS or M:SS). Supplying a
+ * guide (in seconds) will force a number of leading zeros to cover the
+ * length of the guide.
+ *
+ * @static
+ * @example formatTime(125, 600) === "02:05"
+ * @param {number} seconds
+ * Number of seconds to be turned into a string
+ *
+ * @param {number} guide
+ * Number (in seconds) to model the string after
+ *
+ * @return {string}
+ * Time formatted as H:MM:SS or M:SS
+ */
+
+function formatTime(seconds, guide) {
+ if (guide === void 0) {
+ guide = seconds;
+ }
+
+ return implementation(seconds, guide);
+}
+
+/**
+ * Displays time information about the video
+ *
+ * @extends Component
+ */
+
+var TimeDisplay = /*#__PURE__*/function (_Component) {
+ _inheritsLoose__default['default'](TimeDisplay, _Component);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function TimeDisplay(player, options) {
+ var _this;
+
+ _this = _Component.call(this, player, options) || this;
+
+ _this.on(player, ['timeupdate', 'ended'], function (e) {
+ return _this.updateContent(e);
+ });
+
+ _this.updateTextNode_();
+
+ return _this;
+ }
+ /**
+ * Create the `Component`'s DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+
+
+ var _proto = TimeDisplay.prototype;
+
+ _proto.createEl = function createEl$1() {
+ var className = this.buildCSSClass();
+
+ var el = _Component.prototype.createEl.call(this, 'div', {
+ className: className + " vjs-time-control vjs-control"
+ });
+
+ var span = createEl('span', {
+ className: 'vjs-control-text',
+ textContent: this.localize(this.labelText_) + "\xA0"
+ }, {
+ role: 'presentation'
+ });
+ el.appendChild(span);
+ this.contentEl_ = createEl('span', {
+ className: className + "-display"
+ }, {
+ // tell screen readers not to automatically read the time as it changes
+ 'aria-live': 'off',
+ // span elements have no implicit role, but some screen readers (notably VoiceOver)
+ // treat them as a break between items in the DOM when using arrow keys
+ // (or left-to-right swipes on iOS) to read contents of a page. Using
+ // role='presentation' causes VoiceOver to NOT treat this span as a break.
+ 'role': 'presentation'
+ });
+ el.appendChild(this.contentEl_);
+ return el;
+ };
+
+ _proto.dispose = function dispose() {
+ this.contentEl_ = null;
+ this.textNode_ = null;
+
+ _Component.prototype.dispose.call(this);
+ }
+ /**
+ * Updates the time display text node with a new time
+ *
+ * @param {number} [time=0] the time to update to
+ *
+ * @private
+ */
+ ;
+
+ _proto.updateTextNode_ = function updateTextNode_(time) {
+ var _this2 = this;
+
+ if (time === void 0) {
+ time = 0;
+ }
+
+ time = formatTime(time);
+
+ if (this.formattedTime_ === time) {
+ return;
+ }
+
+ this.formattedTime_ = time;
+ this.requestNamedAnimationFrame('TimeDisplay#updateTextNode_', function () {
+ if (!_this2.contentEl_) {
+ return;
+ }
+
+ var oldNode = _this2.textNode_;
+
+ if (oldNode && _this2.contentEl_.firstChild !== oldNode) {
+ oldNode = null;
+ log$1.warn('TimeDisplay#updateTextnode_: Prevented replacement of text node element since it was no longer a child of this node. Appending a new node instead.');
+ }
+
+ _this2.textNode_ = document__default['default'].createTextNode(_this2.formattedTime_);
+
+ if (!_this2.textNode_) {
+ return;
+ }
+
+ if (oldNode) {
+ _this2.contentEl_.replaceChild(_this2.textNode_, oldNode);
+ } else {
+ _this2.contentEl_.appendChild(_this2.textNode_);
+ }
+ });
+ }
+ /**
+ * To be filled out in the child class, should update the displayed time
+ * in accordance with the fact that the current time has changed.
+ *
+ * @param {EventTarget~Event} [event]
+ * The `timeupdate` event that caused this to run.
+ *
+ * @listens Player#timeupdate
+ */
+ ;
+
+ _proto.updateContent = function updateContent(event) {};
+
+ return TimeDisplay;
+}(Component$1);
+/**
+ * The text that is added to the `TimeDisplay` for screen reader users.
+ *
+ * @type {string}
+ * @private
+ */
+
+
+TimeDisplay.prototype.labelText_ = 'Time';
+/**
+ * The text that should display over the `TimeDisplay`s controls. Added to for localization.
+ *
+ * @type {string}
+ * @private
+ *
+ * @deprecated in v7; controlText_ is not used in non-active display Components
+ */
+
+TimeDisplay.prototype.controlText_ = 'Time';
+Component$1.registerComponent('TimeDisplay', TimeDisplay);
+
+/**
+ * Displays the current time
+ *
+ * @extends Component
+ */
+
+var CurrentTimeDisplay = /*#__PURE__*/function (_TimeDisplay) {
+ _inheritsLoose__default['default'](CurrentTimeDisplay, _TimeDisplay);
+
+ function CurrentTimeDisplay() {
+ return _TimeDisplay.apply(this, arguments) || this;
+ }
+
+ var _proto = CurrentTimeDisplay.prototype;
+
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+ _proto.buildCSSClass = function buildCSSClass() {
+ return 'vjs-current-time';
+ }
+ /**
+ * Update current time display
+ *
+ * @param {EventTarget~Event} [event]
+ * The `timeupdate` event that caused this function to run.
+ *
+ * @listens Player#timeupdate
+ */
+ ;
+
+ _proto.updateContent = function updateContent(event) {
+ // Allows for smooth scrubbing, when player can't keep up.
+ var time;
+
+ if (this.player_.ended()) {
+ time = this.player_.duration();
+ } else {
+ time = this.player_.scrubbing() ? this.player_.getCache().currentTime : this.player_.currentTime();
+ }
+
+ this.updateTextNode_(time);
+ };
+
+ return CurrentTimeDisplay;
+}(TimeDisplay);
+/**
+ * The text that is added to the `CurrentTimeDisplay` for screen reader users.
+ *
+ * @type {string}
+ * @private
+ */
+
+
+CurrentTimeDisplay.prototype.labelText_ = 'Current Time';
+/**
+ * The text that should display over the `CurrentTimeDisplay`s controls. Added to for localization.
+ *
+ * @type {string}
+ * @private
+ *
+ * @deprecated in v7; controlText_ is not used in non-active display Components
+ */
+
+CurrentTimeDisplay.prototype.controlText_ = 'Current Time';
+Component$1.registerComponent('CurrentTimeDisplay', CurrentTimeDisplay);
+
+/**
+ * Displays the duration
+ *
+ * @extends Component
+ */
+
+var DurationDisplay = /*#__PURE__*/function (_TimeDisplay) {
+ _inheritsLoose__default['default'](DurationDisplay, _TimeDisplay);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function DurationDisplay(player, options) {
+ var _this;
+
+ _this = _TimeDisplay.call(this, player, options) || this;
+
+ var updateContent = function updateContent(e) {
+ return _this.updateContent(e);
+ }; // we do not want to/need to throttle duration changes,
+ // as they should always display the changed duration as
+ // it has changed
+
+
+ _this.on(player, 'durationchange', updateContent); // Listen to loadstart because the player duration is reset when a new media element is loaded,
+ // but the durationchange on the user agent will not fire.
+ // @see [Spec]{@link https://www.w3.org/TR/2011/WD-html5-20110113/video.html#media-element-load-algorithm}
+
+
+ _this.on(player, 'loadstart', updateContent); // Also listen for timeupdate (in the parent) and loadedmetadata because removing those
+ // listeners could have broken dependent applications/libraries. These
+ // can likely be removed for 7.0.
+
+
+ _this.on(player, 'loadedmetadata', updateContent);
+
+ return _this;
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+
+
+ var _proto = DurationDisplay.prototype;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return 'vjs-duration';
+ }
+ /**
+ * Update duration time display.
+ *
+ * @param {EventTarget~Event} [event]
+ * The `durationchange`, `timeupdate`, or `loadedmetadata` event that caused
+ * this function to be called.
+ *
+ * @listens Player#durationchange
+ * @listens Player#timeupdate
+ * @listens Player#loadedmetadata
+ */
+ ;
+
+ _proto.updateContent = function updateContent(event) {
+ var duration = this.player_.duration();
+ this.updateTextNode_(duration);
+ };
+
+ return DurationDisplay;
+}(TimeDisplay);
+/**
+ * The text that is added to the `DurationDisplay` for screen reader users.
+ *
+ * @type {string}
+ * @private
+ */
+
+
+DurationDisplay.prototype.labelText_ = 'Duration';
+/**
+ * The text that should display over the `DurationDisplay`s controls. Added to for localization.
+ *
+ * @type {string}
+ * @private
+ *
+ * @deprecated in v7; controlText_ is not used in non-active display Components
+ */
+
+DurationDisplay.prototype.controlText_ = 'Duration';
+Component$1.registerComponent('DurationDisplay', DurationDisplay);
+
+/**
+ * The separator between the current time and duration.
+ * Can be hidden if it's not needed in the design.
+ *
+ * @extends Component
+ */
+
+var TimeDivider = /*#__PURE__*/function (_Component) {
+ _inheritsLoose__default['default'](TimeDivider, _Component);
+
+ function TimeDivider() {
+ return _Component.apply(this, arguments) || this;
+ }
+
+ var _proto = TimeDivider.prototype;
+
+ /**
+ * Create the component's DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+ _proto.createEl = function createEl() {
+ var el = _Component.prototype.createEl.call(this, 'div', {
+ className: 'vjs-time-control vjs-time-divider'
+ }, {
+ // this element and its contents can be hidden from assistive techs since
+ // it is made extraneous by the announcement of the control text
+ // for the current time and duration displays
+ 'aria-hidden': true
+ });
+
+ var div = _Component.prototype.createEl.call(this, 'div');
+
+ var span = _Component.prototype.createEl.call(this, 'span', {
+ textContent: '/'
+ });
+
+ div.appendChild(span);
+ el.appendChild(div);
+ return el;
+ };
+
+ return TimeDivider;
+}(Component$1);
+
+Component$1.registerComponent('TimeDivider', TimeDivider);
+
+/**
+ * Displays the time left in the video
+ *
+ * @extends Component
+ */
+
+var RemainingTimeDisplay = /*#__PURE__*/function (_TimeDisplay) {
+ _inheritsLoose__default['default'](RemainingTimeDisplay, _TimeDisplay);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function RemainingTimeDisplay(player, options) {
+ var _this;
+
+ _this = _TimeDisplay.call(this, player, options) || this;
+
+ _this.on(player, 'durationchange', function (e) {
+ return _this.updateContent(e);
+ });
+
+ return _this;
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+
+
+ var _proto = RemainingTimeDisplay.prototype;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return 'vjs-remaining-time';
+ }
+ /**
+ * Create the `Component`'s DOM element with the "minus" characted prepend to the time
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+ ;
+
+ _proto.createEl = function createEl$1() {
+ var el = _TimeDisplay.prototype.createEl.call(this);
+
+ if (this.options_.displayNegative !== false) {
+ el.insertBefore(createEl('span', {}, {
+ 'aria-hidden': true
+ }, '-'), this.contentEl_);
+ }
+
+ return el;
+ }
+ /**
+ * Update remaining time display.
+ *
+ * @param {EventTarget~Event} [event]
+ * The `timeupdate` or `durationchange` event that caused this to run.
+ *
+ * @listens Player#timeupdate
+ * @listens Player#durationchange
+ */
+ ;
+
+ _proto.updateContent = function updateContent(event) {
+ if (typeof this.player_.duration() !== 'number') {
+ return;
+ }
+
+ var time; // @deprecated We should only use remainingTimeDisplay
+ // as of video.js 7
+
+ if (this.player_.ended()) {
+ time = 0;
+ } else if (this.player_.remainingTimeDisplay) {
+ time = this.player_.remainingTimeDisplay();
+ } else {
+ time = this.player_.remainingTime();
+ }
+
+ this.updateTextNode_(time);
+ };
+
+ return RemainingTimeDisplay;
+}(TimeDisplay);
+/**
+ * The text that is added to the `RemainingTimeDisplay` for screen reader users.
+ *
+ * @type {string}
+ * @private
+ */
+
+
+RemainingTimeDisplay.prototype.labelText_ = 'Remaining Time';
+/**
+ * The text that should display over the `RemainingTimeDisplay`s controls. Added to for localization.
+ *
+ * @type {string}
+ * @private
+ *
+ * @deprecated in v7; controlText_ is not used in non-active display Components
+ */
+
+RemainingTimeDisplay.prototype.controlText_ = 'Remaining Time';
+Component$1.registerComponent('RemainingTimeDisplay', RemainingTimeDisplay);
+
+/**
+ * Displays the live indicator when duration is Infinity.
+ *
+ * @extends Component
+ */
+
+var LiveDisplay = /*#__PURE__*/function (_Component) {
+ _inheritsLoose__default['default'](LiveDisplay, _Component);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function LiveDisplay(player, options) {
+ var _this;
+
+ _this = _Component.call(this, player, options) || this;
+
+ _this.updateShowing();
+
+ _this.on(_this.player(), 'durationchange', function (e) {
+ return _this.updateShowing(e);
+ });
+
+ return _this;
+ }
+ /**
+ * Create the `Component`'s DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+
+
+ var _proto = LiveDisplay.prototype;
+
+ _proto.createEl = function createEl$1() {
+ var el = _Component.prototype.createEl.call(this, 'div', {
+ className: 'vjs-live-control vjs-control'
+ });
+
+ this.contentEl_ = createEl('div', {
+ className: 'vjs-live-display'
+ }, {
+ 'aria-live': 'off'
+ });
+ this.contentEl_.appendChild(createEl('span', {
+ className: 'vjs-control-text',
+ textContent: this.localize('Stream Type') + "\xA0"
+ }));
+ this.contentEl_.appendChild(document__default['default'].createTextNode(this.localize('LIVE')));
+ el.appendChild(this.contentEl_);
+ return el;
+ };
+
+ _proto.dispose = function dispose() {
+ this.contentEl_ = null;
+
+ _Component.prototype.dispose.call(this);
+ }
+ /**
+ * Check the duration to see if the LiveDisplay should be showing or not. Then show/hide
+ * it accordingly
+ *
+ * @param {EventTarget~Event} [event]
+ * The {@link Player#durationchange} event that caused this function to run.
+ *
+ * @listens Player#durationchange
+ */
+ ;
+
+ _proto.updateShowing = function updateShowing(event) {
+ if (this.player().duration() === Infinity) {
+ this.show();
+ } else {
+ this.hide();
+ }
+ };
+
+ return LiveDisplay;
+}(Component$1);
+
+Component$1.registerComponent('LiveDisplay', LiveDisplay);
+
+/**
+ * Displays the live indicator when duration is Infinity.
+ *
+ * @extends Component
+ */
+
+var SeekToLive = /*#__PURE__*/function (_Button) {
+ _inheritsLoose__default['default'](SeekToLive, _Button);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function SeekToLive(player, options) {
+ var _this;
+
+ _this = _Button.call(this, player, options) || this;
+
+ _this.updateLiveEdgeStatus();
+
+ if (_this.player_.liveTracker) {
+ _this.updateLiveEdgeStatusHandler_ = function (e) {
+ return _this.updateLiveEdgeStatus(e);
+ };
+
+ _this.on(_this.player_.liveTracker, 'liveedgechange', _this.updateLiveEdgeStatusHandler_);
+ }
+
+ return _this;
+ }
+ /**
+ * Create the `Component`'s DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+
+
+ var _proto = SeekToLive.prototype;
+
+ _proto.createEl = function createEl$1() {
+ var el = _Button.prototype.createEl.call(this, 'button', {
+ className: 'vjs-seek-to-live-control vjs-control'
+ });
+
+ this.textEl_ = createEl('span', {
+ className: 'vjs-seek-to-live-text',
+ textContent: this.localize('LIVE')
+ }, {
+ 'aria-hidden': 'true'
+ });
+ el.appendChild(this.textEl_);
+ return el;
+ }
+ /**
+ * Update the state of this button if we are at the live edge
+ * or not
+ */
+ ;
+
+ _proto.updateLiveEdgeStatus = function updateLiveEdgeStatus() {
+ // default to live edge
+ if (!this.player_.liveTracker || this.player_.liveTracker.atLiveEdge()) {
+ this.setAttribute('aria-disabled', true);
+ this.addClass('vjs-at-live-edge');
+ this.controlText('Seek to live, currently playing live');
+ } else {
+ this.setAttribute('aria-disabled', false);
+ this.removeClass('vjs-at-live-edge');
+ this.controlText('Seek to live, currently behind live');
+ }
+ }
+ /**
+ * On click bring us as near to the live point as possible.
+ * This requires that we wait for the next `live-seekable-change`
+ * event which will happen every segment length seconds.
+ */
+ ;
+
+ _proto.handleClick = function handleClick() {
+ this.player_.liveTracker.seekToLiveEdge();
+ }
+ /**
+ * Dispose of the element and stop tracking
+ */
+ ;
+
+ _proto.dispose = function dispose() {
+ if (this.player_.liveTracker) {
+ this.off(this.player_.liveTracker, 'liveedgechange', this.updateLiveEdgeStatusHandler_);
+ }
+
+ this.textEl_ = null;
+
+ _Button.prototype.dispose.call(this);
+ };
+
+ return SeekToLive;
+}(Button);
+
+SeekToLive.prototype.controlText_ = 'Seek to live, currently playing live';
+Component$1.registerComponent('SeekToLive', SeekToLive);
+
+/**
+ * Keep a number between a min and a max value
+ *
+ * @param {number} number
+ * The number to clamp
+ *
+ * @param {number} min
+ * The minimum value
+ * @param {number} max
+ * The maximum value
+ *
+ * @return {number}
+ * the clamped number
+ */
+var clamp = function clamp(number, min, max) {
+ number = Number(number);
+ return Math.min(max, Math.max(min, isNaN(number) ? min : number));
+};
+
+/**
+ * The base functionality for a slider. Can be vertical or horizontal.
+ * For instance the volume bar or the seek bar on a video is a slider.
+ *
+ * @extends Component
+ */
+
+var Slider = /*#__PURE__*/function (_Component) {
+ _inheritsLoose__default['default'](Slider, _Component);
+
+ /**
+ * Create an instance of this class
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function Slider(player, options) {
+ var _this;
+
+ _this = _Component.call(this, player, options) || this;
+
+ _this.handleMouseDown_ = function (e) {
+ return _this.handleMouseDown(e);
+ };
+
+ _this.handleMouseUp_ = function (e) {
+ return _this.handleMouseUp(e);
+ };
+
+ _this.handleKeyDown_ = function (e) {
+ return _this.handleKeyDown(e);
+ };
+
+ _this.handleClick_ = function (e) {
+ return _this.handleClick(e);
+ };
+
+ _this.handleMouseMove_ = function (e) {
+ return _this.handleMouseMove(e);
+ };
+
+ _this.update_ = function (e) {
+ return _this.update(e);
+ }; // Set property names to bar to match with the child Slider class is looking for
+
+
+ _this.bar = _this.getChild(_this.options_.barName); // Set a horizontal or vertical class on the slider depending on the slider type
+
+ _this.vertical(!!_this.options_.vertical);
+
+ _this.enable();
+
+ return _this;
+ }
+ /**
+ * Are controls are currently enabled for this slider or not.
+ *
+ * @return {boolean}
+ * true if controls are enabled, false otherwise
+ */
+
+
+ var _proto = Slider.prototype;
+
+ _proto.enabled = function enabled() {
+ return this.enabled_;
+ }
+ /**
+ * Enable controls for this slider if they are disabled
+ */
+ ;
+
+ _proto.enable = function enable() {
+ if (this.enabled()) {
+ return;
+ }
+
+ this.on('mousedown', this.handleMouseDown_);
+ this.on('touchstart', this.handleMouseDown_);
+ this.on('keydown', this.handleKeyDown_);
+ this.on('click', this.handleClick_); // TODO: deprecated, controlsvisible does not seem to be fired
+
+ this.on(this.player_, 'controlsvisible', this.update);
+
+ if (this.playerEvent) {
+ this.on(this.player_, this.playerEvent, this.update);
+ }
+
+ this.removeClass('disabled');
+ this.setAttribute('tabindex', 0);
+ this.enabled_ = true;
+ }
+ /**
+ * Disable controls for this slider if they are enabled
+ */
+ ;
+
+ _proto.disable = function disable() {
+ if (!this.enabled()) {
+ return;
+ }
+
+ var doc = this.bar.el_.ownerDocument;
+ this.off('mousedown', this.handleMouseDown_);
+ this.off('touchstart', this.handleMouseDown_);
+ this.off('keydown', this.handleKeyDown_);
+ this.off('click', this.handleClick_);
+ this.off(this.player_, 'controlsvisible', this.update_);
+ this.off(doc, 'mousemove', this.handleMouseMove_);
+ this.off(doc, 'mouseup', this.handleMouseUp_);
+ this.off(doc, 'touchmove', this.handleMouseMove_);
+ this.off(doc, 'touchend', this.handleMouseUp_);
+ this.removeAttribute('tabindex');
+ this.addClass('disabled');
+
+ if (this.playerEvent) {
+ this.off(this.player_, this.playerEvent, this.update);
+ }
+
+ this.enabled_ = false;
+ }
+ /**
+ * Create the `Slider`s DOM element.
+ *
+ * @param {string} type
+ * Type of element to create.
+ *
+ * @param {Object} [props={}]
+ * List of properties in Object form.
+ *
+ * @param {Object} [attributes={}]
+ * list of attributes in Object form.
+ *
+ * @return {Element}
+ * The element that gets created.
+ */
+ ;
+
+ _proto.createEl = function createEl(type, props, attributes) {
+ if (props === void 0) {
+ props = {};
+ }
+
+ if (attributes === void 0) {
+ attributes = {};
+ }
+
+ // Add the slider element class to all sub classes
+ props.className = props.className + ' vjs-slider';
+ props = assign({
+ tabIndex: 0
+ }, props);
+ attributes = assign({
+ 'role': 'slider',
+ 'aria-valuenow': 0,
+ 'aria-valuemin': 0,
+ 'aria-valuemax': 100,
+ 'tabIndex': 0
+ }, attributes);
+ return _Component.prototype.createEl.call(this, type, props, attributes);
+ }
+ /**
+ * Handle `mousedown` or `touchstart` events on the `Slider`.
+ *
+ * @param {EventTarget~Event} event
+ * `mousedown` or `touchstart` event that triggered this function
+ *
+ * @listens mousedown
+ * @listens touchstart
+ * @fires Slider#slideractive
+ */
+ ;
+
+ _proto.handleMouseDown = function handleMouseDown(event) {
+ var doc = this.bar.el_.ownerDocument;
+
+ if (event.type === 'mousedown') {
+ event.preventDefault();
+ } // Do not call preventDefault() on touchstart in Chrome
+ // to avoid console warnings. Use a 'touch-action: none' style
+ // instead to prevent unintented scrolling.
+ // https://developers.google.com/web/updates/2017/01/scrolling-intervention
+
+
+ if (event.type === 'touchstart' && !IS_CHROME) {
+ event.preventDefault();
+ }
+
+ blockTextSelection();
+ this.addClass('vjs-sliding');
+ /**
+ * Triggered when the slider is in an active state
+ *
+ * @event Slider#slideractive
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('slideractive');
+ this.on(doc, 'mousemove', this.handleMouseMove_);
+ this.on(doc, 'mouseup', this.handleMouseUp_);
+ this.on(doc, 'touchmove', this.handleMouseMove_);
+ this.on(doc, 'touchend', this.handleMouseUp_);
+ this.handleMouseMove(event, true);
+ }
+ /**
+ * Handle the `mousemove`, `touchmove`, and `mousedown` events on this `Slider`.
+ * The `mousemove` and `touchmove` events will only only trigger this function during
+ * `mousedown` and `touchstart`. This is due to {@link Slider#handleMouseDown} and
+ * {@link Slider#handleMouseUp}.
+ *
+ * @param {EventTarget~Event} event
+ * `mousedown`, `mousemove`, `touchstart`, or `touchmove` event that triggered
+ * this function
+ * @param {boolean} mouseDown this is a flag that should be set to true if `handleMouseMove` is called directly. It allows us to skip things that should not happen if coming from mouse down but should happen on regular mouse move handler. Defaults to false.
+ *
+ * @listens mousemove
+ * @listens touchmove
+ */
+ ;
+
+ _proto.handleMouseMove = function handleMouseMove(event) {}
+ /**
+ * Handle `mouseup` or `touchend` events on the `Slider`.
+ *
+ * @param {EventTarget~Event} event
+ * `mouseup` or `touchend` event that triggered this function.
+ *
+ * @listens touchend
+ * @listens mouseup
+ * @fires Slider#sliderinactive
+ */
+ ;
+
+ _proto.handleMouseUp = function handleMouseUp() {
+ var doc = this.bar.el_.ownerDocument;
+ unblockTextSelection();
+ this.removeClass('vjs-sliding');
+ /**
+ * Triggered when the slider is no longer in an active state.
+ *
+ * @event Slider#sliderinactive
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('sliderinactive');
+ this.off(doc, 'mousemove', this.handleMouseMove_);
+ this.off(doc, 'mouseup', this.handleMouseUp_);
+ this.off(doc, 'touchmove', this.handleMouseMove_);
+ this.off(doc, 'touchend', this.handleMouseUp_);
+ this.update();
+ }
+ /**
+ * Update the progress bar of the `Slider`.
+ *
+ * @return {number}
+ * The percentage of progress the progress bar represents as a
+ * number from 0 to 1.
+ */
+ ;
+
+ _proto.update = function update() {
+ var _this2 = this;
+
+ // In VolumeBar init we have a setTimeout for update that pops and update
+ // to the end of the execution stack. The player is destroyed before then
+ // update will cause an error
+ // If there's no bar...
+ if (!this.el_ || !this.bar) {
+ return;
+ } // clamp progress between 0 and 1
+ // and only round to four decimal places, as we round to two below
+
+
+ var progress = this.getProgress();
+
+ if (progress === this.progress_) {
+ return progress;
+ }
+
+ this.progress_ = progress;
+ this.requestNamedAnimationFrame('Slider#update', function () {
+ // Set the new bar width or height
+ var sizeKey = _this2.vertical() ? 'height' : 'width'; // Convert to a percentage for css value
+
+ _this2.bar.el().style[sizeKey] = (progress * 100).toFixed(2) + '%';
+ });
+ return progress;
+ }
+ /**
+ * Get the percentage of the bar that should be filled
+ * but clamped and rounded.
+ *
+ * @return {number}
+ * percentage filled that the slider is
+ */
+ ;
+
+ _proto.getProgress = function getProgress() {
+ return Number(clamp(this.getPercent(), 0, 1).toFixed(4));
+ }
+ /**
+ * Calculate distance for slider
+ *
+ * @param {EventTarget~Event} event
+ * The event that caused this function to run.
+ *
+ * @return {number}
+ * The current position of the Slider.
+ * - position.x for vertical `Slider`s
+ * - position.y for horizontal `Slider`s
+ */
+ ;
+
+ _proto.calculateDistance = function calculateDistance(event) {
+ var position = getPointerPosition(this.el_, event);
+
+ if (this.vertical()) {
+ return position.y;
+ }
+
+ return position.x;
+ }
+ /**
+ * Handle a `keydown` event on the `Slider`. Watches for left, rigth, up, and down
+ * arrow keys. This function will only be called when the slider has focus. See
+ * {@link Slider#handleFocus} and {@link Slider#handleBlur}.
+ *
+ * @param {EventTarget~Event} event
+ * the `keydown` event that caused this function to run.
+ *
+ * @listens keydown
+ */
+ ;
+
+ _proto.handleKeyDown = function handleKeyDown(event) {
+ // Left and Down Arrows
+ if (keycode__default['default'].isEventKey(event, 'Left') || keycode__default['default'].isEventKey(event, 'Down')) {
+ event.preventDefault();
+ event.stopPropagation();
+ this.stepBack(); // Up and Right Arrows
+ } else if (keycode__default['default'].isEventKey(event, 'Right') || keycode__default['default'].isEventKey(event, 'Up')) {
+ event.preventDefault();
+ event.stopPropagation();
+ this.stepForward();
+ } else {
+ // Pass keydown handling up for unsupported keys
+ _Component.prototype.handleKeyDown.call(this, event);
+ }
+ }
+ /**
+ * Listener for click events on slider, used to prevent clicks
+ * from bubbling up to parent elements like button menus.
+ *
+ * @param {Object} event
+ * Event that caused this object to run
+ */
+ ;
+
+ _proto.handleClick = function handleClick(event) {
+ event.stopPropagation();
+ event.preventDefault();
+ }
+ /**
+ * Get/set if slider is horizontal for vertical
+ *
+ * @param {boolean} [bool]
+ * - true if slider is vertical,
+ * - false is horizontal
+ *
+ * @return {boolean}
+ * - true if slider is vertical, and getting
+ * - false if the slider is horizontal, and getting
+ */
+ ;
+
+ _proto.vertical = function vertical(bool) {
+ if (bool === undefined) {
+ return this.vertical_ || false;
+ }
+
+ this.vertical_ = !!bool;
+
+ if (this.vertical_) {
+ this.addClass('vjs-slider-vertical');
+ } else {
+ this.addClass('vjs-slider-horizontal');
+ }
+ };
+
+ return Slider;
+}(Component$1);
+
+Component$1.registerComponent('Slider', Slider);
+
+var percentify = function percentify(time, end) {
+ return clamp(time / end * 100, 0, 100).toFixed(2) + '%';
+};
+/**
+ * Shows loading progress
+ *
+ * @extends Component
+ */
+
+
+var LoadProgressBar = /*#__PURE__*/function (_Component) {
+ _inheritsLoose__default['default'](LoadProgressBar, _Component);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function LoadProgressBar(player, options) {
+ var _this;
+
+ _this = _Component.call(this, player, options) || this;
+ _this.partEls_ = [];
+
+ _this.on(player, 'progress', function (e) {
+ return _this.update(e);
+ });
+
+ return _this;
+ }
+ /**
+ * Create the `Component`'s DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+
+
+ var _proto = LoadProgressBar.prototype;
+
+ _proto.createEl = function createEl$1() {
+ var el = _Component.prototype.createEl.call(this, 'div', {
+ className: 'vjs-load-progress'
+ });
+
+ var wrapper = createEl('span', {
+ className: 'vjs-control-text'
+ });
+ var loadedText = createEl('span', {
+ textContent: this.localize('Loaded')
+ });
+ var separator = document__default['default'].createTextNode(': ');
+ this.percentageEl_ = createEl('span', {
+ className: 'vjs-control-text-loaded-percentage',
+ textContent: '0%'
+ });
+ el.appendChild(wrapper);
+ wrapper.appendChild(loadedText);
+ wrapper.appendChild(separator);
+ wrapper.appendChild(this.percentageEl_);
+ return el;
+ };
+
+ _proto.dispose = function dispose() {
+ this.partEls_ = null;
+ this.percentageEl_ = null;
+
+ _Component.prototype.dispose.call(this);
+ }
+ /**
+ * Update progress bar
+ *
+ * @param {EventTarget~Event} [event]
+ * The `progress` event that caused this function to run.
+ *
+ * @listens Player#progress
+ */
+ ;
+
+ _proto.update = function update(event) {
+ var _this2 = this;
+
+ this.requestNamedAnimationFrame('LoadProgressBar#update', function () {
+ var liveTracker = _this2.player_.liveTracker;
+
+ var buffered = _this2.player_.buffered();
+
+ var duration = liveTracker && liveTracker.isLive() ? liveTracker.seekableEnd() : _this2.player_.duration();
+
+ var bufferedEnd = _this2.player_.bufferedEnd();
+
+ var children = _this2.partEls_;
+ var percent = percentify(bufferedEnd, duration);
+
+ if (_this2.percent_ !== percent) {
+ // update the width of the progress bar
+ _this2.el_.style.width = percent; // update the control-text
+
+ textContent(_this2.percentageEl_, percent);
+ _this2.percent_ = percent;
+ } // add child elements to represent the individual buffered time ranges
+
+
+ for (var i = 0; i < buffered.length; i++) {
+ var start = buffered.start(i);
+ var end = buffered.end(i);
+ var part = children[i];
+
+ if (!part) {
+ part = _this2.el_.appendChild(createEl());
+ children[i] = part;
+ } // only update if changed
+
+
+ if (part.dataset.start === start && part.dataset.end === end) {
+ continue;
+ }
+
+ part.dataset.start = start;
+ part.dataset.end = end; // set the percent based on the width of the progress bar (bufferedEnd)
+
+ part.style.left = percentify(start, bufferedEnd);
+ part.style.width = percentify(end - start, bufferedEnd);
+ } // remove unused buffered range elements
+
+
+ for (var _i = children.length; _i > buffered.length; _i--) {
+ _this2.el_.removeChild(children[_i - 1]);
+ }
+
+ children.length = buffered.length;
+ });
+ };
+
+ return LoadProgressBar;
+}(Component$1);
+
+Component$1.registerComponent('LoadProgressBar', LoadProgressBar);
+
+/**
+ * Time tooltips display a time above the progress bar.
+ *
+ * @extends Component
+ */
+
+var TimeTooltip = /*#__PURE__*/function (_Component) {
+ _inheritsLoose__default['default'](TimeTooltip, _Component);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The {@link Player} that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function TimeTooltip(player, options) {
+ var _this;
+
+ _this = _Component.call(this, player, options) || this;
+ _this.update = throttle(bind(_assertThisInitialized__default['default'](_this), _this.update), UPDATE_REFRESH_INTERVAL);
+ return _this;
+ }
+ /**
+ * Create the time tooltip DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+
+
+ var _proto = TimeTooltip.prototype;
+
+ _proto.createEl = function createEl() {
+ return _Component.prototype.createEl.call(this, 'div', {
+ className: 'vjs-time-tooltip'
+ }, {
+ 'aria-hidden': 'true'
+ });
+ }
+ /**
+ * Updates the position of the time tooltip relative to the `SeekBar`.
+ *
+ * @param {Object} seekBarRect
+ * The `ClientRect` for the {@link SeekBar} element.
+ *
+ * @param {number} seekBarPoint
+ * A number from 0 to 1, representing a horizontal reference point
+ * from the left edge of the {@link SeekBar}
+ */
+ ;
+
+ _proto.update = function update(seekBarRect, seekBarPoint, content) {
+ var tooltipRect = findPosition(this.el_);
+ var playerRect = getBoundingClientRect(this.player_.el());
+ var seekBarPointPx = seekBarRect.width * seekBarPoint; // do nothing if either rect isn't available
+ // for example, if the player isn't in the DOM for testing
+
+ if (!playerRect || !tooltipRect) {
+ return;
+ } // This is the space left of the `seekBarPoint` available within the bounds
+ // of the player. We calculate any gap between the left edge of the player
+ // and the left edge of the `SeekBar` and add the number of pixels in the
+ // `SeekBar` before hitting the `seekBarPoint`
+
+
+ var spaceLeftOfPoint = seekBarRect.left - playerRect.left + seekBarPointPx; // This is the space right of the `seekBarPoint` available within the bounds
+ // of the player. We calculate the number of pixels from the `seekBarPoint`
+ // to the right edge of the `SeekBar` and add to that any gap between the
+ // right edge of the `SeekBar` and the player.
+
+ var spaceRightOfPoint = seekBarRect.width - seekBarPointPx + (playerRect.right - seekBarRect.right); // This is the number of pixels by which the tooltip will need to be pulled
+ // further to the right to center it over the `seekBarPoint`.
+
+ var pullTooltipBy = tooltipRect.width / 2; // Adjust the `pullTooltipBy` distance to the left or right depending on
+ // the results of the space calculations above.
+
+ if (spaceLeftOfPoint < pullTooltipBy) {
+ pullTooltipBy += pullTooltipBy - spaceLeftOfPoint;
+ } else if (spaceRightOfPoint < pullTooltipBy) {
+ pullTooltipBy = spaceRightOfPoint;
+ } // Due to the imprecision of decimal/ratio based calculations and varying
+ // rounding behaviors, there are cases where the spacing adjustment is off
+ // by a pixel or two. This adds insurance to these calculations.
+
+
+ if (pullTooltipBy < 0) {
+ pullTooltipBy = 0;
+ } else if (pullTooltipBy > tooltipRect.width) {
+ pullTooltipBy = tooltipRect.width;
+ } // prevent small width fluctuations within 0.4px from
+ // changing the value below.
+ // This really helps for live to prevent the play
+ // progress time tooltip from jittering
+
+
+ pullTooltipBy = Math.round(pullTooltipBy);
+ this.el_.style.right = "-" + pullTooltipBy + "px";
+ this.write(content);
+ }
+ /**
+ * Write the time to the tooltip DOM element.
+ *
+ * @param {string} content
+ * The formatted time for the tooltip.
+ */
+ ;
+
+ _proto.write = function write(content) {
+ textContent(this.el_, content);
+ }
+ /**
+ * Updates the position of the time tooltip relative to the `SeekBar`.
+ *
+ * @param {Object} seekBarRect
+ * The `ClientRect` for the {@link SeekBar} element.
+ *
+ * @param {number} seekBarPoint
+ * A number from 0 to 1, representing a horizontal reference point
+ * from the left edge of the {@link SeekBar}
+ *
+ * @param {number} time
+ * The time to update the tooltip to, not used during live playback
+ *
+ * @param {Function} cb
+ * A function that will be called during the request animation frame
+ * for tooltips that need to do additional animations from the default
+ */
+ ;
+
+ _proto.updateTime = function updateTime(seekBarRect, seekBarPoint, time, cb) {
+ var _this2 = this;
+
+ this.requestNamedAnimationFrame('TimeTooltip#updateTime', function () {
+ var content;
+
+ var duration = _this2.player_.duration();
+
+ if (_this2.player_.liveTracker && _this2.player_.liveTracker.isLive()) {
+ var liveWindow = _this2.player_.liveTracker.liveWindow();
+
+ var secondsBehind = liveWindow - seekBarPoint * liveWindow;
+ content = (secondsBehind < 1 ? '' : '-') + formatTime(secondsBehind, liveWindow);
+ } else {
+ content = formatTime(time, duration);
+ }
+
+ _this2.update(seekBarRect, seekBarPoint, content);
+
+ if (cb) {
+ cb();
+ }
+ });
+ };
+
+ return TimeTooltip;
+}(Component$1);
+
+Component$1.registerComponent('TimeTooltip', TimeTooltip);
+
+/**
+ * Used by {@link SeekBar} to display media playback progress as part of the
+ * {@link ProgressControl}.
+ *
+ * @extends Component
+ */
+
+var PlayProgressBar = /*#__PURE__*/function (_Component) {
+ _inheritsLoose__default['default'](PlayProgressBar, _Component);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The {@link Player} that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function PlayProgressBar(player, options) {
+ var _this;
+
+ _this = _Component.call(this, player, options) || this;
+ _this.update = throttle(bind(_assertThisInitialized__default['default'](_this), _this.update), UPDATE_REFRESH_INTERVAL);
+ return _this;
+ }
+ /**
+ * Create the the DOM element for this class.
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+
+
+ var _proto = PlayProgressBar.prototype;
+
+ _proto.createEl = function createEl() {
+ return _Component.prototype.createEl.call(this, 'div', {
+ className: 'vjs-play-progress vjs-slider-bar'
+ }, {
+ 'aria-hidden': 'true'
+ });
+ }
+ /**
+ * Enqueues updates to its own DOM as well as the DOM of its
+ * {@link TimeTooltip} child.
+ *
+ * @param {Object} seekBarRect
+ * The `ClientRect` for the {@link SeekBar} element.
+ *
+ * @param {number} seekBarPoint
+ * A number from 0 to 1, representing a horizontal reference point
+ * from the left edge of the {@link SeekBar}
+ */
+ ;
+
+ _proto.update = function update(seekBarRect, seekBarPoint) {
+ var timeTooltip = this.getChild('timeTooltip');
+
+ if (!timeTooltip) {
+ return;
+ }
+
+ var time = this.player_.scrubbing() ? this.player_.getCache().currentTime : this.player_.currentTime();
+ timeTooltip.updateTime(seekBarRect, seekBarPoint, time);
+ };
+
+ return PlayProgressBar;
+}(Component$1);
+/**
+ * Default options for {@link PlayProgressBar}.
+ *
+ * @type {Object}
+ * @private
+ */
+
+
+PlayProgressBar.prototype.options_ = {
+ children: []
+}; // Time tooltips should not be added to a player on mobile devices
+
+if (!IS_IOS && !IS_ANDROID) {
+ PlayProgressBar.prototype.options_.children.push('timeTooltip');
+}
+
+Component$1.registerComponent('PlayProgressBar', PlayProgressBar);
+
+/**
+ * The {@link MouseTimeDisplay} component tracks mouse movement over the
+ * {@link ProgressControl}. It displays an indicator and a {@link TimeTooltip}
+ * indicating the time which is represented by a given point in the
+ * {@link ProgressControl}.
+ *
+ * @extends Component
+ */
+
+var MouseTimeDisplay = /*#__PURE__*/function (_Component) {
+ _inheritsLoose__default['default'](MouseTimeDisplay, _Component);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The {@link Player} that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function MouseTimeDisplay(player, options) {
+ var _this;
+
+ _this = _Component.call(this, player, options) || this;
+ _this.update = throttle(bind(_assertThisInitialized__default['default'](_this), _this.update), UPDATE_REFRESH_INTERVAL);
+ return _this;
+ }
+ /**
+ * Create the DOM element for this class.
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+
+
+ var _proto = MouseTimeDisplay.prototype;
+
+ _proto.createEl = function createEl() {
+ return _Component.prototype.createEl.call(this, 'div', {
+ className: 'vjs-mouse-display'
+ });
+ }
+ /**
+ * Enqueues updates to its own DOM as well as the DOM of its
+ * {@link TimeTooltip} child.
+ *
+ * @param {Object} seekBarRect
+ * The `ClientRect` for the {@link SeekBar} element.
+ *
+ * @param {number} seekBarPoint
+ * A number from 0 to 1, representing a horizontal reference point
+ * from the left edge of the {@link SeekBar}
+ */
+ ;
+
+ _proto.update = function update(seekBarRect, seekBarPoint) {
+ var _this2 = this;
+
+ var time = seekBarPoint * this.player_.duration();
+ this.getChild('timeTooltip').updateTime(seekBarRect, seekBarPoint, time, function () {
+ _this2.el_.style.left = seekBarRect.width * seekBarPoint + "px";
+ });
+ };
+
+ return MouseTimeDisplay;
+}(Component$1);
+/**
+ * Default options for `MouseTimeDisplay`
+ *
+ * @type {Object}
+ * @private
+ */
+
+
+MouseTimeDisplay.prototype.options_ = {
+ children: ['timeTooltip']
+};
+Component$1.registerComponent('MouseTimeDisplay', MouseTimeDisplay);
+
+var STEP_SECONDS = 5; // The multiplier of STEP_SECONDS that PgUp/PgDown move the timeline.
+
+var PAGE_KEY_MULTIPLIER = 12;
+/**
+ * Seek bar and container for the progress bars. Uses {@link PlayProgressBar}
+ * as its `bar`.
+ *
+ * @extends Slider
+ */
+
+var SeekBar = /*#__PURE__*/function (_Slider) {
+ _inheritsLoose__default['default'](SeekBar, _Slider);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function SeekBar(player, options) {
+ var _this;
+
+ _this = _Slider.call(this, player, options) || this;
+
+ _this.setEventHandlers_();
+
+ return _this;
+ }
+ /**
+ * Sets the event handlers
+ *
+ * @private
+ */
+
+
+ var _proto = SeekBar.prototype;
+
+ _proto.setEventHandlers_ = function setEventHandlers_() {
+ var _this2 = this;
+
+ this.update_ = bind(this, this.update);
+ this.update = throttle(this.update_, UPDATE_REFRESH_INTERVAL);
+ this.on(this.player_, ['ended', 'durationchange', 'timeupdate'], this.update);
+
+ if (this.player_.liveTracker) {
+ this.on(this.player_.liveTracker, 'liveedgechange', this.update);
+ } // when playing, let's ensure we smoothly update the play progress bar
+ // via an interval
+
+
+ this.updateInterval = null;
+
+ this.enableIntervalHandler_ = function (e) {
+ return _this2.enableInterval_(e);
+ };
+
+ this.disableIntervalHandler_ = function (e) {
+ return _this2.disableInterval_(e);
+ };
+
+ this.on(this.player_, ['playing'], this.enableIntervalHandler_);
+ this.on(this.player_, ['ended', 'pause', 'waiting'], this.disableIntervalHandler_); // we don't need to update the play progress if the document is hidden,
+ // also, this causes the CPU to spike and eventually crash the page on IE11.
+
+ if ('hidden' in document__default['default'] && 'visibilityState' in document__default['default']) {
+ this.on(document__default['default'], 'visibilitychange', this.toggleVisibility_);
+ }
+ };
+
+ _proto.toggleVisibility_ = function toggleVisibility_(e) {
+ if (document__default['default'].visibilityState === 'hidden') {
+ this.cancelNamedAnimationFrame('SeekBar#update');
+ this.cancelNamedAnimationFrame('Slider#update');
+ this.disableInterval_(e);
+ } else {
+ if (!this.player_.ended() && !this.player_.paused()) {
+ this.enableInterval_();
+ } // we just switched back to the page and someone may be looking, so, update ASAP
+
+
+ this.update();
+ }
+ };
+
+ _proto.enableInterval_ = function enableInterval_() {
+ if (this.updateInterval) {
+ return;
+ }
+
+ this.updateInterval = this.setInterval(this.update, UPDATE_REFRESH_INTERVAL);
+ };
+
+ _proto.disableInterval_ = function disableInterval_(e) {
+ if (this.player_.liveTracker && this.player_.liveTracker.isLive() && e && e.type !== 'ended') {
+ return;
+ }
+
+ if (!this.updateInterval) {
+ return;
+ }
+
+ this.clearInterval(this.updateInterval);
+ this.updateInterval = null;
+ }
+ /**
+ * Create the `Component`'s DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+ ;
+
+ _proto.createEl = function createEl() {
+ return _Slider.prototype.createEl.call(this, 'div', {
+ className: 'vjs-progress-holder'
+ }, {
+ 'aria-label': this.localize('Progress Bar')
+ });
+ }
+ /**
+ * This function updates the play progress bar and accessibility
+ * attributes to whatever is passed in.
+ *
+ * @param {EventTarget~Event} [event]
+ * The `timeupdate` or `ended` event that caused this to run.
+ *
+ * @listens Player#timeupdate
+ *
+ * @return {number}
+ * The current percent at a number from 0-1
+ */
+ ;
+
+ _proto.update = function update(event) {
+ var _this3 = this;
+
+ // ignore updates while the tab is hidden
+ if (document__default['default'].visibilityState === 'hidden') {
+ return;
+ }
+
+ var percent = _Slider.prototype.update.call(this);
+
+ this.requestNamedAnimationFrame('SeekBar#update', function () {
+ var currentTime = _this3.player_.ended() ? _this3.player_.duration() : _this3.getCurrentTime_();
+ var liveTracker = _this3.player_.liveTracker;
+
+ var duration = _this3.player_.duration();
+
+ if (liveTracker && liveTracker.isLive()) {
+ duration = _this3.player_.liveTracker.liveCurrentTime();
+ }
+
+ if (_this3.percent_ !== percent) {
+ // machine readable value of progress bar (percentage complete)
+ _this3.el_.setAttribute('aria-valuenow', (percent * 100).toFixed(2));
+
+ _this3.percent_ = percent;
+ }
+
+ if (_this3.currentTime_ !== currentTime || _this3.duration_ !== duration) {
+ // human readable value of progress bar (time complete)
+ _this3.el_.setAttribute('aria-valuetext', _this3.localize('progress bar timing: currentTime={1} duration={2}', [formatTime(currentTime, duration), formatTime(duration, duration)], '{1} of {2}'));
+
+ _this3.currentTime_ = currentTime;
+ _this3.duration_ = duration;
+ } // update the progress bar time tooltip with the current time
+
+
+ if (_this3.bar) {
+ _this3.bar.update(getBoundingClientRect(_this3.el()), _this3.getProgress());
+ }
+ });
+ return percent;
+ }
+ /**
+ * Prevent liveThreshold from causing seeks to seem like they
+ * are not happening from a user perspective.
+ *
+ * @param {number} ct
+ * current time to seek to
+ */
+ ;
+
+ _proto.userSeek_ = function userSeek_(ct) {
+ if (this.player_.liveTracker && this.player_.liveTracker.isLive()) {
+ this.player_.liveTracker.nextSeekedFromUser();
+ }
+
+ this.player_.currentTime(ct);
+ }
+ /**
+ * Get the value of current time but allows for smooth scrubbing,
+ * when player can't keep up.
+ *
+ * @return {number}
+ * The current time value to display
+ *
+ * @private
+ */
+ ;
+
+ _proto.getCurrentTime_ = function getCurrentTime_() {
+ return this.player_.scrubbing() ? this.player_.getCache().currentTime : this.player_.currentTime();
+ }
+ /**
+ * Get the percentage of media played so far.
+ *
+ * @return {number}
+ * The percentage of media played so far (0 to 1).
+ */
+ ;
+
+ _proto.getPercent = function getPercent() {
+ var currentTime = this.getCurrentTime_();
+ var percent;
+ var liveTracker = this.player_.liveTracker;
+
+ if (liveTracker && liveTracker.isLive()) {
+ percent = (currentTime - liveTracker.seekableStart()) / liveTracker.liveWindow(); // prevent the percent from changing at the live edge
+
+ if (liveTracker.atLiveEdge()) {
+ percent = 1;
+ }
+ } else {
+ percent = currentTime / this.player_.duration();
+ }
+
+ return percent;
+ }
+ /**
+ * Handle mouse down on seek bar
+ *
+ * @param {EventTarget~Event} event
+ * The `mousedown` event that caused this to run.
+ *
+ * @listens mousedown
+ */
+ ;
+
+ _proto.handleMouseDown = function handleMouseDown(event) {
+ if (!isSingleLeftClick(event)) {
+ return;
+ } // Stop event propagation to prevent double fire in progress-control.js
+
+
+ event.stopPropagation();
+ this.videoWasPlaying = !this.player_.paused();
+ this.player_.pause();
+
+ _Slider.prototype.handleMouseDown.call(this, event);
+ }
+ /**
+ * Handle mouse move on seek bar
+ *
+ * @param {EventTarget~Event} event
+ * The `mousemove` event that caused this to run.
+ * @param {boolean} mouseDown this is a flag that should be set to true if `handleMouseMove` is called directly. It allows us to skip things that should not happen if coming from mouse down but should happen on regular mouse move handler. Defaults to false
+ *
+ * @listens mousemove
+ */
+ ;
+
+ _proto.handleMouseMove = function handleMouseMove(event, mouseDown) {
+ if (mouseDown === void 0) {
+ mouseDown = false;
+ }
+
+ if (!isSingleLeftClick(event)) {
+ return;
+ }
+
+ if (!mouseDown && !this.player_.scrubbing()) {
+ this.player_.scrubbing(true);
+ }
+
+ var newTime;
+ var distance = this.calculateDistance(event);
+ var liveTracker = this.player_.liveTracker;
+
+ if (!liveTracker || !liveTracker.isLive()) {
+ newTime = distance * this.player_.duration(); // Don't let video end while scrubbing.
+
+ if (newTime === this.player_.duration()) {
+ newTime = newTime - 0.1;
+ }
+ } else {
+ if (distance >= 0.99) {
+ liveTracker.seekToLiveEdge();
+ return;
+ }
+
+ var seekableStart = liveTracker.seekableStart();
+ var seekableEnd = liveTracker.liveCurrentTime();
+ newTime = seekableStart + distance * liveTracker.liveWindow(); // Don't let video end while scrubbing.
+
+ if (newTime >= seekableEnd) {
+ newTime = seekableEnd;
+ } // Compensate for precision differences so that currentTime is not less
+ // than seekable start
+
+
+ if (newTime <= seekableStart) {
+ newTime = seekableStart + 0.1;
+ } // On android seekableEnd can be Infinity sometimes,
+ // this will cause newTime to be Infinity, which is
+ // not a valid currentTime.
+
+
+ if (newTime === Infinity) {
+ return;
+ }
+ } // Set new time (tell player to seek to new time)
+
+
+ this.userSeek_(newTime);
+ };
+
+ _proto.enable = function enable() {
+ _Slider.prototype.enable.call(this);
+
+ var mouseTimeDisplay = this.getChild('mouseTimeDisplay');
+
+ if (!mouseTimeDisplay) {
+ return;
+ }
+
+ mouseTimeDisplay.show();
+ };
+
+ _proto.disable = function disable() {
+ _Slider.prototype.disable.call(this);
+
+ var mouseTimeDisplay = this.getChild('mouseTimeDisplay');
+
+ if (!mouseTimeDisplay) {
+ return;
+ }
+
+ mouseTimeDisplay.hide();
+ }
+ /**
+ * Handle mouse up on seek bar
+ *
+ * @param {EventTarget~Event} event
+ * The `mouseup` event that caused this to run.
+ *
+ * @listens mouseup
+ */
+ ;
+
+ _proto.handleMouseUp = function handleMouseUp(event) {
+ _Slider.prototype.handleMouseUp.call(this, event); // Stop event propagation to prevent double fire in progress-control.js
+
+
+ if (event) {
+ event.stopPropagation();
+ }
+
+ this.player_.scrubbing(false);
+ /**
+ * Trigger timeupdate because we're done seeking and the time has changed.
+ * This is particularly useful for if the player is paused to time the time displays.
+ *
+ * @event Tech#timeupdate
+ * @type {EventTarget~Event}
+ */
+
+ this.player_.trigger({
+ type: 'timeupdate',
+ target: this,
+ manuallyTriggered: true
+ });
+
+ if (this.videoWasPlaying) {
+ silencePromise(this.player_.play());
+ } else {
+ // We're done seeking and the time has changed.
+ // If the player is paused, make sure we display the correct time on the seek bar.
+ this.update_();
+ }
+ }
+ /**
+ * Move more quickly fast forward for keyboard-only users
+ */
+ ;
+
+ _proto.stepForward = function stepForward() {
+ this.userSeek_(this.player_.currentTime() + STEP_SECONDS);
+ }
+ /**
+ * Move more quickly rewind for keyboard-only users
+ */
+ ;
+
+ _proto.stepBack = function stepBack() {
+ this.userSeek_(this.player_.currentTime() - STEP_SECONDS);
+ }
+ /**
+ * Toggles the playback state of the player
+ * This gets called when enter or space is used on the seekbar
+ *
+ * @param {EventTarget~Event} event
+ * The `keydown` event that caused this function to be called
+ *
+ */
+ ;
+
+ _proto.handleAction = function handleAction(event) {
+ if (this.player_.paused()) {
+ this.player_.play();
+ } else {
+ this.player_.pause();
+ }
+ }
+ /**
+ * Called when this SeekBar has focus and a key gets pressed down.
+ * Supports the following keys:
+ *
+ * Space or Enter key fire a click event
+ * Home key moves to start of the timeline
+ * End key moves to end of the timeline
+ * Digit "0" through "9" keys move to 0%, 10% ... 80%, 90% of the timeline
+ * PageDown key moves back a larger step than ArrowDown
+ * PageUp key moves forward a large step
+ *
+ * @param {EventTarget~Event} event
+ * The `keydown` event that caused this function to be called.
+ *
+ * @listens keydown
+ */
+ ;
+
+ _proto.handleKeyDown = function handleKeyDown(event) {
+ var liveTracker = this.player_.liveTracker;
+
+ if (keycode__default['default'].isEventKey(event, 'Space') || keycode__default['default'].isEventKey(event, 'Enter')) {
+ event.preventDefault();
+ event.stopPropagation();
+ this.handleAction(event);
+ } else if (keycode__default['default'].isEventKey(event, 'Home')) {
+ event.preventDefault();
+ event.stopPropagation();
+ this.userSeek_(0);
+ } else if (keycode__default['default'].isEventKey(event, 'End')) {
+ event.preventDefault();
+ event.stopPropagation();
+
+ if (liveTracker && liveTracker.isLive()) {
+ this.userSeek_(liveTracker.liveCurrentTime());
+ } else {
+ this.userSeek_(this.player_.duration());
+ }
+ } else if (/^[0-9]$/.test(keycode__default['default'](event))) {
+ event.preventDefault();
+ event.stopPropagation();
+ var gotoFraction = (keycode__default['default'].codes[keycode__default['default'](event)] - keycode__default['default'].codes['0']) * 10.0 / 100.0;
+
+ if (liveTracker && liveTracker.isLive()) {
+ this.userSeek_(liveTracker.seekableStart() + liveTracker.liveWindow() * gotoFraction);
+ } else {
+ this.userSeek_(this.player_.duration() * gotoFraction);
+ }
+ } else if (keycode__default['default'].isEventKey(event, 'PgDn')) {
+ event.preventDefault();
+ event.stopPropagation();
+ this.userSeek_(this.player_.currentTime() - STEP_SECONDS * PAGE_KEY_MULTIPLIER);
+ } else if (keycode__default['default'].isEventKey(event, 'PgUp')) {
+ event.preventDefault();
+ event.stopPropagation();
+ this.userSeek_(this.player_.currentTime() + STEP_SECONDS * PAGE_KEY_MULTIPLIER);
+ } else {
+ // Pass keydown handling up for unsupported keys
+ _Slider.prototype.handleKeyDown.call(this, event);
+ }
+ };
+
+ _proto.dispose = function dispose() {
+ this.disableInterval_();
+ this.off(this.player_, ['ended', 'durationchange', 'timeupdate'], this.update);
+
+ if (this.player_.liveTracker) {
+ this.off(this.player_.liveTracker, 'liveedgechange', this.update);
+ }
+
+ this.off(this.player_, ['playing'], this.enableIntervalHandler_);
+ this.off(this.player_, ['ended', 'pause', 'waiting'], this.disableIntervalHandler_); // we don't need to update the play progress if the document is hidden,
+ // also, this causes the CPU to spike and eventually crash the page on IE11.
+
+ if ('hidden' in document__default['default'] && 'visibilityState' in document__default['default']) {
+ this.off(document__default['default'], 'visibilitychange', this.toggleVisibility_);
+ }
+
+ _Slider.prototype.dispose.call(this);
+ };
+
+ return SeekBar;
+}(Slider);
+/**
+ * Default options for the `SeekBar`
+ *
+ * @type {Object}
+ * @private
+ */
+
+
+SeekBar.prototype.options_ = {
+ children: ['loadProgressBar', 'playProgressBar'],
+ barName: 'playProgressBar'
+}; // MouseTimeDisplay tooltips should not be added to a player on mobile devices
+
+if (!IS_IOS && !IS_ANDROID) {
+ SeekBar.prototype.options_.children.splice(1, 0, 'mouseTimeDisplay');
+}
+
+Component$1.registerComponent('SeekBar', SeekBar);
+
+/**
+ * The Progress Control component contains the seek bar, load progress,
+ * and play progress.
+ *
+ * @extends Component
+ */
+
+var ProgressControl = /*#__PURE__*/function (_Component) {
+ _inheritsLoose__default['default'](ProgressControl, _Component);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function ProgressControl(player, options) {
+ var _this;
+
+ _this = _Component.call(this, player, options) || this;
+ _this.handleMouseMove = throttle(bind(_assertThisInitialized__default['default'](_this), _this.handleMouseMove), UPDATE_REFRESH_INTERVAL);
+ _this.throttledHandleMouseSeek = throttle(bind(_assertThisInitialized__default['default'](_this), _this.handleMouseSeek), UPDATE_REFRESH_INTERVAL);
+
+ _this.handleMouseUpHandler_ = function (e) {
+ return _this.handleMouseUp(e);
+ };
+
+ _this.handleMouseDownHandler_ = function (e) {
+ return _this.handleMouseDown(e);
+ };
+
+ _this.enable();
+
+ return _this;
+ }
+ /**
+ * Create the `Component`'s DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+
+
+ var _proto = ProgressControl.prototype;
+
+ _proto.createEl = function createEl() {
+ return _Component.prototype.createEl.call(this, 'div', {
+ className: 'vjs-progress-control vjs-control'
+ });
+ }
+ /**
+ * When the mouse moves over the `ProgressControl`, the pointer position
+ * gets passed down to the `MouseTimeDisplay` component.
+ *
+ * @param {EventTarget~Event} event
+ * The `mousemove` event that caused this function to run.
+ *
+ * @listen mousemove
+ */
+ ;
+
+ _proto.handleMouseMove = function handleMouseMove(event) {
+ var seekBar = this.getChild('seekBar');
+
+ if (!seekBar) {
+ return;
+ }
+
+ var playProgressBar = seekBar.getChild('playProgressBar');
+ var mouseTimeDisplay = seekBar.getChild('mouseTimeDisplay');
+
+ if (!playProgressBar && !mouseTimeDisplay) {
+ return;
+ }
+
+ var seekBarEl = seekBar.el();
+ var seekBarRect = findPosition(seekBarEl);
+ var seekBarPoint = getPointerPosition(seekBarEl, event).x; // The default skin has a gap on either side of the `SeekBar`. This means
+ // that it's possible to trigger this behavior outside the boundaries of
+ // the `SeekBar`. This ensures we stay within it at all times.
+
+ seekBarPoint = clamp(seekBarPoint, 0, 1);
+
+ if (mouseTimeDisplay) {
+ mouseTimeDisplay.update(seekBarRect, seekBarPoint);
+ }
+
+ if (playProgressBar) {
+ playProgressBar.update(seekBarRect, seekBar.getProgress());
+ }
+ }
+ /**
+ * A throttled version of the {@link ProgressControl#handleMouseSeek} listener.
+ *
+ * @method ProgressControl#throttledHandleMouseSeek
+ * @param {EventTarget~Event} event
+ * The `mousemove` event that caused this function to run.
+ *
+ * @listen mousemove
+ * @listen touchmove
+ */
+
+ /**
+ * Handle `mousemove` or `touchmove` events on the `ProgressControl`.
+ *
+ * @param {EventTarget~Event} event
+ * `mousedown` or `touchstart` event that triggered this function
+ *
+ * @listens mousemove
+ * @listens touchmove
+ */
+ ;
+
+ _proto.handleMouseSeek = function handleMouseSeek(event) {
+ var seekBar = this.getChild('seekBar');
+
+ if (seekBar) {
+ seekBar.handleMouseMove(event);
+ }
+ }
+ /**
+ * Are controls are currently enabled for this progress control.
+ *
+ * @return {boolean}
+ * true if controls are enabled, false otherwise
+ */
+ ;
+
+ _proto.enabled = function enabled() {
+ return this.enabled_;
+ }
+ /**
+ * Disable all controls on the progress control and its children
+ */
+ ;
+
+ _proto.disable = function disable() {
+ this.children().forEach(function (child) {
+ return child.disable && child.disable();
+ });
+
+ if (!this.enabled()) {
+ return;
+ }
+
+ this.off(['mousedown', 'touchstart'], this.handleMouseDownHandler_);
+ this.off(this.el_, 'mousemove', this.handleMouseMove);
+ this.removeListenersAddedOnMousedownAndTouchstart();
+ this.addClass('disabled');
+ this.enabled_ = false; // Restore normal playback state if controls are disabled while scrubbing
+
+ if (this.player_.scrubbing()) {
+ var seekBar = this.getChild('seekBar');
+ this.player_.scrubbing(false);
+
+ if (seekBar.videoWasPlaying) {
+ silencePromise(this.player_.play());
+ }
+ }
+ }
+ /**
+ * Enable all controls on the progress control and its children
+ */
+ ;
+
+ _proto.enable = function enable() {
+ this.children().forEach(function (child) {
+ return child.enable && child.enable();
+ });
+
+ if (this.enabled()) {
+ return;
+ }
+
+ this.on(['mousedown', 'touchstart'], this.handleMouseDownHandler_);
+ this.on(this.el_, 'mousemove', this.handleMouseMove);
+ this.removeClass('disabled');
+ this.enabled_ = true;
+ }
+ /**
+ * Cleanup listeners after the user finishes interacting with the progress controls
+ */
+ ;
+
+ _proto.removeListenersAddedOnMousedownAndTouchstart = function removeListenersAddedOnMousedownAndTouchstart() {
+ var doc = this.el_.ownerDocument;
+ this.off(doc, 'mousemove', this.throttledHandleMouseSeek);
+ this.off(doc, 'touchmove', this.throttledHandleMouseSeek);
+ this.off(doc, 'mouseup', this.handleMouseUpHandler_);
+ this.off(doc, 'touchend', this.handleMouseUpHandler_);
+ }
+ /**
+ * Handle `mousedown` or `touchstart` events on the `ProgressControl`.
+ *
+ * @param {EventTarget~Event} event
+ * `mousedown` or `touchstart` event that triggered this function
+ *
+ * @listens mousedown
+ * @listens touchstart
+ */
+ ;
+
+ _proto.handleMouseDown = function handleMouseDown(event) {
+ var doc = this.el_.ownerDocument;
+ var seekBar = this.getChild('seekBar');
+
+ if (seekBar) {
+ seekBar.handleMouseDown(event);
+ }
+
+ this.on(doc, 'mousemove', this.throttledHandleMouseSeek);
+ this.on(doc, 'touchmove', this.throttledHandleMouseSeek);
+ this.on(doc, 'mouseup', this.handleMouseUpHandler_);
+ this.on(doc, 'touchend', this.handleMouseUpHandler_);
+ }
+ /**
+ * Handle `mouseup` or `touchend` events on the `ProgressControl`.
+ *
+ * @param {EventTarget~Event} event
+ * `mouseup` or `touchend` event that triggered this function.
+ *
+ * @listens touchend
+ * @listens mouseup
+ */
+ ;
+
+ _proto.handleMouseUp = function handleMouseUp(event) {
+ var seekBar = this.getChild('seekBar');
+
+ if (seekBar) {
+ seekBar.handleMouseUp(event);
+ }
+
+ this.removeListenersAddedOnMousedownAndTouchstart();
+ };
+
+ return ProgressControl;
+}(Component$1);
+/**
+ * Default options for `ProgressControl`
+ *
+ * @type {Object}
+ * @private
+ */
+
+
+ProgressControl.prototype.options_ = {
+ children: ['seekBar']
+};
+Component$1.registerComponent('ProgressControl', ProgressControl);
+
+/**
+ * Toggle Picture-in-Picture mode
+ *
+ * @extends Button
+ */
+
+var PictureInPictureToggle = /*#__PURE__*/function (_Button) {
+ _inheritsLoose__default['default'](PictureInPictureToggle, _Button);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ *
+ * @listens Player#enterpictureinpicture
+ * @listens Player#leavepictureinpicture
+ */
+ function PictureInPictureToggle(player, options) {
+ var _this;
+
+ _this = _Button.call(this, player, options) || this;
+
+ _this.on(player, ['enterpictureinpicture', 'leavepictureinpicture'], function (e) {
+ return _this.handlePictureInPictureChange(e);
+ });
+
+ _this.on(player, ['disablepictureinpicturechanged', 'loadedmetadata'], function (e) {
+ return _this.handlePictureInPictureEnabledChange(e);
+ });
+
+ _this.on(player, ['loadedmetadata', 'audioonlymodechange', 'audiopostermodechange'], function () {
+ // This audio detection will not detect HLS or DASH audio-only streams because there was no reliable way to detect them at the time
+ var isSourceAudio = player.currentType().substring(0, 5) === 'audio';
+
+ if (isSourceAudio || player.audioPosterMode() || player.audioOnlyMode()) {
+ if (player.isInPictureInPicture()) {
+ player.exitPictureInPicture();
+ }
+
+ _this.hide();
+ } else {
+ _this.show();
+ }
+ }); // TODO: Deactivate button on player emptied event.
+
+
+ _this.disable();
+
+ return _this;
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+
+
+ var _proto = PictureInPictureToggle.prototype;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return "vjs-picture-in-picture-control " + _Button.prototype.buildCSSClass.call(this);
+ }
+ /**
+ * Enables or disables button based on document.pictureInPictureEnabled property value
+ * or on value returned by player.disablePictureInPicture() method.
+ */
+ ;
+
+ _proto.handlePictureInPictureEnabledChange = function handlePictureInPictureEnabledChange() {
+ if (document__default['default'].pictureInPictureEnabled && this.player_.disablePictureInPicture() === false) {
+ this.enable();
+ } else {
+ this.disable();
+ }
+ }
+ /**
+ * Handles enterpictureinpicture and leavepictureinpicture on the player and change control text accordingly.
+ *
+ * @param {EventTarget~Event} [event]
+ * The {@link Player#enterpictureinpicture} or {@link Player#leavepictureinpicture} event that caused this function to be
+ * called.
+ *
+ * @listens Player#enterpictureinpicture
+ * @listens Player#leavepictureinpicture
+ */
+ ;
+
+ _proto.handlePictureInPictureChange = function handlePictureInPictureChange(event) {
+ if (this.player_.isInPictureInPicture()) {
+ this.controlText('Exit Picture-in-Picture');
+ } else {
+ this.controlText('Picture-in-Picture');
+ }
+
+ this.handlePictureInPictureEnabledChange();
+ }
+ /**
+ * This gets called when an `PictureInPictureToggle` is "clicked". See
+ * {@link ClickableComponent} for more detailed information on what a click can be.
+ *
+ * @param {EventTarget~Event} [event]
+ * The `keydown`, `tap`, or `click` event that caused this function to be
+ * called.
+ *
+ * @listens tap
+ * @listens click
+ */
+ ;
+
+ _proto.handleClick = function handleClick(event) {
+ if (!this.player_.isInPictureInPicture()) {
+ this.player_.requestPictureInPicture();
+ } else {
+ this.player_.exitPictureInPicture();
+ }
+ };
+
+ return PictureInPictureToggle;
+}(Button);
+/**
+ * The text that should display over the `PictureInPictureToggle`s controls. Added for localization.
+ *
+ * @type {string}
+ * @private
+ */
+
+
+PictureInPictureToggle.prototype.controlText_ = 'Picture-in-Picture';
+Component$1.registerComponent('PictureInPictureToggle', PictureInPictureToggle);
+
+/**
+ * Toggle fullscreen video
+ *
+ * @extends Button
+ */
+
+var FullscreenToggle = /*#__PURE__*/function (_Button) {
+ _inheritsLoose__default['default'](FullscreenToggle, _Button);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function FullscreenToggle(player, options) {
+ var _this;
+
+ _this = _Button.call(this, player, options) || this;
+
+ _this.on(player, 'fullscreenchange', function (e) {
+ return _this.handleFullscreenChange(e);
+ });
+
+ if (document__default['default'][player.fsApi_.fullscreenEnabled] === false) {
+ _this.disable();
+ }
+
+ return _this;
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+
+
+ var _proto = FullscreenToggle.prototype;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return "vjs-fullscreen-control " + _Button.prototype.buildCSSClass.call(this);
+ }
+ /**
+ * Handles fullscreenchange on the player and change control text accordingly.
+ *
+ * @param {EventTarget~Event} [event]
+ * The {@link Player#fullscreenchange} event that caused this function to be
+ * called.
+ *
+ * @listens Player#fullscreenchange
+ */
+ ;
+
+ _proto.handleFullscreenChange = function handleFullscreenChange(event) {
+ if (this.player_.isFullscreen()) {
+ this.controlText('Non-Fullscreen');
+ } else {
+ this.controlText('Fullscreen');
+ }
+ }
+ /**
+ * This gets called when an `FullscreenToggle` is "clicked". See
+ * {@link ClickableComponent} for more detailed information on what a click can be.
+ *
+ * @param {EventTarget~Event} [event]
+ * The `keydown`, `tap`, or `click` event that caused this function to be
+ * called.
+ *
+ * @listens tap
+ * @listens click
+ */
+ ;
+
+ _proto.handleClick = function handleClick(event) {
+ if (!this.player_.isFullscreen()) {
+ this.player_.requestFullscreen();
+ } else {
+ this.player_.exitFullscreen();
+ }
+ };
+
+ return FullscreenToggle;
+}(Button);
+/**
+ * The text that should display over the `FullscreenToggle`s controls. Added for localization.
+ *
+ * @type {string}
+ * @private
+ */
+
+
+FullscreenToggle.prototype.controlText_ = 'Fullscreen';
+Component$1.registerComponent('FullscreenToggle', FullscreenToggle);
+
+/**
+ * Check if volume control is supported and if it isn't hide the
+ * `Component` that was passed using the `vjs-hidden` class.
+ *
+ * @param {Component} self
+ * The component that should be hidden if volume is unsupported
+ *
+ * @param {Player} player
+ * A reference to the player
+ *
+ * @private
+ */
+var checkVolumeSupport = function checkVolumeSupport(self, player) {
+ // hide volume controls when they're not supported by the current tech
+ if (player.tech_ && !player.tech_.featuresVolumeControl) {
+ self.addClass('vjs-hidden');
+ }
+
+ self.on(player, 'loadstart', function () {
+ if (!player.tech_.featuresVolumeControl) {
+ self.addClass('vjs-hidden');
+ } else {
+ self.removeClass('vjs-hidden');
+ }
+ });
+};
+
+/**
+ * Shows volume level
+ *
+ * @extends Component
+ */
+
+var VolumeLevel = /*#__PURE__*/function (_Component) {
+ _inheritsLoose__default['default'](VolumeLevel, _Component);
+
+ function VolumeLevel() {
+ return _Component.apply(this, arguments) || this;
+ }
+
+ var _proto = VolumeLevel.prototype;
+
+ /**
+ * Create the `Component`'s DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+ _proto.createEl = function createEl() {
+ var el = _Component.prototype.createEl.call(this, 'div', {
+ className: 'vjs-volume-level'
+ });
+
+ el.appendChild(_Component.prototype.createEl.call(this, 'span', {
+ className: 'vjs-control-text'
+ }));
+ return el;
+ };
+
+ return VolumeLevel;
+}(Component$1);
+
+Component$1.registerComponent('VolumeLevel', VolumeLevel);
+
+/**
+ * Volume level tooltips display a volume above or side by side the volume bar.
+ *
+ * @extends Component
+ */
+
+var VolumeLevelTooltip = /*#__PURE__*/function (_Component) {
+ _inheritsLoose__default['default'](VolumeLevelTooltip, _Component);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The {@link Player} that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function VolumeLevelTooltip(player, options) {
+ var _this;
+
+ _this = _Component.call(this, player, options) || this;
+ _this.update = throttle(bind(_assertThisInitialized__default['default'](_this), _this.update), UPDATE_REFRESH_INTERVAL);
+ return _this;
+ }
+ /**
+ * Create the volume tooltip DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+
+
+ var _proto = VolumeLevelTooltip.prototype;
+
+ _proto.createEl = function createEl() {
+ return _Component.prototype.createEl.call(this, 'div', {
+ className: 'vjs-volume-tooltip'
+ }, {
+ 'aria-hidden': 'true'
+ });
+ }
+ /**
+ * Updates the position of the tooltip relative to the `VolumeBar` and
+ * its content text.
+ *
+ * @param {Object} rangeBarRect
+ * The `ClientRect` for the {@link VolumeBar} element.
+ *
+ * @param {number} rangeBarPoint
+ * A number from 0 to 1, representing a horizontal/vertical reference point
+ * from the left edge of the {@link VolumeBar}
+ *
+ * @param {boolean} vertical
+ * Referees to the Volume control position
+ * in the control bar{@link VolumeControl}
+ *
+ */
+ ;
+
+ _proto.update = function update(rangeBarRect, rangeBarPoint, vertical, content) {
+ if (!vertical) {
+ var tooltipRect = getBoundingClientRect(this.el_);
+ var playerRect = getBoundingClientRect(this.player_.el());
+ var volumeBarPointPx = rangeBarRect.width * rangeBarPoint;
+
+ if (!playerRect || !tooltipRect) {
+ return;
+ }
+
+ var spaceLeftOfPoint = rangeBarRect.left - playerRect.left + volumeBarPointPx;
+ var spaceRightOfPoint = rangeBarRect.width - volumeBarPointPx + (playerRect.right - rangeBarRect.right);
+ var pullTooltipBy = tooltipRect.width / 2;
+
+ if (spaceLeftOfPoint < pullTooltipBy) {
+ pullTooltipBy += pullTooltipBy - spaceLeftOfPoint;
+ } else if (spaceRightOfPoint < pullTooltipBy) {
+ pullTooltipBy = spaceRightOfPoint;
+ }
+
+ if (pullTooltipBy < 0) {
+ pullTooltipBy = 0;
+ } else if (pullTooltipBy > tooltipRect.width) {
+ pullTooltipBy = tooltipRect.width;
+ }
+
+ this.el_.style.right = "-" + pullTooltipBy + "px";
+ }
+
+ this.write(content + "%");
+ }
+ /**
+ * Write the volume to the tooltip DOM element.
+ *
+ * @param {string} content
+ * The formatted volume for the tooltip.
+ */
+ ;
+
+ _proto.write = function write(content) {
+ textContent(this.el_, content);
+ }
+ /**
+ * Updates the position of the volume tooltip relative to the `VolumeBar`.
+ *
+ * @param {Object} rangeBarRect
+ * The `ClientRect` for the {@link VolumeBar} element.
+ *
+ * @param {number} rangeBarPoint
+ * A number from 0 to 1, representing a horizontal/vertical reference point
+ * from the left edge of the {@link VolumeBar}
+ *
+ * @param {boolean} vertical
+ * Referees to the Volume control position
+ * in the control bar{@link VolumeControl}
+ *
+ * @param {number} volume
+ * The volume level to update the tooltip to
+ *
+ * @param {Function} cb
+ * A function that will be called during the request animation frame
+ * for tooltips that need to do additional animations from the default
+ */
+ ;
+
+ _proto.updateVolume = function updateVolume(rangeBarRect, rangeBarPoint, vertical, volume, cb) {
+ var _this2 = this;
+
+ this.requestNamedAnimationFrame('VolumeLevelTooltip#updateVolume', function () {
+ _this2.update(rangeBarRect, rangeBarPoint, vertical, volume.toFixed(0));
+
+ if (cb) {
+ cb();
+ }
+ });
+ };
+
+ return VolumeLevelTooltip;
+}(Component$1);
+
+Component$1.registerComponent('VolumeLevelTooltip', VolumeLevelTooltip);
+
+/**
+ * The {@link MouseVolumeLevelDisplay} component tracks mouse movement over the
+ * {@link VolumeControl}. It displays an indicator and a {@link VolumeLevelTooltip}
+ * indicating the volume level which is represented by a given point in the
+ * {@link VolumeBar}.
+ *
+ * @extends Component
+ */
+
+var MouseVolumeLevelDisplay = /*#__PURE__*/function (_Component) {
+ _inheritsLoose__default['default'](MouseVolumeLevelDisplay, _Component);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The {@link Player} that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function MouseVolumeLevelDisplay(player, options) {
+ var _this;
+
+ _this = _Component.call(this, player, options) || this;
+ _this.update = throttle(bind(_assertThisInitialized__default['default'](_this), _this.update), UPDATE_REFRESH_INTERVAL);
+ return _this;
+ }
+ /**
+ * Create the DOM element for this class.
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+
+
+ var _proto = MouseVolumeLevelDisplay.prototype;
+
+ _proto.createEl = function createEl() {
+ return _Component.prototype.createEl.call(this, 'div', {
+ className: 'vjs-mouse-display'
+ });
+ }
+ /**
+ * Enquires updates to its own DOM as well as the DOM of its
+ * {@link VolumeLevelTooltip} child.
+ *
+ * @param {Object} rangeBarRect
+ * The `ClientRect` for the {@link VolumeBar} element.
+ *
+ * @param {number} rangeBarPoint
+ * A number from 0 to 1, representing a horizontal/vertical reference point
+ * from the left edge of the {@link VolumeBar}
+ *
+ * @param {boolean} vertical
+ * Referees to the Volume control position
+ * in the control bar{@link VolumeControl}
+ *
+ */
+ ;
+
+ _proto.update = function update(rangeBarRect, rangeBarPoint, vertical) {
+ var _this2 = this;
+
+ var volume = 100 * rangeBarPoint;
+ this.getChild('volumeLevelTooltip').updateVolume(rangeBarRect, rangeBarPoint, vertical, volume, function () {
+ if (vertical) {
+ _this2.el_.style.bottom = rangeBarRect.height * rangeBarPoint + "px";
+ } else {
+ _this2.el_.style.left = rangeBarRect.width * rangeBarPoint + "px";
+ }
+ });
+ };
+
+ return MouseVolumeLevelDisplay;
+}(Component$1);
+/**
+ * Default options for `MouseVolumeLevelDisplay`
+ *
+ * @type {Object}
+ * @private
+ */
+
+
+MouseVolumeLevelDisplay.prototype.options_ = {
+ children: ['volumeLevelTooltip']
+};
+Component$1.registerComponent('MouseVolumeLevelDisplay', MouseVolumeLevelDisplay);
+
+/**
+ * The bar that contains the volume level and can be clicked on to adjust the level
+ *
+ * @extends Slider
+ */
+
+var VolumeBar = /*#__PURE__*/function (_Slider) {
+ _inheritsLoose__default['default'](VolumeBar, _Slider);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function VolumeBar(player, options) {
+ var _this;
+
+ _this = _Slider.call(this, player, options) || this;
+
+ _this.on('slideractive', function (e) {
+ return _this.updateLastVolume_(e);
+ });
+
+ _this.on(player, 'volumechange', function (e) {
+ return _this.updateARIAAttributes(e);
+ });
+
+ player.ready(function () {
+ return _this.updateARIAAttributes();
+ });
+ return _this;
+ }
+ /**
+ * Create the `Component`'s DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+
+
+ var _proto = VolumeBar.prototype;
+
+ _proto.createEl = function createEl() {
+ return _Slider.prototype.createEl.call(this, 'div', {
+ className: 'vjs-volume-bar vjs-slider-bar'
+ }, {
+ 'aria-label': this.localize('Volume Level'),
+ 'aria-live': 'polite'
+ });
+ }
+ /**
+ * Handle mouse down on volume bar
+ *
+ * @param {EventTarget~Event} event
+ * The `mousedown` event that caused this to run.
+ *
+ * @listens mousedown
+ */
+ ;
+
+ _proto.handleMouseDown = function handleMouseDown(event) {
+ if (!isSingleLeftClick(event)) {
+ return;
+ }
+
+ _Slider.prototype.handleMouseDown.call(this, event);
+ }
+ /**
+ * Handle movement events on the {@link VolumeMenuButton}.
+ *
+ * @param {EventTarget~Event} event
+ * The event that caused this function to run.
+ *
+ * @listens mousemove
+ */
+ ;
+
+ _proto.handleMouseMove = function handleMouseMove(event) {
+ var mouseVolumeLevelDisplay = this.getChild('mouseVolumeLevelDisplay');
+
+ if (mouseVolumeLevelDisplay) {
+ var volumeBarEl = this.el();
+ var volumeBarRect = getBoundingClientRect(volumeBarEl);
+ var vertical = this.vertical();
+ var volumeBarPoint = getPointerPosition(volumeBarEl, event);
+ volumeBarPoint = vertical ? volumeBarPoint.y : volumeBarPoint.x; // The default skin has a gap on either side of the `VolumeBar`. This means
+ // that it's possible to trigger this behavior outside the boundaries of
+ // the `VolumeBar`. This ensures we stay within it at all times.
+
+ volumeBarPoint = clamp(volumeBarPoint, 0, 1);
+ mouseVolumeLevelDisplay.update(volumeBarRect, volumeBarPoint, vertical);
+ }
+
+ if (!isSingleLeftClick(event)) {
+ return;
+ }
+
+ this.checkMuted();
+ this.player_.volume(this.calculateDistance(event));
+ }
+ /**
+ * If the player is muted unmute it.
+ */
+ ;
+
+ _proto.checkMuted = function checkMuted() {
+ if (this.player_.muted()) {
+ this.player_.muted(false);
+ }
+ }
+ /**
+ * Get percent of volume level
+ *
+ * @return {number}
+ * Volume level percent as a decimal number.
+ */
+ ;
+
+ _proto.getPercent = function getPercent() {
+ if (this.player_.muted()) {
+ return 0;
+ }
+
+ return this.player_.volume();
+ }
+ /**
+ * Increase volume level for keyboard users
+ */
+ ;
+
+ _proto.stepForward = function stepForward() {
+ this.checkMuted();
+ this.player_.volume(this.player_.volume() + 0.1);
+ }
+ /**
+ * Decrease volume level for keyboard users
+ */
+ ;
+
+ _proto.stepBack = function stepBack() {
+ this.checkMuted();
+ this.player_.volume(this.player_.volume() - 0.1);
+ }
+ /**
+ * Update ARIA accessibility attributes
+ *
+ * @param {EventTarget~Event} [event]
+ * The `volumechange` event that caused this function to run.
+ *
+ * @listens Player#volumechange
+ */
+ ;
+
+ _proto.updateARIAAttributes = function updateARIAAttributes(event) {
+ var ariaValue = this.player_.muted() ? 0 : this.volumeAsPercentage_();
+ this.el_.setAttribute('aria-valuenow', ariaValue);
+ this.el_.setAttribute('aria-valuetext', ariaValue + '%');
+ }
+ /**
+ * Returns the current value of the player volume as a percentage
+ *
+ * @private
+ */
+ ;
+
+ _proto.volumeAsPercentage_ = function volumeAsPercentage_() {
+ return Math.round(this.player_.volume() * 100);
+ }
+ /**
+ * When user starts dragging the VolumeBar, store the volume and listen for
+ * the end of the drag. When the drag ends, if the volume was set to zero,
+ * set lastVolume to the stored volume.
+ *
+ * @listens slideractive
+ * @private
+ */
+ ;
+
+ _proto.updateLastVolume_ = function updateLastVolume_() {
+ var _this2 = this;
+
+ var volumeBeforeDrag = this.player_.volume();
+ this.one('sliderinactive', function () {
+ if (_this2.player_.volume() === 0) {
+ _this2.player_.lastVolume_(volumeBeforeDrag);
+ }
+ });
+ };
+
+ return VolumeBar;
+}(Slider);
+/**
+ * Default options for the `VolumeBar`
+ *
+ * @type {Object}
+ * @private
+ */
+
+
+VolumeBar.prototype.options_ = {
+ children: ['volumeLevel'],
+ barName: 'volumeLevel'
+}; // MouseVolumeLevelDisplay tooltip should not be added to a player on mobile devices
+
+if (!IS_IOS && !IS_ANDROID) {
+ VolumeBar.prototype.options_.children.splice(0, 0, 'mouseVolumeLevelDisplay');
+}
+/**
+ * Call the update event for this Slider when this event happens on the player.
+ *
+ * @type {string}
+ */
+
+
+VolumeBar.prototype.playerEvent = 'volumechange';
+Component$1.registerComponent('VolumeBar', VolumeBar);
+
+/**
+ * The component for controlling the volume level
+ *
+ * @extends Component
+ */
+
+var VolumeControl = /*#__PURE__*/function (_Component) {
+ _inheritsLoose__default['default'](VolumeControl, _Component);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options={}]
+ * The key/value store of player options.
+ */
+ function VolumeControl(player, options) {
+ var _this;
+
+ if (options === void 0) {
+ options = {};
+ }
+
+ options.vertical = options.vertical || false; // Pass the vertical option down to the VolumeBar if
+ // the VolumeBar is turned on.
+
+ if (typeof options.volumeBar === 'undefined' || isPlain(options.volumeBar)) {
+ options.volumeBar = options.volumeBar || {};
+ options.volumeBar.vertical = options.vertical;
+ }
+
+ _this = _Component.call(this, player, options) || this; // hide this control if volume support is missing
+
+ checkVolumeSupport(_assertThisInitialized__default['default'](_this), player);
+ _this.throttledHandleMouseMove = throttle(bind(_assertThisInitialized__default['default'](_this), _this.handleMouseMove), UPDATE_REFRESH_INTERVAL);
+
+ _this.handleMouseUpHandler_ = function (e) {
+ return _this.handleMouseUp(e);
+ };
+
+ _this.on('mousedown', function (e) {
+ return _this.handleMouseDown(e);
+ });
+
+ _this.on('touchstart', function (e) {
+ return _this.handleMouseDown(e);
+ });
+
+ _this.on('mousemove', function (e) {
+ return _this.handleMouseMove(e);
+ }); // while the slider is active (the mouse has been pressed down and
+ // is dragging) or in focus we do not want to hide the VolumeBar
+
+
+ _this.on(_this.volumeBar, ['focus', 'slideractive'], function () {
+ _this.volumeBar.addClass('vjs-slider-active');
+
+ _this.addClass('vjs-slider-active');
+
+ _this.trigger('slideractive');
+ });
+
+ _this.on(_this.volumeBar, ['blur', 'sliderinactive'], function () {
+ _this.volumeBar.removeClass('vjs-slider-active');
+
+ _this.removeClass('vjs-slider-active');
+
+ _this.trigger('sliderinactive');
+ });
+
+ return _this;
+ }
+ /**
+ * Create the `Component`'s DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+
+
+ var _proto = VolumeControl.prototype;
+
+ _proto.createEl = function createEl() {
+ var orientationClass = 'vjs-volume-horizontal';
+
+ if (this.options_.vertical) {
+ orientationClass = 'vjs-volume-vertical';
+ }
+
+ return _Component.prototype.createEl.call(this, 'div', {
+ className: "vjs-volume-control vjs-control " + orientationClass
+ });
+ }
+ /**
+ * Handle `mousedown` or `touchstart` events on the `VolumeControl`.
+ *
+ * @param {EventTarget~Event} event
+ * `mousedown` or `touchstart` event that triggered this function
+ *
+ * @listens mousedown
+ * @listens touchstart
+ */
+ ;
+
+ _proto.handleMouseDown = function handleMouseDown(event) {
+ var doc = this.el_.ownerDocument;
+ this.on(doc, 'mousemove', this.throttledHandleMouseMove);
+ this.on(doc, 'touchmove', this.throttledHandleMouseMove);
+ this.on(doc, 'mouseup', this.handleMouseUpHandler_);
+ this.on(doc, 'touchend', this.handleMouseUpHandler_);
+ }
+ /**
+ * Handle `mouseup` or `touchend` events on the `VolumeControl`.
+ *
+ * @param {EventTarget~Event} event
+ * `mouseup` or `touchend` event that triggered this function.
+ *
+ * @listens touchend
+ * @listens mouseup
+ */
+ ;
+
+ _proto.handleMouseUp = function handleMouseUp(event) {
+ var doc = this.el_.ownerDocument;
+ this.off(doc, 'mousemove', this.throttledHandleMouseMove);
+ this.off(doc, 'touchmove', this.throttledHandleMouseMove);
+ this.off(doc, 'mouseup', this.handleMouseUpHandler_);
+ this.off(doc, 'touchend', this.handleMouseUpHandler_);
+ }
+ /**
+ * Handle `mousedown` or `touchstart` events on the `VolumeControl`.
+ *
+ * @param {EventTarget~Event} event
+ * `mousedown` or `touchstart` event that triggered this function
+ *
+ * @listens mousedown
+ * @listens touchstart
+ */
+ ;
+
+ _proto.handleMouseMove = function handleMouseMove(event) {
+ this.volumeBar.handleMouseMove(event);
+ };
+
+ return VolumeControl;
+}(Component$1);
+/**
+ * Default options for the `VolumeControl`
+ *
+ * @type {Object}
+ * @private
+ */
+
+
+VolumeControl.prototype.options_ = {
+ children: ['volumeBar']
+};
+Component$1.registerComponent('VolumeControl', VolumeControl);
+
+/**
+ * Check if muting volume is supported and if it isn't hide the mute toggle
+ * button.
+ *
+ * @param {Component} self
+ * A reference to the mute toggle button
+ *
+ * @param {Player} player
+ * A reference to the player
+ *
+ * @private
+ */
+var checkMuteSupport = function checkMuteSupport(self, player) {
+ // hide mute toggle button if it's not supported by the current tech
+ if (player.tech_ && !player.tech_.featuresMuteControl) {
+ self.addClass('vjs-hidden');
+ }
+
+ self.on(player, 'loadstart', function () {
+ if (!player.tech_.featuresMuteControl) {
+ self.addClass('vjs-hidden');
+ } else {
+ self.removeClass('vjs-hidden');
+ }
+ });
+};
+
+/**
+ * A button component for muting the audio.
+ *
+ * @extends Button
+ */
+
+var MuteToggle = /*#__PURE__*/function (_Button) {
+ _inheritsLoose__default['default'](MuteToggle, _Button);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function MuteToggle(player, options) {
+ var _this;
+
+ _this = _Button.call(this, player, options) || this; // hide this control if volume support is missing
+
+ checkMuteSupport(_assertThisInitialized__default['default'](_this), player);
+
+ _this.on(player, ['loadstart', 'volumechange'], function (e) {
+ return _this.update(e);
+ });
+
+ return _this;
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+
+
+ var _proto = MuteToggle.prototype;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return "vjs-mute-control " + _Button.prototype.buildCSSClass.call(this);
+ }
+ /**
+ * This gets called when an `MuteToggle` is "clicked". See
+ * {@link ClickableComponent} for more detailed information on what a click can be.
+ *
+ * @param {EventTarget~Event} [event]
+ * The `keydown`, `tap`, or `click` event that caused this function to be
+ * called.
+ *
+ * @listens tap
+ * @listens click
+ */
+ ;
+
+ _proto.handleClick = function handleClick(event) {
+ var vol = this.player_.volume();
+ var lastVolume = this.player_.lastVolume_();
+
+ if (vol === 0) {
+ var volumeToSet = lastVolume < 0.1 ? 0.1 : lastVolume;
+ this.player_.volume(volumeToSet);
+ this.player_.muted(false);
+ } else {
+ this.player_.muted(this.player_.muted() ? false : true);
+ }
+ }
+ /**
+ * Update the `MuteToggle` button based on the state of `volume` and `muted`
+ * on the player.
+ *
+ * @param {EventTarget~Event} [event]
+ * The {@link Player#loadstart} event if this function was called
+ * through an event.
+ *
+ * @listens Player#loadstart
+ * @listens Player#volumechange
+ */
+ ;
+
+ _proto.update = function update(event) {
+ this.updateIcon_();
+ this.updateControlText_();
+ }
+ /**
+ * Update the appearance of the `MuteToggle` icon.
+ *
+ * Possible states (given `level` variable below):
+ * - 0: crossed out
+ * - 1: zero bars of volume
+ * - 2: one bar of volume
+ * - 3: two bars of volume
+ *
+ * @private
+ */
+ ;
+
+ _proto.updateIcon_ = function updateIcon_() {
+ var vol = this.player_.volume();
+ var level = 3; // in iOS when a player is loaded with muted attribute
+ // and volume is changed with a native mute button
+ // we want to make sure muted state is updated
+
+ if (IS_IOS && this.player_.tech_ && this.player_.tech_.el_) {
+ this.player_.muted(this.player_.tech_.el_.muted);
+ }
+
+ if (vol === 0 || this.player_.muted()) {
+ level = 0;
+ } else if (vol < 0.33) {
+ level = 1;
+ } else if (vol < 0.67) {
+ level = 2;
+ } // TODO improve muted icon classes
+
+
+ for (var i = 0; i < 4; i++) {
+ removeClass(this.el_, "vjs-vol-" + i);
+ }
+
+ addClass(this.el_, "vjs-vol-" + level);
+ }
+ /**
+ * If `muted` has changed on the player, update the control text
+ * (`title` attribute on `vjs-mute-control` element and content of
+ * `vjs-control-text` element).
+ *
+ * @private
+ */
+ ;
+
+ _proto.updateControlText_ = function updateControlText_() {
+ var soundOff = this.player_.muted() || this.player_.volume() === 0;
+ var text = soundOff ? 'Unmute' : 'Mute';
+
+ if (this.controlText() !== text) {
+ this.controlText(text);
+ }
+ };
+
+ return MuteToggle;
+}(Button);
+/**
+ * The text that should display over the `MuteToggle`s controls. Added for localization.
+ *
+ * @type {string}
+ * @private
+ */
+
+
+MuteToggle.prototype.controlText_ = 'Mute';
+Component$1.registerComponent('MuteToggle', MuteToggle);
+
+/**
+ * A Component to contain the MuteToggle and VolumeControl so that
+ * they can work together.
+ *
+ * @extends Component
+ */
+
+var VolumePanel = /*#__PURE__*/function (_Component) {
+ _inheritsLoose__default['default'](VolumePanel, _Component);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options={}]
+ * The key/value store of player options.
+ */
+ function VolumePanel(player, options) {
+ var _this;
+
+ if (options === void 0) {
+ options = {};
+ }
+
+ if (typeof options.inline !== 'undefined') {
+ options.inline = options.inline;
+ } else {
+ options.inline = true;
+ } // pass the inline option down to the VolumeControl as vertical if
+ // the VolumeControl is on.
+
+
+ if (typeof options.volumeControl === 'undefined' || isPlain(options.volumeControl)) {
+ options.volumeControl = options.volumeControl || {};
+ options.volumeControl.vertical = !options.inline;
+ }
+
+ _this = _Component.call(this, player, options) || this; // this handler is used by mouse handler methods below
+
+ _this.handleKeyPressHandler_ = function (e) {
+ return _this.handleKeyPress(e);
+ };
+
+ _this.on(player, ['loadstart'], function (e) {
+ return _this.volumePanelState_(e);
+ });
+
+ _this.on(_this.muteToggle, 'keyup', function (e) {
+ return _this.handleKeyPress(e);
+ });
+
+ _this.on(_this.volumeControl, 'keyup', function (e) {
+ return _this.handleVolumeControlKeyUp(e);
+ });
+
+ _this.on('keydown', function (e) {
+ return _this.handleKeyPress(e);
+ });
+
+ _this.on('mouseover', function (e) {
+ return _this.handleMouseOver(e);
+ });
+
+ _this.on('mouseout', function (e) {
+ return _this.handleMouseOut(e);
+ }); // while the slider is active (the mouse has been pressed down and
+ // is dragging) we do not want to hide the VolumeBar
+
+
+ _this.on(_this.volumeControl, ['slideractive'], _this.sliderActive_);
+
+ _this.on(_this.volumeControl, ['sliderinactive'], _this.sliderInactive_);
+
+ return _this;
+ }
+ /**
+ * Add vjs-slider-active class to the VolumePanel
+ *
+ * @listens VolumeControl#slideractive
+ * @private
+ */
+
+
+ var _proto = VolumePanel.prototype;
+
+ _proto.sliderActive_ = function sliderActive_() {
+ this.addClass('vjs-slider-active');
+ }
+ /**
+ * Removes vjs-slider-active class to the VolumePanel
+ *
+ * @listens VolumeControl#sliderinactive
+ * @private
+ */
+ ;
+
+ _proto.sliderInactive_ = function sliderInactive_() {
+ this.removeClass('vjs-slider-active');
+ }
+ /**
+ * Adds vjs-hidden or vjs-mute-toggle-only to the VolumePanel
+ * depending on MuteToggle and VolumeControl state
+ *
+ * @listens Player#loadstart
+ * @private
+ */
+ ;
+
+ _proto.volumePanelState_ = function volumePanelState_() {
+ // hide volume panel if neither volume control or mute toggle
+ // are displayed
+ if (this.volumeControl.hasClass('vjs-hidden') && this.muteToggle.hasClass('vjs-hidden')) {
+ this.addClass('vjs-hidden');
+ } // if only mute toggle is visible we don't want
+ // volume panel expanding when hovered or active
+
+
+ if (this.volumeControl.hasClass('vjs-hidden') && !this.muteToggle.hasClass('vjs-hidden')) {
+ this.addClass('vjs-mute-toggle-only');
+ }
+ }
+ /**
+ * Create the `Component`'s DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+ ;
+
+ _proto.createEl = function createEl() {
+ var orientationClass = 'vjs-volume-panel-horizontal';
+
+ if (!this.options_.inline) {
+ orientationClass = 'vjs-volume-panel-vertical';
+ }
+
+ return _Component.prototype.createEl.call(this, 'div', {
+ className: "vjs-volume-panel vjs-control " + orientationClass
+ });
+ }
+ /**
+ * Dispose of the `volume-panel` and all child components.
+ */
+ ;
+
+ _proto.dispose = function dispose() {
+ this.handleMouseOut();
+
+ _Component.prototype.dispose.call(this);
+ }
+ /**
+ * Handles `keyup` events on the `VolumeControl`, looking for ESC, which closes
+ * the volume panel and sets focus on `MuteToggle`.
+ *
+ * @param {EventTarget~Event} event
+ * The `keyup` event that caused this function to be called.
+ *
+ * @listens keyup
+ */
+ ;
+
+ _proto.handleVolumeControlKeyUp = function handleVolumeControlKeyUp(event) {
+ if (keycode__default['default'].isEventKey(event, 'Esc')) {
+ this.muteToggle.focus();
+ }
+ }
+ /**
+ * This gets called when a `VolumePanel` gains hover via a `mouseover` event.
+ * Turns on listening for `mouseover` event. When they happen it
+ * calls `this.handleMouseOver`.
+ *
+ * @param {EventTarget~Event} event
+ * The `mouseover` event that caused this function to be called.
+ *
+ * @listens mouseover
+ */
+ ;
+
+ _proto.handleMouseOver = function handleMouseOver(event) {
+ this.addClass('vjs-hover');
+ on(document__default['default'], 'keyup', this.handleKeyPressHandler_);
+ }
+ /**
+ * This gets called when a `VolumePanel` gains hover via a `mouseout` event.
+ * Turns on listening for `mouseout` event. When they happen it
+ * calls `this.handleMouseOut`.
+ *
+ * @param {EventTarget~Event} event
+ * The `mouseout` event that caused this function to be called.
+ *
+ * @listens mouseout
+ */
+ ;
+
+ _proto.handleMouseOut = function handleMouseOut(event) {
+ this.removeClass('vjs-hover');
+ off(document__default['default'], 'keyup', this.handleKeyPressHandler_);
+ }
+ /**
+ * Handles `keyup` event on the document or `keydown` event on the `VolumePanel`,
+ * looking for ESC, which hides the `VolumeControl`.
+ *
+ * @param {EventTarget~Event} event
+ * The keypress that triggered this event.
+ *
+ * @listens keydown | keyup
+ */
+ ;
+
+ _proto.handleKeyPress = function handleKeyPress(event) {
+ if (keycode__default['default'].isEventKey(event, 'Esc')) {
+ this.handleMouseOut();
+ }
+ };
+
+ return VolumePanel;
+}(Component$1);
+/**
+ * Default options for the `VolumeControl`
+ *
+ * @type {Object}
+ * @private
+ */
+
+
+VolumePanel.prototype.options_ = {
+ children: ['muteToggle', 'volumeControl']
+};
+Component$1.registerComponent('VolumePanel', VolumePanel);
+
+/**
+ * The Menu component is used to build popup menus, including subtitle and
+ * captions selection menus.
+ *
+ * @extends Component
+ */
+
+var Menu = /*#__PURE__*/function (_Component) {
+ _inheritsLoose__default['default'](Menu, _Component);
+
+ /**
+ * Create an instance of this class.
+ *
+ * @param {Player} player
+ * the player that this component should attach to
+ *
+ * @param {Object} [options]
+ * Object of option names and values
+ *
+ */
+ function Menu(player, options) {
+ var _this;
+
+ _this = _Component.call(this, player, options) || this;
+
+ if (options) {
+ _this.menuButton_ = options.menuButton;
+ }
+
+ _this.focusedChild_ = -1;
+
+ _this.on('keydown', function (e) {
+ return _this.handleKeyDown(e);
+ }); // All the menu item instances share the same blur handler provided by the menu container.
+
+
+ _this.boundHandleBlur_ = function (e) {
+ return _this.handleBlur(e);
+ };
+
+ _this.boundHandleTapClick_ = function (e) {
+ return _this.handleTapClick(e);
+ };
+
+ return _this;
+ }
+ /**
+ * Add event listeners to the {@link MenuItem}.
+ *
+ * @param {Object} component
+ * The instance of the `MenuItem` to add listeners to.
+ *
+ */
+
+
+ var _proto = Menu.prototype;
+
+ _proto.addEventListenerForItem = function addEventListenerForItem(component) {
+ if (!(component instanceof Component$1)) {
+ return;
+ }
+
+ this.on(component, 'blur', this.boundHandleBlur_);
+ this.on(component, ['tap', 'click'], this.boundHandleTapClick_);
+ }
+ /**
+ * Remove event listeners from the {@link MenuItem}.
+ *
+ * @param {Object} component
+ * The instance of the `MenuItem` to remove listeners.
+ *
+ */
+ ;
+
+ _proto.removeEventListenerForItem = function removeEventListenerForItem(component) {
+ if (!(component instanceof Component$1)) {
+ return;
+ }
+
+ this.off(component, 'blur', this.boundHandleBlur_);
+ this.off(component, ['tap', 'click'], this.boundHandleTapClick_);
+ }
+ /**
+ * This method will be called indirectly when the component has been added
+ * before the component adds to the new menu instance by `addItem`.
+ * In this case, the original menu instance will remove the component
+ * by calling `removeChild`.
+ *
+ * @param {Object} component
+ * The instance of the `MenuItem`
+ */
+ ;
+
+ _proto.removeChild = function removeChild(component) {
+ if (typeof component === 'string') {
+ component = this.getChild(component);
+ }
+
+ this.removeEventListenerForItem(component);
+
+ _Component.prototype.removeChild.call(this, component);
+ }
+ /**
+ * Add a {@link MenuItem} to the menu.
+ *
+ * @param {Object|string} component
+ * The name or instance of the `MenuItem` to add.
+ *
+ */
+ ;
+
+ _proto.addItem = function addItem(component) {
+ var childComponent = this.addChild(component);
+
+ if (childComponent) {
+ this.addEventListenerForItem(childComponent);
+ }
+ }
+ /**
+ * Create the `Menu`s DOM element.
+ *
+ * @return {Element}
+ * the element that was created
+ */
+ ;
+
+ _proto.createEl = function createEl$1() {
+ var contentElType = this.options_.contentElType || 'ul';
+ this.contentEl_ = createEl(contentElType, {
+ className: 'vjs-menu-content'
+ });
+ this.contentEl_.setAttribute('role', 'menu');
+
+ var el = _Component.prototype.createEl.call(this, 'div', {
+ append: this.contentEl_,
+ className: 'vjs-menu'
+ });
+
+ el.appendChild(this.contentEl_); // Prevent clicks from bubbling up. Needed for Menu Buttons,
+ // where a click on the parent is significant
+
+ on(el, 'click', function (event) {
+ event.preventDefault();
+ event.stopImmediatePropagation();
+ });
+ return el;
+ };
+
+ _proto.dispose = function dispose() {
+ this.contentEl_ = null;
+ this.boundHandleBlur_ = null;
+ this.boundHandleTapClick_ = null;
+
+ _Component.prototype.dispose.call(this);
+ }
+ /**
+ * Called when a `MenuItem` loses focus.
+ *
+ * @param {EventTarget~Event} event
+ * The `blur` event that caused this function to be called.
+ *
+ * @listens blur
+ */
+ ;
+
+ _proto.handleBlur = function handleBlur(event) {
+ var relatedTarget = event.relatedTarget || document__default['default'].activeElement; // Close menu popup when a user clicks outside the menu
+
+ if (!this.children().some(function (element) {
+ return element.el() === relatedTarget;
+ })) {
+ var btn = this.menuButton_;
+
+ if (btn && btn.buttonPressed_ && relatedTarget !== btn.el().firstChild) {
+ btn.unpressButton();
+ }
+ }
+ }
+ /**
+ * Called when a `MenuItem` gets clicked or tapped.
+ *
+ * @param {EventTarget~Event} event
+ * The `click` or `tap` event that caused this function to be called.
+ *
+ * @listens click,tap
+ */
+ ;
+
+ _proto.handleTapClick = function handleTapClick(event) {
+ // Unpress the associated MenuButton, and move focus back to it
+ if (this.menuButton_) {
+ this.menuButton_.unpressButton();
+ var childComponents = this.children();
+
+ if (!Array.isArray(childComponents)) {
+ return;
+ }
+
+ var foundComponent = childComponents.filter(function (component) {
+ return component.el() === event.target;
+ })[0];
+
+ if (!foundComponent) {
+ return;
+ } // don't focus menu button if item is a caption settings item
+ // because focus will move elsewhere
+
+
+ if (foundComponent.name() !== 'CaptionSettingsMenuItem') {
+ this.menuButton_.focus();
+ }
+ }
+ }
+ /**
+ * Handle a `keydown` event on this menu. This listener is added in the constructor.
+ *
+ * @param {EventTarget~Event} event
+ * A `keydown` event that happened on the menu.
+ *
+ * @listens keydown
+ */
+ ;
+
+ _proto.handleKeyDown = function handleKeyDown(event) {
+ // Left and Down Arrows
+ if (keycode__default['default'].isEventKey(event, 'Left') || keycode__default['default'].isEventKey(event, 'Down')) {
+ event.preventDefault();
+ event.stopPropagation();
+ this.stepForward(); // Up and Right Arrows
+ } else if (keycode__default['default'].isEventKey(event, 'Right') || keycode__default['default'].isEventKey(event, 'Up')) {
+ event.preventDefault();
+ event.stopPropagation();
+ this.stepBack();
+ }
+ }
+ /**
+ * Move to next (lower) menu item for keyboard users.
+ */
+ ;
+
+ _proto.stepForward = function stepForward() {
+ var stepChild = 0;
+
+ if (this.focusedChild_ !== undefined) {
+ stepChild = this.focusedChild_ + 1;
+ }
+
+ this.focus(stepChild);
+ }
+ /**
+ * Move to previous (higher) menu item for keyboard users.
+ */
+ ;
+
+ _proto.stepBack = function stepBack() {
+ var stepChild = 0;
+
+ if (this.focusedChild_ !== undefined) {
+ stepChild = this.focusedChild_ - 1;
+ }
+
+ this.focus(stepChild);
+ }
+ /**
+ * Set focus on a {@link MenuItem} in the `Menu`.
+ *
+ * @param {Object|string} [item=0]
+ * Index of child item set focus on.
+ */
+ ;
+
+ _proto.focus = function focus(item) {
+ if (item === void 0) {
+ item = 0;
+ }
+
+ var children = this.children().slice();
+ var haveTitle = children.length && children[0].hasClass('vjs-menu-title');
+
+ if (haveTitle) {
+ children.shift();
+ }
+
+ if (children.length > 0) {
+ if (item < 0) {
+ item = 0;
+ } else if (item >= children.length) {
+ item = children.length - 1;
+ }
+
+ this.focusedChild_ = item;
+ children[item].el_.focus();
+ }
+ };
+
+ return Menu;
+}(Component$1);
+
+Component$1.registerComponent('Menu', Menu);
+
+/**
+ * A `MenuButton` class for any popup {@link Menu}.
+ *
+ * @extends Component
+ */
+
+var MenuButton = /*#__PURE__*/function (_Component) {
+ _inheritsLoose__default['default'](MenuButton, _Component);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options={}]
+ * The key/value store of player options.
+ */
+ function MenuButton(player, options) {
+ var _this;
+
+ if (options === void 0) {
+ options = {};
+ }
+
+ _this = _Component.call(this, player, options) || this;
+ _this.menuButton_ = new Button(player, options);
+
+ _this.menuButton_.controlText(_this.controlText_);
+
+ _this.menuButton_.el_.setAttribute('aria-haspopup', 'true'); // Add buildCSSClass values to the button, not the wrapper
+
+
+ var buttonClass = Button.prototype.buildCSSClass();
+ _this.menuButton_.el_.className = _this.buildCSSClass() + ' ' + buttonClass;
+
+ _this.menuButton_.removeClass('vjs-control');
+
+ _this.addChild(_this.menuButton_);
+
+ _this.update();
+
+ _this.enabled_ = true;
+
+ var handleClick = function handleClick(e) {
+ return _this.handleClick(e);
+ };
+
+ _this.handleMenuKeyUp_ = function (e) {
+ return _this.handleMenuKeyUp(e);
+ };
+
+ _this.on(_this.menuButton_, 'tap', handleClick);
+
+ _this.on(_this.menuButton_, 'click', handleClick);
+
+ _this.on(_this.menuButton_, 'keydown', function (e) {
+ return _this.handleKeyDown(e);
+ });
+
+ _this.on(_this.menuButton_, 'mouseenter', function () {
+ _this.addClass('vjs-hover');
+
+ _this.menu.show();
+
+ on(document__default['default'], 'keyup', _this.handleMenuKeyUp_);
+ });
+
+ _this.on('mouseleave', function (e) {
+ return _this.handleMouseLeave(e);
+ });
+
+ _this.on('keydown', function (e) {
+ return _this.handleSubmenuKeyDown(e);
+ });
+
+ return _this;
+ }
+ /**
+ * Update the menu based on the current state of its items.
+ */
+
+
+ var _proto = MenuButton.prototype;
+
+ _proto.update = function update() {
+ var menu = this.createMenu();
+
+ if (this.menu) {
+ this.menu.dispose();
+ this.removeChild(this.menu);
+ }
+
+ this.menu = menu;
+ this.addChild(menu);
+ /**
+ * Track the state of the menu button
+ *
+ * @type {Boolean}
+ * @private
+ */
+
+ this.buttonPressed_ = false;
+ this.menuButton_.el_.setAttribute('aria-expanded', 'false');
+
+ if (this.items && this.items.length <= this.hideThreshold_) {
+ this.hide();
+ this.menu.contentEl_.removeAttribute('role');
+ } else {
+ this.show();
+ this.menu.contentEl_.setAttribute('role', 'menu');
+ }
+ }
+ /**
+ * Create the menu and add all items to it.
+ *
+ * @return {Menu}
+ * The constructed menu
+ */
+ ;
+
+ _proto.createMenu = function createMenu() {
+ var menu = new Menu(this.player_, {
+ menuButton: this
+ });
+ /**
+ * Hide the menu if the number of items is less than or equal to this threshold. This defaults
+ * to 0 and whenever we add items which can be hidden to the menu we'll increment it. We list
+ * it here because every time we run `createMenu` we need to reset the value.
+ *
+ * @protected
+ * @type {Number}
+ */
+
+ this.hideThreshold_ = 0; // Add a title list item to the top
+
+ if (this.options_.title) {
+ var titleEl = createEl('li', {
+ className: 'vjs-menu-title',
+ textContent: toTitleCase$1(this.options_.title),
+ tabIndex: -1
+ });
+ var titleComponent = new Component$1(this.player_, {
+ el: titleEl
+ });
+ menu.addItem(titleComponent);
+ }
+
+ this.items = this.createItems();
+
+ if (this.items) {
+ // Add menu items to the menu
+ for (var i = 0; i < this.items.length; i++) {
+ menu.addItem(this.items[i]);
+ }
+ }
+
+ return menu;
+ }
+ /**
+ * Create the list of menu items. Specific to each subclass.
+ *
+ * @abstract
+ */
+ ;
+
+ _proto.createItems = function createItems() {}
+ /**
+ * Create the `MenuButtons`s DOM element.
+ *
+ * @return {Element}
+ * The element that gets created.
+ */
+ ;
+
+ _proto.createEl = function createEl() {
+ return _Component.prototype.createEl.call(this, 'div', {
+ className: this.buildWrapperCSSClass()
+ }, {});
+ }
+ /**
+ * Allow sub components to stack CSS class names for the wrapper element
+ *
+ * @return {string}
+ * The constructed wrapper DOM `className`
+ */
+ ;
+
+ _proto.buildWrapperCSSClass = function buildWrapperCSSClass() {
+ var menuButtonClass = 'vjs-menu-button'; // If the inline option is passed, we want to use different styles altogether.
+
+ if (this.options_.inline === true) {
+ menuButtonClass += '-inline';
+ } else {
+ menuButtonClass += '-popup';
+ } // TODO: Fix the CSS so that this isn't necessary
+
+
+ var buttonClass = Button.prototype.buildCSSClass();
+ return "vjs-menu-button " + menuButtonClass + " " + buttonClass + " " + _Component.prototype.buildCSSClass.call(this);
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+ ;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ var menuButtonClass = 'vjs-menu-button'; // If the inline option is passed, we want to use different styles altogether.
+
+ if (this.options_.inline === true) {
+ menuButtonClass += '-inline';
+ } else {
+ menuButtonClass += '-popup';
+ }
+
+ return "vjs-menu-button " + menuButtonClass + " " + _Component.prototype.buildCSSClass.call(this);
+ }
+ /**
+ * Get or set the localized control text that will be used for accessibility.
+ *
+ * > NOTE: This will come from the internal `menuButton_` element.
+ *
+ * @param {string} [text]
+ * Control text for element.
+ *
+ * @param {Element} [el=this.menuButton_.el()]
+ * Element to set the title on.
+ *
+ * @return {string}
+ * - The control text when getting
+ */
+ ;
+
+ _proto.controlText = function controlText(text, el) {
+ if (el === void 0) {
+ el = this.menuButton_.el();
+ }
+
+ return this.menuButton_.controlText(text, el);
+ }
+ /**
+ * Dispose of the `menu-button` and all child components.
+ */
+ ;
+
+ _proto.dispose = function dispose() {
+ this.handleMouseLeave();
+
+ _Component.prototype.dispose.call(this);
+ }
+ /**
+ * Handle a click on a `MenuButton`.
+ * See {@link ClickableComponent#handleClick} for instances where this is called.
+ *
+ * @param {EventTarget~Event} event
+ * The `keydown`, `tap`, or `click` event that caused this function to be
+ * called.
+ *
+ * @listens tap
+ * @listens click
+ */
+ ;
+
+ _proto.handleClick = function handleClick(event) {
+ if (this.buttonPressed_) {
+ this.unpressButton();
+ } else {
+ this.pressButton();
+ }
+ }
+ /**
+ * Handle `mouseleave` for `MenuButton`.
+ *
+ * @param {EventTarget~Event} event
+ * The `mouseleave` event that caused this function to be called.
+ *
+ * @listens mouseleave
+ */
+ ;
+
+ _proto.handleMouseLeave = function handleMouseLeave(event) {
+ this.removeClass('vjs-hover');
+ off(document__default['default'], 'keyup', this.handleMenuKeyUp_);
+ }
+ /**
+ * Set the focus to the actual button, not to this element
+ */
+ ;
+
+ _proto.focus = function focus() {
+ this.menuButton_.focus();
+ }
+ /**
+ * Remove the focus from the actual button, not this element
+ */
+ ;
+
+ _proto.blur = function blur() {
+ this.menuButton_.blur();
+ }
+ /**
+ * Handle tab, escape, down arrow, and up arrow keys for `MenuButton`. See
+ * {@link ClickableComponent#handleKeyDown} for instances where this is called.
+ *
+ * @param {EventTarget~Event} event
+ * The `keydown` event that caused this function to be called.
+ *
+ * @listens keydown
+ */
+ ;
+
+ _proto.handleKeyDown = function handleKeyDown(event) {
+ // Escape or Tab unpress the 'button'
+ if (keycode__default['default'].isEventKey(event, 'Esc') || keycode__default['default'].isEventKey(event, 'Tab')) {
+ if (this.buttonPressed_) {
+ this.unpressButton();
+ } // Don't preventDefault for Tab key - we still want to lose focus
+
+
+ if (!keycode__default['default'].isEventKey(event, 'Tab')) {
+ event.preventDefault(); // Set focus back to the menu button's button
+
+ this.menuButton_.focus();
+ } // Up Arrow or Down Arrow also 'press' the button to open the menu
+
+ } else if (keycode__default['default'].isEventKey(event, 'Up') || keycode__default['default'].isEventKey(event, 'Down')) {
+ if (!this.buttonPressed_) {
+ event.preventDefault();
+ this.pressButton();
+ }
+ }
+ }
+ /**
+ * Handle a `keyup` event on a `MenuButton`. The listener for this is added in
+ * the constructor.
+ *
+ * @param {EventTarget~Event} event
+ * Key press event
+ *
+ * @listens keyup
+ */
+ ;
+
+ _proto.handleMenuKeyUp = function handleMenuKeyUp(event) {
+ // Escape hides popup menu
+ if (keycode__default['default'].isEventKey(event, 'Esc') || keycode__default['default'].isEventKey(event, 'Tab')) {
+ this.removeClass('vjs-hover');
+ }
+ }
+ /**
+ * This method name now delegates to `handleSubmenuKeyDown`. This means
+ * anyone calling `handleSubmenuKeyPress` will not see their method calls
+ * stop working.
+ *
+ * @param {EventTarget~Event} event
+ * The event that caused this function to be called.
+ */
+ ;
+
+ _proto.handleSubmenuKeyPress = function handleSubmenuKeyPress(event) {
+ this.handleSubmenuKeyDown(event);
+ }
+ /**
+ * Handle a `keydown` event on a sub-menu. The listener for this is added in
+ * the constructor.
+ *
+ * @param {EventTarget~Event} event
+ * Key press event
+ *
+ * @listens keydown
+ */
+ ;
+
+ _proto.handleSubmenuKeyDown = function handleSubmenuKeyDown(event) {
+ // Escape or Tab unpress the 'button'
+ if (keycode__default['default'].isEventKey(event, 'Esc') || keycode__default['default'].isEventKey(event, 'Tab')) {
+ if (this.buttonPressed_) {
+ this.unpressButton();
+ } // Don't preventDefault for Tab key - we still want to lose focus
+
+
+ if (!keycode__default['default'].isEventKey(event, 'Tab')) {
+ event.preventDefault(); // Set focus back to the menu button's button
+
+ this.menuButton_.focus();
+ }
+ }
+ }
+ /**
+ * Put the current `MenuButton` into a pressed state.
+ */
+ ;
+
+ _proto.pressButton = function pressButton() {
+ if (this.enabled_) {
+ this.buttonPressed_ = true;
+ this.menu.show();
+ this.menu.lockShowing();
+ this.menuButton_.el_.setAttribute('aria-expanded', 'true'); // set the focus into the submenu, except on iOS where it is resulting in
+ // undesired scrolling behavior when the player is in an iframe
+
+ if (IS_IOS && isInFrame()) {
+ // Return early so that the menu isn't focused
+ return;
+ }
+
+ this.menu.focus();
+ }
+ }
+ /**
+ * Take the current `MenuButton` out of a pressed state.
+ */
+ ;
+
+ _proto.unpressButton = function unpressButton() {
+ if (this.enabled_) {
+ this.buttonPressed_ = false;
+ this.menu.unlockShowing();
+ this.menu.hide();
+ this.menuButton_.el_.setAttribute('aria-expanded', 'false');
+ }
+ }
+ /**
+ * Disable the `MenuButton`. Don't allow it to be clicked.
+ */
+ ;
+
+ _proto.disable = function disable() {
+ this.unpressButton();
+ this.enabled_ = false;
+ this.addClass('vjs-disabled');
+ this.menuButton_.disable();
+ }
+ /**
+ * Enable the `MenuButton`. Allow it to be clicked.
+ */
+ ;
+
+ _proto.enable = function enable() {
+ this.enabled_ = true;
+ this.removeClass('vjs-disabled');
+ this.menuButton_.enable();
+ };
+
+ return MenuButton;
+}(Component$1);
+
+Component$1.registerComponent('MenuButton', MenuButton);
+
+/**
+ * The base class for buttons that toggle specific track types (e.g. subtitles).
+ *
+ * @extends MenuButton
+ */
+
+var TrackButton = /*#__PURE__*/function (_MenuButton) {
+ _inheritsLoose__default['default'](TrackButton, _MenuButton);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function TrackButton(player, options) {
+ var _this;
+
+ var tracks = options.tracks;
+ _this = _MenuButton.call(this, player, options) || this;
+
+ if (_this.items.length <= 1) {
+ _this.hide();
+ }
+
+ if (!tracks) {
+ return _assertThisInitialized__default['default'](_this);
+ }
+
+ var updateHandler = bind(_assertThisInitialized__default['default'](_this), _this.update);
+ tracks.addEventListener('removetrack', updateHandler);
+ tracks.addEventListener('addtrack', updateHandler);
+ tracks.addEventListener('labelchange', updateHandler);
+
+ _this.player_.on('ready', updateHandler);
+
+ _this.player_.on('dispose', function () {
+ tracks.removeEventListener('removetrack', updateHandler);
+ tracks.removeEventListener('addtrack', updateHandler);
+ tracks.removeEventListener('labelchange', updateHandler);
+ });
+
+ return _this;
+ }
+
+ return TrackButton;
+}(MenuButton);
+
+Component$1.registerComponent('TrackButton', TrackButton);
+
+/**
+ * @file menu-keys.js
+ */
+
+/**
+ * All keys used for operation of a menu (`MenuButton`, `Menu`, and `MenuItem`)
+ * Note that 'Enter' and 'Space' are not included here (otherwise they would
+ * prevent the `MenuButton` and `MenuItem` from being keyboard-clickable)
+ * @typedef MenuKeys
+ * @array
+ */
+var MenuKeys = ['Tab', 'Esc', 'Up', 'Down', 'Right', 'Left'];
+
+/**
+ * The component for a menu item. ``
+ *
+ * @extends ClickableComponent
+ */
+
+var MenuItem = /*#__PURE__*/function (_ClickableComponent) {
+ _inheritsLoose__default['default'](MenuItem, _ClickableComponent);
+
+ /**
+ * Creates an instance of the this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options={}]
+ * The key/value store of player options.
+ *
+ */
+ function MenuItem(player, options) {
+ var _this;
+
+ _this = _ClickableComponent.call(this, player, options) || this;
+ _this.selectable = options.selectable;
+ _this.isSelected_ = options.selected || false;
+ _this.multiSelectable = options.multiSelectable;
+
+ _this.selected(_this.isSelected_);
+
+ if (_this.selectable) {
+ if (_this.multiSelectable) {
+ _this.el_.setAttribute('role', 'menuitemcheckbox');
+ } else {
+ _this.el_.setAttribute('role', 'menuitemradio');
+ }
+ } else {
+ _this.el_.setAttribute('role', 'menuitem');
+ }
+
+ return _this;
+ }
+ /**
+ * Create the `MenuItem's DOM element
+ *
+ * @param {string} [type=li]
+ * Element's node type, not actually used, always set to `li`.
+ *
+ * @param {Object} [props={}]
+ * An object of properties that should be set on the element
+ *
+ * @param {Object} [attrs={}]
+ * An object of attributes that should be set on the element
+ *
+ * @return {Element}
+ * The element that gets created.
+ */
+
+
+ var _proto = MenuItem.prototype;
+
+ _proto.createEl = function createEl$1(type, props, attrs) {
+ // The control is textual, not just an icon
+ this.nonIconControl = true;
+
+ var el = _ClickableComponent.prototype.createEl.call(this, 'li', assign({
+ className: 'vjs-menu-item',
+ tabIndex: -1
+ }, props), attrs); // swap icon with menu item text.
+
+
+ el.replaceChild(createEl('span', {
+ className: 'vjs-menu-item-text',
+ textContent: this.localize(this.options_.label)
+ }), el.querySelector('.vjs-icon-placeholder'));
+ return el;
+ }
+ /**
+ * Ignore keys which are used by the menu, but pass any other ones up. See
+ * {@link ClickableComponent#handleKeyDown} for instances where this is called.
+ *
+ * @param {EventTarget~Event} event
+ * The `keydown` event that caused this function to be called.
+ *
+ * @listens keydown
+ */
+ ;
+
+ _proto.handleKeyDown = function handleKeyDown(event) {
+ if (!MenuKeys.some(function (key) {
+ return keycode__default['default'].isEventKey(event, key);
+ })) {
+ // Pass keydown handling up for unused keys
+ _ClickableComponent.prototype.handleKeyDown.call(this, event);
+ }
+ }
+ /**
+ * Any click on a `MenuItem` puts it into the selected state.
+ * See {@link ClickableComponent#handleClick} for instances where this is called.
+ *
+ * @param {EventTarget~Event} event
+ * The `keydown`, `tap`, or `click` event that caused this function to be
+ * called.
+ *
+ * @listens tap
+ * @listens click
+ */
+ ;
+
+ _proto.handleClick = function handleClick(event) {
+ this.selected(true);
+ }
+ /**
+ * Set the state for this menu item as selected or not.
+ *
+ * @param {boolean} selected
+ * if the menu item is selected or not
+ */
+ ;
+
+ _proto.selected = function selected(_selected) {
+ if (this.selectable) {
+ if (_selected) {
+ this.addClass('vjs-selected');
+ this.el_.setAttribute('aria-checked', 'true'); // aria-checked isn't fully supported by browsers/screen readers,
+ // so indicate selected state to screen reader in the control text.
+
+ this.controlText(', selected');
+ this.isSelected_ = true;
+ } else {
+ this.removeClass('vjs-selected');
+ this.el_.setAttribute('aria-checked', 'false'); // Indicate un-selected state to screen reader
+
+ this.controlText('');
+ this.isSelected_ = false;
+ }
+ }
+ };
+
+ return MenuItem;
+}(ClickableComponent);
+
+Component$1.registerComponent('MenuItem', MenuItem);
+
+/**
+ * The specific menu item type for selecting a language within a text track kind
+ *
+ * @extends MenuItem
+ */
+
+var TextTrackMenuItem = /*#__PURE__*/function (_MenuItem) {
+ _inheritsLoose__default['default'](TextTrackMenuItem, _MenuItem);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function TextTrackMenuItem(player, options) {
+ var _this;
+
+ var track = options.track;
+ var tracks = player.textTracks(); // Modify options for parent MenuItem class's init.
+
+ options.label = track.label || track.language || 'Unknown';
+ options.selected = track.mode === 'showing';
+ _this = _MenuItem.call(this, player, options) || this;
+ _this.track = track; // Determine the relevant kind(s) of tracks for this component and filter
+ // out empty kinds.
+
+ _this.kinds = (options.kinds || [options.kind || _this.track.kind]).filter(Boolean);
+
+ var changeHandler = function changeHandler() {
+ for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
+ args[_key] = arguments[_key];
+ }
+
+ _this.handleTracksChange.apply(_assertThisInitialized__default['default'](_this), args);
+ };
+
+ var selectedLanguageChangeHandler = function selectedLanguageChangeHandler() {
+ for (var _len2 = arguments.length, args = new Array(_len2), _key2 = 0; _key2 < _len2; _key2++) {
+ args[_key2] = arguments[_key2];
+ }
+
+ _this.handleSelectedLanguageChange.apply(_assertThisInitialized__default['default'](_this), args);
+ };
+
+ player.on(['loadstart', 'texttrackchange'], changeHandler);
+ tracks.addEventListener('change', changeHandler);
+ tracks.addEventListener('selectedlanguagechange', selectedLanguageChangeHandler);
+
+ _this.on('dispose', function () {
+ player.off(['loadstart', 'texttrackchange'], changeHandler);
+ tracks.removeEventListener('change', changeHandler);
+ tracks.removeEventListener('selectedlanguagechange', selectedLanguageChangeHandler);
+ }); // iOS7 doesn't dispatch change events to TextTrackLists when an
+ // associated track's mode changes. Without something like
+ // Object.observe() (also not present on iOS7), it's not
+ // possible to detect changes to the mode attribute and polyfill
+ // the change event. As a poor substitute, we manually dispatch
+ // change events whenever the controls modify the mode.
+
+
+ if (tracks.onchange === undefined) {
+ var event;
+
+ _this.on(['tap', 'click'], function () {
+ if (typeof window__default['default'].Event !== 'object') {
+ // Android 2.3 throws an Illegal Constructor error for window.Event
+ try {
+ event = new window__default['default'].Event('change');
+ } catch (err) {// continue regardless of error
+ }
+ }
+
+ if (!event) {
+ event = document__default['default'].createEvent('Event');
+ event.initEvent('change', true, true);
+ }
+
+ tracks.dispatchEvent(event);
+ });
+ } // set the default state based on current tracks
+
+
+ _this.handleTracksChange();
+
+ return _this;
+ }
+ /**
+ * This gets called when an `TextTrackMenuItem` is "clicked". See
+ * {@link ClickableComponent} for more detailed information on what a click can be.
+ *
+ * @param {EventTarget~Event} event
+ * The `keydown`, `tap`, or `click` event that caused this function to be
+ * called.
+ *
+ * @listens tap
+ * @listens click
+ */
+
+
+ var _proto = TextTrackMenuItem.prototype;
+
+ _proto.handleClick = function handleClick(event) {
+ var referenceTrack = this.track;
+ var tracks = this.player_.textTracks();
+
+ _MenuItem.prototype.handleClick.call(this, event);
+
+ if (!tracks) {
+ return;
+ }
+
+ for (var i = 0; i < tracks.length; i++) {
+ var track = tracks[i]; // If the track from the text tracks list is not of the right kind,
+ // skip it. We do not want to affect tracks of incompatible kind(s).
+
+ if (this.kinds.indexOf(track.kind) === -1) {
+ continue;
+ } // If this text track is the component's track and it is not showing,
+ // set it to showing.
+
+
+ if (track === referenceTrack) {
+ if (track.mode !== 'showing') {
+ track.mode = 'showing';
+ } // If this text track is not the component's track and it is not
+ // disabled, set it to disabled.
+
+ } else if (track.mode !== 'disabled') {
+ track.mode = 'disabled';
+ }
+ }
+ }
+ /**
+ * Handle text track list change
+ *
+ * @param {EventTarget~Event} event
+ * The `change` event that caused this function to be called.
+ *
+ * @listens TextTrackList#change
+ */
+ ;
+
+ _proto.handleTracksChange = function handleTracksChange(event) {
+ var shouldBeSelected = this.track.mode === 'showing'; // Prevent redundant selected() calls because they may cause
+ // screen readers to read the appended control text unnecessarily
+
+ if (shouldBeSelected !== this.isSelected_) {
+ this.selected(shouldBeSelected);
+ }
+ };
+
+ _proto.handleSelectedLanguageChange = function handleSelectedLanguageChange(event) {
+ if (this.track.mode === 'showing') {
+ var selectedLanguage = this.player_.cache_.selectedLanguage; // Don't replace the kind of track across the same language
+
+ if (selectedLanguage && selectedLanguage.enabled && selectedLanguage.language === this.track.language && selectedLanguage.kind !== this.track.kind) {
+ return;
+ }
+
+ this.player_.cache_.selectedLanguage = {
+ enabled: true,
+ language: this.track.language,
+ kind: this.track.kind
+ };
+ }
+ };
+
+ _proto.dispose = function dispose() {
+ // remove reference to track object on dispose
+ this.track = null;
+
+ _MenuItem.prototype.dispose.call(this);
+ };
+
+ return TextTrackMenuItem;
+}(MenuItem);
+
+Component$1.registerComponent('TextTrackMenuItem', TextTrackMenuItem);
+
+/**
+ * A special menu item for turning of a specific type of text track
+ *
+ * @extends TextTrackMenuItem
+ */
+
+var OffTextTrackMenuItem = /*#__PURE__*/function (_TextTrackMenuItem) {
+ _inheritsLoose__default['default'](OffTextTrackMenuItem, _TextTrackMenuItem);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function OffTextTrackMenuItem(player, options) {
+ // Create pseudo track info
+ // Requires options['kind']
+ options.track = {
+ player: player,
+ // it is no longer necessary to store `kind` or `kinds` on the track itself
+ // since they are now stored in the `kinds` property of all instances of
+ // TextTrackMenuItem, but this will remain for backwards compatibility
+ kind: options.kind,
+ kinds: options.kinds,
+ "default": false,
+ mode: 'disabled'
+ };
+
+ if (!options.kinds) {
+ options.kinds = [options.kind];
+ }
+
+ if (options.label) {
+ options.track.label = options.label;
+ } else {
+ options.track.label = options.kinds.join(' and ') + ' off';
+ } // MenuItem is selectable
+
+
+ options.selectable = true; // MenuItem is NOT multiSelectable (i.e. only one can be marked "selected" at a time)
+
+ options.multiSelectable = false;
+ return _TextTrackMenuItem.call(this, player, options) || this;
+ }
+ /**
+ * Handle text track change
+ *
+ * @param {EventTarget~Event} event
+ * The event that caused this function to run
+ */
+
+
+ var _proto = OffTextTrackMenuItem.prototype;
+
+ _proto.handleTracksChange = function handleTracksChange(event) {
+ var tracks = this.player().textTracks();
+ var shouldBeSelected = true;
+
+ for (var i = 0, l = tracks.length; i < l; i++) {
+ var track = tracks[i];
+
+ if (this.options_.kinds.indexOf(track.kind) > -1 && track.mode === 'showing') {
+ shouldBeSelected = false;
+ break;
+ }
+ } // Prevent redundant selected() calls because they may cause
+ // screen readers to read the appended control text unnecessarily
+
+
+ if (shouldBeSelected !== this.isSelected_) {
+ this.selected(shouldBeSelected);
+ }
+ };
+
+ _proto.handleSelectedLanguageChange = function handleSelectedLanguageChange(event) {
+ var tracks = this.player().textTracks();
+ var allHidden = true;
+
+ for (var i = 0, l = tracks.length; i < l; i++) {
+ var track = tracks[i];
+
+ if (['captions', 'descriptions', 'subtitles'].indexOf(track.kind) > -1 && track.mode === 'showing') {
+ allHidden = false;
+ break;
+ }
+ }
+
+ if (allHidden) {
+ this.player_.cache_.selectedLanguage = {
+ enabled: false
+ };
+ }
+ };
+
+ return OffTextTrackMenuItem;
+}(TextTrackMenuItem);
+
+Component$1.registerComponent('OffTextTrackMenuItem', OffTextTrackMenuItem);
+
+/**
+ * The base class for buttons that toggle specific text track types (e.g. subtitles)
+ *
+ * @extends MenuButton
+ */
+
+var TextTrackButton = /*#__PURE__*/function (_TrackButton) {
+ _inheritsLoose__default['default'](TextTrackButton, _TrackButton);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options={}]
+ * The key/value store of player options.
+ */
+ function TextTrackButton(player, options) {
+ if (options === void 0) {
+ options = {};
+ }
+
+ options.tracks = player.textTracks();
+ return _TrackButton.call(this, player, options) || this;
+ }
+ /**
+ * Create a menu item for each text track
+ *
+ * @param {TextTrackMenuItem[]} [items=[]]
+ * Existing array of items to use during creation
+ *
+ * @return {TextTrackMenuItem[]}
+ * Array of menu items that were created
+ */
+
+
+ var _proto = TextTrackButton.prototype;
+
+ _proto.createItems = function createItems(items, TrackMenuItem) {
+ if (items === void 0) {
+ items = [];
+ }
+
+ if (TrackMenuItem === void 0) {
+ TrackMenuItem = TextTrackMenuItem;
+ }
+
+ // Label is an override for the [track] off label
+ // USed to localise captions/subtitles
+ var label;
+
+ if (this.label_) {
+ label = this.label_ + " off";
+ } // Add an OFF menu item to turn all tracks off
+
+
+ items.push(new OffTextTrackMenuItem(this.player_, {
+ kinds: this.kinds_,
+ kind: this.kind_,
+ label: label
+ }));
+ this.hideThreshold_ += 1;
+ var tracks = this.player_.textTracks();
+
+ if (!Array.isArray(this.kinds_)) {
+ this.kinds_ = [this.kind_];
+ }
+
+ for (var i = 0; i < tracks.length; i++) {
+ var track = tracks[i]; // only add tracks that are of an appropriate kind and have a label
+
+ if (this.kinds_.indexOf(track.kind) > -1) {
+ var item = new TrackMenuItem(this.player_, {
+ track: track,
+ kinds: this.kinds_,
+ kind: this.kind_,
+ // MenuItem is selectable
+ selectable: true,
+ // MenuItem is NOT multiSelectable (i.e. only one can be marked "selected" at a time)
+ multiSelectable: false
+ });
+ item.addClass("vjs-" + track.kind + "-menu-item");
+ items.push(item);
+ }
+ }
+
+ return items;
+ };
+
+ return TextTrackButton;
+}(TrackButton);
+
+Component$1.registerComponent('TextTrackButton', TextTrackButton);
+
+/**
+ * The chapter track menu item
+ *
+ * @extends MenuItem
+ */
+
+var ChaptersTrackMenuItem = /*#__PURE__*/function (_MenuItem) {
+ _inheritsLoose__default['default'](ChaptersTrackMenuItem, _MenuItem);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function ChaptersTrackMenuItem(player, options) {
+ var _this;
+
+ var track = options.track;
+ var cue = options.cue;
+ var currentTime = player.currentTime(); // Modify options for parent MenuItem class's init.
+
+ options.selectable = true;
+ options.multiSelectable = false;
+ options.label = cue.text;
+ options.selected = cue.startTime <= currentTime && currentTime < cue.endTime;
+ _this = _MenuItem.call(this, player, options) || this;
+ _this.track = track;
+ _this.cue = cue;
+ return _this;
+ }
+ /**
+ * This gets called when an `ChaptersTrackMenuItem` is "clicked". See
+ * {@link ClickableComponent} for more detailed information on what a click can be.
+ *
+ * @param {EventTarget~Event} [event]
+ * The `keydown`, `tap`, or `click` event that caused this function to be
+ * called.
+ *
+ * @listens tap
+ * @listens click
+ */
+
+
+ var _proto = ChaptersTrackMenuItem.prototype;
+
+ _proto.handleClick = function handleClick(event) {
+ _MenuItem.prototype.handleClick.call(this);
+
+ this.player_.currentTime(this.cue.startTime);
+ };
+
+ return ChaptersTrackMenuItem;
+}(MenuItem);
+
+Component$1.registerComponent('ChaptersTrackMenuItem', ChaptersTrackMenuItem);
+
+/**
+ * The button component for toggling and selecting chapters
+ * Chapters act much differently than other text tracks
+ * Cues are navigation vs. other tracks of alternative languages
+ *
+ * @extends TextTrackButton
+ */
+
+var ChaptersButton = /*#__PURE__*/function (_TextTrackButton) {
+ _inheritsLoose__default['default'](ChaptersButton, _TextTrackButton);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ *
+ * @param {Component~ReadyCallback} [ready]
+ * The function to call when this function is ready.
+ */
+ function ChaptersButton(player, options, ready) {
+ var _this;
+
+ _this = _TextTrackButton.call(this, player, options, ready) || this;
+
+ _this.selectCurrentItem_ = function () {
+ _this.items.forEach(function (item) {
+ item.selected(_this.track_.activeCues[0] === item.cue);
+ });
+ };
+
+ return _this;
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+
+
+ var _proto = ChaptersButton.prototype;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return "vjs-chapters-button " + _TextTrackButton.prototype.buildCSSClass.call(this);
+ };
+
+ _proto.buildWrapperCSSClass = function buildWrapperCSSClass() {
+ return "vjs-chapters-button " + _TextTrackButton.prototype.buildWrapperCSSClass.call(this);
+ }
+ /**
+ * Update the menu based on the current state of its items.
+ *
+ * @param {EventTarget~Event} [event]
+ * An event that triggered this function to run.
+ *
+ * @listens TextTrackList#addtrack
+ * @listens TextTrackList#removetrack
+ * @listens TextTrackList#change
+ */
+ ;
+
+ _proto.update = function update(event) {
+ if (event && event.track && event.track.kind !== 'chapters') {
+ return;
+ }
+
+ var track = this.findChaptersTrack();
+
+ if (track !== this.track_) {
+ this.setTrack(track);
+
+ _TextTrackButton.prototype.update.call(this);
+ } else if (!this.items || track && track.cues && track.cues.length !== this.items.length) {
+ // Update the menu initially or if the number of cues has changed since set
+ _TextTrackButton.prototype.update.call(this);
+ }
+ }
+ /**
+ * Set the currently selected track for the chapters button.
+ *
+ * @param {TextTrack} track
+ * The new track to select. Nothing will change if this is the currently selected
+ * track.
+ */
+ ;
+
+ _proto.setTrack = function setTrack(track) {
+ if (this.track_ === track) {
+ return;
+ }
+
+ if (!this.updateHandler_) {
+ this.updateHandler_ = this.update.bind(this);
+ } // here this.track_ refers to the old track instance
+
+
+ if (this.track_) {
+ var remoteTextTrackEl = this.player_.remoteTextTrackEls().getTrackElementByTrack_(this.track_);
+
+ if (remoteTextTrackEl) {
+ remoteTextTrackEl.removeEventListener('load', this.updateHandler_);
+ }
+
+ this.track_.removeEventListener('cuechange', this.selectCurrentItem_);
+ this.track_ = null;
+ }
+
+ this.track_ = track; // here this.track_ refers to the new track instance
+
+ if (this.track_) {
+ this.track_.mode = 'hidden';
+
+ var _remoteTextTrackEl = this.player_.remoteTextTrackEls().getTrackElementByTrack_(this.track_);
+
+ if (_remoteTextTrackEl) {
+ _remoteTextTrackEl.addEventListener('load', this.updateHandler_);
+ }
+
+ this.track_.addEventListener('cuechange', this.selectCurrentItem_);
+ }
+ }
+ /**
+ * Find the track object that is currently in use by this ChaptersButton
+ *
+ * @return {TextTrack|undefined}
+ * The current track or undefined if none was found.
+ */
+ ;
+
+ _proto.findChaptersTrack = function findChaptersTrack() {
+ var tracks = this.player_.textTracks() || [];
+
+ for (var i = tracks.length - 1; i >= 0; i--) {
+ // We will always choose the last track as our chaptersTrack
+ var track = tracks[i];
+
+ if (track.kind === this.kind_) {
+ return track;
+ }
+ }
+ }
+ /**
+ * Get the caption for the ChaptersButton based on the track label. This will also
+ * use the current tracks localized kind as a fallback if a label does not exist.
+ *
+ * @return {string}
+ * The tracks current label or the localized track kind.
+ */
+ ;
+
+ _proto.getMenuCaption = function getMenuCaption() {
+ if (this.track_ && this.track_.label) {
+ return this.track_.label;
+ }
+
+ return this.localize(toTitleCase$1(this.kind_));
+ }
+ /**
+ * Create menu from chapter track
+ *
+ * @return {Menu}
+ * New menu for the chapter buttons
+ */
+ ;
+
+ _proto.createMenu = function createMenu() {
+ this.options_.title = this.getMenuCaption();
+ return _TextTrackButton.prototype.createMenu.call(this);
+ }
+ /**
+ * Create a menu item for each text track
+ *
+ * @return {TextTrackMenuItem[]}
+ * Array of menu items
+ */
+ ;
+
+ _proto.createItems = function createItems() {
+ var items = [];
+
+ if (!this.track_) {
+ return items;
+ }
+
+ var cues = this.track_.cues;
+
+ if (!cues) {
+ return items;
+ }
+
+ for (var i = 0, l = cues.length; i < l; i++) {
+ var cue = cues[i];
+ var mi = new ChaptersTrackMenuItem(this.player_, {
+ track: this.track_,
+ cue: cue
+ });
+ items.push(mi);
+ }
+
+ return items;
+ };
+
+ return ChaptersButton;
+}(TextTrackButton);
+/**
+ * `kind` of TextTrack to look for to associate it with this menu.
+ *
+ * @type {string}
+ * @private
+ */
+
+
+ChaptersButton.prototype.kind_ = 'chapters';
+/**
+ * The text that should display over the `ChaptersButton`s controls. Added for localization.
+ *
+ * @type {string}
+ * @private
+ */
+
+ChaptersButton.prototype.controlText_ = 'Chapters';
+Component$1.registerComponent('ChaptersButton', ChaptersButton);
+
+/**
+ * The button component for toggling and selecting descriptions
+ *
+ * @extends TextTrackButton
+ */
+
+var DescriptionsButton = /*#__PURE__*/function (_TextTrackButton) {
+ _inheritsLoose__default['default'](DescriptionsButton, _TextTrackButton);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ *
+ * @param {Component~ReadyCallback} [ready]
+ * The function to call when this component is ready.
+ */
+ function DescriptionsButton(player, options, ready) {
+ var _this;
+
+ _this = _TextTrackButton.call(this, player, options, ready) || this;
+ var tracks = player.textTracks();
+ var changeHandler = bind(_assertThisInitialized__default['default'](_this), _this.handleTracksChange);
+ tracks.addEventListener('change', changeHandler);
+
+ _this.on('dispose', function () {
+ tracks.removeEventListener('change', changeHandler);
+ });
+
+ return _this;
+ }
+ /**
+ * Handle text track change
+ *
+ * @param {EventTarget~Event} event
+ * The event that caused this function to run
+ *
+ * @listens TextTrackList#change
+ */
+
+
+ var _proto = DescriptionsButton.prototype;
+
+ _proto.handleTracksChange = function handleTracksChange(event) {
+ var tracks = this.player().textTracks();
+ var disabled = false; // Check whether a track of a different kind is showing
+
+ for (var i = 0, l = tracks.length; i < l; i++) {
+ var track = tracks[i];
+
+ if (track.kind !== this.kind_ && track.mode === 'showing') {
+ disabled = true;
+ break;
+ }
+ } // If another track is showing, disable this menu button
+
+
+ if (disabled) {
+ this.disable();
+ } else {
+ this.enable();
+ }
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+ ;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return "vjs-descriptions-button " + _TextTrackButton.prototype.buildCSSClass.call(this);
+ };
+
+ _proto.buildWrapperCSSClass = function buildWrapperCSSClass() {
+ return "vjs-descriptions-button " + _TextTrackButton.prototype.buildWrapperCSSClass.call(this);
+ };
+
+ return DescriptionsButton;
+}(TextTrackButton);
+/**
+ * `kind` of TextTrack to look for to associate it with this menu.
+ *
+ * @type {string}
+ * @private
+ */
+
+
+DescriptionsButton.prototype.kind_ = 'descriptions';
+/**
+ * The text that should display over the `DescriptionsButton`s controls. Added for localization.
+ *
+ * @type {string}
+ * @private
+ */
+
+DescriptionsButton.prototype.controlText_ = 'Descriptions';
+Component$1.registerComponent('DescriptionsButton', DescriptionsButton);
+
+/**
+ * The button component for toggling and selecting subtitles
+ *
+ * @extends TextTrackButton
+ */
+
+var SubtitlesButton = /*#__PURE__*/function (_TextTrackButton) {
+ _inheritsLoose__default['default'](SubtitlesButton, _TextTrackButton);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ *
+ * @param {Component~ReadyCallback} [ready]
+ * The function to call when this component is ready.
+ */
+ function SubtitlesButton(player, options, ready) {
+ return _TextTrackButton.call(this, player, options, ready) || this;
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+
+
+ var _proto = SubtitlesButton.prototype;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return "vjs-subtitles-button " + _TextTrackButton.prototype.buildCSSClass.call(this);
+ };
+
+ _proto.buildWrapperCSSClass = function buildWrapperCSSClass() {
+ return "vjs-subtitles-button " + _TextTrackButton.prototype.buildWrapperCSSClass.call(this);
+ };
+
+ return SubtitlesButton;
+}(TextTrackButton);
+/**
+ * `kind` of TextTrack to look for to associate it with this menu.
+ *
+ * @type {string}
+ * @private
+ */
+
+
+SubtitlesButton.prototype.kind_ = 'subtitles';
+/**
+ * The text that should display over the `SubtitlesButton`s controls. Added for localization.
+ *
+ * @type {string}
+ * @private
+ */
+
+SubtitlesButton.prototype.controlText_ = 'Subtitles';
+Component$1.registerComponent('SubtitlesButton', SubtitlesButton);
+
+/**
+ * The menu item for caption track settings menu
+ *
+ * @extends TextTrackMenuItem
+ */
+
+var CaptionSettingsMenuItem = /*#__PURE__*/function (_TextTrackMenuItem) {
+ _inheritsLoose__default['default'](CaptionSettingsMenuItem, _TextTrackMenuItem);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function CaptionSettingsMenuItem(player, options) {
+ var _this;
+
+ options.track = {
+ player: player,
+ kind: options.kind,
+ label: options.kind + ' settings',
+ selectable: false,
+ "default": false,
+ mode: 'disabled'
+ }; // CaptionSettingsMenuItem has no concept of 'selected'
+
+ options.selectable = false;
+ options.name = 'CaptionSettingsMenuItem';
+ _this = _TextTrackMenuItem.call(this, player, options) || this;
+
+ _this.addClass('vjs-texttrack-settings');
+
+ _this.controlText(', opens ' + options.kind + ' settings dialog');
+
+ return _this;
+ }
+ /**
+ * This gets called when an `CaptionSettingsMenuItem` is "clicked". See
+ * {@link ClickableComponent} for more detailed information on what a click can be.
+ *
+ * @param {EventTarget~Event} [event]
+ * The `keydown`, `tap`, or `click` event that caused this function to be
+ * called.
+ *
+ * @listens tap
+ * @listens click
+ */
+
+
+ var _proto = CaptionSettingsMenuItem.prototype;
+
+ _proto.handleClick = function handleClick(event) {
+ this.player().getChild('textTrackSettings').open();
+ };
+
+ return CaptionSettingsMenuItem;
+}(TextTrackMenuItem);
+
+Component$1.registerComponent('CaptionSettingsMenuItem', CaptionSettingsMenuItem);
+
+/**
+ * The button component for toggling and selecting captions
+ *
+ * @extends TextTrackButton
+ */
+
+var CaptionsButton = /*#__PURE__*/function (_TextTrackButton) {
+ _inheritsLoose__default['default'](CaptionsButton, _TextTrackButton);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ *
+ * @param {Component~ReadyCallback} [ready]
+ * The function to call when this component is ready.
+ */
+ function CaptionsButton(player, options, ready) {
+ return _TextTrackButton.call(this, player, options, ready) || this;
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+
+
+ var _proto = CaptionsButton.prototype;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return "vjs-captions-button " + _TextTrackButton.prototype.buildCSSClass.call(this);
+ };
+
+ _proto.buildWrapperCSSClass = function buildWrapperCSSClass() {
+ return "vjs-captions-button " + _TextTrackButton.prototype.buildWrapperCSSClass.call(this);
+ }
+ /**
+ * Create caption menu items
+ *
+ * @return {CaptionSettingsMenuItem[]}
+ * The array of current menu items.
+ */
+ ;
+
+ _proto.createItems = function createItems() {
+ var items = [];
+
+ if (!(this.player().tech_ && this.player().tech_.featuresNativeTextTracks) && this.player().getChild('textTrackSettings')) {
+ items.push(new CaptionSettingsMenuItem(this.player_, {
+ kind: this.kind_
+ }));
+ this.hideThreshold_ += 1;
+ }
+
+ return _TextTrackButton.prototype.createItems.call(this, items);
+ };
+
+ return CaptionsButton;
+}(TextTrackButton);
+/**
+ * `kind` of TextTrack to look for to associate it with this menu.
+ *
+ * @type {string}
+ * @private
+ */
+
+
+CaptionsButton.prototype.kind_ = 'captions';
+/**
+ * The text that should display over the `CaptionsButton`s controls. Added for localization.
+ *
+ * @type {string}
+ * @private
+ */
+
+CaptionsButton.prototype.controlText_ = 'Captions';
+Component$1.registerComponent('CaptionsButton', CaptionsButton);
+
+/**
+ * SubsCapsMenuItem has an [cc] icon to distinguish captions from subtitles
+ * in the SubsCapsMenu.
+ *
+ * @extends TextTrackMenuItem
+ */
+
+var SubsCapsMenuItem = /*#__PURE__*/function (_TextTrackMenuItem) {
+ _inheritsLoose__default['default'](SubsCapsMenuItem, _TextTrackMenuItem);
+
+ function SubsCapsMenuItem() {
+ return _TextTrackMenuItem.apply(this, arguments) || this;
+ }
+
+ var _proto = SubsCapsMenuItem.prototype;
+
+ _proto.createEl = function createEl$1(type, props, attrs) {
+ var el = _TextTrackMenuItem.prototype.createEl.call(this, type, props, attrs);
+
+ var parentSpan = el.querySelector('.vjs-menu-item-text');
+
+ if (this.options_.track.kind === 'captions') {
+ parentSpan.appendChild(createEl('span', {
+ className: 'vjs-icon-placeholder'
+ }, {
+ 'aria-hidden': true
+ }));
+ parentSpan.appendChild(createEl('span', {
+ className: 'vjs-control-text',
+ // space added as the text will visually flow with the
+ // label
+ textContent: " " + this.localize('Captions')
+ }));
+ }
+
+ return el;
+ };
+
+ return SubsCapsMenuItem;
+}(TextTrackMenuItem);
+
+Component$1.registerComponent('SubsCapsMenuItem', SubsCapsMenuItem);
+
+/**
+ * The button component for toggling and selecting captions and/or subtitles
+ *
+ * @extends TextTrackButton
+ */
+
+var SubsCapsButton = /*#__PURE__*/function (_TextTrackButton) {
+ _inheritsLoose__default['default'](SubsCapsButton, _TextTrackButton);
+
+ function SubsCapsButton(player, options) {
+ var _this;
+
+ if (options === void 0) {
+ options = {};
+ }
+
+ _this = _TextTrackButton.call(this, player, options) || this; // Although North America uses "captions" in most cases for
+ // "captions and subtitles" other locales use "subtitles"
+
+ _this.label_ = 'subtitles';
+
+ if (['en', 'en-us', 'en-ca', 'fr-ca'].indexOf(_this.player_.language_) > -1) {
+ _this.label_ = 'captions';
+ }
+
+ _this.menuButton_.controlText(toTitleCase$1(_this.label_));
+
+ return _this;
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+
+
+ var _proto = SubsCapsButton.prototype;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return "vjs-subs-caps-button " + _TextTrackButton.prototype.buildCSSClass.call(this);
+ };
+
+ _proto.buildWrapperCSSClass = function buildWrapperCSSClass() {
+ return "vjs-subs-caps-button " + _TextTrackButton.prototype.buildWrapperCSSClass.call(this);
+ }
+ /**
+ * Create caption/subtitles menu items
+ *
+ * @return {CaptionSettingsMenuItem[]}
+ * The array of current menu items.
+ */
+ ;
+
+ _proto.createItems = function createItems() {
+ var items = [];
+
+ if (!(this.player().tech_ && this.player().tech_.featuresNativeTextTracks) && this.player().getChild('textTrackSettings')) {
+ items.push(new CaptionSettingsMenuItem(this.player_, {
+ kind: this.label_
+ }));
+ this.hideThreshold_ += 1;
+ }
+
+ items = _TextTrackButton.prototype.createItems.call(this, items, SubsCapsMenuItem);
+ return items;
+ };
+
+ return SubsCapsButton;
+}(TextTrackButton);
+/**
+ * `kind`s of TextTrack to look for to associate it with this menu.
+ *
+ * @type {array}
+ * @private
+ */
+
+
+SubsCapsButton.prototype.kinds_ = ['captions', 'subtitles'];
+/**
+ * The text that should display over the `SubsCapsButton`s controls.
+ *
+ *
+ * @type {string}
+ * @private
+ */
+
+SubsCapsButton.prototype.controlText_ = 'Subtitles';
+Component$1.registerComponent('SubsCapsButton', SubsCapsButton);
+
+/**
+ * An {@link AudioTrack} {@link MenuItem}
+ *
+ * @extends MenuItem
+ */
+
+var AudioTrackMenuItem = /*#__PURE__*/function (_MenuItem) {
+ _inheritsLoose__default['default'](AudioTrackMenuItem, _MenuItem);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function AudioTrackMenuItem(player, options) {
+ var _this;
+
+ var track = options.track;
+ var tracks = player.audioTracks(); // Modify options for parent MenuItem class's init.
+
+ options.label = track.label || track.language || 'Unknown';
+ options.selected = track.enabled;
+ _this = _MenuItem.call(this, player, options) || this;
+ _this.track = track;
+
+ _this.addClass("vjs-" + track.kind + "-menu-item");
+
+ var changeHandler = function changeHandler() {
+ for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
+ args[_key] = arguments[_key];
+ }
+
+ _this.handleTracksChange.apply(_assertThisInitialized__default['default'](_this), args);
+ };
+
+ tracks.addEventListener('change', changeHandler);
+
+ _this.on('dispose', function () {
+ tracks.removeEventListener('change', changeHandler);
+ });
+
+ return _this;
+ }
+
+ var _proto = AudioTrackMenuItem.prototype;
+
+ _proto.createEl = function createEl$1(type, props, attrs) {
+ var el = _MenuItem.prototype.createEl.call(this, type, props, attrs);
+
+ var parentSpan = el.querySelector('.vjs-menu-item-text');
+
+ if (this.options_.track.kind === 'main-desc') {
+ parentSpan.appendChild(createEl('span', {
+ className: 'vjs-icon-placeholder'
+ }, {
+ 'aria-hidden': true
+ }));
+ parentSpan.appendChild(createEl('span', {
+ className: 'vjs-control-text',
+ textContent: ' ' + this.localize('Descriptions')
+ }));
+ }
+
+ return el;
+ }
+ /**
+ * This gets called when an `AudioTrackMenuItem is "clicked". See {@link ClickableComponent}
+ * for more detailed information on what a click can be.
+ *
+ * @param {EventTarget~Event} [event]
+ * The `keydown`, `tap`, or `click` event that caused this function to be
+ * called.
+ *
+ * @listens tap
+ * @listens click
+ */
+ ;
+
+ _proto.handleClick = function handleClick(event) {
+ _MenuItem.prototype.handleClick.call(this, event); // the audio track list will automatically toggle other tracks
+ // off for us.
+
+
+ this.track.enabled = true; // when native audio tracks are used, we want to make sure that other tracks are turned off
+
+ if (this.player_.tech_.featuresNativeAudioTracks) {
+ var tracks = this.player_.audioTracks();
+
+ for (var i = 0; i < tracks.length; i++) {
+ var track = tracks[i]; // skip the current track since we enabled it above
+
+ if (track === this.track) {
+ continue;
+ }
+
+ track.enabled = track === this.track;
+ }
+ }
+ }
+ /**
+ * Handle any {@link AudioTrack} change.
+ *
+ * @param {EventTarget~Event} [event]
+ * The {@link AudioTrackList#change} event that caused this to run.
+ *
+ * @listens AudioTrackList#change
+ */
+ ;
+
+ _proto.handleTracksChange = function handleTracksChange(event) {
+ this.selected(this.track.enabled);
+ };
+
+ return AudioTrackMenuItem;
+}(MenuItem);
+
+Component$1.registerComponent('AudioTrackMenuItem', AudioTrackMenuItem);
+
+/**
+ * The base class for buttons that toggle specific {@link AudioTrack} types.
+ *
+ * @extends TrackButton
+ */
+
+var AudioTrackButton = /*#__PURE__*/function (_TrackButton) {
+ _inheritsLoose__default['default'](AudioTrackButton, _TrackButton);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options={}]
+ * The key/value store of player options.
+ */
+ function AudioTrackButton(player, options) {
+ if (options === void 0) {
+ options = {};
+ }
+
+ options.tracks = player.audioTracks();
+ return _TrackButton.call(this, player, options) || this;
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+
+
+ var _proto = AudioTrackButton.prototype;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return "vjs-audio-button " + _TrackButton.prototype.buildCSSClass.call(this);
+ };
+
+ _proto.buildWrapperCSSClass = function buildWrapperCSSClass() {
+ return "vjs-audio-button " + _TrackButton.prototype.buildWrapperCSSClass.call(this);
+ }
+ /**
+ * Create a menu item for each audio track
+ *
+ * @param {AudioTrackMenuItem[]} [items=[]]
+ * An array of existing menu items to use.
+ *
+ * @return {AudioTrackMenuItem[]}
+ * An array of menu items
+ */
+ ;
+
+ _proto.createItems = function createItems(items) {
+ if (items === void 0) {
+ items = [];
+ }
+
+ // if there's only one audio track, there no point in showing it
+ this.hideThreshold_ = 1;
+ var tracks = this.player_.audioTracks();
+
+ for (var i = 0; i < tracks.length; i++) {
+ var track = tracks[i];
+ items.push(new AudioTrackMenuItem(this.player_, {
+ track: track,
+ // MenuItem is selectable
+ selectable: true,
+ // MenuItem is NOT multiSelectable (i.e. only one can be marked "selected" at a time)
+ multiSelectable: false
+ }));
+ }
+
+ return items;
+ };
+
+ return AudioTrackButton;
+}(TrackButton);
+/**
+ * The text that should display over the `AudioTrackButton`s controls. Added for localization.
+ *
+ * @type {string}
+ * @private
+ */
+
+
+AudioTrackButton.prototype.controlText_ = 'Audio Track';
+Component$1.registerComponent('AudioTrackButton', AudioTrackButton);
+
+/**
+ * The specific menu item type for selecting a playback rate.
+ *
+ * @extends MenuItem
+ */
+
+var PlaybackRateMenuItem = /*#__PURE__*/function (_MenuItem) {
+ _inheritsLoose__default['default'](PlaybackRateMenuItem, _MenuItem);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function PlaybackRateMenuItem(player, options) {
+ var _this;
+
+ var label = options.rate;
+ var rate = parseFloat(label, 10); // Modify options for parent MenuItem class's init.
+
+ options.label = label;
+ options.selected = rate === player.playbackRate();
+ options.selectable = true;
+ options.multiSelectable = false;
+ _this = _MenuItem.call(this, player, options) || this;
+ _this.label = label;
+ _this.rate = rate;
+
+ _this.on(player, 'ratechange', function (e) {
+ return _this.update(e);
+ });
+
+ return _this;
+ }
+ /**
+ * This gets called when an `PlaybackRateMenuItem` is "clicked". See
+ * {@link ClickableComponent} for more detailed information on what a click can be.
+ *
+ * @param {EventTarget~Event} [event]
+ * The `keydown`, `tap`, or `click` event that caused this function to be
+ * called.
+ *
+ * @listens tap
+ * @listens click
+ */
+
+
+ var _proto = PlaybackRateMenuItem.prototype;
+
+ _proto.handleClick = function handleClick(event) {
+ _MenuItem.prototype.handleClick.call(this);
+
+ this.player().playbackRate(this.rate);
+ }
+ /**
+ * Update the PlaybackRateMenuItem when the playbackrate changes.
+ *
+ * @param {EventTarget~Event} [event]
+ * The `ratechange` event that caused this function to run.
+ *
+ * @listens Player#ratechange
+ */
+ ;
+
+ _proto.update = function update(event) {
+ this.selected(this.player().playbackRate() === this.rate);
+ };
+
+ return PlaybackRateMenuItem;
+}(MenuItem);
+/**
+ * The text that should display over the `PlaybackRateMenuItem`s controls. Added for localization.
+ *
+ * @type {string}
+ * @private
+ */
+
+
+PlaybackRateMenuItem.prototype.contentElType = 'button';
+Component$1.registerComponent('PlaybackRateMenuItem', PlaybackRateMenuItem);
+
+/**
+ * The component for controlling the playback rate.
+ *
+ * @extends MenuButton
+ */
+
+var PlaybackRateMenuButton = /*#__PURE__*/function (_MenuButton) {
+ _inheritsLoose__default['default'](PlaybackRateMenuButton, _MenuButton);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function PlaybackRateMenuButton(player, options) {
+ var _this;
+
+ _this = _MenuButton.call(this, player, options) || this;
+
+ _this.menuButton_.el_.setAttribute('aria-describedby', _this.labelElId_);
+
+ _this.updateVisibility();
+
+ _this.updateLabel();
+
+ _this.on(player, 'loadstart', function (e) {
+ return _this.updateVisibility(e);
+ });
+
+ _this.on(player, 'ratechange', function (e) {
+ return _this.updateLabel(e);
+ });
+
+ _this.on(player, 'playbackrateschange', function (e) {
+ return _this.handlePlaybackRateschange(e);
+ });
+
+ return _this;
+ }
+ /**
+ * Create the `Component`'s DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+
+
+ var _proto = PlaybackRateMenuButton.prototype;
+
+ _proto.createEl = function createEl$1() {
+ var el = _MenuButton.prototype.createEl.call(this);
+
+ this.labelElId_ = 'vjs-playback-rate-value-label-' + this.id_;
+ this.labelEl_ = createEl('div', {
+ className: 'vjs-playback-rate-value',
+ id: this.labelElId_,
+ textContent: '1x'
+ });
+ el.appendChild(this.labelEl_);
+ return el;
+ };
+
+ _proto.dispose = function dispose() {
+ this.labelEl_ = null;
+
+ _MenuButton.prototype.dispose.call(this);
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+ ;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return "vjs-playback-rate " + _MenuButton.prototype.buildCSSClass.call(this);
+ };
+
+ _proto.buildWrapperCSSClass = function buildWrapperCSSClass() {
+ return "vjs-playback-rate " + _MenuButton.prototype.buildWrapperCSSClass.call(this);
+ }
+ /**
+ * Create the list of menu items. Specific to each subclass.
+ *
+ */
+ ;
+
+ _proto.createItems = function createItems() {
+ var rates = this.playbackRates();
+ var items = [];
+
+ for (var i = rates.length - 1; i >= 0; i--) {
+ items.push(new PlaybackRateMenuItem(this.player(), {
+ rate: rates[i] + 'x'
+ }));
+ }
+
+ return items;
+ }
+ /**
+ * Updates ARIA accessibility attributes
+ */
+ ;
+
+ _proto.updateARIAAttributes = function updateARIAAttributes() {
+ // Current playback rate
+ this.el().setAttribute('aria-valuenow', this.player().playbackRate());
+ }
+ /**
+ * This gets called when an `PlaybackRateMenuButton` is "clicked". See
+ * {@link ClickableComponent} for more detailed information on what a click can be.
+ *
+ * @param {EventTarget~Event} [event]
+ * The `keydown`, `tap`, or `click` event that caused this function to be
+ * called.
+ *
+ * @listens tap
+ * @listens click
+ */
+ ;
+
+ _proto.handleClick = function handleClick(event) {
+ // select next rate option
+ var currentRate = this.player().playbackRate();
+ var rates = this.playbackRates();
+ var currentIndex = rates.indexOf(currentRate); // this get the next rate and it will select first one if the last one currently selected
+
+ var newIndex = (currentIndex + 1) % rates.length;
+ this.player().playbackRate(rates[newIndex]);
+ }
+ /**
+ * On playbackrateschange, update the menu to account for the new items.
+ *
+ * @listens Player#playbackrateschange
+ */
+ ;
+
+ _proto.handlePlaybackRateschange = function handlePlaybackRateschange(event) {
+ this.update();
+ }
+ /**
+ * Get possible playback rates
+ *
+ * @return {Array}
+ * All possible playback rates
+ */
+ ;
+
+ _proto.playbackRates = function playbackRates() {
+ var player = this.player();
+ return player.playbackRates && player.playbackRates() || [];
+ }
+ /**
+ * Get whether playback rates is supported by the tech
+ * and an array of playback rates exists
+ *
+ * @return {boolean}
+ * Whether changing playback rate is supported
+ */
+ ;
+
+ _proto.playbackRateSupported = function playbackRateSupported() {
+ return this.player().tech_ && this.player().tech_.featuresPlaybackRate && this.playbackRates() && this.playbackRates().length > 0;
+ }
+ /**
+ * Hide playback rate controls when they're no playback rate options to select
+ *
+ * @param {EventTarget~Event} [event]
+ * The event that caused this function to run.
+ *
+ * @listens Player#loadstart
+ */
+ ;
+
+ _proto.updateVisibility = function updateVisibility(event) {
+ if (this.playbackRateSupported()) {
+ this.removeClass('vjs-hidden');
+ } else {
+ this.addClass('vjs-hidden');
+ }
+ }
+ /**
+ * Update button label when rate changed
+ *
+ * @param {EventTarget~Event} [event]
+ * The event that caused this function to run.
+ *
+ * @listens Player#ratechange
+ */
+ ;
+
+ _proto.updateLabel = function updateLabel(event) {
+ if (this.playbackRateSupported()) {
+ this.labelEl_.textContent = this.player().playbackRate() + 'x';
+ }
+ };
+
+ return PlaybackRateMenuButton;
+}(MenuButton);
+/**
+ * The text that should display over the `FullscreenToggle`s controls. Added for localization.
+ *
+ * @type {string}
+ * @private
+ */
+
+
+PlaybackRateMenuButton.prototype.controlText_ = 'Playback Rate';
+Component$1.registerComponent('PlaybackRateMenuButton', PlaybackRateMenuButton);
+
+/**
+ * Just an empty spacer element that can be used as an append point for plugins, etc.
+ * Also can be used to create space between elements when necessary.
+ *
+ * @extends Component
+ */
+
+var Spacer = /*#__PURE__*/function (_Component) {
+ _inheritsLoose__default['default'](Spacer, _Component);
+
+ function Spacer() {
+ return _Component.apply(this, arguments) || this;
+ }
+
+ var _proto = Spacer.prototype;
+
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+ _proto.buildCSSClass = function buildCSSClass() {
+ return "vjs-spacer " + _Component.prototype.buildCSSClass.call(this);
+ }
+ /**
+ * Create the `Component`'s DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+ ;
+
+ _proto.createEl = function createEl(tag, props, attributes) {
+ if (tag === void 0) {
+ tag = 'div';
+ }
+
+ if (props === void 0) {
+ props = {};
+ }
+
+ if (attributes === void 0) {
+ attributes = {};
+ }
+
+ if (!props.className) {
+ props.className = this.buildCSSClass();
+ }
+
+ return _Component.prototype.createEl.call(this, tag, props, attributes);
+ };
+
+ return Spacer;
+}(Component$1);
+
+Component$1.registerComponent('Spacer', Spacer);
+
+/**
+ * Spacer specifically meant to be used as an insertion point for new plugins, etc.
+ *
+ * @extends Spacer
+ */
+
+var CustomControlSpacer = /*#__PURE__*/function (_Spacer) {
+ _inheritsLoose__default['default'](CustomControlSpacer, _Spacer);
+
+ function CustomControlSpacer() {
+ return _Spacer.apply(this, arguments) || this;
+ }
+
+ var _proto = CustomControlSpacer.prototype;
+
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+ _proto.buildCSSClass = function buildCSSClass() {
+ return "vjs-custom-control-spacer " + _Spacer.prototype.buildCSSClass.call(this);
+ }
+ /**
+ * Create the `Component`'s DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+ ;
+
+ _proto.createEl = function createEl() {
+ return _Spacer.prototype.createEl.call(this, 'div', {
+ className: this.buildCSSClass(),
+ // No-flex/table-cell mode requires there be some content
+ // in the cell to fill the remaining space of the table.
+ textContent: "\xA0"
+ });
+ };
+
+ return CustomControlSpacer;
+}(Spacer);
+
+Component$1.registerComponent('CustomControlSpacer', CustomControlSpacer);
+
+/**
+ * Container of main controls.
+ *
+ * @extends Component
+ */
+
+var ControlBar = /*#__PURE__*/function (_Component) {
+ _inheritsLoose__default['default'](ControlBar, _Component);
+
+ function ControlBar() {
+ return _Component.apply(this, arguments) || this;
+ }
+
+ var _proto = ControlBar.prototype;
+
+ /**
+ * Create the `Component`'s DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+ _proto.createEl = function createEl() {
+ return _Component.prototype.createEl.call(this, 'div', {
+ className: 'vjs-control-bar',
+ dir: 'ltr'
+ });
+ };
+
+ return ControlBar;
+}(Component$1);
+/**
+ * Default options for `ControlBar`
+ *
+ * @type {Object}
+ * @private
+ */
+
+
+ControlBar.prototype.options_ = {
+ children: ['playToggle', 'volumePanel', 'currentTimeDisplay', 'timeDivider', 'durationDisplay', 'progressControl', 'liveDisplay', 'seekToLive', 'remainingTimeDisplay', 'customControlSpacer', 'playbackRateMenuButton', 'chaptersButton', 'descriptionsButton', 'subsCapsButton', 'audioTrackButton', 'fullscreenToggle']
+};
+
+if ('exitPictureInPicture' in document__default['default']) {
+ ControlBar.prototype.options_.children.splice(ControlBar.prototype.options_.children.length - 1, 0, 'pictureInPictureToggle');
+}
+
+Component$1.registerComponent('ControlBar', ControlBar);
+
+/**
+ * A display that indicates an error has occurred. This means that the video
+ * is unplayable.
+ *
+ * @extends ModalDialog
+ */
+
+var ErrorDisplay = /*#__PURE__*/function (_ModalDialog) {
+ _inheritsLoose__default['default'](ErrorDisplay, _ModalDialog);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function ErrorDisplay(player, options) {
+ var _this;
+
+ _this = _ModalDialog.call(this, player, options) || this;
+
+ _this.on(player, 'error', function (e) {
+ return _this.open(e);
+ });
+
+ return _this;
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ *
+ * @deprecated Since version 5.
+ */
+
+
+ var _proto = ErrorDisplay.prototype;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return "vjs-error-display " + _ModalDialog.prototype.buildCSSClass.call(this);
+ }
+ /**
+ * Gets the localized error message based on the `Player`s error.
+ *
+ * @return {string}
+ * The `Player`s error message localized or an empty string.
+ */
+ ;
+
+ _proto.content = function content() {
+ var error = this.player().error();
+ return error ? this.localize(error.message) : '';
+ };
+
+ return ErrorDisplay;
+}(ModalDialog);
+/**
+ * The default options for an `ErrorDisplay`.
+ *
+ * @private
+ */
+
+
+ErrorDisplay.prototype.options_ = _extends__default['default']({}, ModalDialog.prototype.options_, {
+ pauseOnOpen: false,
+ fillAlways: true,
+ temporary: false,
+ uncloseable: true
+});
+Component$1.registerComponent('ErrorDisplay', ErrorDisplay);
+
+var LOCAL_STORAGE_KEY$1 = 'vjs-text-track-settings';
+var COLOR_BLACK = ['#000', 'Black'];
+var COLOR_BLUE = ['#00F', 'Blue'];
+var COLOR_CYAN = ['#0FF', 'Cyan'];
+var COLOR_GREEN = ['#0F0', 'Green'];
+var COLOR_MAGENTA = ['#F0F', 'Magenta'];
+var COLOR_RED = ['#F00', 'Red'];
+var COLOR_WHITE = ['#FFF', 'White'];
+var COLOR_YELLOW = ['#FF0', 'Yellow'];
+var OPACITY_OPAQUE = ['1', 'Opaque'];
+var OPACITY_SEMI = ['0.5', 'Semi-Transparent'];
+var OPACITY_TRANS = ['0', 'Transparent']; // Configuration for the various elements in the DOM of this component.
+//
+// Possible keys include:
+//
+// `default`:
+// The default option index. Only needs to be provided if not zero.
+// `parser`:
+// A function which is used to parse the value from the selected option in
+// a customized way.
+// `selector`:
+// The selector used to find the associated element.
+
+var selectConfigs = {
+ backgroundColor: {
+ selector: '.vjs-bg-color > select',
+ id: 'captions-background-color-%s',
+ label: 'Color',
+ options: [COLOR_BLACK, COLOR_WHITE, COLOR_RED, COLOR_GREEN, COLOR_BLUE, COLOR_YELLOW, COLOR_MAGENTA, COLOR_CYAN]
+ },
+ backgroundOpacity: {
+ selector: '.vjs-bg-opacity > select',
+ id: 'captions-background-opacity-%s',
+ label: 'Transparency',
+ options: [OPACITY_OPAQUE, OPACITY_SEMI, OPACITY_TRANS]
+ },
+ color: {
+ selector: '.vjs-fg-color > select',
+ id: 'captions-foreground-color-%s',
+ label: 'Color',
+ options: [COLOR_WHITE, COLOR_BLACK, COLOR_RED, COLOR_GREEN, COLOR_BLUE, COLOR_YELLOW, COLOR_MAGENTA, COLOR_CYAN]
+ },
+ edgeStyle: {
+ selector: '.vjs-edge-style > select',
+ id: '%s',
+ label: 'Text Edge Style',
+ options: [['none', 'None'], ['raised', 'Raised'], ['depressed', 'Depressed'], ['uniform', 'Uniform'], ['dropshadow', 'Dropshadow']]
+ },
+ fontFamily: {
+ selector: '.vjs-font-family > select',
+ id: 'captions-font-family-%s',
+ label: 'Font Family',
+ options: [['proportionalSansSerif', 'Proportional Sans-Serif'], ['monospaceSansSerif', 'Monospace Sans-Serif'], ['proportionalSerif', 'Proportional Serif'], ['monospaceSerif', 'Monospace Serif'], ['casual', 'Casual'], ['script', 'Script'], ['small-caps', 'Small Caps']]
+ },
+ fontPercent: {
+ selector: '.vjs-font-percent > select',
+ id: 'captions-font-size-%s',
+ label: 'Font Size',
+ options: [['0.50', '50%'], ['0.75', '75%'], ['1.00', '100%'], ['1.25', '125%'], ['1.50', '150%'], ['1.75', '175%'], ['2.00', '200%'], ['3.00', '300%'], ['4.00', '400%']],
+ "default": 2,
+ parser: function parser(v) {
+ return v === '1.00' ? null : Number(v);
+ }
+ },
+ textOpacity: {
+ selector: '.vjs-text-opacity > select',
+ id: 'captions-foreground-opacity-%s',
+ label: 'Transparency',
+ options: [OPACITY_OPAQUE, OPACITY_SEMI]
+ },
+ // Options for this object are defined below.
+ windowColor: {
+ selector: '.vjs-window-color > select',
+ id: 'captions-window-color-%s',
+ label: 'Color'
+ },
+ // Options for this object are defined below.
+ windowOpacity: {
+ selector: '.vjs-window-opacity > select',
+ id: 'captions-window-opacity-%s',
+ label: 'Transparency',
+ options: [OPACITY_TRANS, OPACITY_SEMI, OPACITY_OPAQUE]
+ }
+};
+selectConfigs.windowColor.options = selectConfigs.backgroundColor.options;
+/**
+ * Get the actual value of an option.
+ *
+ * @param {string} value
+ * The value to get
+ *
+ * @param {Function} [parser]
+ * Optional function to adjust the value.
+ *
+ * @return {Mixed}
+ * - Will be `undefined` if no value exists
+ * - Will be `undefined` if the given value is "none".
+ * - Will be the actual value otherwise.
+ *
+ * @private
+ */
+
+function parseOptionValue(value, parser) {
+ if (parser) {
+ value = parser(value);
+ }
+
+ if (value && value !== 'none') {
+ return value;
+ }
+}
+/**
+ * Gets the value of the selected element within a element.
+ *
+ * @param {Element} el
+ * the element to look in
+ *
+ * @param {Function} [parser]
+ * Optional function to adjust the value.
+ *
+ * @return {Mixed}
+ * - Will be `undefined` if no value exists
+ * - Will be `undefined` if the given value is "none".
+ * - Will be the actual value otherwise.
+ *
+ * @private
+ */
+
+
+function getSelectedOptionValue(el, parser) {
+ var value = el.options[el.options.selectedIndex].value;
+ return parseOptionValue(value, parser);
+}
+/**
+ * Sets the selected element within a element based on a
+ * given value.
+ *
+ * @param {Element} el
+ * The element to look in.
+ *
+ * @param {string} value
+ * the property to look on.
+ *
+ * @param {Function} [parser]
+ * Optional function to adjust the value before comparing.
+ *
+ * @private
+ */
+
+
+function setSelectedOption(el, value, parser) {
+ if (!value) {
+ return;
+ }
+
+ for (var i = 0; i < el.options.length; i++) {
+ if (parseOptionValue(el.options[i].value, parser) === value) {
+ el.selectedIndex = i;
+ break;
+ }
+ }
+}
+/**
+ * Manipulate Text Tracks settings.
+ *
+ * @extends ModalDialog
+ */
+
+
+var TextTrackSettings = /*#__PURE__*/function (_ModalDialog) {
+ _inheritsLoose__default['default'](TextTrackSettings, _ModalDialog);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function TextTrackSettings(player, options) {
+ var _this;
+
+ options.temporary = false;
+ _this = _ModalDialog.call(this, player, options) || this;
+ _this.updateDisplay = _this.updateDisplay.bind(_assertThisInitialized__default['default'](_this)); // fill the modal and pretend we have opened it
+
+ _this.fill();
+
+ _this.hasBeenOpened_ = _this.hasBeenFilled_ = true;
+ _this.endDialog = createEl('p', {
+ className: 'vjs-control-text',
+ textContent: _this.localize('End of dialog window.')
+ });
+
+ _this.el().appendChild(_this.endDialog);
+
+ _this.setDefaults(); // Grab `persistTextTrackSettings` from the player options if not passed in child options
+
+
+ if (options.persistTextTrackSettings === undefined) {
+ _this.options_.persistTextTrackSettings = _this.options_.playerOptions.persistTextTrackSettings;
+ }
+
+ _this.on(_this.$('.vjs-done-button'), 'click', function () {
+ _this.saveSettings();
+
+ _this.close();
+ });
+
+ _this.on(_this.$('.vjs-default-button'), 'click', function () {
+ _this.setDefaults();
+
+ _this.updateDisplay();
+ });
+
+ each(selectConfigs, function (config) {
+ _this.on(_this.$(config.selector), 'change', _this.updateDisplay);
+ });
+
+ if (_this.options_.persistTextTrackSettings) {
+ _this.restoreSettings();
+ }
+
+ return _this;
+ }
+
+ var _proto = TextTrackSettings.prototype;
+
+ _proto.dispose = function dispose() {
+ this.endDialog = null;
+
+ _ModalDialog.prototype.dispose.call(this);
+ }
+ /**
+ * Create a element with configured options.
+ *
+ * @param {string} key
+ * Configuration key to use during creation.
+ *
+ * @return {string}
+ * An HTML string.
+ *
+ * @private
+ */
+ ;
+
+ _proto.createElSelect_ = function createElSelect_(key, legendId, type) {
+ var _this2 = this;
+
+ if (legendId === void 0) {
+ legendId = '';
+ }
+
+ if (type === void 0) {
+ type = 'label';
+ }
+
+ var config = selectConfigs[key];
+ var id = config.id.replace('%s', this.id_);
+ var selectLabelledbyIds = [legendId, id].join(' ').trim();
+ return ["<" + type + " id=\"" + id + "\" class=\"" + (type === 'label' ? 'vjs-label' : '') + "\">", this.localize(config.label), "" + type + ">", ""].concat(config.options.map(function (o) {
+ var optionId = id + '-' + o[1].replace(/\W+/g, '');
+ return ["", _this2.localize(o[1]), ' '].join('');
+ })).concat(' ').join('');
+ }
+ /**
+ * Create foreground color element for the component
+ *
+ * @return {string}
+ * An HTML string.
+ *
+ * @private
+ */
+ ;
+
+ _proto.createElFgColor_ = function createElFgColor_() {
+ var legendId = "captions-text-legend-" + this.id_;
+ return ['', "", this.localize('Text'), ' ', this.createElSelect_('color', legendId), '', this.createElSelect_('textOpacity', legendId), ' ', ' '].join('');
+ }
+ /**
+ * Create background color element for the component
+ *
+ * @return {string}
+ * An HTML string.
+ *
+ * @private
+ */
+ ;
+
+ _proto.createElBgColor_ = function createElBgColor_() {
+ var legendId = "captions-background-" + this.id_;
+ return ['', "", this.localize('Background'), ' ', this.createElSelect_('backgroundColor', legendId), '', this.createElSelect_('backgroundOpacity', legendId), ' ', ' '].join('');
+ }
+ /**
+ * Create window color element for the component
+ *
+ * @return {string}
+ * An HTML string.
+ *
+ * @private
+ */
+ ;
+
+ _proto.createElWinColor_ = function createElWinColor_() {
+ var legendId = "captions-window-" + this.id_;
+ return ['', "", this.localize('Window'), ' ', this.createElSelect_('windowColor', legendId), '', this.createElSelect_('windowOpacity', legendId), ' ', ' '].join('');
+ }
+ /**
+ * Create color elements for the component
+ *
+ * @return {Element}
+ * The element that was created
+ *
+ * @private
+ */
+ ;
+
+ _proto.createElColors_ = function createElColors_() {
+ return createEl('div', {
+ className: 'vjs-track-settings-colors',
+ innerHTML: [this.createElFgColor_(), this.createElBgColor_(), this.createElWinColor_()].join('')
+ });
+ }
+ /**
+ * Create font elements for the component
+ *
+ * @return {Element}
+ * The element that was created.
+ *
+ * @private
+ */
+ ;
+
+ _proto.createElFont_ = function createElFont_() {
+ return createEl('div', {
+ className: 'vjs-track-settings-font',
+ innerHTML: ['', this.createElSelect_('fontPercent', '', 'legend'), ' ', '', this.createElSelect_('edgeStyle', '', 'legend'), ' ', '', this.createElSelect_('fontFamily', '', 'legend'), ' '].join('')
+ });
+ }
+ /**
+ * Create controls for the component
+ *
+ * @return {Element}
+ * The element that was created.
+ *
+ * @private
+ */
+ ;
+
+ _proto.createElControls_ = function createElControls_() {
+ var defaultsDescription = this.localize('restore all settings to the default values');
+ return createEl('div', {
+ className: 'vjs-track-settings-controls',
+ innerHTML: ["", this.localize('Reset'), " " + defaultsDescription + " ", ' ', "" + this.localize('Done') + " "].join('')
+ });
+ };
+
+ _proto.content = function content() {
+ return [this.createElColors_(), this.createElFont_(), this.createElControls_()];
+ };
+
+ _proto.label = function label() {
+ return this.localize('Caption Settings Dialog');
+ };
+
+ _proto.description = function description() {
+ return this.localize('Beginning of dialog window. Escape will cancel and close the window.');
+ };
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return _ModalDialog.prototype.buildCSSClass.call(this) + ' vjs-text-track-settings';
+ }
+ /**
+ * Gets an object of text track settings (or null).
+ *
+ * @return {Object}
+ * An object with config values parsed from the DOM or localStorage.
+ */
+ ;
+
+ _proto.getValues = function getValues() {
+ var _this3 = this;
+
+ return reduce(selectConfigs, function (accum, config, key) {
+ var value = getSelectedOptionValue(_this3.$(config.selector), config.parser);
+
+ if (value !== undefined) {
+ accum[key] = value;
+ }
+
+ return accum;
+ }, {});
+ }
+ /**
+ * Sets text track settings from an object of values.
+ *
+ * @param {Object} values
+ * An object with config values parsed from the DOM or localStorage.
+ */
+ ;
+
+ _proto.setValues = function setValues(values) {
+ var _this4 = this;
+
+ each(selectConfigs, function (config, key) {
+ setSelectedOption(_this4.$(config.selector), values[key], config.parser);
+ });
+ }
+ /**
+ * Sets all `` elements to their default values.
+ */
+ ;
+
+ _proto.setDefaults = function setDefaults() {
+ var _this5 = this;
+
+ each(selectConfigs, function (config) {
+ var index = config.hasOwnProperty('default') ? config["default"] : 0;
+ _this5.$(config.selector).selectedIndex = index;
+ });
+ }
+ /**
+ * Restore texttrack settings from localStorage
+ */
+ ;
+
+ _proto.restoreSettings = function restoreSettings() {
+ var values;
+
+ try {
+ values = JSON.parse(window__default['default'].localStorage.getItem(LOCAL_STORAGE_KEY$1));
+ } catch (err) {
+ log$1.warn(err);
+ }
+
+ if (values) {
+ this.setValues(values);
+ }
+ }
+ /**
+ * Save text track settings to localStorage
+ */
+ ;
+
+ _proto.saveSettings = function saveSettings() {
+ if (!this.options_.persistTextTrackSettings) {
+ return;
+ }
+
+ var values = this.getValues();
+
+ try {
+ if (Object.keys(values).length) {
+ window__default['default'].localStorage.setItem(LOCAL_STORAGE_KEY$1, JSON.stringify(values));
+ } else {
+ window__default['default'].localStorage.removeItem(LOCAL_STORAGE_KEY$1);
+ }
+ } catch (err) {
+ log$1.warn(err);
+ }
+ }
+ /**
+ * Update display of text track settings
+ */
+ ;
+
+ _proto.updateDisplay = function updateDisplay() {
+ var ttDisplay = this.player_.getChild('textTrackDisplay');
+
+ if (ttDisplay) {
+ ttDisplay.updateDisplay();
+ }
+ }
+ /**
+ * conditionally blur the element and refocus the captions button
+ *
+ * @private
+ */
+ ;
+
+ _proto.conditionalBlur_ = function conditionalBlur_() {
+ this.previouslyActiveEl_ = null;
+ var cb = this.player_.controlBar;
+ var subsCapsBtn = cb && cb.subsCapsButton;
+ var ccBtn = cb && cb.captionsButton;
+
+ if (subsCapsBtn) {
+ subsCapsBtn.focus();
+ } else if (ccBtn) {
+ ccBtn.focus();
+ }
+ };
+
+ return TextTrackSettings;
+}(ModalDialog);
+
+Component$1.registerComponent('TextTrackSettings', TextTrackSettings);
+
+/**
+ * A Resize Manager. It is in charge of triggering `playerresize` on the player in the right conditions.
+ *
+ * It'll either create an iframe and use a debounced resize handler on it or use the new {@link https://wicg.github.io/ResizeObserver/|ResizeObserver}.
+ *
+ * If the ResizeObserver is available natively, it will be used. A polyfill can be passed in as an option.
+ * If a `playerresize` event is not needed, the ResizeManager component can be removed from the player, see the example below.
+ * @example How to disable the resize manager
+ * const player = videojs('#vid', {
+ * resizeManager: false
+ * });
+ *
+ * @see {@link https://wicg.github.io/ResizeObserver/|ResizeObserver specification}
+ *
+ * @extends Component
+ */
+
+var ResizeManager = /*#__PURE__*/function (_Component) {
+ _inheritsLoose__default['default'](ResizeManager, _Component);
+
+ /**
+ * Create the ResizeManager.
+ *
+ * @param {Object} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of ResizeManager options.
+ *
+ * @param {Object} [options.ResizeObserver]
+ * A polyfill for ResizeObserver can be passed in here.
+ * If this is set to null it will ignore the native ResizeObserver and fall back to the iframe fallback.
+ */
+ function ResizeManager(player, options) {
+ var _this;
+
+ var RESIZE_OBSERVER_AVAILABLE = options.ResizeObserver || window__default['default'].ResizeObserver; // if `null` was passed, we want to disable the ResizeObserver
+
+ if (options.ResizeObserver === null) {
+ RESIZE_OBSERVER_AVAILABLE = false;
+ } // Only create an element when ResizeObserver isn't available
+
+
+ var options_ = mergeOptions$3({
+ createEl: !RESIZE_OBSERVER_AVAILABLE,
+ reportTouchActivity: false
+ }, options);
+ _this = _Component.call(this, player, options_) || this;
+ _this.ResizeObserver = options.ResizeObserver || window__default['default'].ResizeObserver;
+ _this.loadListener_ = null;
+ _this.resizeObserver_ = null;
+ _this.debouncedHandler_ = debounce(function () {
+ _this.resizeHandler();
+ }, 100, false, _assertThisInitialized__default['default'](_this));
+
+ if (RESIZE_OBSERVER_AVAILABLE) {
+ _this.resizeObserver_ = new _this.ResizeObserver(_this.debouncedHandler_);
+
+ _this.resizeObserver_.observe(player.el());
+ } else {
+ _this.loadListener_ = function () {
+ if (!_this.el_ || !_this.el_.contentWindow) {
+ return;
+ }
+
+ var debouncedHandler_ = _this.debouncedHandler_;
+
+ var unloadListener_ = _this.unloadListener_ = function () {
+ off(this, 'resize', debouncedHandler_);
+ off(this, 'unload', unloadListener_);
+ unloadListener_ = null;
+ }; // safari and edge can unload the iframe before resizemanager dispose
+ // we have to dispose of event handlers correctly before that happens
+
+
+ on(_this.el_.contentWindow, 'unload', unloadListener_);
+ on(_this.el_.contentWindow, 'resize', debouncedHandler_);
+ };
+
+ _this.one('load', _this.loadListener_);
+ }
+
+ return _this;
+ }
+
+ var _proto = ResizeManager.prototype;
+
+ _proto.createEl = function createEl() {
+ return _Component.prototype.createEl.call(this, 'iframe', {
+ className: 'vjs-resize-manager',
+ tabIndex: -1,
+ title: this.localize('No content')
+ }, {
+ 'aria-hidden': 'true'
+ });
+ }
+ /**
+ * Called when a resize is triggered on the iframe or a resize is observed via the ResizeObserver
+ *
+ * @fires Player#playerresize
+ */
+ ;
+
+ _proto.resizeHandler = function resizeHandler() {
+ /**
+ * Called when the player size has changed
+ *
+ * @event Player#playerresize
+ * @type {EventTarget~Event}
+ */
+ // make sure player is still around to trigger
+ // prevents this from causing an error after dispose
+ if (!this.player_ || !this.player_.trigger) {
+ return;
+ }
+
+ this.player_.trigger('playerresize');
+ };
+
+ _proto.dispose = function dispose() {
+ if (this.debouncedHandler_) {
+ this.debouncedHandler_.cancel();
+ }
+
+ if (this.resizeObserver_) {
+ if (this.player_.el()) {
+ this.resizeObserver_.unobserve(this.player_.el());
+ }
+
+ this.resizeObserver_.disconnect();
+ }
+
+ if (this.loadListener_) {
+ this.off('load', this.loadListener_);
+ }
+
+ if (this.el_ && this.el_.contentWindow && this.unloadListener_) {
+ this.unloadListener_.call(this.el_.contentWindow);
+ }
+
+ this.ResizeObserver = null;
+ this.resizeObserver = null;
+ this.debouncedHandler_ = null;
+ this.loadListener_ = null;
+
+ _Component.prototype.dispose.call(this);
+ };
+
+ return ResizeManager;
+}(Component$1);
+
+Component$1.registerComponent('ResizeManager', ResizeManager);
+
+var defaults = {
+ trackingThreshold: 20,
+ liveTolerance: 15
+};
+/*
+ track when we are at the live edge, and other helpers for live playback */
+
+/**
+ * A class for checking live current time and determining when the player
+ * is at or behind the live edge.
+ */
+
+var LiveTracker = /*#__PURE__*/function (_Component) {
+ _inheritsLoose__default['default'](LiveTracker, _Component);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ *
+ * @param {number} [options.trackingThreshold=20]
+ * Number of seconds of live window (seekableEnd - seekableStart) that
+ * media needs to have before the liveui will be shown.
+ *
+ * @param {number} [options.liveTolerance=15]
+ * Number of seconds behind live that we have to be
+ * before we will be considered non-live. Note that this will only
+ * be used when playing at the live edge. This allows large seekable end
+ * changes to not effect wether we are live or not.
+ */
+ function LiveTracker(player, options) {
+ var _this;
+
+ // LiveTracker does not need an element
+ var options_ = mergeOptions$3(defaults, options, {
+ createEl: false
+ });
+ _this = _Component.call(this, player, options_) || this;
+
+ _this.handleVisibilityChange_ = function (e) {
+ return _this.handleVisibilityChange(e);
+ };
+
+ _this.trackLiveHandler_ = function () {
+ return _this.trackLive_();
+ };
+
+ _this.handlePlay_ = function (e) {
+ return _this.handlePlay(e);
+ };
+
+ _this.handleFirstTimeupdate_ = function (e) {
+ return _this.handleFirstTimeupdate(e);
+ };
+
+ _this.handleSeeked_ = function (e) {
+ return _this.handleSeeked(e);
+ };
+
+ _this.seekToLiveEdge_ = function (e) {
+ return _this.seekToLiveEdge(e);
+ };
+
+ _this.reset_();
+
+ _this.on(_this.player_, 'durationchange', function (e) {
+ return _this.handleDurationchange(e);
+ }); // we should try to toggle tracking on canplay as native playback engines, like Safari
+ // may not have the proper values for things like seekableEnd until then
+
+
+ _this.on(_this.player_, 'canplay', function () {
+ return _this.toggleTracking();
+ }); // we don't need to track live playback if the document is hidden,
+ // also, tracking when the document is hidden can
+ // cause the CPU to spike and eventually crash the page on IE11.
+
+
+ if (IE_VERSION && 'hidden' in document__default['default'] && 'visibilityState' in document__default['default']) {
+ _this.on(document__default['default'], 'visibilitychange', _this.handleVisibilityChange_);
+ }
+
+ return _this;
+ }
+ /**
+ * toggle tracking based on document visiblility
+ */
+
+
+ var _proto = LiveTracker.prototype;
+
+ _proto.handleVisibilityChange = function handleVisibilityChange() {
+ if (this.player_.duration() !== Infinity) {
+ return;
+ }
+
+ if (document__default['default'].hidden) {
+ this.stopTracking();
+ } else {
+ this.startTracking();
+ }
+ }
+ /**
+ * all the functionality for tracking when seek end changes
+ * and for tracking how far past seek end we should be
+ */
+ ;
+
+ _proto.trackLive_ = function trackLive_() {
+ var seekable = this.player_.seekable(); // skip undefined seekable
+
+ if (!seekable || !seekable.length) {
+ return;
+ }
+
+ var newTime = Number(window__default['default'].performance.now().toFixed(4));
+ var deltaTime = this.lastTime_ === -1 ? 0 : (newTime - this.lastTime_) / 1000;
+ this.lastTime_ = newTime;
+ this.pastSeekEnd_ = this.pastSeekEnd() + deltaTime;
+ var liveCurrentTime = this.liveCurrentTime();
+ var currentTime = this.player_.currentTime(); // we are behind live if any are true
+ // 1. the player is paused
+ // 2. the user seeked to a location 2 seconds away from live
+ // 3. the difference between live and current time is greater
+ // liveTolerance which defaults to 15s
+
+ var isBehind = this.player_.paused() || this.seekedBehindLive_ || Math.abs(liveCurrentTime - currentTime) > this.options_.liveTolerance; // we cannot be behind if
+ // 1. until we have not seen a timeupdate yet
+ // 2. liveCurrentTime is Infinity, which happens on Android and Native Safari
+
+ if (!this.timeupdateSeen_ || liveCurrentTime === Infinity) {
+ isBehind = false;
+ }
+
+ if (isBehind !== this.behindLiveEdge_) {
+ this.behindLiveEdge_ = isBehind;
+ this.trigger('liveedgechange');
+ }
+ }
+ /**
+ * handle a durationchange event on the player
+ * and start/stop tracking accordingly.
+ */
+ ;
+
+ _proto.handleDurationchange = function handleDurationchange() {
+ this.toggleTracking();
+ }
+ /**
+ * start/stop tracking
+ */
+ ;
+
+ _proto.toggleTracking = function toggleTracking() {
+ if (this.player_.duration() === Infinity && this.liveWindow() >= this.options_.trackingThreshold) {
+ if (this.player_.options_.liveui) {
+ this.player_.addClass('vjs-liveui');
+ }
+
+ this.startTracking();
+ } else {
+ this.player_.removeClass('vjs-liveui');
+ this.stopTracking();
+ }
+ }
+ /**
+ * start tracking live playback
+ */
+ ;
+
+ _proto.startTracking = function startTracking() {
+ if (this.isTracking()) {
+ return;
+ } // If we haven't seen a timeupdate, we need to check whether playback
+ // began before this component started tracking. This can happen commonly
+ // when using autoplay.
+
+
+ if (!this.timeupdateSeen_) {
+ this.timeupdateSeen_ = this.player_.hasStarted();
+ }
+
+ this.trackingInterval_ = this.setInterval(this.trackLiveHandler_, UPDATE_REFRESH_INTERVAL);
+ this.trackLive_();
+ this.on(this.player_, ['play', 'pause'], this.trackLiveHandler_);
+
+ if (!this.timeupdateSeen_) {
+ this.one(this.player_, 'play', this.handlePlay_);
+ this.one(this.player_, 'timeupdate', this.handleFirstTimeupdate_);
+ } else {
+ this.on(this.player_, 'seeked', this.handleSeeked_);
+ }
+ }
+ /**
+ * handle the first timeupdate on the player if it wasn't already playing
+ * when live tracker started tracking.
+ */
+ ;
+
+ _proto.handleFirstTimeupdate = function handleFirstTimeupdate() {
+ this.timeupdateSeen_ = true;
+ this.on(this.player_, 'seeked', this.handleSeeked_);
+ }
+ /**
+ * Keep track of what time a seek starts, and listen for seeked
+ * to find where a seek ends.
+ */
+ ;
+
+ _proto.handleSeeked = function handleSeeked() {
+ var timeDiff = Math.abs(this.liveCurrentTime() - this.player_.currentTime());
+ this.seekedBehindLive_ = this.nextSeekedFromUser_ && timeDiff > 2;
+ this.nextSeekedFromUser_ = false;
+ this.trackLive_();
+ }
+ /**
+ * handle the first play on the player, and make sure that we seek
+ * right to the live edge.
+ */
+ ;
+
+ _proto.handlePlay = function handlePlay() {
+ this.one(this.player_, 'timeupdate', this.seekToLiveEdge_);
+ }
+ /**
+ * Stop tracking, and set all internal variables to
+ * their initial value.
+ */
+ ;
+
+ _proto.reset_ = function reset_() {
+ this.lastTime_ = -1;
+ this.pastSeekEnd_ = 0;
+ this.lastSeekEnd_ = -1;
+ this.behindLiveEdge_ = true;
+ this.timeupdateSeen_ = false;
+ this.seekedBehindLive_ = false;
+ this.nextSeekedFromUser_ = false;
+ this.clearInterval(this.trackingInterval_);
+ this.trackingInterval_ = null;
+ this.off(this.player_, ['play', 'pause'], this.trackLiveHandler_);
+ this.off(this.player_, 'seeked', this.handleSeeked_);
+ this.off(this.player_, 'play', this.handlePlay_);
+ this.off(this.player_, 'timeupdate', this.handleFirstTimeupdate_);
+ this.off(this.player_, 'timeupdate', this.seekToLiveEdge_);
+ }
+ /**
+ * The next seeked event is from the user. Meaning that any seek
+ * > 2s behind live will be considered behind live for real and
+ * liveTolerance will be ignored.
+ */
+ ;
+
+ _proto.nextSeekedFromUser = function nextSeekedFromUser() {
+ this.nextSeekedFromUser_ = true;
+ }
+ /**
+ * stop tracking live playback
+ */
+ ;
+
+ _proto.stopTracking = function stopTracking() {
+ if (!this.isTracking()) {
+ return;
+ }
+
+ this.reset_();
+ this.trigger('liveedgechange');
+ }
+ /**
+ * A helper to get the player seekable end
+ * so that we don't have to null check everywhere
+ *
+ * @return {number}
+ * The furthest seekable end or Infinity.
+ */
+ ;
+
+ _proto.seekableEnd = function seekableEnd() {
+ var seekable = this.player_.seekable();
+ var seekableEnds = [];
+ var i = seekable ? seekable.length : 0;
+
+ while (i--) {
+ seekableEnds.push(seekable.end(i));
+ } // grab the furthest seekable end after sorting, or if there are none
+ // default to Infinity
+
+
+ return seekableEnds.length ? seekableEnds.sort()[seekableEnds.length - 1] : Infinity;
+ }
+ /**
+ * A helper to get the player seekable start
+ * so that we don't have to null check everywhere
+ *
+ * @return {number}
+ * The earliest seekable start or 0.
+ */
+ ;
+
+ _proto.seekableStart = function seekableStart() {
+ var seekable = this.player_.seekable();
+ var seekableStarts = [];
+ var i = seekable ? seekable.length : 0;
+
+ while (i--) {
+ seekableStarts.push(seekable.start(i));
+ } // grab the first seekable start after sorting, or if there are none
+ // default to 0
+
+
+ return seekableStarts.length ? seekableStarts.sort()[0] : 0;
+ }
+ /**
+ * Get the live time window aka
+ * the amount of time between seekable start and
+ * live current time.
+ *
+ * @return {number}
+ * The amount of seconds that are seekable in
+ * the live video.
+ */
+ ;
+
+ _proto.liveWindow = function liveWindow() {
+ var liveCurrentTime = this.liveCurrentTime(); // if liveCurrenTime is Infinity then we don't have a liveWindow at all
+
+ if (liveCurrentTime === Infinity) {
+ return 0;
+ }
+
+ return liveCurrentTime - this.seekableStart();
+ }
+ /**
+ * Determines if the player is live, only checks if this component
+ * is tracking live playback or not
+ *
+ * @return {boolean}
+ * Wether liveTracker is tracking
+ */
+ ;
+
+ _proto.isLive = function isLive() {
+ return this.isTracking();
+ }
+ /**
+ * Determines if currentTime is at the live edge and won't fall behind
+ * on each seekableendchange
+ *
+ * @return {boolean}
+ * Wether playback is at the live edge
+ */
+ ;
+
+ _proto.atLiveEdge = function atLiveEdge() {
+ return !this.behindLiveEdge();
+ }
+ /**
+ * get what we expect the live current time to be
+ *
+ * @return {number}
+ * The expected live current time
+ */
+ ;
+
+ _proto.liveCurrentTime = function liveCurrentTime() {
+ return this.pastSeekEnd() + this.seekableEnd();
+ }
+ /**
+ * The number of seconds that have occured after seekable end
+ * changed. This will be reset to 0 once seekable end changes.
+ *
+ * @return {number}
+ * Seconds past the current seekable end
+ */
+ ;
+
+ _proto.pastSeekEnd = function pastSeekEnd() {
+ var seekableEnd = this.seekableEnd();
+
+ if (this.lastSeekEnd_ !== -1 && seekableEnd !== this.lastSeekEnd_) {
+ this.pastSeekEnd_ = 0;
+ }
+
+ this.lastSeekEnd_ = seekableEnd;
+ return this.pastSeekEnd_;
+ }
+ /**
+ * If we are currently behind the live edge, aka currentTime will be
+ * behind on a seekableendchange
+ *
+ * @return {boolean}
+ * If we are behind the live edge
+ */
+ ;
+
+ _proto.behindLiveEdge = function behindLiveEdge() {
+ return this.behindLiveEdge_;
+ }
+ /**
+ * Wether live tracker is currently tracking or not.
+ */
+ ;
+
+ _proto.isTracking = function isTracking() {
+ return typeof this.trackingInterval_ === 'number';
+ }
+ /**
+ * Seek to the live edge if we are behind the live edge
+ */
+ ;
+
+ _proto.seekToLiveEdge = function seekToLiveEdge() {
+ this.seekedBehindLive_ = false;
+
+ if (this.atLiveEdge()) {
+ return;
+ }
+
+ this.nextSeekedFromUser_ = false;
+ this.player_.currentTime(this.liveCurrentTime());
+ }
+ /**
+ * Dispose of liveTracker
+ */
+ ;
+
+ _proto.dispose = function dispose() {
+ this.off(document__default['default'], 'visibilitychange', this.handleVisibilityChange_);
+ this.stopTracking();
+
+ _Component.prototype.dispose.call(this);
+ };
+
+ return LiveTracker;
+}(Component$1);
+
+Component$1.registerComponent('LiveTracker', LiveTracker);
+
+/**
+ * This function is used to fire a sourceset when there is something
+ * similar to `mediaEl.load()` being called. It will try to find the source via
+ * the `src` attribute and then the `` elements. It will then fire `sourceset`
+ * with the source that was found or empty string if we cannot know. If it cannot
+ * find a source then `sourceset` will not be fired.
+ *
+ * @param {Html5} tech
+ * The tech object that sourceset was setup on
+ *
+ * @return {boolean}
+ * returns false if the sourceset was not fired and true otherwise.
+ */
+
+var sourcesetLoad = function sourcesetLoad(tech) {
+ var el = tech.el(); // if `el.src` is set, that source will be loaded.
+
+ if (el.hasAttribute('src')) {
+ tech.triggerSourceset(el.src);
+ return true;
+ }
+ /**
+ * Since there isn't a src property on the media element, source elements will be used for
+ * implementing the source selection algorithm. This happens asynchronously and
+ * for most cases were there is more than one source we cannot tell what source will
+ * be loaded, without re-implementing the source selection algorithm. At this time we are not
+ * going to do that. There are three special cases that we do handle here though:
+ *
+ * 1. If there are no sources, do not fire `sourceset`.
+ * 2. If there is only one `` with a `src` property/attribute that is our `src`
+ * 3. If there is more than one `` but all of them have the same `src` url.
+ * That will be our src.
+ */
+
+
+ var sources = tech.$$('source');
+ var srcUrls = [];
+ var src = ''; // if there are no sources, do not fire sourceset
+
+ if (!sources.length) {
+ return false;
+ } // only count valid/non-duplicate source elements
+
+
+ for (var i = 0; i < sources.length; i++) {
+ var url = sources[i].src;
+
+ if (url && srcUrls.indexOf(url) === -1) {
+ srcUrls.push(url);
+ }
+ } // there were no valid sources
+
+
+ if (!srcUrls.length) {
+ return false;
+ } // there is only one valid source element url
+ // use that
+
+
+ if (srcUrls.length === 1) {
+ src = srcUrls[0];
+ }
+
+ tech.triggerSourceset(src);
+ return true;
+};
+/**
+ * our implementation of an `innerHTML` descriptor for browsers
+ * that do not have one.
+ */
+
+
+var innerHTMLDescriptorPolyfill = Object.defineProperty({}, 'innerHTML', {
+ get: function get() {
+ return this.cloneNode(true).innerHTML;
+ },
+ set: function set(v) {
+ // make a dummy node to use innerHTML on
+ var dummy = document__default['default'].createElement(this.nodeName.toLowerCase()); // set innerHTML to the value provided
+
+ dummy.innerHTML = v; // make a document fragment to hold the nodes from dummy
+
+ var docFrag = document__default['default'].createDocumentFragment(); // copy all of the nodes created by the innerHTML on dummy
+ // to the document fragment
+
+ while (dummy.childNodes.length) {
+ docFrag.appendChild(dummy.childNodes[0]);
+ } // remove content
+
+
+ this.innerText = ''; // now we add all of that html in one by appending the
+ // document fragment. This is how innerHTML does it.
+
+ window__default['default'].Element.prototype.appendChild.call(this, docFrag); // then return the result that innerHTML's setter would
+
+ return this.innerHTML;
+ }
+});
+/**
+ * Get a property descriptor given a list of priorities and the
+ * property to get.
+ */
+
+var getDescriptor = function getDescriptor(priority, prop) {
+ var descriptor = {};
+
+ for (var i = 0; i < priority.length; i++) {
+ descriptor = Object.getOwnPropertyDescriptor(priority[i], prop);
+
+ if (descriptor && descriptor.set && descriptor.get) {
+ break;
+ }
+ }
+
+ descriptor.enumerable = true;
+ descriptor.configurable = true;
+ return descriptor;
+};
+
+var getInnerHTMLDescriptor = function getInnerHTMLDescriptor(tech) {
+ return getDescriptor([tech.el(), window__default['default'].HTMLMediaElement.prototype, window__default['default'].Element.prototype, innerHTMLDescriptorPolyfill], 'innerHTML');
+};
+/**
+ * Patches browser internal functions so that we can tell synchronously
+ * if a `` was appended to the media element. For some reason this
+ * causes a `sourceset` if the the media element is ready and has no source.
+ * This happens when:
+ * - The page has just loaded and the media element does not have a source.
+ * - The media element was emptied of all sources, then `load()` was called.
+ *
+ * It does this by patching the following functions/properties when they are supported:
+ *
+ * - `append()` - can be used to add a `` element to the media element
+ * - `appendChild()` - can be used to add a `` element to the media element
+ * - `insertAdjacentHTML()` - can be used to add a `` element to the media element
+ * - `innerHTML` - can be used to add a `` element to the media element
+ *
+ * @param {Html5} tech
+ * The tech object that sourceset is being setup on.
+ */
+
+
+var firstSourceWatch = function firstSourceWatch(tech) {
+ var el = tech.el(); // make sure firstSourceWatch isn't setup twice.
+
+ if (el.resetSourceWatch_) {
+ return;
+ }
+
+ var old = {};
+ var innerDescriptor = getInnerHTMLDescriptor(tech);
+
+ var appendWrapper = function appendWrapper(appendFn) {
+ return function () {
+ for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
+ args[_key] = arguments[_key];
+ }
+
+ var retval = appendFn.apply(el, args);
+ sourcesetLoad(tech);
+ return retval;
+ };
+ };
+
+ ['append', 'appendChild', 'insertAdjacentHTML'].forEach(function (k) {
+ if (!el[k]) {
+ return;
+ } // store the old function
+
+
+ old[k] = el[k]; // call the old function with a sourceset if a source
+ // was loaded
+
+ el[k] = appendWrapper(old[k]);
+ });
+ Object.defineProperty(el, 'innerHTML', mergeOptions$3(innerDescriptor, {
+ set: appendWrapper(innerDescriptor.set)
+ }));
+
+ el.resetSourceWatch_ = function () {
+ el.resetSourceWatch_ = null;
+ Object.keys(old).forEach(function (k) {
+ el[k] = old[k];
+ });
+ Object.defineProperty(el, 'innerHTML', innerDescriptor);
+ }; // on the first sourceset, we need to revert our changes
+
+
+ tech.one('sourceset', el.resetSourceWatch_);
+};
+/**
+ * our implementation of a `src` descriptor for browsers
+ * that do not have one.
+ */
+
+
+var srcDescriptorPolyfill = Object.defineProperty({}, 'src', {
+ get: function get() {
+ if (this.hasAttribute('src')) {
+ return getAbsoluteURL(window__default['default'].Element.prototype.getAttribute.call(this, 'src'));
+ }
+
+ return '';
+ },
+ set: function set(v) {
+ window__default['default'].Element.prototype.setAttribute.call(this, 'src', v);
+ return v;
+ }
+});
+
+var getSrcDescriptor = function getSrcDescriptor(tech) {
+ return getDescriptor([tech.el(), window__default['default'].HTMLMediaElement.prototype, srcDescriptorPolyfill], 'src');
+};
+/**
+ * setup `sourceset` handling on the `Html5` tech. This function
+ * patches the following element properties/functions:
+ *
+ * - `src` - to determine when `src` is set
+ * - `setAttribute()` - to determine when `src` is set
+ * - `load()` - this re-triggers the source selection algorithm, and can
+ * cause a sourceset.
+ *
+ * If there is no source when we are adding `sourceset` support or during a `load()`
+ * we also patch the functions listed in `firstSourceWatch`.
+ *
+ * @param {Html5} tech
+ * The tech to patch
+ */
+
+
+var setupSourceset = function setupSourceset(tech) {
+ if (!tech.featuresSourceset) {
+ return;
+ }
+
+ var el = tech.el(); // make sure sourceset isn't setup twice.
+
+ if (el.resetSourceset_) {
+ return;
+ }
+
+ var srcDescriptor = getSrcDescriptor(tech);
+ var oldSetAttribute = el.setAttribute;
+ var oldLoad = el.load;
+ Object.defineProperty(el, 'src', mergeOptions$3(srcDescriptor, {
+ set: function set(v) {
+ var retval = srcDescriptor.set.call(el, v); // we use the getter here to get the actual value set on src
+
+ tech.triggerSourceset(el.src);
+ return retval;
+ }
+ }));
+
+ el.setAttribute = function (n, v) {
+ var retval = oldSetAttribute.call(el, n, v);
+
+ if (/src/i.test(n)) {
+ tech.triggerSourceset(el.src);
+ }
+
+ return retval;
+ };
+
+ el.load = function () {
+ var retval = oldLoad.call(el); // if load was called, but there was no source to fire
+ // sourceset on. We have to watch for a source append
+ // as that can trigger a `sourceset` when the media element
+ // has no source
+
+ if (!sourcesetLoad(tech)) {
+ tech.triggerSourceset('');
+ firstSourceWatch(tech);
+ }
+
+ return retval;
+ };
+
+ if (el.currentSrc) {
+ tech.triggerSourceset(el.currentSrc);
+ } else if (!sourcesetLoad(tech)) {
+ firstSourceWatch(tech);
+ }
+
+ el.resetSourceset_ = function () {
+ el.resetSourceset_ = null;
+ el.load = oldLoad;
+ el.setAttribute = oldSetAttribute;
+ Object.defineProperty(el, 'src', srcDescriptor);
+
+ if (el.resetSourceWatch_) {
+ el.resetSourceWatch_();
+ }
+ };
+};
+
+/**
+ * Object.defineProperty but "lazy", which means that the value is only set after
+ * it retrieved the first time, rather than being set right away.
+ *
+ * @param {Object} obj the object to set the property on
+ * @param {string} key the key for the property to set
+ * @param {Function} getValue the function used to get the value when it is needed.
+ * @param {boolean} setter wether a setter shoould be allowed or not
+ */
+var defineLazyProperty = function defineLazyProperty(obj, key, getValue, setter) {
+ if (setter === void 0) {
+ setter = true;
+ }
+
+ var set = function set(value) {
+ return Object.defineProperty(obj, key, {
+ value: value,
+ enumerable: true,
+ writable: true
+ });
+ };
+
+ var options = {
+ configurable: true,
+ enumerable: true,
+ get: function get() {
+ var value = getValue();
+ set(value);
+ return value;
+ }
+ };
+
+ if (setter) {
+ options.set = set;
+ }
+
+ return Object.defineProperty(obj, key, options);
+};
+
+/**
+ * HTML5 Media Controller - Wrapper for HTML5 Media API
+ *
+ * @mixes Tech~SourceHandlerAdditions
+ * @extends Tech
+ */
+
+var Html5 = /*#__PURE__*/function (_Tech) {
+ _inheritsLoose__default['default'](Html5, _Tech);
+
+ /**
+ * Create an instance of this Tech.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ *
+ * @param {Component~ReadyCallback} ready
+ * Callback function to call when the `HTML5` Tech is ready.
+ */
+ function Html5(options, ready) {
+ var _this;
+
+ _this = _Tech.call(this, options, ready) || this;
+ var source = options.source;
+ var crossoriginTracks = false;
+ _this.featuresVideoFrameCallback = _this.featuresVideoFrameCallback && _this.el_.tagName === 'VIDEO'; // Set the source if one is provided
+ // 1) Check if the source is new (if not, we want to keep the original so playback isn't interrupted)
+ // 2) Check to see if the network state of the tag was failed at init, and if so, reset the source
+ // anyway so the error gets fired.
+
+ if (source && (_this.el_.currentSrc !== source.src || options.tag && options.tag.initNetworkState_ === 3)) {
+ _this.setSource(source);
+ } else {
+ _this.handleLateInit_(_this.el_);
+ } // setup sourceset after late sourceset/init
+
+
+ if (options.enableSourceset) {
+ _this.setupSourcesetHandling_();
+ }
+
+ _this.isScrubbing_ = false;
+
+ if (_this.el_.hasChildNodes()) {
+ var nodes = _this.el_.childNodes;
+ var nodesLength = nodes.length;
+ var removeNodes = [];
+
+ while (nodesLength--) {
+ var node = nodes[nodesLength];
+ var nodeName = node.nodeName.toLowerCase();
+
+ if (nodeName === 'track') {
+ if (!_this.featuresNativeTextTracks) {
+ // Empty video tag tracks so the built-in player doesn't use them also.
+ // This may not be fast enough to stop HTML5 browsers from reading the tags
+ // so we'll need to turn off any default tracks if we're manually doing
+ // captions and subtitles. videoElement.textTracks
+ removeNodes.push(node);
+ } else {
+ // store HTMLTrackElement and TextTrack to remote list
+ _this.remoteTextTrackEls().addTrackElement_(node);
+
+ _this.remoteTextTracks().addTrack(node.track);
+
+ _this.textTracks().addTrack(node.track);
+
+ if (!crossoriginTracks && !_this.el_.hasAttribute('crossorigin') && isCrossOrigin(node.src)) {
+ crossoriginTracks = true;
+ }
+ }
+ }
+ }
+
+ for (var i = 0; i < removeNodes.length; i++) {
+ _this.el_.removeChild(removeNodes[i]);
+ }
+ }
+
+ _this.proxyNativeTracks_();
+
+ if (_this.featuresNativeTextTracks && crossoriginTracks) {
+ log$1.warn('Text Tracks are being loaded from another origin but the crossorigin attribute isn\'t used.\n' + 'This may prevent text tracks from loading.');
+ } // prevent iOS Safari from disabling metadata text tracks during native playback
+
+
+ _this.restoreMetadataTracksInIOSNativePlayer_(); // Determine if native controls should be used
+ // Our goal should be to get the custom controls on mobile solid everywhere
+ // so we can remove this all together. Right now this will block custom
+ // controls on touch enabled laptops like the Chrome Pixel
+
+
+ if ((TOUCH_ENABLED || IS_IPHONE || IS_NATIVE_ANDROID) && options.nativeControlsForTouch === true) {
+ _this.setControls(true);
+ } // on iOS, we want to proxy `webkitbeginfullscreen` and `webkitendfullscreen`
+ // into a `fullscreenchange` event
+
+
+ _this.proxyWebkitFullscreen_();
+
+ _this.triggerReady();
+
+ return _this;
+ }
+ /**
+ * Dispose of `HTML5` media element and remove all tracks.
+ */
+
+
+ var _proto = Html5.prototype;
+
+ _proto.dispose = function dispose() {
+ if (this.el_ && this.el_.resetSourceset_) {
+ this.el_.resetSourceset_();
+ }
+
+ Html5.disposeMediaElement(this.el_);
+ this.options_ = null; // tech will handle clearing of the emulated track list
+
+ _Tech.prototype.dispose.call(this);
+ }
+ /**
+ * Modify the media element so that we can detect when
+ * the source is changed. Fires `sourceset` just after the source has changed
+ */
+ ;
+
+ _proto.setupSourcesetHandling_ = function setupSourcesetHandling_() {
+ setupSourceset(this);
+ }
+ /**
+ * When a captions track is enabled in the iOS Safari native player, all other
+ * tracks are disabled (including metadata tracks), which nulls all of their
+ * associated cue points. This will restore metadata tracks to their pre-fullscreen
+ * state in those cases so that cue points are not needlessly lost.
+ *
+ * @private
+ */
+ ;
+
+ _proto.restoreMetadataTracksInIOSNativePlayer_ = function restoreMetadataTracksInIOSNativePlayer_() {
+ var textTracks = this.textTracks();
+ var metadataTracksPreFullscreenState; // captures a snapshot of every metadata track's current state
+
+ var takeMetadataTrackSnapshot = function takeMetadataTrackSnapshot() {
+ metadataTracksPreFullscreenState = [];
+
+ for (var i = 0; i < textTracks.length; i++) {
+ var track = textTracks[i];
+
+ if (track.kind === 'metadata') {
+ metadataTracksPreFullscreenState.push({
+ track: track,
+ storedMode: track.mode
+ });
+ }
+ }
+ }; // snapshot each metadata track's initial state, and update the snapshot
+ // each time there is a track 'change' event
+
+
+ takeMetadataTrackSnapshot();
+ textTracks.addEventListener('change', takeMetadataTrackSnapshot);
+ this.on('dispose', function () {
+ return textTracks.removeEventListener('change', takeMetadataTrackSnapshot);
+ });
+
+ var restoreTrackMode = function restoreTrackMode() {
+ for (var i = 0; i < metadataTracksPreFullscreenState.length; i++) {
+ var storedTrack = metadataTracksPreFullscreenState[i];
+
+ if (storedTrack.track.mode === 'disabled' && storedTrack.track.mode !== storedTrack.storedMode) {
+ storedTrack.track.mode = storedTrack.storedMode;
+ }
+ } // we only want this handler to be executed on the first 'change' event
+
+
+ textTracks.removeEventListener('change', restoreTrackMode);
+ }; // when we enter fullscreen playback, stop updating the snapshot and
+ // restore all track modes to their pre-fullscreen state
+
+
+ this.on('webkitbeginfullscreen', function () {
+ textTracks.removeEventListener('change', takeMetadataTrackSnapshot); // remove the listener before adding it just in case it wasn't previously removed
+
+ textTracks.removeEventListener('change', restoreTrackMode);
+ textTracks.addEventListener('change', restoreTrackMode);
+ }); // start updating the snapshot again after leaving fullscreen
+
+ this.on('webkitendfullscreen', function () {
+ // remove the listener before adding it just in case it wasn't previously removed
+ textTracks.removeEventListener('change', takeMetadataTrackSnapshot);
+ textTracks.addEventListener('change', takeMetadataTrackSnapshot); // remove the restoreTrackMode handler in case it wasn't triggered during fullscreen playback
+
+ textTracks.removeEventListener('change', restoreTrackMode);
+ });
+ }
+ /**
+ * Attempt to force override of tracks for the given type
+ *
+ * @param {string} type - Track type to override, possible values include 'Audio',
+ * 'Video', and 'Text'.
+ * @param {boolean} override - If set to true native audio/video will be overridden,
+ * otherwise native audio/video will potentially be used.
+ * @private
+ */
+ ;
+
+ _proto.overrideNative_ = function overrideNative_(type, override) {
+ var _this2 = this;
+
+ // If there is no behavioral change don't add/remove listeners
+ if (override !== this["featuresNative" + type + "Tracks"]) {
+ return;
+ }
+
+ var lowerCaseType = type.toLowerCase();
+
+ if (this[lowerCaseType + "TracksListeners_"]) {
+ Object.keys(this[lowerCaseType + "TracksListeners_"]).forEach(function (eventName) {
+ var elTracks = _this2.el()[lowerCaseType + "Tracks"];
+
+ elTracks.removeEventListener(eventName, _this2[lowerCaseType + "TracksListeners_"][eventName]);
+ });
+ }
+
+ this["featuresNative" + type + "Tracks"] = !override;
+ this[lowerCaseType + "TracksListeners_"] = null;
+ this.proxyNativeTracksForType_(lowerCaseType);
+ }
+ /**
+ * Attempt to force override of native audio tracks.
+ *
+ * @param {boolean} override - If set to true native audio will be overridden,
+ * otherwise native audio will potentially be used.
+ */
+ ;
+
+ _proto.overrideNativeAudioTracks = function overrideNativeAudioTracks(override) {
+ this.overrideNative_('Audio', override);
+ }
+ /**
+ * Attempt to force override of native video tracks.
+ *
+ * @param {boolean} override - If set to true native video will be overridden,
+ * otherwise native video will potentially be used.
+ */
+ ;
+
+ _proto.overrideNativeVideoTracks = function overrideNativeVideoTracks(override) {
+ this.overrideNative_('Video', override);
+ }
+ /**
+ * Proxy native track list events for the given type to our track
+ * lists if the browser we are playing in supports that type of track list.
+ *
+ * @param {string} name - Track type; values include 'audio', 'video', and 'text'
+ * @private
+ */
+ ;
+
+ _proto.proxyNativeTracksForType_ = function proxyNativeTracksForType_(name) {
+ var _this3 = this;
+
+ var props = NORMAL[name];
+ var elTracks = this.el()[props.getterName];
+ var techTracks = this[props.getterName]();
+
+ if (!this["featuresNative" + props.capitalName + "Tracks"] || !elTracks || !elTracks.addEventListener) {
+ return;
+ }
+
+ var listeners = {
+ change: function change(e) {
+ var event = {
+ type: 'change',
+ target: techTracks,
+ currentTarget: techTracks,
+ srcElement: techTracks
+ };
+ techTracks.trigger(event); // if we are a text track change event, we should also notify the
+ // remote text track list. This can potentially cause a false positive
+ // if we were to get a change event on a non-remote track and
+ // we triggered the event on the remote text track list which doesn't
+ // contain that track. However, best practices mean looping through the
+ // list of tracks and searching for the appropriate mode value, so,
+ // this shouldn't pose an issue
+
+ if (name === 'text') {
+ _this3[REMOTE.remoteText.getterName]().trigger(event);
+ }
+ },
+ addtrack: function addtrack(e) {
+ techTracks.addTrack(e.track);
+ },
+ removetrack: function removetrack(e) {
+ techTracks.removeTrack(e.track);
+ }
+ };
+
+ var removeOldTracks = function removeOldTracks() {
+ var removeTracks = [];
+
+ for (var i = 0; i < techTracks.length; i++) {
+ var found = false;
+
+ for (var j = 0; j < elTracks.length; j++) {
+ if (elTracks[j] === techTracks[i]) {
+ found = true;
+ break;
+ }
+ }
+
+ if (!found) {
+ removeTracks.push(techTracks[i]);
+ }
+ }
+
+ while (removeTracks.length) {
+ techTracks.removeTrack(removeTracks.shift());
+ }
+ };
+
+ this[props.getterName + 'Listeners_'] = listeners;
+ Object.keys(listeners).forEach(function (eventName) {
+ var listener = listeners[eventName];
+ elTracks.addEventListener(eventName, listener);
+
+ _this3.on('dispose', function (e) {
+ return elTracks.removeEventListener(eventName, listener);
+ });
+ }); // Remove (native) tracks that are not used anymore
+
+ this.on('loadstart', removeOldTracks);
+ this.on('dispose', function (e) {
+ return _this3.off('loadstart', removeOldTracks);
+ });
+ }
+ /**
+ * Proxy all native track list events to our track lists if the browser we are playing
+ * in supports that type of track list.
+ *
+ * @private
+ */
+ ;
+
+ _proto.proxyNativeTracks_ = function proxyNativeTracks_() {
+ var _this4 = this;
+
+ NORMAL.names.forEach(function (name) {
+ _this4.proxyNativeTracksForType_(name);
+ });
+ }
+ /**
+ * Create the `Html5` Tech's DOM element.
+ *
+ * @return {Element}
+ * The element that gets created.
+ */
+ ;
+
+ _proto.createEl = function createEl() {
+ var el = this.options_.tag; // Check if this browser supports moving the element into the box.
+ // On the iPhone video will break if you move the element,
+ // So we have to create a brand new element.
+ // If we ingested the player div, we do not need to move the media element.
+
+ if (!el || !(this.options_.playerElIngest || this.movingMediaElementInDOM)) {
+ // If the original tag is still there, clone and remove it.
+ if (el) {
+ var clone = el.cloneNode(true);
+
+ if (el.parentNode) {
+ el.parentNode.insertBefore(clone, el);
+ }
+
+ Html5.disposeMediaElement(el);
+ el = clone;
+ } else {
+ el = document__default['default'].createElement('video'); // determine if native controls should be used
+
+ var tagAttributes = this.options_.tag && getAttributes(this.options_.tag);
+ var attributes = mergeOptions$3({}, tagAttributes);
+
+ if (!TOUCH_ENABLED || this.options_.nativeControlsForTouch !== true) {
+ delete attributes.controls;
+ }
+
+ setAttributes(el, assign(attributes, {
+ id: this.options_.techId,
+ "class": 'vjs-tech'
+ }));
+ }
+
+ el.playerId = this.options_.playerId;
+ }
+
+ if (typeof this.options_.preload !== 'undefined') {
+ setAttribute(el, 'preload', this.options_.preload);
+ }
+
+ if (this.options_.disablePictureInPicture !== undefined) {
+ el.disablePictureInPicture = this.options_.disablePictureInPicture;
+ } // Update specific tag settings, in case they were overridden
+ // `autoplay` has to be *last* so that `muted` and `playsinline` are present
+ // when iOS/Safari or other browsers attempt to autoplay.
+
+
+ var settingsAttrs = ['loop', 'muted', 'playsinline', 'autoplay'];
+
+ for (var i = 0; i < settingsAttrs.length; i++) {
+ var attr = settingsAttrs[i];
+ var value = this.options_[attr];
+
+ if (typeof value !== 'undefined') {
+ if (value) {
+ setAttribute(el, attr, attr);
+ } else {
+ removeAttribute(el, attr);
+ }
+
+ el[attr] = value;
+ }
+ }
+
+ return el;
+ }
+ /**
+ * This will be triggered if the loadstart event has already fired, before videojs was
+ * ready. Two known examples of when this can happen are:
+ * 1. If we're loading the playback object after it has started loading
+ * 2. The media is already playing the (often with autoplay on) then
+ *
+ * This function will fire another loadstart so that videojs can catchup.
+ *
+ * @fires Tech#loadstart
+ *
+ * @return {undefined}
+ * returns nothing.
+ */
+ ;
+
+ _proto.handleLateInit_ = function handleLateInit_(el) {
+ if (el.networkState === 0 || el.networkState === 3) {
+ // The video element hasn't started loading the source yet
+ // or didn't find a source
+ return;
+ }
+
+ if (el.readyState === 0) {
+ // NetworkState is set synchronously BUT loadstart is fired at the
+ // end of the current stack, usually before setInterval(fn, 0).
+ // So at this point we know loadstart may have already fired or is
+ // about to fire, and either way the player hasn't seen it yet.
+ // We don't want to fire loadstart prematurely here and cause a
+ // double loadstart so we'll wait and see if it happens between now
+ // and the next loop, and fire it if not.
+ // HOWEVER, we also want to make sure it fires before loadedmetadata
+ // which could also happen between now and the next loop, so we'll
+ // watch for that also.
+ var loadstartFired = false;
+
+ var setLoadstartFired = function setLoadstartFired() {
+ loadstartFired = true;
+ };
+
+ this.on('loadstart', setLoadstartFired);
+
+ var triggerLoadstart = function triggerLoadstart() {
+ // We did miss the original loadstart. Make sure the player
+ // sees loadstart before loadedmetadata
+ if (!loadstartFired) {
+ this.trigger('loadstart');
+ }
+ };
+
+ this.on('loadedmetadata', triggerLoadstart);
+ this.ready(function () {
+ this.off('loadstart', setLoadstartFired);
+ this.off('loadedmetadata', triggerLoadstart);
+
+ if (!loadstartFired) {
+ // We did miss the original native loadstart. Fire it now.
+ this.trigger('loadstart');
+ }
+ });
+ return;
+ } // From here on we know that loadstart already fired and we missed it.
+ // The other readyState events aren't as much of a problem if we double
+ // them, so not going to go to as much trouble as loadstart to prevent
+ // that unless we find reason to.
+
+
+ var eventsToTrigger = ['loadstart']; // loadedmetadata: newly equal to HAVE_METADATA (1) or greater
+
+ eventsToTrigger.push('loadedmetadata'); // loadeddata: newly increased to HAVE_CURRENT_DATA (2) or greater
+
+ if (el.readyState >= 2) {
+ eventsToTrigger.push('loadeddata');
+ } // canplay: newly increased to HAVE_FUTURE_DATA (3) or greater
+
+
+ if (el.readyState >= 3) {
+ eventsToTrigger.push('canplay');
+ } // canplaythrough: newly equal to HAVE_ENOUGH_DATA (4)
+
+
+ if (el.readyState >= 4) {
+ eventsToTrigger.push('canplaythrough');
+ } // We still need to give the player time to add event listeners
+
+
+ this.ready(function () {
+ eventsToTrigger.forEach(function (type) {
+ this.trigger(type);
+ }, this);
+ });
+ }
+ /**
+ * Set whether we are scrubbing or not.
+ * This is used to decide whether we should use `fastSeek` or not.
+ * `fastSeek` is used to provide trick play on Safari browsers.
+ *
+ * @param {boolean} isScrubbing
+ * - true for we are currently scrubbing
+ * - false for we are no longer scrubbing
+ */
+ ;
+
+ _proto.setScrubbing = function setScrubbing(isScrubbing) {
+ this.isScrubbing_ = isScrubbing;
+ }
+ /**
+ * Get whether we are scrubbing or not.
+ *
+ * @return {boolean} isScrubbing
+ * - true for we are currently scrubbing
+ * - false for we are no longer scrubbing
+ */
+ ;
+
+ _proto.scrubbing = function scrubbing() {
+ return this.isScrubbing_;
+ }
+ /**
+ * Set current time for the `HTML5` tech.
+ *
+ * @param {number} seconds
+ * Set the current time of the media to this.
+ */
+ ;
+
+ _proto.setCurrentTime = function setCurrentTime(seconds) {
+ try {
+ if (this.isScrubbing_ && this.el_.fastSeek && IS_ANY_SAFARI) {
+ this.el_.fastSeek(seconds);
+ } else {
+ this.el_.currentTime = seconds;
+ }
+ } catch (e) {
+ log$1(e, 'Video is not ready. (Video.js)'); // this.warning(VideoJS.warnings.videoNotReady);
+ }
+ }
+ /**
+ * Get the current duration of the HTML5 media element.
+ *
+ * @return {number}
+ * The duration of the media or 0 if there is no duration.
+ */
+ ;
+
+ _proto.duration = function duration() {
+ var _this5 = this;
+
+ // Android Chrome will report duration as Infinity for VOD HLS until after
+ // playback has started, which triggers the live display erroneously.
+ // Return NaN if playback has not started and trigger a durationupdate once
+ // the duration can be reliably known.
+ if (this.el_.duration === Infinity && IS_ANDROID && IS_CHROME && this.el_.currentTime === 0) {
+ // Wait for the first `timeupdate` with currentTime > 0 - there may be
+ // several with 0
+ var checkProgress = function checkProgress() {
+ if (_this5.el_.currentTime > 0) {
+ // Trigger durationchange for genuinely live video
+ if (_this5.el_.duration === Infinity) {
+ _this5.trigger('durationchange');
+ }
+
+ _this5.off('timeupdate', checkProgress);
+ }
+ };
+
+ this.on('timeupdate', checkProgress);
+ return NaN;
+ }
+
+ return this.el_.duration || NaN;
+ }
+ /**
+ * Get the current width of the HTML5 media element.
+ *
+ * @return {number}
+ * The width of the HTML5 media element.
+ */
+ ;
+
+ _proto.width = function width() {
+ return this.el_.offsetWidth;
+ }
+ /**
+ * Get the current height of the HTML5 media element.
+ *
+ * @return {number}
+ * The height of the HTML5 media element.
+ */
+ ;
+
+ _proto.height = function height() {
+ return this.el_.offsetHeight;
+ }
+ /**
+ * Proxy iOS `webkitbeginfullscreen` and `webkitendfullscreen` into
+ * `fullscreenchange` event.
+ *
+ * @private
+ * @fires fullscreenchange
+ * @listens webkitendfullscreen
+ * @listens webkitbeginfullscreen
+ * @listens webkitbeginfullscreen
+ */
+ ;
+
+ _proto.proxyWebkitFullscreen_ = function proxyWebkitFullscreen_() {
+ var _this6 = this;
+
+ if (!('webkitDisplayingFullscreen' in this.el_)) {
+ return;
+ }
+
+ var endFn = function endFn() {
+ this.trigger('fullscreenchange', {
+ isFullscreen: false
+ }); // Safari will sometimes set contols on the videoelement when existing fullscreen.
+
+ if (this.el_.controls && !this.options_.nativeControlsForTouch && this.controls()) {
+ this.el_.controls = false;
+ }
+ };
+
+ var beginFn = function beginFn() {
+ if ('webkitPresentationMode' in this.el_ && this.el_.webkitPresentationMode !== 'picture-in-picture') {
+ this.one('webkitendfullscreen', endFn);
+ this.trigger('fullscreenchange', {
+ isFullscreen: true,
+ // set a flag in case another tech triggers fullscreenchange
+ nativeIOSFullscreen: true
+ });
+ }
+ };
+
+ this.on('webkitbeginfullscreen', beginFn);
+ this.on('dispose', function () {
+ _this6.off('webkitbeginfullscreen', beginFn);
+
+ _this6.off('webkitendfullscreen', endFn);
+ });
+ }
+ /**
+ * Check if fullscreen is supported on the current playback device.
+ *
+ * @return {boolean}
+ * - True if fullscreen is supported.
+ * - False if fullscreen is not supported.
+ */
+ ;
+
+ _proto.supportsFullScreen = function supportsFullScreen() {
+ if (typeof this.el_.webkitEnterFullScreen === 'function') {
+ var userAgent = window__default['default'].navigator && window__default['default'].navigator.userAgent || ''; // Seems to be broken in Chromium/Chrome && Safari in Leopard
+
+ if (/Android/.test(userAgent) || !/Chrome|Mac OS X 10.5/.test(userAgent)) {
+ return true;
+ }
+ }
+
+ return false;
+ }
+ /**
+ * Request that the `HTML5` Tech enter fullscreen.
+ */
+ ;
+
+ _proto.enterFullScreen = function enterFullScreen() {
+ var video = this.el_;
+
+ if (video.paused && video.networkState <= video.HAVE_METADATA) {
+ // attempt to prime the video element for programmatic access
+ // this isn't necessary on the desktop but shouldn't hurt
+ silencePromise(this.el_.play()); // playing and pausing synchronously during the transition to fullscreen
+ // can get iOS ~6.1 devices into a play/pause loop
+
+ this.setTimeout(function () {
+ video.pause();
+
+ try {
+ video.webkitEnterFullScreen();
+ } catch (e) {
+ this.trigger('fullscreenerror', e);
+ }
+ }, 0);
+ } else {
+ try {
+ video.webkitEnterFullScreen();
+ } catch (e) {
+ this.trigger('fullscreenerror', e);
+ }
+ }
+ }
+ /**
+ * Request that the `HTML5` Tech exit fullscreen.
+ */
+ ;
+
+ _proto.exitFullScreen = function exitFullScreen() {
+ if (!this.el_.webkitDisplayingFullscreen) {
+ this.trigger('fullscreenerror', new Error('The video is not fullscreen'));
+ return;
+ }
+
+ this.el_.webkitExitFullScreen();
+ }
+ /**
+ * Create a floating video window always on top of other windows so that users may
+ * continue consuming media while they interact with other content sites, or
+ * applications on their device.
+ *
+ * @see [Spec]{@link https://wicg.github.io/picture-in-picture}
+ *
+ * @return {Promise}
+ * A promise with a Picture-in-Picture window.
+ */
+ ;
+
+ _proto.requestPictureInPicture = function requestPictureInPicture() {
+ return this.el_.requestPictureInPicture();
+ }
+ /**
+ * Native requestVideoFrameCallback if supported by browser/tech, or fallback
+ * Don't use rVCF on Safari when DRM is playing, as it doesn't fire
+ * Needs to be checked later than the constructor
+ * This will be a false positive for clear sources loaded after a Fairplay source
+ *
+ * @param {function} cb function to call
+ * @return {number} id of request
+ */
+ ;
+
+ _proto.requestVideoFrameCallback = function requestVideoFrameCallback(cb) {
+ if (this.featuresVideoFrameCallback && !this.el_.webkitKeys) {
+ return this.el_.requestVideoFrameCallback(cb);
+ }
+
+ return _Tech.prototype.requestVideoFrameCallback.call(this, cb);
+ }
+ /**
+ * Native or fallback requestVideoFrameCallback
+ *
+ * @param {number} id request id to cancel
+ */
+ ;
+
+ _proto.cancelVideoFrameCallback = function cancelVideoFrameCallback(id) {
+ if (this.featuresVideoFrameCallback && !this.el_.webkitKeys) {
+ this.el_.cancelVideoFrameCallback(id);
+ } else {
+ _Tech.prototype.cancelVideoFrameCallback.call(this, id);
+ }
+ }
+ /**
+ * A getter/setter for the `Html5` Tech's source object.
+ * > Note: Please use {@link Html5#setSource}
+ *
+ * @param {Tech~SourceObject} [src]
+ * The source object you want to set on the `HTML5` techs element.
+ *
+ * @return {Tech~SourceObject|undefined}
+ * - The current source object when a source is not passed in.
+ * - undefined when setting
+ *
+ * @deprecated Since version 5.
+ */
+ ;
+
+ _proto.src = function src(_src) {
+ if (_src === undefined) {
+ return this.el_.src;
+ } // Setting src through `src` instead of `setSrc` will be deprecated
+
+
+ this.setSrc(_src);
+ }
+ /**
+ * Reset the tech by removing all sources and then calling
+ * {@link Html5.resetMediaElement}.
+ */
+ ;
+
+ _proto.reset = function reset() {
+ Html5.resetMediaElement(this.el_);
+ }
+ /**
+ * Get the current source on the `HTML5` Tech. Falls back to returning the source from
+ * the HTML5 media element.
+ *
+ * @return {Tech~SourceObject}
+ * The current source object from the HTML5 tech. With a fallback to the
+ * elements source.
+ */
+ ;
+
+ _proto.currentSrc = function currentSrc() {
+ if (this.currentSource_) {
+ return this.currentSource_.src;
+ }
+
+ return this.el_.currentSrc;
+ }
+ /**
+ * Set controls attribute for the HTML5 media Element.
+ *
+ * @param {string} val
+ * Value to set the controls attribute to
+ */
+ ;
+
+ _proto.setControls = function setControls(val) {
+ this.el_.controls = !!val;
+ }
+ /**
+ * Create and returns a remote {@link TextTrack} object.
+ *
+ * @param {string} kind
+ * `TextTrack` kind (subtitles, captions, descriptions, chapters, or metadata)
+ *
+ * @param {string} [label]
+ * Label to identify the text track
+ *
+ * @param {string} [language]
+ * Two letter language abbreviation
+ *
+ * @return {TextTrack}
+ * The TextTrack that gets created.
+ */
+ ;
+
+ _proto.addTextTrack = function addTextTrack(kind, label, language) {
+ if (!this.featuresNativeTextTracks) {
+ return _Tech.prototype.addTextTrack.call(this, kind, label, language);
+ }
+
+ return this.el_.addTextTrack(kind, label, language);
+ }
+ /**
+ * Creates either native TextTrack or an emulated TextTrack depending
+ * on the value of `featuresNativeTextTracks`
+ *
+ * @param {Object} options
+ * The object should contain the options to initialize the TextTrack with.
+ *
+ * @param {string} [options.kind]
+ * `TextTrack` kind (subtitles, captions, descriptions, chapters, or metadata).
+ *
+ * @param {string} [options.label]
+ * Label to identify the text track
+ *
+ * @param {string} [options.language]
+ * Two letter language abbreviation.
+ *
+ * @param {boolean} [options.default]
+ * Default this track to on.
+ *
+ * @param {string} [options.id]
+ * The internal id to assign this track.
+ *
+ * @param {string} [options.src]
+ * A source url for the track.
+ *
+ * @return {HTMLTrackElement}
+ * The track element that gets created.
+ */
+ ;
+
+ _proto.createRemoteTextTrack = function createRemoteTextTrack(options) {
+ if (!this.featuresNativeTextTracks) {
+ return _Tech.prototype.createRemoteTextTrack.call(this, options);
+ }
+
+ var htmlTrackElement = document__default['default'].createElement('track');
+
+ if (options.kind) {
+ htmlTrackElement.kind = options.kind;
+ }
+
+ if (options.label) {
+ htmlTrackElement.label = options.label;
+ }
+
+ if (options.language || options.srclang) {
+ htmlTrackElement.srclang = options.language || options.srclang;
+ }
+
+ if (options["default"]) {
+ htmlTrackElement["default"] = options["default"];
+ }
+
+ if (options.id) {
+ htmlTrackElement.id = options.id;
+ }
+
+ if (options.src) {
+ htmlTrackElement.src = options.src;
+ }
+
+ return htmlTrackElement;
+ }
+ /**
+ * Creates a remote text track object and returns an html track element.
+ *
+ * @param {Object} options The object should contain values for
+ * kind, language, label, and src (location of the WebVTT file)
+ * @param {boolean} [manualCleanup=true] if set to false, the TextTrack will be
+ * automatically removed from the video element whenever the source changes
+ * @return {HTMLTrackElement} An Html Track Element.
+ * This can be an emulated {@link HTMLTrackElement} or a native one.
+ * @deprecated The default value of the "manualCleanup" parameter will default
+ * to "false" in upcoming versions of Video.js
+ */
+ ;
+
+ _proto.addRemoteTextTrack = function addRemoteTextTrack(options, manualCleanup) {
+ var htmlTrackElement = _Tech.prototype.addRemoteTextTrack.call(this, options, manualCleanup);
+
+ if (this.featuresNativeTextTracks) {
+ this.el().appendChild(htmlTrackElement);
+ }
+
+ return htmlTrackElement;
+ }
+ /**
+ * Remove remote `TextTrack` from `TextTrackList` object
+ *
+ * @param {TextTrack} track
+ * `TextTrack` object to remove
+ */
+ ;
+
+ _proto.removeRemoteTextTrack = function removeRemoteTextTrack(track) {
+ _Tech.prototype.removeRemoteTextTrack.call(this, track);
+
+ if (this.featuresNativeTextTracks) {
+ var tracks = this.$$('track');
+ var i = tracks.length;
+
+ while (i--) {
+ if (track === tracks[i] || track === tracks[i].track) {
+ this.el().removeChild(tracks[i]);
+ }
+ }
+ }
+ }
+ /**
+ * Gets available media playback quality metrics as specified by the W3C's Media
+ * Playback Quality API.
+ *
+ * @see [Spec]{@link https://wicg.github.io/media-playback-quality}
+ *
+ * @return {Object}
+ * An object with supported media playback quality metrics
+ */
+ ;
+
+ _proto.getVideoPlaybackQuality = function getVideoPlaybackQuality() {
+ if (typeof this.el().getVideoPlaybackQuality === 'function') {
+ return this.el().getVideoPlaybackQuality();
+ }
+
+ var videoPlaybackQuality = {};
+
+ if (typeof this.el().webkitDroppedFrameCount !== 'undefined' && typeof this.el().webkitDecodedFrameCount !== 'undefined') {
+ videoPlaybackQuality.droppedVideoFrames = this.el().webkitDroppedFrameCount;
+ videoPlaybackQuality.totalVideoFrames = this.el().webkitDecodedFrameCount;
+ }
+
+ if (window__default['default'].performance && typeof window__default['default'].performance.now === 'function') {
+ videoPlaybackQuality.creationTime = window__default['default'].performance.now();
+ } else if (window__default['default'].performance && window__default['default'].performance.timing && typeof window__default['default'].performance.timing.navigationStart === 'number') {
+ videoPlaybackQuality.creationTime = window__default['default'].Date.now() - window__default['default'].performance.timing.navigationStart;
+ }
+
+ return videoPlaybackQuality;
+ };
+
+ return Html5;
+}(Tech);
+/* HTML5 Support Testing ---------------------------------------------------- */
+
+/**
+ * Element for testing browser HTML5 media capabilities
+ *
+ * @type {Element}
+ * @constant
+ * @private
+ */
+
+
+defineLazyProperty(Html5, 'TEST_VID', function () {
+ if (!isReal()) {
+ return;
+ }
+
+ var video = document__default['default'].createElement('video');
+ var track = document__default['default'].createElement('track');
+ track.kind = 'captions';
+ track.srclang = 'en';
+ track.label = 'English';
+ video.appendChild(track);
+ return video;
+});
+/**
+ * Check if HTML5 media is supported by this browser/device.
+ *
+ * @return {boolean}
+ * - True if HTML5 media is supported.
+ * - False if HTML5 media is not supported.
+ */
+
+Html5.isSupported = function () {
+ // IE with no Media Player is a LIAR! (#984)
+ try {
+ Html5.TEST_VID.volume = 0.5;
+ } catch (e) {
+ return false;
+ }
+
+ return !!(Html5.TEST_VID && Html5.TEST_VID.canPlayType);
+};
+/**
+ * Check if the tech can support the given type
+ *
+ * @param {string} type
+ * The mimetype to check
+ * @return {string} 'probably', 'maybe', or '' (empty string)
+ */
+
+
+Html5.canPlayType = function (type) {
+ return Html5.TEST_VID.canPlayType(type);
+};
+/**
+ * Check if the tech can support the given source
+ *
+ * @param {Object} srcObj
+ * The source object
+ * @param {Object} options
+ * The options passed to the tech
+ * @return {string} 'probably', 'maybe', or '' (empty string)
+ */
+
+
+Html5.canPlaySource = function (srcObj, options) {
+ return Html5.canPlayType(srcObj.type);
+};
+/**
+ * Check if the volume can be changed in this browser/device.
+ * Volume cannot be changed in a lot of mobile devices.
+ * Specifically, it can't be changed from 1 on iOS.
+ *
+ * @return {boolean}
+ * - True if volume can be controlled
+ * - False otherwise
+ */
+
+
+Html5.canControlVolume = function () {
+ // IE will error if Windows Media Player not installed #3315
+ try {
+ var volume = Html5.TEST_VID.volume;
+ Html5.TEST_VID.volume = volume / 2 + 0.1;
+ var canControl = volume !== Html5.TEST_VID.volume; // With the introduction of iOS 15, there are cases where the volume is read as
+ // changed but reverts back to its original state at the start of the next tick.
+ // To determine whether volume can be controlled on iOS,
+ // a timeout is set and the volume is checked asynchronously.
+ // Since `features` doesn't currently work asynchronously, the value is manually set.
+
+ if (canControl && IS_IOS) {
+ window__default['default'].setTimeout(function () {
+ if (Html5 && Html5.prototype) {
+ Html5.prototype.featuresVolumeControl = volume !== Html5.TEST_VID.volume;
+ }
+ }); // default iOS to false, which will be updated in the timeout above.
+
+ return false;
+ }
+
+ return canControl;
+ } catch (e) {
+ return false;
+ }
+};
+/**
+ * Check if the volume can be muted in this browser/device.
+ * Some devices, e.g. iOS, don't allow changing volume
+ * but permits muting/unmuting.
+ *
+ * @return {bolean}
+ * - True if volume can be muted
+ * - False otherwise
+ */
+
+
+Html5.canMuteVolume = function () {
+ try {
+ var muted = Html5.TEST_VID.muted; // in some versions of iOS muted property doesn't always
+ // work, so we want to set both property and attribute
+
+ Html5.TEST_VID.muted = !muted;
+
+ if (Html5.TEST_VID.muted) {
+ setAttribute(Html5.TEST_VID, 'muted', 'muted');
+ } else {
+ removeAttribute(Html5.TEST_VID, 'muted', 'muted');
+ }
+
+ return muted !== Html5.TEST_VID.muted;
+ } catch (e) {
+ return false;
+ }
+};
+/**
+ * Check if the playback rate can be changed in this browser/device.
+ *
+ * @return {boolean}
+ * - True if playback rate can be controlled
+ * - False otherwise
+ */
+
+
+Html5.canControlPlaybackRate = function () {
+ // Playback rate API is implemented in Android Chrome, but doesn't do anything
+ // https://github.com/videojs/video.js/issues/3180
+ if (IS_ANDROID && IS_CHROME && CHROME_VERSION < 58) {
+ return false;
+ } // IE will error if Windows Media Player not installed #3315
+
+
+ try {
+ var playbackRate = Html5.TEST_VID.playbackRate;
+ Html5.TEST_VID.playbackRate = playbackRate / 2 + 0.1;
+ return playbackRate !== Html5.TEST_VID.playbackRate;
+ } catch (e) {
+ return false;
+ }
+};
+/**
+ * Check if we can override a video/audio elements attributes, with
+ * Object.defineProperty.
+ *
+ * @return {boolean}
+ * - True if builtin attributes can be overridden
+ * - False otherwise
+ */
+
+
+Html5.canOverrideAttributes = function () {
+ // if we cannot overwrite the src/innerHTML property, there is no support
+ // iOS 7 safari for instance cannot do this.
+ try {
+ var noop = function noop() {};
+
+ Object.defineProperty(document__default['default'].createElement('video'), 'src', {
+ get: noop,
+ set: noop
+ });
+ Object.defineProperty(document__default['default'].createElement('audio'), 'src', {
+ get: noop,
+ set: noop
+ });
+ Object.defineProperty(document__default['default'].createElement('video'), 'innerHTML', {
+ get: noop,
+ set: noop
+ });
+ Object.defineProperty(document__default['default'].createElement('audio'), 'innerHTML', {
+ get: noop,
+ set: noop
+ });
+ } catch (e) {
+ return false;
+ }
+
+ return true;
+};
+/**
+ * Check to see if native `TextTrack`s are supported by this browser/device.
+ *
+ * @return {boolean}
+ * - True if native `TextTrack`s are supported.
+ * - False otherwise
+ */
+
+
+Html5.supportsNativeTextTracks = function () {
+ return IS_ANY_SAFARI || IS_IOS && IS_CHROME;
+};
+/**
+ * Check to see if native `VideoTrack`s are supported by this browser/device
+ *
+ * @return {boolean}
+ * - True if native `VideoTrack`s are supported.
+ * - False otherwise
+ */
+
+
+Html5.supportsNativeVideoTracks = function () {
+ return !!(Html5.TEST_VID && Html5.TEST_VID.videoTracks);
+};
+/**
+ * Check to see if native `AudioTrack`s are supported by this browser/device
+ *
+ * @return {boolean}
+ * - True if native `AudioTrack`s are supported.
+ * - False otherwise
+ */
+
+
+Html5.supportsNativeAudioTracks = function () {
+ return !!(Html5.TEST_VID && Html5.TEST_VID.audioTracks);
+};
+/**
+ * An array of events available on the Html5 tech.
+ *
+ * @private
+ * @type {Array}
+ */
+
+
+Html5.Events = ['loadstart', 'suspend', 'abort', 'error', 'emptied', 'stalled', 'loadedmetadata', 'loadeddata', 'canplay', 'canplaythrough', 'playing', 'waiting', 'seeking', 'seeked', 'ended', 'durationchange', 'timeupdate', 'progress', 'play', 'pause', 'ratechange', 'resize', 'volumechange'];
+/**
+ * Boolean indicating whether the `Tech` supports volume control.
+ *
+ * @type {boolean}
+ * @default {@link Html5.canControlVolume}
+ */
+
+/**
+ * Boolean indicating whether the `Tech` supports muting volume.
+ *
+ * @type {bolean}
+ * @default {@link Html5.canMuteVolume}
+ */
+
+/**
+ * Boolean indicating whether the `Tech` supports changing the speed at which the media
+ * plays. Examples:
+ * - Set player to play 2x (twice) as fast
+ * - Set player to play 0.5x (half) as fast
+ *
+ * @type {boolean}
+ * @default {@link Html5.canControlPlaybackRate}
+ */
+
+/**
+ * Boolean indicating whether the `Tech` supports the `sourceset` event.
+ *
+ * @type {boolean}
+ * @default
+ */
+
+/**
+ * Boolean indicating whether the `HTML5` tech currently supports native `TextTrack`s.
+ *
+ * @type {boolean}
+ * @default {@link Html5.supportsNativeTextTracks}
+ */
+
+/**
+ * Boolean indicating whether the `HTML5` tech currently supports native `VideoTrack`s.
+ *
+ * @type {boolean}
+ * @default {@link Html5.supportsNativeVideoTracks}
+ */
+
+/**
+ * Boolean indicating whether the `HTML5` tech currently supports native `AudioTrack`s.
+ *
+ * @type {boolean}
+ * @default {@link Html5.supportsNativeAudioTracks}
+ */
+
+[['featuresMuteControl', 'canMuteVolume'], ['featuresPlaybackRate', 'canControlPlaybackRate'], ['featuresSourceset', 'canOverrideAttributes'], ['featuresNativeTextTracks', 'supportsNativeTextTracks'], ['featuresNativeVideoTracks', 'supportsNativeVideoTracks'], ['featuresNativeAudioTracks', 'supportsNativeAudioTracks']].forEach(function (_ref) {
+ var key = _ref[0],
+ fn = _ref[1];
+ defineLazyProperty(Html5.prototype, key, function () {
+ return Html5[fn]();
+ }, true);
+});
+Html5.prototype.featuresVolumeControl = Html5.canControlVolume();
+/**
+ * Boolean indicating whether the `HTML5` tech currently supports the media element
+ * moving in the DOM. iOS breaks if you move the media element, so this is set this to
+ * false there. Everywhere else this should be true.
+ *
+ * @type {boolean}
+ * @default
+ */
+
+Html5.prototype.movingMediaElementInDOM = !IS_IOS; // TODO: Previous comment: No longer appears to be used. Can probably be removed.
+// Is this true?
+
+/**
+ * Boolean indicating whether the `HTML5` tech currently supports automatic media resize
+ * when going into fullscreen.
+ *
+ * @type {boolean}
+ * @default
+ */
+
+Html5.prototype.featuresFullscreenResize = true;
+/**
+ * Boolean indicating whether the `HTML5` tech currently supports the progress event.
+ * If this is false, manual `progress` events will be triggered instead.
+ *
+ * @type {boolean}
+ * @default
+ */
+
+Html5.prototype.featuresProgressEvents = true;
+/**
+ * Boolean indicating whether the `HTML5` tech currently supports the timeupdate event.
+ * If this is false, manual `timeupdate` events will be triggered instead.
+ *
+ * @default
+ */
+
+Html5.prototype.featuresTimeupdateEvents = true;
+/**
+ * Whether the HTML5 el supports `requestVideoFrameCallback`
+ *
+ * @type {boolean}
+ */
+
+Html5.prototype.featuresVideoFrameCallback = !!(Html5.TEST_VID && Html5.TEST_VID.requestVideoFrameCallback); // HTML5 Feature detection and Device Fixes --------------------------------- //
+
+var canPlayType;
+
+Html5.patchCanPlayType = function () {
+ // Android 4.0 and above can play HLS to some extent but it reports being unable to do so
+ // Firefox and Chrome report correctly
+ if (ANDROID_VERSION >= 4.0 && !IS_FIREFOX && !IS_CHROME) {
+ canPlayType = Html5.TEST_VID && Html5.TEST_VID.constructor.prototype.canPlayType;
+
+ Html5.TEST_VID.constructor.prototype.canPlayType = function (type) {
+ var mpegurlRE = /^application\/(?:x-|vnd\.apple\.)mpegurl/i;
+
+ if (type && mpegurlRE.test(type)) {
+ return 'maybe';
+ }
+
+ return canPlayType.call(this, type);
+ };
+ }
+};
+
+Html5.unpatchCanPlayType = function () {
+ var r = Html5.TEST_VID.constructor.prototype.canPlayType;
+
+ if (canPlayType) {
+ Html5.TEST_VID.constructor.prototype.canPlayType = canPlayType;
+ }
+
+ return r;
+}; // by default, patch the media element
+
+
+Html5.patchCanPlayType();
+
+Html5.disposeMediaElement = function (el) {
+ if (!el) {
+ return;
+ }
+
+ if (el.parentNode) {
+ el.parentNode.removeChild(el);
+ } // remove any child track or source nodes to prevent their loading
+
+
+ while (el.hasChildNodes()) {
+ el.removeChild(el.firstChild);
+ } // remove any src reference. not setting `src=''` because that causes a warning
+ // in firefox
+
+
+ el.removeAttribute('src'); // force the media element to update its loading state by calling load()
+ // however IE on Windows 7N has a bug that throws an error so need a try/catch (#793)
+
+ if (typeof el.load === 'function') {
+ // wrapping in an iife so it's not deoptimized (#1060#discussion_r10324473)
+ (function () {
+ try {
+ el.load();
+ } catch (e) {// not supported
+ }
+ })();
+ }
+};
+
+Html5.resetMediaElement = function (el) {
+ if (!el) {
+ return;
+ }
+
+ var sources = el.querySelectorAll('source');
+ var i = sources.length;
+
+ while (i--) {
+ el.removeChild(sources[i]);
+ } // remove any src reference.
+ // not setting `src=''` because that throws an error
+
+
+ el.removeAttribute('src');
+
+ if (typeof el.load === 'function') {
+ // wrapping in an iife so it's not deoptimized (#1060#discussion_r10324473)
+ (function () {
+ try {
+ el.load();
+ } catch (e) {// satisfy linter
+ }
+ })();
+ }
+};
+/* Native HTML5 element property wrapping ----------------------------------- */
+// Wrap native boolean attributes with getters that check both property and attribute
+// The list is as followed:
+// muted, defaultMuted, autoplay, controls, loop, playsinline
+
+
+[
+/**
+ * Get the value of `muted` from the media element. `muted` indicates
+ * that the volume for the media should be set to silent. This does not actually change
+ * the `volume` attribute.
+ *
+ * @method Html5#muted
+ * @return {boolean}
+ * - True if the value of `volume` should be ignored and the audio set to silent.
+ * - False if the value of `volume` should be used.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-muted}
+ */
+'muted',
+/**
+ * Get the value of `defaultMuted` from the media element. `defaultMuted` indicates
+ * whether the media should start muted or not. Only changes the default state of the
+ * media. `muted` and `defaultMuted` can have different values. {@link Html5#muted} indicates the
+ * current state.
+ *
+ * @method Html5#defaultMuted
+ * @return {boolean}
+ * - The value of `defaultMuted` from the media element.
+ * - True indicates that the media should start muted.
+ * - False indicates that the media should not start muted
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-defaultmuted}
+ */
+'defaultMuted',
+/**
+ * Get the value of `autoplay` from the media element. `autoplay` indicates
+ * that the media should start to play as soon as the page is ready.
+ *
+ * @method Html5#autoplay
+ * @return {boolean}
+ * - The value of `autoplay` from the media element.
+ * - True indicates that the media should start as soon as the page loads.
+ * - False indicates that the media should not start as soon as the page loads.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-media-autoplay}
+ */
+'autoplay',
+/**
+ * Get the value of `controls` from the media element. `controls` indicates
+ * whether the native media controls should be shown or hidden.
+ *
+ * @method Html5#controls
+ * @return {boolean}
+ * - The value of `controls` from the media element.
+ * - True indicates that native controls should be showing.
+ * - False indicates that native controls should be hidden.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-media-controls}
+ */
+'controls',
+/**
+ * Get the value of `loop` from the media element. `loop` indicates
+ * that the media should return to the start of the media and continue playing once
+ * it reaches the end.
+ *
+ * @method Html5#loop
+ * @return {boolean}
+ * - The value of `loop` from the media element.
+ * - True indicates that playback should seek back to start once
+ * the end of a media is reached.
+ * - False indicates that playback should not loop back to the start when the
+ * end of the media is reached.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-media-loop}
+ */
+'loop',
+/**
+ * Get the value of `playsinline` from the media element. `playsinline` indicates
+ * to the browser that non-fullscreen playback is preferred when fullscreen
+ * playback is the native default, such as in iOS Safari.
+ *
+ * @method Html5#playsinline
+ * @return {boolean}
+ * - The value of `playsinline` from the media element.
+ * - True indicates that the media should play inline.
+ * - False indicates that the media should not play inline.
+ *
+ * @see [Spec]{@link https://html.spec.whatwg.org/#attr-video-playsinline}
+ */
+'playsinline'].forEach(function (prop) {
+ Html5.prototype[prop] = function () {
+ return this.el_[prop] || this.el_.hasAttribute(prop);
+ };
+}); // Wrap native boolean attributes with setters that set both property and attribute
+// The list is as followed:
+// setMuted, setDefaultMuted, setAutoplay, setLoop, setPlaysinline
+// setControls is special-cased above
+
+[
+/**
+ * Set the value of `muted` on the media element. `muted` indicates that the current
+ * audio level should be silent.
+ *
+ * @method Html5#setMuted
+ * @param {boolean} muted
+ * - True if the audio should be set to silent
+ * - False otherwise
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-muted}
+ */
+'muted',
+/**
+ * Set the value of `defaultMuted` on the media element. `defaultMuted` indicates that the current
+ * audio level should be silent, but will only effect the muted level on initial playback..
+ *
+ * @method Html5.prototype.setDefaultMuted
+ * @param {boolean} defaultMuted
+ * - True if the audio should be set to silent
+ * - False otherwise
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-defaultmuted}
+ */
+'defaultMuted',
+/**
+ * Set the value of `autoplay` on the media element. `autoplay` indicates
+ * that the media should start to play as soon as the page is ready.
+ *
+ * @method Html5#setAutoplay
+ * @param {boolean} autoplay
+ * - True indicates that the media should start as soon as the page loads.
+ * - False indicates that the media should not start as soon as the page loads.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-media-autoplay}
+ */
+'autoplay',
+/**
+ * Set the value of `loop` on the media element. `loop` indicates
+ * that the media should return to the start of the media and continue playing once
+ * it reaches the end.
+ *
+ * @method Html5#setLoop
+ * @param {boolean} loop
+ * - True indicates that playback should seek back to start once
+ * the end of a media is reached.
+ * - False indicates that playback should not loop back to the start when the
+ * end of the media is reached.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-media-loop}
+ */
+'loop',
+/**
+ * Set the value of `playsinline` from the media element. `playsinline` indicates
+ * to the browser that non-fullscreen playback is preferred when fullscreen
+ * playback is the native default, such as in iOS Safari.
+ *
+ * @method Html5#setPlaysinline
+ * @param {boolean} playsinline
+ * - True indicates that the media should play inline.
+ * - False indicates that the media should not play inline.
+ *
+ * @see [Spec]{@link https://html.spec.whatwg.org/#attr-video-playsinline}
+ */
+'playsinline'].forEach(function (prop) {
+ Html5.prototype['set' + toTitleCase$1(prop)] = function (v) {
+ this.el_[prop] = v;
+
+ if (v) {
+ this.el_.setAttribute(prop, prop);
+ } else {
+ this.el_.removeAttribute(prop);
+ }
+ };
+}); // Wrap native properties with a getter
+// The list is as followed
+// paused, currentTime, buffered, volume, poster, preload, error, seeking
+// seekable, ended, playbackRate, defaultPlaybackRate, disablePictureInPicture
+// played, networkState, readyState, videoWidth, videoHeight, crossOrigin
+
+[
+/**
+ * Get the value of `paused` from the media element. `paused` indicates whether the media element
+ * is currently paused or not.
+ *
+ * @method Html5#paused
+ * @return {boolean}
+ * The value of `paused` from the media element.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-paused}
+ */
+'paused',
+/**
+ * Get the value of `currentTime` from the media element. `currentTime` indicates
+ * the current second that the media is at in playback.
+ *
+ * @method Html5#currentTime
+ * @return {number}
+ * The value of `currentTime` from the media element.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-currenttime}
+ */
+'currentTime',
+/**
+ * Get the value of `buffered` from the media element. `buffered` is a `TimeRange`
+ * object that represents the parts of the media that are already downloaded and
+ * available for playback.
+ *
+ * @method Html5#buffered
+ * @return {TimeRange}
+ * The value of `buffered` from the media element.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-buffered}
+ */
+'buffered',
+/**
+ * Get the value of `volume` from the media element. `volume` indicates
+ * the current playback volume of audio for a media. `volume` will be a value from 0
+ * (silent) to 1 (loudest and default).
+ *
+ * @method Html5#volume
+ * @return {number}
+ * The value of `volume` from the media element. Value will be between 0-1.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-a-volume}
+ */
+'volume',
+/**
+ * Get the value of `poster` from the media element. `poster` indicates
+ * that the url of an image file that can/will be shown when no media data is available.
+ *
+ * @method Html5#poster
+ * @return {string}
+ * The value of `poster` from the media element. Value will be a url to an
+ * image.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-video-poster}
+ */
+'poster',
+/**
+ * Get the value of `preload` from the media element. `preload` indicates
+ * what should download before the media is interacted with. It can have the following
+ * values:
+ * - none: nothing should be downloaded
+ * - metadata: poster and the first few frames of the media may be downloaded to get
+ * media dimensions and other metadata
+ * - auto: allow the media and metadata for the media to be downloaded before
+ * interaction
+ *
+ * @method Html5#preload
+ * @return {string}
+ * The value of `preload` from the media element. Will be 'none', 'metadata',
+ * or 'auto'.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-media-preload}
+ */
+'preload',
+/**
+ * Get the value of the `error` from the media element. `error` indicates any
+ * MediaError that may have occurred during playback. If error returns null there is no
+ * current error.
+ *
+ * @method Html5#error
+ * @return {MediaError|null}
+ * The value of `error` from the media element. Will be `MediaError` if there
+ * is a current error and null otherwise.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-error}
+ */
+'error',
+/**
+ * Get the value of `seeking` from the media element. `seeking` indicates whether the
+ * media is currently seeking to a new position or not.
+ *
+ * @method Html5#seeking
+ * @return {boolean}
+ * - The value of `seeking` from the media element.
+ * - True indicates that the media is currently seeking to a new position.
+ * - False indicates that the media is not seeking to a new position at this time.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-seeking}
+ */
+'seeking',
+/**
+ * Get the value of `seekable` from the media element. `seekable` returns a
+ * `TimeRange` object indicating ranges of time that can currently be `seeked` to.
+ *
+ * @method Html5#seekable
+ * @return {TimeRange}
+ * The value of `seekable` from the media element. A `TimeRange` object
+ * indicating the current ranges of time that can be seeked to.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-seekable}
+ */
+'seekable',
+/**
+ * Get the value of `ended` from the media element. `ended` indicates whether
+ * the media has reached the end or not.
+ *
+ * @method Html5#ended
+ * @return {boolean}
+ * - The value of `ended` from the media element.
+ * - True indicates that the media has ended.
+ * - False indicates that the media has not ended.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-ended}
+ */
+'ended',
+/**
+ * Get the value of `playbackRate` from the media element. `playbackRate` indicates
+ * the rate at which the media is currently playing back. Examples:
+ * - if playbackRate is set to 2, media will play twice as fast.
+ * - if playbackRate is set to 0.5, media will play half as fast.
+ *
+ * @method Html5#playbackRate
+ * @return {number}
+ * The value of `playbackRate` from the media element. A number indicating
+ * the current playback speed of the media, where 1 is normal speed.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-playbackrate}
+ */
+'playbackRate',
+/**
+ * Get the value of `defaultPlaybackRate` from the media element. `defaultPlaybackRate` indicates
+ * the rate at which the media is currently playing back. This value will not indicate the current
+ * `playbackRate` after playback has started, use {@link Html5#playbackRate} for that.
+ *
+ * Examples:
+ * - if defaultPlaybackRate is set to 2, media will play twice as fast.
+ * - if defaultPlaybackRate is set to 0.5, media will play half as fast.
+ *
+ * @method Html5.prototype.defaultPlaybackRate
+ * @return {number}
+ * The value of `defaultPlaybackRate` from the media element. A number indicating
+ * the current playback speed of the media, where 1 is normal speed.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-playbackrate}
+ */
+'defaultPlaybackRate',
+/**
+ * Get the value of 'disablePictureInPicture' from the video element.
+ *
+ * @method Html5#disablePictureInPicture
+ * @return {boolean} value
+ * - The value of `disablePictureInPicture` from the video element.
+ * - True indicates that the video can't be played in Picture-In-Picture mode
+ * - False indicates that the video can be played in Picture-In-Picture mode
+ *
+ * @see [Spec]{@link https://w3c.github.io/picture-in-picture/#disable-pip}
+ */
+'disablePictureInPicture',
+/**
+ * Get the value of `played` from the media element. `played` returns a `TimeRange`
+ * object representing points in the media timeline that have been played.
+ *
+ * @method Html5#played
+ * @return {TimeRange}
+ * The value of `played` from the media element. A `TimeRange` object indicating
+ * the ranges of time that have been played.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-played}
+ */
+'played',
+/**
+ * Get the value of `networkState` from the media element. `networkState` indicates
+ * the current network state. It returns an enumeration from the following list:
+ * - 0: NETWORK_EMPTY
+ * - 1: NETWORK_IDLE
+ * - 2: NETWORK_LOADING
+ * - 3: NETWORK_NO_SOURCE
+ *
+ * @method Html5#networkState
+ * @return {number}
+ * The value of `networkState` from the media element. This will be a number
+ * from the list in the description.
+ *
+ * @see [Spec] {@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-networkstate}
+ */
+'networkState',
+/**
+ * Get the value of `readyState` from the media element. `readyState` indicates
+ * the current state of the media element. It returns an enumeration from the
+ * following list:
+ * - 0: HAVE_NOTHING
+ * - 1: HAVE_METADATA
+ * - 2: HAVE_CURRENT_DATA
+ * - 3: HAVE_FUTURE_DATA
+ * - 4: HAVE_ENOUGH_DATA
+ *
+ * @method Html5#readyState
+ * @return {number}
+ * The value of `readyState` from the media element. This will be a number
+ * from the list in the description.
+ *
+ * @see [Spec] {@link https://www.w3.org/TR/html5/embedded-content-0.html#ready-states}
+ */
+'readyState',
+/**
+ * Get the value of `videoWidth` from the video element. `videoWidth` indicates
+ * the current width of the video in css pixels.
+ *
+ * @method Html5#videoWidth
+ * @return {number}
+ * The value of `videoWidth` from the video element. This will be a number
+ * in css pixels.
+ *
+ * @see [Spec] {@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-video-videowidth}
+ */
+'videoWidth',
+/**
+ * Get the value of `videoHeight` from the video element. `videoHeight` indicates
+ * the current height of the video in css pixels.
+ *
+ * @method Html5#videoHeight
+ * @return {number}
+ * The value of `videoHeight` from the video element. This will be a number
+ * in css pixels.
+ *
+ * @see [Spec] {@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-video-videowidth}
+ */
+'videoHeight',
+/**
+ * Get the value of `crossOrigin` from the media element. `crossOrigin` indicates
+ * to the browser that should sent the cookies along with the requests for the
+ * different assets/playlists
+ *
+ * @method Html5#crossOrigin
+ * @return {string}
+ * - anonymous indicates that the media should not sent cookies.
+ * - use-credentials indicates that the media should sent cookies along the requests.
+ *
+ * @see [Spec]{@link https://html.spec.whatwg.org/#attr-media-crossorigin}
+ */
+'crossOrigin'].forEach(function (prop) {
+ Html5.prototype[prop] = function () {
+ return this.el_[prop];
+ };
+}); // Wrap native properties with a setter in this format:
+// set + toTitleCase(name)
+// The list is as follows:
+// setVolume, setSrc, setPoster, setPreload, setPlaybackRate, setDefaultPlaybackRate,
+// setDisablePictureInPicture, setCrossOrigin
+
+[
+/**
+ * Set the value of `volume` on the media element. `volume` indicates the current
+ * audio level as a percentage in decimal form. This means that 1 is 100%, 0.5 is 50%, and
+ * so on.
+ *
+ * @method Html5#setVolume
+ * @param {number} percentAsDecimal
+ * The volume percent as a decimal. Valid range is from 0-1.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-a-volume}
+ */
+'volume',
+/**
+ * Set the value of `src` on the media element. `src` indicates the current
+ * {@link Tech~SourceObject} for the media.
+ *
+ * @method Html5#setSrc
+ * @param {Tech~SourceObject} src
+ * The source object to set as the current source.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-src}
+ */
+'src',
+/**
+ * Set the value of `poster` on the media element. `poster` is the url to
+ * an image file that can/will be shown when no media data is available.
+ *
+ * @method Html5#setPoster
+ * @param {string} poster
+ * The url to an image that should be used as the `poster` for the media
+ * element.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-media-poster}
+ */
+'poster',
+/**
+ * Set the value of `preload` on the media element. `preload` indicates
+ * what should download before the media is interacted with. It can have the following
+ * values:
+ * - none: nothing should be downloaded
+ * - metadata: poster and the first few frames of the media may be downloaded to get
+ * media dimensions and other metadata
+ * - auto: allow the media and metadata for the media to be downloaded before
+ * interaction
+ *
+ * @method Html5#setPreload
+ * @param {string} preload
+ * The value of `preload` to set on the media element. Must be 'none', 'metadata',
+ * or 'auto'.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-media-preload}
+ */
+'preload',
+/**
+ * Set the value of `playbackRate` on the media element. `playbackRate` indicates
+ * the rate at which the media should play back. Examples:
+ * - if playbackRate is set to 2, media will play twice as fast.
+ * - if playbackRate is set to 0.5, media will play half as fast.
+ *
+ * @method Html5#setPlaybackRate
+ * @return {number}
+ * The value of `playbackRate` from the media element. A number indicating
+ * the current playback speed of the media, where 1 is normal speed.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-playbackrate}
+ */
+'playbackRate',
+/**
+ * Set the value of `defaultPlaybackRate` on the media element. `defaultPlaybackRate` indicates
+ * the rate at which the media should play back upon initial startup. Changing this value
+ * after a video has started will do nothing. Instead you should used {@link Html5#setPlaybackRate}.
+ *
+ * Example Values:
+ * - if playbackRate is set to 2, media will play twice as fast.
+ * - if playbackRate is set to 0.5, media will play half as fast.
+ *
+ * @method Html5.prototype.setDefaultPlaybackRate
+ * @return {number}
+ * The value of `defaultPlaybackRate` from the media element. A number indicating
+ * the current playback speed of the media, where 1 is normal speed.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-defaultplaybackrate}
+ */
+'defaultPlaybackRate',
+/**
+ * Prevents the browser from suggesting a Picture-in-Picture context menu
+ * or to request Picture-in-Picture automatically in some cases.
+ *
+ * @method Html5#setDisablePictureInPicture
+ * @param {boolean} value
+ * The true value will disable Picture-in-Picture mode.
+ *
+ * @see [Spec]{@link https://w3c.github.io/picture-in-picture/#disable-pip}
+ */
+'disablePictureInPicture',
+/**
+ * Set the value of `crossOrigin` from the media element. `crossOrigin` indicates
+ * to the browser that should sent the cookies along with the requests for the
+ * different assets/playlists
+ *
+ * @method Html5#setCrossOrigin
+ * @param {string} crossOrigin
+ * - anonymous indicates that the media should not sent cookies.
+ * - use-credentials indicates that the media should sent cookies along the requests.
+ *
+ * @see [Spec]{@link https://html.spec.whatwg.org/#attr-media-crossorigin}
+ */
+'crossOrigin'].forEach(function (prop) {
+ Html5.prototype['set' + toTitleCase$1(prop)] = function (v) {
+ this.el_[prop] = v;
+ };
+}); // wrap native functions with a function
+// The list is as follows:
+// pause, load, play
+
+[
+/**
+ * A wrapper around the media elements `pause` function. This will call the `HTML5`
+ * media elements `pause` function.
+ *
+ * @method Html5#pause
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-pause}
+ */
+'pause',
+/**
+ * A wrapper around the media elements `load` function. This will call the `HTML5`s
+ * media element `load` function.
+ *
+ * @method Html5#load
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-load}
+ */
+'load',
+/**
+ * A wrapper around the media elements `play` function. This will call the `HTML5`s
+ * media element `play` function.
+ *
+ * @method Html5#play
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-play}
+ */
+'play'].forEach(function (prop) {
+ Html5.prototype[prop] = function () {
+ return this.el_[prop]();
+ };
+});
+Tech.withSourceHandlers(Html5);
+/**
+ * Native source handler for Html5, simply passes the source to the media element.
+ *
+ * @property {Tech~SourceObject} source
+ * The source object
+ *
+ * @property {Html5} tech
+ * The instance of the HTML5 tech.
+ */
+
+Html5.nativeSourceHandler = {};
+/**
+ * Check if the media element can play the given mime type.
+ *
+ * @param {string} type
+ * The mimetype to check
+ *
+ * @return {string}
+ * 'probably', 'maybe', or '' (empty string)
+ */
+
+Html5.nativeSourceHandler.canPlayType = function (type) {
+ // IE without MediaPlayer throws an error (#519)
+ try {
+ return Html5.TEST_VID.canPlayType(type);
+ } catch (e) {
+ return '';
+ }
+};
+/**
+ * Check if the media element can handle a source natively.
+ *
+ * @param {Tech~SourceObject} source
+ * The source object
+ *
+ * @param {Object} [options]
+ * Options to be passed to the tech.
+ *
+ * @return {string}
+ * 'probably', 'maybe', or '' (empty string).
+ */
+
+
+Html5.nativeSourceHandler.canHandleSource = function (source, options) {
+ // If a type was provided we should rely on that
+ if (source.type) {
+ return Html5.nativeSourceHandler.canPlayType(source.type); // If no type, fall back to checking 'video/[EXTENSION]'
+ } else if (source.src) {
+ var ext = getFileExtension(source.src);
+ return Html5.nativeSourceHandler.canPlayType("video/" + ext);
+ }
+
+ return '';
+};
+/**
+ * Pass the source to the native media element.
+ *
+ * @param {Tech~SourceObject} source
+ * The source object
+ *
+ * @param {Html5} tech
+ * The instance of the Html5 tech
+ *
+ * @param {Object} [options]
+ * The options to pass to the source
+ */
+
+
+Html5.nativeSourceHandler.handleSource = function (source, tech, options) {
+ tech.setSrc(source.src);
+};
+/**
+ * A noop for the native dispose function, as cleanup is not needed.
+ */
+
+
+Html5.nativeSourceHandler.dispose = function () {}; // Register the native source handler
+
+
+Html5.registerSourceHandler(Html5.nativeSourceHandler);
+Tech.registerTech('Html5', Html5);
+
+// on the player when they happen
+
+var TECH_EVENTS_RETRIGGER = [
+/**
+ * Fired while the user agent is downloading media data.
+ *
+ * @event Player#progress
+ * @type {EventTarget~Event}
+ */
+
+/**
+ * Retrigger the `progress` event that was triggered by the {@link Tech}.
+ *
+ * @private
+ * @method Player#handleTechProgress_
+ * @fires Player#progress
+ * @listens Tech#progress
+ */
+'progress',
+/**
+ * Fires when the loading of an audio/video is aborted.
+ *
+ * @event Player#abort
+ * @type {EventTarget~Event}
+ */
+
+/**
+ * Retrigger the `abort` event that was triggered by the {@link Tech}.
+ *
+ * @private
+ * @method Player#handleTechAbort_
+ * @fires Player#abort
+ * @listens Tech#abort
+ */
+'abort',
+/**
+ * Fires when the browser is intentionally not getting media data.
+ *
+ * @event Player#suspend
+ * @type {EventTarget~Event}
+ */
+
+/**
+ * Retrigger the `suspend` event that was triggered by the {@link Tech}.
+ *
+ * @private
+ * @method Player#handleTechSuspend_
+ * @fires Player#suspend
+ * @listens Tech#suspend
+ */
+'suspend',
+/**
+ * Fires when the current playlist is empty.
+ *
+ * @event Player#emptied
+ * @type {EventTarget~Event}
+ */
+
+/**
+ * Retrigger the `emptied` event that was triggered by the {@link Tech}.
+ *
+ * @private
+ * @method Player#handleTechEmptied_
+ * @fires Player#emptied
+ * @listens Tech#emptied
+ */
+'emptied',
+/**
+ * Fires when the browser is trying to get media data, but data is not available.
+ *
+ * @event Player#stalled
+ * @type {EventTarget~Event}
+ */
+
+/**
+ * Retrigger the `stalled` event that was triggered by the {@link Tech}.
+ *
+ * @private
+ * @method Player#handleTechStalled_
+ * @fires Player#stalled
+ * @listens Tech#stalled
+ */
+'stalled',
+/**
+ * Fires when the browser has loaded meta data for the audio/video.
+ *
+ * @event Player#loadedmetadata
+ * @type {EventTarget~Event}
+ */
+
+/**
+ * Retrigger the `loadedmetadata` event that was triggered by the {@link Tech}.
+ *
+ * @private
+ * @method Player#handleTechLoadedmetadata_
+ * @fires Player#loadedmetadata
+ * @listens Tech#loadedmetadata
+ */
+'loadedmetadata',
+/**
+ * Fires when the browser has loaded the current frame of the audio/video.
+ *
+ * @event Player#loadeddata
+ * @type {event}
+ */
+
+/**
+ * Retrigger the `loadeddata` event that was triggered by the {@link Tech}.
+ *
+ * @private
+ * @method Player#handleTechLoaddeddata_
+ * @fires Player#loadeddata
+ * @listens Tech#loadeddata
+ */
+'loadeddata',
+/**
+ * Fires when the current playback position has changed.
+ *
+ * @event Player#timeupdate
+ * @type {event}
+ */
+
+/**
+ * Retrigger the `timeupdate` event that was triggered by the {@link Tech}.
+ *
+ * @private
+ * @method Player#handleTechTimeUpdate_
+ * @fires Player#timeupdate
+ * @listens Tech#timeupdate
+ */
+'timeupdate',
+/**
+ * Fires when the video's intrinsic dimensions change
+ *
+ * @event Player#resize
+ * @type {event}
+ */
+
+/**
+ * Retrigger the `resize` event that was triggered by the {@link Tech}.
+ *
+ * @private
+ * @method Player#handleTechResize_
+ * @fires Player#resize
+ * @listens Tech#resize
+ */
+'resize',
+/**
+ * Fires when the volume has been changed
+ *
+ * @event Player#volumechange
+ * @type {event}
+ */
+
+/**
+ * Retrigger the `volumechange` event that was triggered by the {@link Tech}.
+ *
+ * @private
+ * @method Player#handleTechVolumechange_
+ * @fires Player#volumechange
+ * @listens Tech#volumechange
+ */
+'volumechange',
+/**
+ * Fires when the text track has been changed
+ *
+ * @event Player#texttrackchange
+ * @type {event}
+ */
+
+/**
+ * Retrigger the `texttrackchange` event that was triggered by the {@link Tech}.
+ *
+ * @private
+ * @method Player#handleTechTexttrackchange_
+ * @fires Player#texttrackchange
+ * @listens Tech#texttrackchange
+ */
+'texttrackchange']; // events to queue when playback rate is zero
+// this is a hash for the sole purpose of mapping non-camel-cased event names
+// to camel-cased function names
+
+var TECH_EVENTS_QUEUE = {
+ canplay: 'CanPlay',
+ canplaythrough: 'CanPlayThrough',
+ playing: 'Playing',
+ seeked: 'Seeked'
+};
+var BREAKPOINT_ORDER = ['tiny', 'xsmall', 'small', 'medium', 'large', 'xlarge', 'huge'];
+var BREAKPOINT_CLASSES = {}; // grep: vjs-layout-tiny
+// grep: vjs-layout-x-small
+// grep: vjs-layout-small
+// grep: vjs-layout-medium
+// grep: vjs-layout-large
+// grep: vjs-layout-x-large
+// grep: vjs-layout-huge
+
+BREAKPOINT_ORDER.forEach(function (k) {
+ var v = k.charAt(0) === 'x' ? "x-" + k.substring(1) : k;
+ BREAKPOINT_CLASSES[k] = "vjs-layout-" + v;
+});
+var DEFAULT_BREAKPOINTS = {
+ tiny: 210,
+ xsmall: 320,
+ small: 425,
+ medium: 768,
+ large: 1440,
+ xlarge: 2560,
+ huge: Infinity
+};
+/**
+ * An instance of the `Player` class is created when any of the Video.js setup methods
+ * are used to initialize a video.
+ *
+ * After an instance has been created it can be accessed globally in two ways:
+ * 1. By calling `videojs('example_video_1');`
+ * 2. By using it directly via `videojs.players.example_video_1;`
+ *
+ * @extends Component
+ */
+
+var Player = /*#__PURE__*/function (_Component) {
+ _inheritsLoose__default['default'](Player, _Component);
+
+ /**
+ * Create an instance of this class.
+ *
+ * @param {Element} tag
+ * The original video DOM element used for configuring options.
+ *
+ * @param {Object} [options]
+ * Object of option names and values.
+ *
+ * @param {Component~ReadyCallback} [ready]
+ * Ready callback function.
+ */
+ function Player(tag, options, ready) {
+ var _this;
+
+ // Make sure tag ID exists
+ tag.id = tag.id || options.id || "vjs_video_" + newGUID(); // Set Options
+ // The options argument overrides options set in the video tag
+ // which overrides globally set options.
+ // This latter part coincides with the load order
+ // (tag must exist before Player)
+
+ options = assign(Player.getTagSettings(tag), options); // Delay the initialization of children because we need to set up
+ // player properties first, and can't use `this` before `super()`
+
+ options.initChildren = false; // Same with creating the element
+
+ options.createEl = false; // don't auto mixin the evented mixin
+
+ options.evented = false; // we don't want the player to report touch activity on itself
+ // see enableTouchActivity in Component
+
+ options.reportTouchActivity = false; // If language is not set, get the closest lang attribute
+
+ if (!options.language) {
+ if (typeof tag.closest === 'function') {
+ var closest = tag.closest('[lang]');
+
+ if (closest && closest.getAttribute) {
+ options.language = closest.getAttribute('lang');
+ }
+ } else {
+ var element = tag;
+
+ while (element && element.nodeType === 1) {
+ if (getAttributes(element).hasOwnProperty('lang')) {
+ options.language = element.getAttribute('lang');
+ break;
+ }
+
+ element = element.parentNode;
+ }
+ }
+ } // Run base component initializing with new options
+
+
+ _this = _Component.call(this, null, options, ready) || this; // Create bound methods for document listeners.
+
+ _this.boundDocumentFullscreenChange_ = function (e) {
+ return _this.documentFullscreenChange_(e);
+ };
+
+ _this.boundFullWindowOnEscKey_ = function (e) {
+ return _this.fullWindowOnEscKey(e);
+ };
+
+ _this.boundUpdateStyleEl_ = function (e) {
+ return _this.updateStyleEl_(e);
+ };
+
+ _this.boundApplyInitTime_ = function (e) {
+ return _this.applyInitTime_(e);
+ };
+
+ _this.boundUpdateCurrentBreakpoint_ = function (e) {
+ return _this.updateCurrentBreakpoint_(e);
+ };
+
+ _this.boundHandleTechClick_ = function (e) {
+ return _this.handleTechClick_(e);
+ };
+
+ _this.boundHandleTechDoubleClick_ = function (e) {
+ return _this.handleTechDoubleClick_(e);
+ };
+
+ _this.boundHandleTechTouchStart_ = function (e) {
+ return _this.handleTechTouchStart_(e);
+ };
+
+ _this.boundHandleTechTouchMove_ = function (e) {
+ return _this.handleTechTouchMove_(e);
+ };
+
+ _this.boundHandleTechTouchEnd_ = function (e) {
+ return _this.handleTechTouchEnd_(e);
+ };
+
+ _this.boundHandleTechTap_ = function (e) {
+ return _this.handleTechTap_(e);
+ }; // default isFullscreen_ to false
+
+
+ _this.isFullscreen_ = false; // create logger
+
+ _this.log = createLogger(_this.id_); // Hold our own reference to fullscreen api so it can be mocked in tests
+
+ _this.fsApi_ = FullscreenApi; // Tracks when a tech changes the poster
+
+ _this.isPosterFromTech_ = false; // Holds callback info that gets queued when playback rate is zero
+ // and a seek is happening
+
+ _this.queuedCallbacks_ = []; // Turn off API access because we're loading a new tech that might load asynchronously
+
+ _this.isReady_ = false; // Init state hasStarted_
+
+ _this.hasStarted_ = false; // Init state userActive_
+
+ _this.userActive_ = false; // Init debugEnabled_
+
+ _this.debugEnabled_ = false; // Init state audioOnlyMode_
+
+ _this.audioOnlyMode_ = false; // Init state audioPosterMode_
+
+ _this.audioPosterMode_ = false; // Init state audioOnlyCache_
+
+ _this.audioOnlyCache_ = {
+ playerHeight: null,
+ hiddenChildren: []
+ }; // if the global option object was accidentally blown away by
+ // someone, bail early with an informative error
+
+ if (!_this.options_ || !_this.options_.techOrder || !_this.options_.techOrder.length) {
+ throw new Error('No techOrder specified. Did you overwrite ' + 'videojs.options instead of just changing the ' + 'properties you want to override?');
+ } // Store the original tag used to set options
+
+
+ _this.tag = tag; // Store the tag attributes used to restore html5 element
+
+ _this.tagAttributes = tag && getAttributes(tag); // Update current language
+
+ _this.language(_this.options_.language); // Update Supported Languages
+
+
+ if (options.languages) {
+ // Normalise player option languages to lowercase
+ var languagesToLower = {};
+ Object.getOwnPropertyNames(options.languages).forEach(function (name) {
+ languagesToLower[name.toLowerCase()] = options.languages[name];
+ });
+ _this.languages_ = languagesToLower;
+ } else {
+ _this.languages_ = Player.prototype.options_.languages;
+ }
+
+ _this.resetCache_(); // Set poster
+
+
+ _this.poster_ = options.poster || ''; // Set controls
+
+ _this.controls_ = !!options.controls; // Original tag settings stored in options
+ // now remove immediately so native controls don't flash.
+ // May be turned back on by HTML5 tech if nativeControlsForTouch is true
+
+ tag.controls = false;
+ tag.removeAttribute('controls');
+ _this.changingSrc_ = false;
+ _this.playCallbacks_ = [];
+ _this.playTerminatedQueue_ = []; // the attribute overrides the option
+
+ if (tag.hasAttribute('autoplay')) {
+ _this.autoplay(true);
+ } else {
+ // otherwise use the setter to validate and
+ // set the correct value.
+ _this.autoplay(_this.options_.autoplay);
+ } // check plugins
+
+
+ if (options.plugins) {
+ Object.keys(options.plugins).forEach(function (name) {
+ if (typeof _this[name] !== 'function') {
+ throw new Error("plugin \"" + name + "\" does not exist");
+ }
+ });
+ }
+ /*
+ * Store the internal state of scrubbing
+ *
+ * @private
+ * @return {Boolean} True if the user is scrubbing
+ */
+
+
+ _this.scrubbing_ = false;
+ _this.el_ = _this.createEl(); // Make this an evented object and use `el_` as its event bus.
+
+ evented(_assertThisInitialized__default['default'](_this), {
+ eventBusKey: 'el_'
+ }); // listen to document and player fullscreenchange handlers so we receive those events
+ // before a user can receive them so we can update isFullscreen appropriately.
+ // make sure that we listen to fullscreenchange events before everything else to make sure that
+ // our isFullscreen method is updated properly for internal components as well as external.
+
+ if (_this.fsApi_.requestFullscreen) {
+ on(document__default['default'], _this.fsApi_.fullscreenchange, _this.boundDocumentFullscreenChange_);
+
+ _this.on(_this.fsApi_.fullscreenchange, _this.boundDocumentFullscreenChange_);
+ }
+
+ if (_this.fluid_) {
+ _this.on(['playerreset', 'resize'], _this.boundUpdateStyleEl_);
+ } // We also want to pass the original player options to each component and plugin
+ // as well so they don't need to reach back into the player for options later.
+ // We also need to do another copy of this.options_ so we don't end up with
+ // an infinite loop.
+
+
+ var playerOptionsCopy = mergeOptions$3(_this.options_); // Load plugins
+
+ if (options.plugins) {
+ Object.keys(options.plugins).forEach(function (name) {
+ _this[name](options.plugins[name]);
+ });
+ } // Enable debug mode to fire debugon event for all plugins.
+
+
+ if (options.debug) {
+ _this.debug(true);
+ }
+
+ _this.options_.playerOptions = playerOptionsCopy;
+ _this.middleware_ = [];
+
+ _this.playbackRates(options.playbackRates);
+
+ _this.initChildren(); // Set isAudio based on whether or not an audio tag was used
+
+
+ _this.isAudio(tag.nodeName.toLowerCase() === 'audio'); // Update controls className. Can't do this when the controls are initially
+ // set because the element doesn't exist yet.
+
+
+ if (_this.controls()) {
+ _this.addClass('vjs-controls-enabled');
+ } else {
+ _this.addClass('vjs-controls-disabled');
+ } // Set ARIA label and region role depending on player type
+
+
+ _this.el_.setAttribute('role', 'region');
+
+ if (_this.isAudio()) {
+ _this.el_.setAttribute('aria-label', _this.localize('Audio Player'));
+ } else {
+ _this.el_.setAttribute('aria-label', _this.localize('Video Player'));
+ }
+
+ if (_this.isAudio()) {
+ _this.addClass('vjs-audio');
+ }
+
+ if (_this.flexNotSupported_()) {
+ _this.addClass('vjs-no-flex');
+ } // TODO: Make this smarter. Toggle user state between touching/mousing
+ // using events, since devices can have both touch and mouse events.
+ // TODO: Make this check be performed again when the window switches between monitors
+ // (See https://github.com/videojs/video.js/issues/5683)
+
+
+ if (TOUCH_ENABLED) {
+ _this.addClass('vjs-touch-enabled');
+ } // iOS Safari has broken hover handling
+
+
+ if (!IS_IOS) {
+ _this.addClass('vjs-workinghover');
+ } // Make player easily findable by ID
+
+
+ Player.players[_this.id_] = _assertThisInitialized__default['default'](_this); // Add a major version class to aid css in plugins
+
+ var majorVersion = version$5.split('.')[0];
+
+ _this.addClass("vjs-v" + majorVersion); // When the player is first initialized, trigger activity so components
+ // like the control bar show themselves if needed
+
+
+ _this.userActive(true);
+
+ _this.reportUserActivity();
+
+ _this.one('play', function (e) {
+ return _this.listenForUserActivity_(e);
+ });
+
+ _this.on('stageclick', function (e) {
+ return _this.handleStageClick_(e);
+ });
+
+ _this.on('keydown', function (e) {
+ return _this.handleKeyDown(e);
+ });
+
+ _this.on('languagechange', function (e) {
+ return _this.handleLanguagechange(e);
+ });
+
+ _this.breakpoints(_this.options_.breakpoints);
+
+ _this.responsive(_this.options_.responsive); // Calling both the audio mode methods after the player is fully
+ // setup to be able to listen to the events triggered by them
+
+
+ _this.on('ready', function () {
+ // Calling the audioPosterMode method first so that
+ // the audioOnlyMode can take precedence when both options are set to true
+ _this.audioPosterMode(_this.options_.audioPosterMode);
+
+ _this.audioOnlyMode(_this.options_.audioOnlyMode);
+ });
+
+ return _this;
+ }
+ /**
+ * Destroys the video player and does any necessary cleanup.
+ *
+ * This is especially helpful if you are dynamically adding and removing videos
+ * to/from the DOM.
+ *
+ * @fires Player#dispose
+ */
+
+
+ var _proto = Player.prototype;
+
+ _proto.dispose = function dispose() {
+ var _this2 = this;
+
+ /**
+ * Called when the player is being disposed of.
+ *
+ * @event Player#dispose
+ * @type {EventTarget~Event}
+ */
+ this.trigger('dispose'); // prevent dispose from being called twice
+
+ this.off('dispose'); // Make sure all player-specific document listeners are unbound. This is
+
+ off(document__default['default'], this.fsApi_.fullscreenchange, this.boundDocumentFullscreenChange_);
+ off(document__default['default'], 'keydown', this.boundFullWindowOnEscKey_);
+
+ if (this.styleEl_ && this.styleEl_.parentNode) {
+ this.styleEl_.parentNode.removeChild(this.styleEl_);
+ this.styleEl_ = null;
+ } // Kill reference to this player
+
+
+ Player.players[this.id_] = null;
+
+ if (this.tag && this.tag.player) {
+ this.tag.player = null;
+ }
+
+ if (this.el_ && this.el_.player) {
+ this.el_.player = null;
+ }
+
+ if (this.tech_) {
+ this.tech_.dispose();
+ this.isPosterFromTech_ = false;
+ this.poster_ = '';
+ }
+
+ if (this.playerElIngest_) {
+ this.playerElIngest_ = null;
+ }
+
+ if (this.tag) {
+ this.tag = null;
+ }
+
+ clearCacheForPlayer(this); // remove all event handlers for track lists
+ // all tracks and track listeners are removed on
+ // tech dispose
+
+ ALL.names.forEach(function (name) {
+ var props = ALL[name];
+
+ var list = _this2[props.getterName](); // if it is not a native list
+ // we have to manually remove event listeners
+
+
+ if (list && list.off) {
+ list.off();
+ }
+ }); // the actual .el_ is removed here, or replaced if
+
+ _Component.prototype.dispose.call(this, {
+ restoreEl: this.options_.restoreEl
+ });
+ }
+ /**
+ * Create the `Player`'s DOM element.
+ *
+ * @return {Element}
+ * The DOM element that gets created.
+ */
+ ;
+
+ _proto.createEl = function createEl() {
+ var tag = this.tag;
+ var el;
+ var playerElIngest = this.playerElIngest_ = tag.parentNode && tag.parentNode.hasAttribute && tag.parentNode.hasAttribute('data-vjs-player');
+ var divEmbed = this.tag.tagName.toLowerCase() === 'video-js';
+
+ if (playerElIngest) {
+ el = this.el_ = tag.parentNode;
+ } else if (!divEmbed) {
+ el = this.el_ = _Component.prototype.createEl.call(this, 'div');
+ } // Copy over all the attributes from the tag, including ID and class
+ // ID will now reference player box, not the video tag
+
+
+ var attrs = getAttributes(tag);
+
+ if (divEmbed) {
+ el = this.el_ = tag;
+ tag = this.tag = document__default['default'].createElement('video');
+
+ while (el.children.length) {
+ tag.appendChild(el.firstChild);
+ }
+
+ if (!hasClass(el, 'video-js')) {
+ addClass(el, 'video-js');
+ }
+
+ el.appendChild(tag);
+ playerElIngest = this.playerElIngest_ = el; // move properties over from our custom `video-js` element
+ // to our new `video` element. This will move things like
+ // `src` or `controls` that were set via js before the player
+ // was initialized.
+
+ Object.keys(el).forEach(function (k) {
+ try {
+ tag[k] = el[k];
+ } catch (e) {// we got a a property like outerHTML which we can't actually copy, ignore it
+ }
+ });
+ } // set tabindex to -1 to remove the video element from the focus order
+
+
+ tag.setAttribute('tabindex', '-1');
+ attrs.tabindex = '-1'; // Workaround for #4583 (JAWS+IE doesn't announce BPB or play button), and
+ // for the same issue with Chrome (on Windows) with JAWS.
+ // See https://github.com/FreedomScientific/VFO-standards-support/issues/78
+ // Note that we can't detect if JAWS is being used, but this ARIA attribute
+ // doesn't change behavior of IE11 or Chrome if JAWS is not being used
+
+ if (IE_VERSION || IS_CHROME && IS_WINDOWS) {
+ tag.setAttribute('role', 'application');
+ attrs.role = 'application';
+ } // Remove width/height attrs from tag so CSS can make it 100% width/height
+
+
+ tag.removeAttribute('width');
+ tag.removeAttribute('height');
+
+ if ('width' in attrs) {
+ delete attrs.width;
+ }
+
+ if ('height' in attrs) {
+ delete attrs.height;
+ }
+
+ Object.getOwnPropertyNames(attrs).forEach(function (attr) {
+ // don't copy over the class attribute to the player element when we're in a div embed
+ // the class is already set up properly in the divEmbed case
+ // and we want to make sure that the `video-js` class doesn't get lost
+ if (!(divEmbed && attr === 'class')) {
+ el.setAttribute(attr, attrs[attr]);
+ }
+
+ if (divEmbed) {
+ tag.setAttribute(attr, attrs[attr]);
+ }
+ }); // Update tag id/class for use as HTML5 playback tech
+ // Might think we should do this after embedding in container so .vjs-tech class
+ // doesn't flash 100% width/height, but class only applies with .video-js parent
+
+ tag.playerId = tag.id;
+ tag.id += '_html5_api';
+ tag.className = 'vjs-tech'; // Make player findable on elements
+
+ tag.player = el.player = this; // Default state of video is paused
+
+ this.addClass('vjs-paused'); // Add a style element in the player that we'll use to set the width/height
+ // of the player in a way that's still overrideable by CSS, just like the
+ // video element
+
+ if (window__default['default'].VIDEOJS_NO_DYNAMIC_STYLE !== true) {
+ this.styleEl_ = createStyleElement('vjs-styles-dimensions');
+ var defaultsStyleEl = $('.vjs-styles-defaults');
+ var head = $('head');
+ head.insertBefore(this.styleEl_, defaultsStyleEl ? defaultsStyleEl.nextSibling : head.firstChild);
+ }
+
+ this.fill_ = false;
+ this.fluid_ = false; // Pass in the width/height/aspectRatio options which will update the style el
+
+ this.width(this.options_.width);
+ this.height(this.options_.height);
+ this.fill(this.options_.fill);
+ this.fluid(this.options_.fluid);
+ this.aspectRatio(this.options_.aspectRatio); // support both crossOrigin and crossorigin to reduce confusion and issues around the name
+
+ this.crossOrigin(this.options_.crossOrigin || this.options_.crossorigin); // Hide any links within the video/audio tag,
+ // because IE doesn't hide them completely from screen readers.
+
+ var links = tag.getElementsByTagName('a');
+
+ for (var i = 0; i < links.length; i++) {
+ var linkEl = links.item(i);
+ addClass(linkEl, 'vjs-hidden');
+ linkEl.setAttribute('hidden', 'hidden');
+ } // insertElFirst seems to cause the networkState to flicker from 3 to 2, so
+ // keep track of the original for later so we can know if the source originally failed
+
+
+ tag.initNetworkState_ = tag.networkState; // Wrap video tag in div (el/box) container
+
+ if (tag.parentNode && !playerElIngest) {
+ tag.parentNode.insertBefore(el, tag);
+ } // insert the tag as the first child of the player element
+ // then manually add it to the children array so that this.addChild
+ // will work properly for other components
+ //
+ // Breaks iPhone, fixed in HTML5 setup.
+
+
+ prependTo(tag, el);
+ this.children_.unshift(tag); // Set lang attr on player to ensure CSS :lang() in consistent with player
+ // if it's been set to something different to the doc
+
+ this.el_.setAttribute('lang', this.language_);
+ this.el_.setAttribute('translate', 'no');
+ this.el_ = el;
+ return el;
+ }
+ /**
+ * Get or set the `Player`'s crossOrigin option. For the HTML5 player, this
+ * sets the `crossOrigin` property on the `` tag to control the CORS
+ * behavior.
+ *
+ * @see [Video Element Attributes]{@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/video#attr-crossorigin}
+ *
+ * @param {string} [value]
+ * The value to set the `Player`'s crossOrigin to. If an argument is
+ * given, must be one of `anonymous` or `use-credentials`.
+ *
+ * @return {string|undefined}
+ * - The current crossOrigin value of the `Player` when getting.
+ * - undefined when setting
+ */
+ ;
+
+ _proto.crossOrigin = function crossOrigin(value) {
+ if (!value) {
+ return this.techGet_('crossOrigin');
+ }
+
+ if (value !== 'anonymous' && value !== 'use-credentials') {
+ log$1.warn("crossOrigin must be \"anonymous\" or \"use-credentials\", given \"" + value + "\"");
+ return;
+ }
+
+ this.techCall_('setCrossOrigin', value);
+ return;
+ }
+ /**
+ * A getter/setter for the `Player`'s width. Returns the player's configured value.
+ * To get the current width use `currentWidth()`.
+ *
+ * @param {number} [value]
+ * The value to set the `Player`'s width to.
+ *
+ * @return {number}
+ * The current width of the `Player` when getting.
+ */
+ ;
+
+ _proto.width = function width(value) {
+ return this.dimension('width', value);
+ }
+ /**
+ * A getter/setter for the `Player`'s height. Returns the player's configured value.
+ * To get the current height use `currentheight()`.
+ *
+ * @param {number} [value]
+ * The value to set the `Player`'s heigth to.
+ *
+ * @return {number}
+ * The current height of the `Player` when getting.
+ */
+ ;
+
+ _proto.height = function height(value) {
+ return this.dimension('height', value);
+ }
+ /**
+ * A getter/setter for the `Player`'s width & height.
+ *
+ * @param {string} dimension
+ * This string can be:
+ * - 'width'
+ * - 'height'
+ *
+ * @param {number} [value]
+ * Value for dimension specified in the first argument.
+ *
+ * @return {number}
+ * The dimension arguments value when getting (width/height).
+ */
+ ;
+
+ _proto.dimension = function dimension(_dimension, value) {
+ var privDimension = _dimension + '_';
+
+ if (value === undefined) {
+ return this[privDimension] || 0;
+ }
+
+ if (value === '' || value === 'auto') {
+ // If an empty string is given, reset the dimension to be automatic
+ this[privDimension] = undefined;
+ this.updateStyleEl_();
+ return;
+ }
+
+ var parsedVal = parseFloat(value);
+
+ if (isNaN(parsedVal)) {
+ log$1.error("Improper value \"" + value + "\" supplied for for " + _dimension);
+ return;
+ }
+
+ this[privDimension] = parsedVal;
+ this.updateStyleEl_();
+ }
+ /**
+ * A getter/setter/toggler for the vjs-fluid `className` on the `Player`.
+ *
+ * Turning this on will turn off fill mode.
+ *
+ * @param {boolean} [bool]
+ * - A value of true adds the class.
+ * - A value of false removes the class.
+ * - No value will be a getter.
+ *
+ * @return {boolean|undefined}
+ * - The value of fluid when getting.
+ * - `undefined` when setting.
+ */
+ ;
+
+ _proto.fluid = function fluid(bool) {
+ var _this3 = this;
+
+ if (bool === undefined) {
+ return !!this.fluid_;
+ }
+
+ this.fluid_ = !!bool;
+
+ if (isEvented(this)) {
+ this.off(['playerreset', 'resize'], this.boundUpdateStyleEl_);
+ }
+
+ if (bool) {
+ this.addClass('vjs-fluid');
+ this.fill(false);
+ addEventedCallback(this, function () {
+ _this3.on(['playerreset', 'resize'], _this3.boundUpdateStyleEl_);
+ });
+ } else {
+ this.removeClass('vjs-fluid');
+ }
+
+ this.updateStyleEl_();
+ }
+ /**
+ * A getter/setter/toggler for the vjs-fill `className` on the `Player`.
+ *
+ * Turning this on will turn off fluid mode.
+ *
+ * @param {boolean} [bool]
+ * - A value of true adds the class.
+ * - A value of false removes the class.
+ * - No value will be a getter.
+ *
+ * @return {boolean|undefined}
+ * - The value of fluid when getting.
+ * - `undefined` when setting.
+ */
+ ;
+
+ _proto.fill = function fill(bool) {
+ if (bool === undefined) {
+ return !!this.fill_;
+ }
+
+ this.fill_ = !!bool;
+
+ if (bool) {
+ this.addClass('vjs-fill');
+ this.fluid(false);
+ } else {
+ this.removeClass('vjs-fill');
+ }
+ }
+ /**
+ * Get/Set the aspect ratio
+ *
+ * @param {string} [ratio]
+ * Aspect ratio for player
+ *
+ * @return {string|undefined}
+ * returns the current aspect ratio when getting
+ */
+
+ /**
+ * A getter/setter for the `Player`'s aspect ratio.
+ *
+ * @param {string} [ratio]
+ * The value to set the `Player`'s aspect ratio to.
+ *
+ * @return {string|undefined}
+ * - The current aspect ratio of the `Player` when getting.
+ * - undefined when setting
+ */
+ ;
+
+ _proto.aspectRatio = function aspectRatio(ratio) {
+ if (ratio === undefined) {
+ return this.aspectRatio_;
+ } // Check for width:height format
+
+
+ if (!/^\d+\:\d+$/.test(ratio)) {
+ throw new Error('Improper value supplied for aspect ratio. The format should be width:height, for example 16:9.');
+ }
+
+ this.aspectRatio_ = ratio; // We're assuming if you set an aspect ratio you want fluid mode,
+ // because in fixed mode you could calculate width and height yourself.
+
+ this.fluid(true);
+ this.updateStyleEl_();
+ }
+ /**
+ * Update styles of the `Player` element (height, width and aspect ratio).
+ *
+ * @private
+ * @listens Tech#loadedmetadata
+ */
+ ;
+
+ _proto.updateStyleEl_ = function updateStyleEl_() {
+ if (window__default['default'].VIDEOJS_NO_DYNAMIC_STYLE === true) {
+ var _width = typeof this.width_ === 'number' ? this.width_ : this.options_.width;
+
+ var _height = typeof this.height_ === 'number' ? this.height_ : this.options_.height;
+
+ var techEl = this.tech_ && this.tech_.el();
+
+ if (techEl) {
+ if (_width >= 0) {
+ techEl.width = _width;
+ }
+
+ if (_height >= 0) {
+ techEl.height = _height;
+ }
+ }
+
+ return;
+ }
+
+ var width;
+ var height;
+ var aspectRatio;
+ var idClass; // The aspect ratio is either used directly or to calculate width and height.
+
+ if (this.aspectRatio_ !== undefined && this.aspectRatio_ !== 'auto') {
+ // Use any aspectRatio that's been specifically set
+ aspectRatio = this.aspectRatio_;
+ } else if (this.videoWidth() > 0) {
+ // Otherwise try to get the aspect ratio from the video metadata
+ aspectRatio = this.videoWidth() + ':' + this.videoHeight();
+ } else {
+ // Or use a default. The video element's is 2:1, but 16:9 is more common.
+ aspectRatio = '16:9';
+ } // Get the ratio as a decimal we can use to calculate dimensions
+
+
+ var ratioParts = aspectRatio.split(':');
+ var ratioMultiplier = ratioParts[1] / ratioParts[0];
+
+ if (this.width_ !== undefined) {
+ // Use any width that's been specifically set
+ width = this.width_;
+ } else if (this.height_ !== undefined) {
+ // Or calulate the width from the aspect ratio if a height has been set
+ width = this.height_ / ratioMultiplier;
+ } else {
+ // Or use the video's metadata, or use the video el's default of 300
+ width = this.videoWidth() || 300;
+ }
+
+ if (this.height_ !== undefined) {
+ // Use any height that's been specifically set
+ height = this.height_;
+ } else {
+ // Otherwise calculate the height from the ratio and the width
+ height = width * ratioMultiplier;
+ } // Ensure the CSS class is valid by starting with an alpha character
+
+
+ if (/^[^a-zA-Z]/.test(this.id())) {
+ idClass = 'dimensions-' + this.id();
+ } else {
+ idClass = this.id() + '-dimensions';
+ } // Ensure the right class is still on the player for the style element
+
+
+ this.addClass(idClass);
+ setTextContent(this.styleEl_, "\n ." + idClass + " {\n width: " + width + "px;\n height: " + height + "px;\n }\n\n ." + idClass + ".vjs-fluid:not(.vjs-audio-only-mode) {\n padding-top: " + ratioMultiplier * 100 + "%;\n }\n ");
+ }
+ /**
+ * Load/Create an instance of playback {@link Tech} including element
+ * and API methods. Then append the `Tech` element in `Player` as a child.
+ *
+ * @param {string} techName
+ * name of the playback technology
+ *
+ * @param {string} source
+ * video source
+ *
+ * @private
+ */
+ ;
+
+ _proto.loadTech_ = function loadTech_(techName, source) {
+ var _this4 = this;
+
+ // Pause and remove current playback technology
+ if (this.tech_) {
+ this.unloadTech_();
+ }
+
+ var titleTechName = toTitleCase$1(techName);
+ var camelTechName = techName.charAt(0).toLowerCase() + techName.slice(1); // get rid of the HTML5 video tag as soon as we are using another tech
+
+ if (titleTechName !== 'Html5' && this.tag) {
+ Tech.getTech('Html5').disposeMediaElement(this.tag);
+ this.tag.player = null;
+ this.tag = null;
+ }
+
+ this.techName_ = titleTechName; // Turn off API access because we're loading a new tech that might load asynchronously
+
+ this.isReady_ = false;
+ var autoplay = this.autoplay(); // if autoplay is a string (or `true` with normalizeAutoplay: true) we pass false to the tech
+ // because the player is going to handle autoplay on `loadstart`
+
+ if (typeof this.autoplay() === 'string' || this.autoplay() === true && this.options_.normalizeAutoplay) {
+ autoplay = false;
+ } // Grab tech-specific options from player options and add source and parent element to use.
+
+
+ var techOptions = {
+ source: source,
+ autoplay: autoplay,
+ 'nativeControlsForTouch': this.options_.nativeControlsForTouch,
+ 'playerId': this.id(),
+ 'techId': this.id() + "_" + camelTechName + "_api",
+ 'playsinline': this.options_.playsinline,
+ 'preload': this.options_.preload,
+ 'loop': this.options_.loop,
+ 'disablePictureInPicture': this.options_.disablePictureInPicture,
+ 'muted': this.options_.muted,
+ 'poster': this.poster(),
+ 'language': this.language(),
+ 'playerElIngest': this.playerElIngest_ || false,
+ 'vtt.js': this.options_['vtt.js'],
+ 'canOverridePoster': !!this.options_.techCanOverridePoster,
+ 'enableSourceset': this.options_.enableSourceset,
+ 'Promise': this.options_.Promise
+ };
+ ALL.names.forEach(function (name) {
+ var props = ALL[name];
+ techOptions[props.getterName] = _this4[props.privateName];
+ });
+ assign(techOptions, this.options_[titleTechName]);
+ assign(techOptions, this.options_[camelTechName]);
+ assign(techOptions, this.options_[techName.toLowerCase()]);
+
+ if (this.tag) {
+ techOptions.tag = this.tag;
+ }
+
+ if (source && source.src === this.cache_.src && this.cache_.currentTime > 0) {
+ techOptions.startTime = this.cache_.currentTime;
+ } // Initialize tech instance
+
+
+ var TechClass = Tech.getTech(techName);
+
+ if (!TechClass) {
+ throw new Error("No Tech named '" + titleTechName + "' exists! '" + titleTechName + "' should be registered using videojs.registerTech()'");
+ }
+
+ this.tech_ = new TechClass(techOptions); // player.triggerReady is always async, so don't need this to be async
+
+ this.tech_.ready(bind(this, this.handleTechReady_), true);
+ textTrackConverter.jsonToTextTracks(this.textTracksJson_ || [], this.tech_); // Listen to all HTML5-defined events and trigger them on the player
+
+ TECH_EVENTS_RETRIGGER.forEach(function (event) {
+ _this4.on(_this4.tech_, event, function (e) {
+ return _this4["handleTech" + toTitleCase$1(event) + "_"](e);
+ });
+ });
+ Object.keys(TECH_EVENTS_QUEUE).forEach(function (event) {
+ _this4.on(_this4.tech_, event, function (eventObj) {
+ if (_this4.tech_.playbackRate() === 0 && _this4.tech_.seeking()) {
+ _this4.queuedCallbacks_.push({
+ callback: _this4["handleTech" + TECH_EVENTS_QUEUE[event] + "_"].bind(_this4),
+ event: eventObj
+ });
+
+ return;
+ }
+
+ _this4["handleTech" + TECH_EVENTS_QUEUE[event] + "_"](eventObj);
+ });
+ });
+ this.on(this.tech_, 'loadstart', function (e) {
+ return _this4.handleTechLoadStart_(e);
+ });
+ this.on(this.tech_, 'sourceset', function (e) {
+ return _this4.handleTechSourceset_(e);
+ });
+ this.on(this.tech_, 'waiting', function (e) {
+ return _this4.handleTechWaiting_(e);
+ });
+ this.on(this.tech_, 'ended', function (e) {
+ return _this4.handleTechEnded_(e);
+ });
+ this.on(this.tech_, 'seeking', function (e) {
+ return _this4.handleTechSeeking_(e);
+ });
+ this.on(this.tech_, 'play', function (e) {
+ return _this4.handleTechPlay_(e);
+ });
+ this.on(this.tech_, 'firstplay', function (e) {
+ return _this4.handleTechFirstPlay_(e);
+ });
+ this.on(this.tech_, 'pause', function (e) {
+ return _this4.handleTechPause_(e);
+ });
+ this.on(this.tech_, 'durationchange', function (e) {
+ return _this4.handleTechDurationChange_(e);
+ });
+ this.on(this.tech_, 'fullscreenchange', function (e, data) {
+ return _this4.handleTechFullscreenChange_(e, data);
+ });
+ this.on(this.tech_, 'fullscreenerror', function (e, err) {
+ return _this4.handleTechFullscreenError_(e, err);
+ });
+ this.on(this.tech_, 'enterpictureinpicture', function (e) {
+ return _this4.handleTechEnterPictureInPicture_(e);
+ });
+ this.on(this.tech_, 'leavepictureinpicture', function (e) {
+ return _this4.handleTechLeavePictureInPicture_(e);
+ });
+ this.on(this.tech_, 'error', function (e) {
+ return _this4.handleTechError_(e);
+ });
+ this.on(this.tech_, 'posterchange', function (e) {
+ return _this4.handleTechPosterChange_(e);
+ });
+ this.on(this.tech_, 'textdata', function (e) {
+ return _this4.handleTechTextData_(e);
+ });
+ this.on(this.tech_, 'ratechange', function (e) {
+ return _this4.handleTechRateChange_(e);
+ });
+ this.on(this.tech_, 'loadedmetadata', this.boundUpdateStyleEl_);
+ this.usingNativeControls(this.techGet_('controls'));
+
+ if (this.controls() && !this.usingNativeControls()) {
+ this.addTechControlsListeners_();
+ } // Add the tech element in the DOM if it was not already there
+ // Make sure to not insert the original video element if using Html5
+
+
+ if (this.tech_.el().parentNode !== this.el() && (titleTechName !== 'Html5' || !this.tag)) {
+ prependTo(this.tech_.el(), this.el());
+ } // Get rid of the original video tag reference after the first tech is loaded
+
+
+ if (this.tag) {
+ this.tag.player = null;
+ this.tag = null;
+ }
+ }
+ /**
+ * Unload and dispose of the current playback {@link Tech}.
+ *
+ * @private
+ */
+ ;
+
+ _proto.unloadTech_ = function unloadTech_() {
+ var _this5 = this;
+
+ // Save the current text tracks so that we can reuse the same text tracks with the next tech
+ ALL.names.forEach(function (name) {
+ var props = ALL[name];
+ _this5[props.privateName] = _this5[props.getterName]();
+ });
+ this.textTracksJson_ = textTrackConverter.textTracksToJson(this.tech_);
+ this.isReady_ = false;
+ this.tech_.dispose();
+ this.tech_ = false;
+
+ if (this.isPosterFromTech_) {
+ this.poster_ = '';
+ this.trigger('posterchange');
+ }
+
+ this.isPosterFromTech_ = false;
+ }
+ /**
+ * Return a reference to the current {@link Tech}.
+ * It will print a warning by default about the danger of using the tech directly
+ * but any argument that is passed in will silence the warning.
+ *
+ * @param {*} [safety]
+ * Anything passed in to silence the warning
+ *
+ * @return {Tech}
+ * The Tech
+ */
+ ;
+
+ _proto.tech = function tech(safety) {
+ if (safety === undefined) {
+ log$1.warn('Using the tech directly can be dangerous. I hope you know what you\'re doing.\n' + 'See https://github.com/videojs/video.js/issues/2617 for more info.\n');
+ }
+
+ return this.tech_;
+ }
+ /**
+ * Set up click and touch listeners for the playback element
+ *
+ * - On desktops: a click on the video itself will toggle playback
+ * - On mobile devices: a click on the video toggles controls
+ * which is done by toggling the user state between active and
+ * inactive
+ * - A tap can signal that a user has become active or has become inactive
+ * e.g. a quick tap on an iPhone movie should reveal the controls. Another
+ * quick tap should hide them again (signaling the user is in an inactive
+ * viewing state)
+ * - In addition to this, we still want the user to be considered inactive after
+ * a few seconds of inactivity.
+ *
+ * > Note: the only part of iOS interaction we can't mimic with this setup
+ * is a touch and hold on the video element counting as activity in order to
+ * keep the controls showing, but that shouldn't be an issue. A touch and hold
+ * on any controls will still keep the user active
+ *
+ * @private
+ */
+ ;
+
+ _proto.addTechControlsListeners_ = function addTechControlsListeners_() {
+ // Make sure to remove all the previous listeners in case we are called multiple times.
+ this.removeTechControlsListeners_();
+ this.on(this.tech_, 'click', this.boundHandleTechClick_);
+ this.on(this.tech_, 'dblclick', this.boundHandleTechDoubleClick_); // If the controls were hidden we don't want that to change without a tap event
+ // so we'll check if the controls were already showing before reporting user
+ // activity
+
+ this.on(this.tech_, 'touchstart', this.boundHandleTechTouchStart_);
+ this.on(this.tech_, 'touchmove', this.boundHandleTechTouchMove_);
+ this.on(this.tech_, 'touchend', this.boundHandleTechTouchEnd_); // The tap listener needs to come after the touchend listener because the tap
+ // listener cancels out any reportedUserActivity when setting userActive(false)
+
+ this.on(this.tech_, 'tap', this.boundHandleTechTap_);
+ }
+ /**
+ * Remove the listeners used for click and tap controls. This is needed for
+ * toggling to controls disabled, where a tap/touch should do nothing.
+ *
+ * @private
+ */
+ ;
+
+ _proto.removeTechControlsListeners_ = function removeTechControlsListeners_() {
+ // We don't want to just use `this.off()` because there might be other needed
+ // listeners added by techs that extend this.
+ this.off(this.tech_, 'tap', this.boundHandleTechTap_);
+ this.off(this.tech_, 'touchstart', this.boundHandleTechTouchStart_);
+ this.off(this.tech_, 'touchmove', this.boundHandleTechTouchMove_);
+ this.off(this.tech_, 'touchend', this.boundHandleTechTouchEnd_);
+ this.off(this.tech_, 'click', this.boundHandleTechClick_);
+ this.off(this.tech_, 'dblclick', this.boundHandleTechDoubleClick_);
+ }
+ /**
+ * Player waits for the tech to be ready
+ *
+ * @private
+ */
+ ;
+
+ _proto.handleTechReady_ = function handleTechReady_() {
+ this.triggerReady(); // Keep the same volume as before
+
+ if (this.cache_.volume) {
+ this.techCall_('setVolume', this.cache_.volume);
+ } // Look if the tech found a higher resolution poster while loading
+
+
+ this.handleTechPosterChange_(); // Update the duration if available
+
+ this.handleTechDurationChange_();
+ }
+ /**
+ * Retrigger the `loadstart` event that was triggered by the {@link Tech}. This
+ * function will also trigger {@link Player#firstplay} if it is the first loadstart
+ * for a video.
+ *
+ * @fires Player#loadstart
+ * @fires Player#firstplay
+ * @listens Tech#loadstart
+ * @private
+ */
+ ;
+
+ _proto.handleTechLoadStart_ = function handleTechLoadStart_() {
+ // TODO: Update to use `emptied` event instead. See #1277.
+ this.removeClass('vjs-ended');
+ this.removeClass('vjs-seeking'); // reset the error state
+
+ this.error(null); // Update the duration
+
+ this.handleTechDurationChange_(); // If it's already playing we want to trigger a firstplay event now.
+ // The firstplay event relies on both the play and loadstart events
+ // which can happen in any order for a new source
+
+ if (!this.paused()) {
+ /**
+ * Fired when the user agent begins looking for media data
+ *
+ * @event Player#loadstart
+ * @type {EventTarget~Event}
+ */
+ this.trigger('loadstart');
+ this.trigger('firstplay');
+ } else {
+ // reset the hasStarted state
+ this.hasStarted(false);
+ this.trigger('loadstart');
+ } // autoplay happens after loadstart for the browser,
+ // so we mimic that behavior
+
+
+ this.manualAutoplay_(this.autoplay() === true && this.options_.normalizeAutoplay ? 'play' : this.autoplay());
+ }
+ /**
+ * Handle autoplay string values, rather than the typical boolean
+ * values that should be handled by the tech. Note that this is not
+ * part of any specification. Valid values and what they do can be
+ * found on the autoplay getter at Player#autoplay()
+ */
+ ;
+
+ _proto.manualAutoplay_ = function manualAutoplay_(type) {
+ var _this6 = this;
+
+ if (!this.tech_ || typeof type !== 'string') {
+ return;
+ } // Save original muted() value, set muted to true, and attempt to play().
+ // On promise rejection, restore muted from saved value
+
+
+ var resolveMuted = function resolveMuted() {
+ var previouslyMuted = _this6.muted();
+
+ _this6.muted(true);
+
+ var restoreMuted = function restoreMuted() {
+ _this6.muted(previouslyMuted);
+ }; // restore muted on play terminatation
+
+
+ _this6.playTerminatedQueue_.push(restoreMuted);
+
+ var mutedPromise = _this6.play();
+
+ if (!isPromise(mutedPromise)) {
+ return;
+ }
+
+ return mutedPromise["catch"](function (err) {
+ restoreMuted();
+ throw new Error("Rejection at manualAutoplay. Restoring muted value. " + (err ? err : ''));
+ });
+ };
+
+ var promise; // if muted defaults to true
+ // the only thing we can do is call play
+
+ if (type === 'any' && !this.muted()) {
+ promise = this.play();
+
+ if (isPromise(promise)) {
+ promise = promise["catch"](resolveMuted);
+ }
+ } else if (type === 'muted' && !this.muted()) {
+ promise = resolveMuted();
+ } else {
+ promise = this.play();
+ }
+
+ if (!isPromise(promise)) {
+ return;
+ }
+
+ return promise.then(function () {
+ _this6.trigger({
+ type: 'autoplay-success',
+ autoplay: type
+ });
+ })["catch"](function () {
+ _this6.trigger({
+ type: 'autoplay-failure',
+ autoplay: type
+ });
+ });
+ }
+ /**
+ * Update the internal source caches so that we return the correct source from
+ * `src()`, `currentSource()`, and `currentSources()`.
+ *
+ * > Note: `currentSources` will not be updated if the source that is passed in exists
+ * in the current `currentSources` cache.
+ *
+ *
+ * @param {Tech~SourceObject} srcObj
+ * A string or object source to update our caches to.
+ */
+ ;
+
+ _proto.updateSourceCaches_ = function updateSourceCaches_(srcObj) {
+ if (srcObj === void 0) {
+ srcObj = '';
+ }
+
+ var src = srcObj;
+ var type = '';
+
+ if (typeof src !== 'string') {
+ src = srcObj.src;
+ type = srcObj.type;
+ } // make sure all the caches are set to default values
+ // to prevent null checking
+
+
+ this.cache_.source = this.cache_.source || {};
+ this.cache_.sources = this.cache_.sources || []; // try to get the type of the src that was passed in
+
+ if (src && !type) {
+ type = findMimetype(this, src);
+ } // update `currentSource` cache always
+
+
+ this.cache_.source = mergeOptions$3({}, srcObj, {
+ src: src,
+ type: type
+ });
+ var matchingSources = this.cache_.sources.filter(function (s) {
+ return s.src && s.src === src;
+ });
+ var sourceElSources = [];
+ var sourceEls = this.$$('source');
+ var matchingSourceEls = [];
+
+ for (var i = 0; i < sourceEls.length; i++) {
+ var sourceObj = getAttributes(sourceEls[i]);
+ sourceElSources.push(sourceObj);
+
+ if (sourceObj.src && sourceObj.src === src) {
+ matchingSourceEls.push(sourceObj.src);
+ }
+ } // if we have matching source els but not matching sources
+ // the current source cache is not up to date
+
+
+ if (matchingSourceEls.length && !matchingSources.length) {
+ this.cache_.sources = sourceElSources; // if we don't have matching source or source els set the
+ // sources cache to the `currentSource` cache
+ } else if (!matchingSources.length) {
+ this.cache_.sources = [this.cache_.source];
+ } // update the tech `src` cache
+
+
+ this.cache_.src = src;
+ }
+ /**
+ * *EXPERIMENTAL* Fired when the source is set or changed on the {@link Tech}
+ * causing the media element to reload.
+ *
+ * It will fire for the initial source and each subsequent source.
+ * This event is a custom event from Video.js and is triggered by the {@link Tech}.
+ *
+ * The event object for this event contains a `src` property that will contain the source
+ * that was available when the event was triggered. This is generally only necessary if Video.js
+ * is switching techs while the source was being changed.
+ *
+ * It is also fired when `load` is called on the player (or media element)
+ * because the {@link https://html.spec.whatwg.org/multipage/media.html#dom-media-load|specification for `load`}
+ * says that the resource selection algorithm needs to be aborted and restarted.
+ * In this case, it is very likely that the `src` property will be set to the
+ * empty string `""` to indicate we do not know what the source will be but
+ * that it is changing.
+ *
+ * *This event is currently still experimental and may change in minor releases.*
+ * __To use this, pass `enableSourceset` option to the player.__
+ *
+ * @event Player#sourceset
+ * @type {EventTarget~Event}
+ * @prop {string} src
+ * The source url available when the `sourceset` was triggered.
+ * It will be an empty string if we cannot know what the source is
+ * but know that the source will change.
+ */
+
+ /**
+ * Retrigger the `sourceset` event that was triggered by the {@link Tech}.
+ *
+ * @fires Player#sourceset
+ * @listens Tech#sourceset
+ * @private
+ */
+ ;
+
+ _proto.handleTechSourceset_ = function handleTechSourceset_(event) {
+ var _this7 = this;
+
+ // only update the source cache when the source
+ // was not updated using the player api
+ if (!this.changingSrc_) {
+ var updateSourceCaches = function updateSourceCaches(src) {
+ return _this7.updateSourceCaches_(src);
+ };
+
+ var playerSrc = this.currentSource().src;
+ var eventSrc = event.src; // if we have a playerSrc that is not a blob, and a tech src that is a blob
+
+ if (playerSrc && !/^blob:/.test(playerSrc) && /^blob:/.test(eventSrc)) {
+ // if both the tech source and the player source were updated we assume
+ // something like @videojs/http-streaming did the sourceset and skip updating the source cache.
+ if (!this.lastSource_ || this.lastSource_.tech !== eventSrc && this.lastSource_.player !== playerSrc) {
+ updateSourceCaches = function updateSourceCaches() {};
+ }
+ } // update the source to the initial source right away
+ // in some cases this will be empty string
+
+
+ updateSourceCaches(eventSrc); // if the `sourceset` `src` was an empty string
+ // wait for a `loadstart` to update the cache to `currentSrc`.
+ // If a sourceset happens before a `loadstart`, we reset the state
+
+ if (!event.src) {
+ this.tech_.any(['sourceset', 'loadstart'], function (e) {
+ // if a sourceset happens before a `loadstart` there
+ // is nothing to do as this `handleTechSourceset_`
+ // will be called again and this will be handled there.
+ if (e.type === 'sourceset') {
+ return;
+ }
+
+ var techSrc = _this7.techGet('currentSrc');
+
+ _this7.lastSource_.tech = techSrc;
+
+ _this7.updateSourceCaches_(techSrc);
+ });
+ }
+ }
+
+ this.lastSource_ = {
+ player: this.currentSource().src,
+ tech: event.src
+ };
+ this.trigger({
+ src: event.src,
+ type: 'sourceset'
+ });
+ }
+ /**
+ * Add/remove the vjs-has-started class
+ *
+ * @fires Player#firstplay
+ *
+ * @param {boolean} request
+ * - true: adds the class
+ * - false: remove the class
+ *
+ * @return {boolean}
+ * the boolean value of hasStarted_
+ */
+ ;
+
+ _proto.hasStarted = function hasStarted(request) {
+ if (request === undefined) {
+ // act as getter, if we have no request to change
+ return this.hasStarted_;
+ }
+
+ if (request === this.hasStarted_) {
+ return;
+ }
+
+ this.hasStarted_ = request;
+
+ if (this.hasStarted_) {
+ this.addClass('vjs-has-started');
+ this.trigger('firstplay');
+ } else {
+ this.removeClass('vjs-has-started');
+ }
+ }
+ /**
+ * Fired whenever the media begins or resumes playback
+ *
+ * @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#dom-media-play}
+ * @fires Player#play
+ * @listens Tech#play
+ * @private
+ */
+ ;
+
+ _proto.handleTechPlay_ = function handleTechPlay_() {
+ this.removeClass('vjs-ended');
+ this.removeClass('vjs-paused');
+ this.addClass('vjs-playing'); // hide the poster when the user hits play
+
+ this.hasStarted(true);
+ /**
+ * Triggered whenever an {@link Tech#play} event happens. Indicates that
+ * playback has started or resumed.
+ *
+ * @event Player#play
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('play');
+ }
+ /**
+ * Retrigger the `ratechange` event that was triggered by the {@link Tech}.
+ *
+ * If there were any events queued while the playback rate was zero, fire
+ * those events now.
+ *
+ * @private
+ * @method Player#handleTechRateChange_
+ * @fires Player#ratechange
+ * @listens Tech#ratechange
+ */
+ ;
+
+ _proto.handleTechRateChange_ = function handleTechRateChange_() {
+ if (this.tech_.playbackRate() > 0 && this.cache_.lastPlaybackRate === 0) {
+ this.queuedCallbacks_.forEach(function (queued) {
+ return queued.callback(queued.event);
+ });
+ this.queuedCallbacks_ = [];
+ }
+
+ this.cache_.lastPlaybackRate = this.tech_.playbackRate();
+ /**
+ * Fires when the playing speed of the audio/video is changed
+ *
+ * @event Player#ratechange
+ * @type {event}
+ */
+
+ this.trigger('ratechange');
+ }
+ /**
+ * Retrigger the `waiting` event that was triggered by the {@link Tech}.
+ *
+ * @fires Player#waiting
+ * @listens Tech#waiting
+ * @private
+ */
+ ;
+
+ _proto.handleTechWaiting_ = function handleTechWaiting_() {
+ var _this8 = this;
+
+ this.addClass('vjs-waiting');
+ /**
+ * A readyState change on the DOM element has caused playback to stop.
+ *
+ * @event Player#waiting
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('waiting'); // Browsers may emit a timeupdate event after a waiting event. In order to prevent
+ // premature removal of the waiting class, wait for the time to change.
+
+ var timeWhenWaiting = this.currentTime();
+
+ var timeUpdateListener = function timeUpdateListener() {
+ if (timeWhenWaiting !== _this8.currentTime()) {
+ _this8.removeClass('vjs-waiting');
+
+ _this8.off('timeupdate', timeUpdateListener);
+ }
+ };
+
+ this.on('timeupdate', timeUpdateListener);
+ }
+ /**
+ * Retrigger the `canplay` event that was triggered by the {@link Tech}.
+ * > Note: This is not consistent between browsers. See #1351
+ *
+ * @fires Player#canplay
+ * @listens Tech#canplay
+ * @private
+ */
+ ;
+
+ _proto.handleTechCanPlay_ = function handleTechCanPlay_() {
+ this.removeClass('vjs-waiting');
+ /**
+ * The media has a readyState of HAVE_FUTURE_DATA or greater.
+ *
+ * @event Player#canplay
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('canplay');
+ }
+ /**
+ * Retrigger the `canplaythrough` event that was triggered by the {@link Tech}.
+ *
+ * @fires Player#canplaythrough
+ * @listens Tech#canplaythrough
+ * @private
+ */
+ ;
+
+ _proto.handleTechCanPlayThrough_ = function handleTechCanPlayThrough_() {
+ this.removeClass('vjs-waiting');
+ /**
+ * The media has a readyState of HAVE_ENOUGH_DATA or greater. This means that the
+ * entire media file can be played without buffering.
+ *
+ * @event Player#canplaythrough
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('canplaythrough');
+ }
+ /**
+ * Retrigger the `playing` event that was triggered by the {@link Tech}.
+ *
+ * @fires Player#playing
+ * @listens Tech#playing
+ * @private
+ */
+ ;
+
+ _proto.handleTechPlaying_ = function handleTechPlaying_() {
+ this.removeClass('vjs-waiting');
+ /**
+ * The media is no longer blocked from playback, and has started playing.
+ *
+ * @event Player#playing
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('playing');
+ }
+ /**
+ * Retrigger the `seeking` event that was triggered by the {@link Tech}.
+ *
+ * @fires Player#seeking
+ * @listens Tech#seeking
+ * @private
+ */
+ ;
+
+ _proto.handleTechSeeking_ = function handleTechSeeking_() {
+ this.addClass('vjs-seeking');
+ /**
+ * Fired whenever the player is jumping to a new time
+ *
+ * @event Player#seeking
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('seeking');
+ }
+ /**
+ * Retrigger the `seeked` event that was triggered by the {@link Tech}.
+ *
+ * @fires Player#seeked
+ * @listens Tech#seeked
+ * @private
+ */
+ ;
+
+ _proto.handleTechSeeked_ = function handleTechSeeked_() {
+ this.removeClass('vjs-seeking');
+ this.removeClass('vjs-ended');
+ /**
+ * Fired when the player has finished jumping to a new time
+ *
+ * @event Player#seeked
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('seeked');
+ }
+ /**
+ * Retrigger the `firstplay` event that was triggered by the {@link Tech}.
+ *
+ * @fires Player#firstplay
+ * @listens Tech#firstplay
+ * @deprecated As of 6.0 firstplay event is deprecated.
+ * As of 6.0 passing the `starttime` option to the player and the firstplay event are deprecated.
+ * @private
+ */
+ ;
+
+ _proto.handleTechFirstPlay_ = function handleTechFirstPlay_() {
+ // If the first starttime attribute is specified
+ // then we will start at the given offset in seconds
+ if (this.options_.starttime) {
+ log$1.warn('Passing the `starttime` option to the player will be deprecated in 6.0');
+ this.currentTime(this.options_.starttime);
+ }
+
+ this.addClass('vjs-has-started');
+ /**
+ * Fired the first time a video is played. Not part of the HLS spec, and this is
+ * probably not the best implementation yet, so use sparingly. If you don't have a
+ * reason to prevent playback, use `myPlayer.one('play');` instead.
+ *
+ * @event Player#firstplay
+ * @deprecated As of 6.0 firstplay event is deprecated.
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('firstplay');
+ }
+ /**
+ * Retrigger the `pause` event that was triggered by the {@link Tech}.
+ *
+ * @fires Player#pause
+ * @listens Tech#pause
+ * @private
+ */
+ ;
+
+ _proto.handleTechPause_ = function handleTechPause_() {
+ this.removeClass('vjs-playing');
+ this.addClass('vjs-paused');
+ /**
+ * Fired whenever the media has been paused
+ *
+ * @event Player#pause
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('pause');
+ }
+ /**
+ * Retrigger the `ended` event that was triggered by the {@link Tech}.
+ *
+ * @fires Player#ended
+ * @listens Tech#ended
+ * @private
+ */
+ ;
+
+ _proto.handleTechEnded_ = function handleTechEnded_() {
+ this.addClass('vjs-ended');
+ this.removeClass('vjs-waiting');
+
+ if (this.options_.loop) {
+ this.currentTime(0);
+ this.play();
+ } else if (!this.paused()) {
+ this.pause();
+ }
+ /**
+ * Fired when the end of the media resource is reached (currentTime == duration)
+ *
+ * @event Player#ended
+ * @type {EventTarget~Event}
+ */
+
+
+ this.trigger('ended');
+ }
+ /**
+ * Fired when the duration of the media resource is first known or changed
+ *
+ * @listens Tech#durationchange
+ * @private
+ */
+ ;
+
+ _proto.handleTechDurationChange_ = function handleTechDurationChange_() {
+ this.duration(this.techGet_('duration'));
+ }
+ /**
+ * Handle a click on the media element to play/pause
+ *
+ * @param {EventTarget~Event} event
+ * the event that caused this function to trigger
+ *
+ * @listens Tech#click
+ * @private
+ */
+ ;
+
+ _proto.handleTechClick_ = function handleTechClick_(event) {
+ // When controls are disabled a click should not toggle playback because
+ // the click is considered a control
+ if (!this.controls_) {
+ return;
+ }
+
+ if (this.options_ === undefined || this.options_.userActions === undefined || this.options_.userActions.click === undefined || this.options_.userActions.click !== false) {
+ if (this.options_ !== undefined && this.options_.userActions !== undefined && typeof this.options_.userActions.click === 'function') {
+ this.options_.userActions.click.call(this, event);
+ } else if (this.paused()) {
+ silencePromise(this.play());
+ } else {
+ this.pause();
+ }
+ }
+ }
+ /**
+ * Handle a double-click on the media element to enter/exit fullscreen
+ *
+ * @param {EventTarget~Event} event
+ * the event that caused this function to trigger
+ *
+ * @listens Tech#dblclick
+ * @private
+ */
+ ;
+
+ _proto.handleTechDoubleClick_ = function handleTechDoubleClick_(event) {
+ if (!this.controls_) {
+ return;
+ } // we do not want to toggle fullscreen state
+ // when double-clicking inside a control bar or a modal
+
+
+ var inAllowedEls = Array.prototype.some.call(this.$$('.vjs-control-bar, .vjs-modal-dialog'), function (el) {
+ return el.contains(event.target);
+ });
+
+ if (!inAllowedEls) {
+ /*
+ * options.userActions.doubleClick
+ *
+ * If `undefined` or `true`, double-click toggles fullscreen if controls are present
+ * Set to `false` to disable double-click handling
+ * Set to a function to substitute an external double-click handler
+ */
+ if (this.options_ === undefined || this.options_.userActions === undefined || this.options_.userActions.doubleClick === undefined || this.options_.userActions.doubleClick !== false) {
+ if (this.options_ !== undefined && this.options_.userActions !== undefined && typeof this.options_.userActions.doubleClick === 'function') {
+ this.options_.userActions.doubleClick.call(this, event);
+ } else if (this.isFullscreen()) {
+ this.exitFullscreen();
+ } else {
+ this.requestFullscreen();
+ }
+ }
+ }
+ }
+ /**
+ * Handle a tap on the media element. It will toggle the user
+ * activity state, which hides and shows the controls.
+ *
+ * @listens Tech#tap
+ * @private
+ */
+ ;
+
+ _proto.handleTechTap_ = function handleTechTap_() {
+ this.userActive(!this.userActive());
+ }
+ /**
+ * Handle touch to start
+ *
+ * @listens Tech#touchstart
+ * @private
+ */
+ ;
+
+ _proto.handleTechTouchStart_ = function handleTechTouchStart_() {
+ this.userWasActive = this.userActive();
+ }
+ /**
+ * Handle touch to move
+ *
+ * @listens Tech#touchmove
+ * @private
+ */
+ ;
+
+ _proto.handleTechTouchMove_ = function handleTechTouchMove_() {
+ if (this.userWasActive) {
+ this.reportUserActivity();
+ }
+ }
+ /**
+ * Handle touch to end
+ *
+ * @param {EventTarget~Event} event
+ * the touchend event that triggered
+ * this function
+ *
+ * @listens Tech#touchend
+ * @private
+ */
+ ;
+
+ _proto.handleTechTouchEnd_ = function handleTechTouchEnd_(event) {
+ // Stop the mouse events from also happening
+ if (event.cancelable) {
+ event.preventDefault();
+ }
+ }
+ /**
+ * native click events on the SWF aren't triggered on IE11, Win8.1RT
+ * use stageclick events triggered from inside the SWF instead
+ *
+ * @private
+ * @listens stageclick
+ */
+ ;
+
+ _proto.handleStageClick_ = function handleStageClick_() {
+ this.reportUserActivity();
+ }
+ /**
+ * @private
+ */
+ ;
+
+ _proto.toggleFullscreenClass_ = function toggleFullscreenClass_() {
+ if (this.isFullscreen()) {
+ this.addClass('vjs-fullscreen');
+ } else {
+ this.removeClass('vjs-fullscreen');
+ }
+ }
+ /**
+ * when the document fschange event triggers it calls this
+ */
+ ;
+
+ _proto.documentFullscreenChange_ = function documentFullscreenChange_(e) {
+ var targetPlayer = e.target.player; // if another player was fullscreen
+ // do a null check for targetPlayer because older firefox's would put document as e.target
+
+ if (targetPlayer && targetPlayer !== this) {
+ return;
+ }
+
+ var el = this.el();
+ var isFs = document__default['default'][this.fsApi_.fullscreenElement] === el;
+
+ if (!isFs && el.matches) {
+ isFs = el.matches(':' + this.fsApi_.fullscreen);
+ } else if (!isFs && el.msMatchesSelector) {
+ isFs = el.msMatchesSelector(':' + this.fsApi_.fullscreen);
+ }
+
+ this.isFullscreen(isFs);
+ }
+ /**
+ * Handle Tech Fullscreen Change
+ *
+ * @param {EventTarget~Event} event
+ * the fullscreenchange event that triggered this function
+ *
+ * @param {Object} data
+ * the data that was sent with the event
+ *
+ * @private
+ * @listens Tech#fullscreenchange
+ * @fires Player#fullscreenchange
+ */
+ ;
+
+ _proto.handleTechFullscreenChange_ = function handleTechFullscreenChange_(event, data) {
+ var _this9 = this;
+
+ if (data) {
+ if (data.nativeIOSFullscreen) {
+ this.addClass('vjs-ios-native-fs');
+ this.tech_.one('webkitendfullscreen', function () {
+ _this9.removeClass('vjs-ios-native-fs');
+ });
+ }
+
+ this.isFullscreen(data.isFullscreen);
+ }
+ };
+
+ _proto.handleTechFullscreenError_ = function handleTechFullscreenError_(event, err) {
+ this.trigger('fullscreenerror', err);
+ }
+ /**
+ * @private
+ */
+ ;
+
+ _proto.togglePictureInPictureClass_ = function togglePictureInPictureClass_() {
+ if (this.isInPictureInPicture()) {
+ this.addClass('vjs-picture-in-picture');
+ } else {
+ this.removeClass('vjs-picture-in-picture');
+ }
+ }
+ /**
+ * Handle Tech Enter Picture-in-Picture.
+ *
+ * @param {EventTarget~Event} event
+ * the enterpictureinpicture event that triggered this function
+ *
+ * @private
+ * @listens Tech#enterpictureinpicture
+ */
+ ;
+
+ _proto.handleTechEnterPictureInPicture_ = function handleTechEnterPictureInPicture_(event) {
+ this.isInPictureInPicture(true);
+ }
+ /**
+ * Handle Tech Leave Picture-in-Picture.
+ *
+ * @param {EventTarget~Event} event
+ * the leavepictureinpicture event that triggered this function
+ *
+ * @private
+ * @listens Tech#leavepictureinpicture
+ */
+ ;
+
+ _proto.handleTechLeavePictureInPicture_ = function handleTechLeavePictureInPicture_(event) {
+ this.isInPictureInPicture(false);
+ }
+ /**
+ * Fires when an error occurred during the loading of an audio/video.
+ *
+ * @private
+ * @listens Tech#error
+ */
+ ;
+
+ _proto.handleTechError_ = function handleTechError_() {
+ var error = this.tech_.error();
+ this.error(error);
+ }
+ /**
+ * Retrigger the `textdata` event that was triggered by the {@link Tech}.
+ *
+ * @fires Player#textdata
+ * @listens Tech#textdata
+ * @private
+ */
+ ;
+
+ _proto.handleTechTextData_ = function handleTechTextData_() {
+ var data = null;
+
+ if (arguments.length > 1) {
+ data = arguments[1];
+ }
+ /**
+ * Fires when we get a textdata event from tech
+ *
+ * @event Player#textdata
+ * @type {EventTarget~Event}
+ */
+
+
+ this.trigger('textdata', data);
+ }
+ /**
+ * Get object for cached values.
+ *
+ * @return {Object}
+ * get the current object cache
+ */
+ ;
+
+ _proto.getCache = function getCache() {
+ return this.cache_;
+ }
+ /**
+ * Resets the internal cache object.
+ *
+ * Using this function outside the player constructor or reset method may
+ * have unintended side-effects.
+ *
+ * @private
+ */
+ ;
+
+ _proto.resetCache_ = function resetCache_() {
+ this.cache_ = {
+ // Right now, the currentTime is not _really_ cached because it is always
+ // retrieved from the tech (see: currentTime). However, for completeness,
+ // we set it to zero here to ensure that if we do start actually caching
+ // it, we reset it along with everything else.
+ currentTime: 0,
+ initTime: 0,
+ inactivityTimeout: this.options_.inactivityTimeout,
+ duration: NaN,
+ lastVolume: 1,
+ lastPlaybackRate: this.defaultPlaybackRate(),
+ media: null,
+ src: '',
+ source: {},
+ sources: [],
+ playbackRates: [],
+ volume: 1
+ };
+ }
+ /**
+ * Pass values to the playback tech
+ *
+ * @param {string} [method]
+ * the method to call
+ *
+ * @param {Object} arg
+ * the argument to pass
+ *
+ * @private
+ */
+ ;
+
+ _proto.techCall_ = function techCall_(method, arg) {
+ // If it's not ready yet, call method when it is
+ this.ready(function () {
+ if (method in allowedSetters) {
+ return set(this.middleware_, this.tech_, method, arg);
+ } else if (method in allowedMediators) {
+ return mediate(this.middleware_, this.tech_, method, arg);
+ }
+
+ try {
+ if (this.tech_) {
+ this.tech_[method](arg);
+ }
+ } catch (e) {
+ log$1(e);
+ throw e;
+ }
+ }, true);
+ }
+ /**
+ * Get calls can't wait for the tech, and sometimes don't need to.
+ *
+ * @param {string} method
+ * Tech method
+ *
+ * @return {Function|undefined}
+ * the method or undefined
+ *
+ * @private
+ */
+ ;
+
+ _proto.techGet_ = function techGet_(method) {
+ if (!this.tech_ || !this.tech_.isReady_) {
+ return;
+ }
+
+ if (method in allowedGetters) {
+ return get(this.middleware_, this.tech_, method);
+ } else if (method in allowedMediators) {
+ return mediate(this.middleware_, this.tech_, method);
+ } // Flash likes to die and reload when you hide or reposition it.
+ // In these cases the object methods go away and we get errors.
+ // TODO: Is this needed for techs other than Flash?
+ // When that happens we'll catch the errors and inform tech that it's not ready any more.
+
+
+ try {
+ return this.tech_[method]();
+ } catch (e) {
+ // When building additional tech libs, an expected method may not be defined yet
+ if (this.tech_[method] === undefined) {
+ log$1("Video.js: " + method + " method not defined for " + this.techName_ + " playback technology.", e);
+ throw e;
+ } // When a method isn't available on the object it throws a TypeError
+
+
+ if (e.name === 'TypeError') {
+ log$1("Video.js: " + method + " unavailable on " + this.techName_ + " playback technology element.", e);
+ this.tech_.isReady_ = false;
+ throw e;
+ } // If error unknown, just log and throw
+
+
+ log$1(e);
+ throw e;
+ }
+ }
+ /**
+ * Attempt to begin playback at the first opportunity.
+ *
+ * @return {Promise|undefined}
+ * Returns a promise if the browser supports Promises (or one
+ * was passed in as an option). This promise will be resolved on
+ * the return value of play. If this is undefined it will fulfill the
+ * promise chain otherwise the promise chain will be fulfilled when
+ * the promise from play is fulfilled.
+ */
+ ;
+
+ _proto.play = function play() {
+ var _this10 = this;
+
+ var PromiseClass = this.options_.Promise || window__default['default'].Promise;
+
+ if (PromiseClass) {
+ return new PromiseClass(function (resolve) {
+ _this10.play_(resolve);
+ });
+ }
+
+ return this.play_();
+ }
+ /**
+ * The actual logic for play, takes a callback that will be resolved on the
+ * return value of play. This allows us to resolve to the play promise if there
+ * is one on modern browsers.
+ *
+ * @private
+ * @param {Function} [callback]
+ * The callback that should be called when the techs play is actually called
+ */
+ ;
+
+ _proto.play_ = function play_(callback) {
+ var _this11 = this;
+
+ if (callback === void 0) {
+ callback = silencePromise;
+ }
+
+ this.playCallbacks_.push(callback);
+ var isSrcReady = Boolean(!this.changingSrc_ && (this.src() || this.currentSrc())); // treat calls to play_ somewhat like the `one` event function
+
+ if (this.waitToPlay_) {
+ this.off(['ready', 'loadstart'], this.waitToPlay_);
+ this.waitToPlay_ = null;
+ } // if the player/tech is not ready or the src itself is not ready
+ // queue up a call to play on `ready` or `loadstart`
+
+
+ if (!this.isReady_ || !isSrcReady) {
+ this.waitToPlay_ = function (e) {
+ _this11.play_();
+ };
+
+ this.one(['ready', 'loadstart'], this.waitToPlay_); // if we are in Safari, there is a high chance that loadstart will trigger after the gesture timeperiod
+ // in that case, we need to prime the video element by calling load so it'll be ready in time
+
+ if (!isSrcReady && (IS_ANY_SAFARI || IS_IOS)) {
+ this.load();
+ }
+
+ return;
+ } // If the player/tech is ready and we have a source, we can attempt playback.
+
+
+ var val = this.techGet_('play'); // play was terminated if the returned value is null
+
+ if (val === null) {
+ this.runPlayTerminatedQueue_();
+ } else {
+ this.runPlayCallbacks_(val);
+ }
+ }
+ /**
+ * These functions will be run when if play is terminated. If play
+ * runPlayCallbacks_ is run these function will not be run. This allows us
+ * to differenciate between a terminated play and an actual call to play.
+ */
+ ;
+
+ _proto.runPlayTerminatedQueue_ = function runPlayTerminatedQueue_() {
+ var queue = this.playTerminatedQueue_.slice(0);
+ this.playTerminatedQueue_ = [];
+ queue.forEach(function (q) {
+ q();
+ });
+ }
+ /**
+ * When a callback to play is delayed we have to run these
+ * callbacks when play is actually called on the tech. This function
+ * runs the callbacks that were delayed and accepts the return value
+ * from the tech.
+ *
+ * @param {undefined|Promise} val
+ * The return value from the tech.
+ */
+ ;
+
+ _proto.runPlayCallbacks_ = function runPlayCallbacks_(val) {
+ var callbacks = this.playCallbacks_.slice(0);
+ this.playCallbacks_ = []; // clear play terminatedQueue since we finished a real play
+
+ this.playTerminatedQueue_ = [];
+ callbacks.forEach(function (cb) {
+ cb(val);
+ });
+ }
+ /**
+ * Pause the video playback
+ *
+ * @return {Player}
+ * A reference to the player object this function was called on
+ */
+ ;
+
+ _proto.pause = function pause() {
+ this.techCall_('pause');
+ }
+ /**
+ * Check if the player is paused or has yet to play
+ *
+ * @return {boolean}
+ * - false: if the media is currently playing
+ * - true: if media is not currently playing
+ */
+ ;
+
+ _proto.paused = function paused() {
+ // The initial state of paused should be true (in Safari it's actually false)
+ return this.techGet_('paused') === false ? false : true;
+ }
+ /**
+ * Get a TimeRange object representing the current ranges of time that the user
+ * has played.
+ *
+ * @return {TimeRange}
+ * A time range object that represents all the increments of time that have
+ * been played.
+ */
+ ;
+
+ _proto.played = function played() {
+ return this.techGet_('played') || createTimeRanges(0, 0);
+ }
+ /**
+ * Returns whether or not the user is "scrubbing". Scrubbing is
+ * when the user has clicked the progress bar handle and is
+ * dragging it along the progress bar.
+ *
+ * @param {boolean} [isScrubbing]
+ * whether the user is or is not scrubbing
+ *
+ * @return {boolean}
+ * The value of scrubbing when getting
+ */
+ ;
+
+ _proto.scrubbing = function scrubbing(isScrubbing) {
+ if (typeof isScrubbing === 'undefined') {
+ return this.scrubbing_;
+ }
+
+ this.scrubbing_ = !!isScrubbing;
+ this.techCall_('setScrubbing', this.scrubbing_);
+
+ if (isScrubbing) {
+ this.addClass('vjs-scrubbing');
+ } else {
+ this.removeClass('vjs-scrubbing');
+ }
+ }
+ /**
+ * Get or set the current time (in seconds)
+ *
+ * @param {number|string} [seconds]
+ * The time to seek to in seconds
+ *
+ * @return {number}
+ * - the current time in seconds when getting
+ */
+ ;
+
+ _proto.currentTime = function currentTime(seconds) {
+ if (typeof seconds !== 'undefined') {
+ if (seconds < 0) {
+ seconds = 0;
+ }
+
+ if (!this.isReady_ || this.changingSrc_ || !this.tech_ || !this.tech_.isReady_) {
+ this.cache_.initTime = seconds;
+ this.off('canplay', this.boundApplyInitTime_);
+ this.one('canplay', this.boundApplyInitTime_);
+ return;
+ }
+
+ this.techCall_('setCurrentTime', seconds);
+ this.cache_.initTime = 0;
+ return;
+ } // cache last currentTime and return. default to 0 seconds
+ //
+ // Caching the currentTime is meant to prevent a massive amount of reads on the tech's
+ // currentTime when scrubbing, but may not provide much performance benefit afterall.
+ // Should be tested. Also something has to read the actual current time or the cache will
+ // never get updated.
+
+
+ this.cache_.currentTime = this.techGet_('currentTime') || 0;
+ return this.cache_.currentTime;
+ }
+ /**
+ * Apply the value of initTime stored in cache as currentTime.
+ *
+ * @private
+ */
+ ;
+
+ _proto.applyInitTime_ = function applyInitTime_() {
+ this.currentTime(this.cache_.initTime);
+ }
+ /**
+ * Normally gets the length in time of the video in seconds;
+ * in all but the rarest use cases an argument will NOT be passed to the method
+ *
+ * > **NOTE**: The video must have started loading before the duration can be
+ * known, and depending on preload behaviour may not be known until the video starts
+ * playing.
+ *
+ * @fires Player#durationchange
+ *
+ * @param {number} [seconds]
+ * The duration of the video to set in seconds
+ *
+ * @return {number}
+ * - The duration of the video in seconds when getting
+ */
+ ;
+
+ _proto.duration = function duration(seconds) {
+ if (seconds === undefined) {
+ // return NaN if the duration is not known
+ return this.cache_.duration !== undefined ? this.cache_.duration : NaN;
+ }
+
+ seconds = parseFloat(seconds); // Standardize on Infinity for signaling video is live
+
+ if (seconds < 0) {
+ seconds = Infinity;
+ }
+
+ if (seconds !== this.cache_.duration) {
+ // Cache the last set value for optimized scrubbing (esp. Flash)
+ // TODO: Required for techs other than Flash?
+ this.cache_.duration = seconds;
+
+ if (seconds === Infinity) {
+ this.addClass('vjs-live');
+ } else {
+ this.removeClass('vjs-live');
+ }
+
+ if (!isNaN(seconds)) {
+ // Do not fire durationchange unless the duration value is known.
+ // @see [Spec]{@link https://www.w3.org/TR/2011/WD-html5-20110113/video.html#media-element-load-algorithm}
+
+ /**
+ * @event Player#durationchange
+ * @type {EventTarget~Event}
+ */
+ this.trigger('durationchange');
+ }
+ }
+ }
+ /**
+ * Calculates how much time is left in the video. Not part
+ * of the native video API.
+ *
+ * @return {number}
+ * The time remaining in seconds
+ */
+ ;
+
+ _proto.remainingTime = function remainingTime() {
+ return this.duration() - this.currentTime();
+ }
+ /**
+ * A remaining time function that is intented to be used when
+ * the time is to be displayed directly to the user.
+ *
+ * @return {number}
+ * The rounded time remaining in seconds
+ */
+ ;
+
+ _proto.remainingTimeDisplay = function remainingTimeDisplay() {
+ return Math.floor(this.duration()) - Math.floor(this.currentTime());
+ } //
+ // Kind of like an array of portions of the video that have been downloaded.
+
+ /**
+ * Get a TimeRange object with an array of the times of the video
+ * that have been downloaded. If you just want the percent of the
+ * video that's been downloaded, use bufferedPercent.
+ *
+ * @see [Buffered Spec]{@link http://dev.w3.org/html5/spec/video.html#dom-media-buffered}
+ *
+ * @return {TimeRange}
+ * A mock TimeRange object (following HTML spec)
+ */
+ ;
+
+ _proto.buffered = function buffered() {
+ var buffered = this.techGet_('buffered');
+
+ if (!buffered || !buffered.length) {
+ buffered = createTimeRanges(0, 0);
+ }
+
+ return buffered;
+ }
+ /**
+ * Get the percent (as a decimal) of the video that's been downloaded.
+ * This method is not a part of the native HTML video API.
+ *
+ * @return {number}
+ * A decimal between 0 and 1 representing the percent
+ * that is buffered 0 being 0% and 1 being 100%
+ */
+ ;
+
+ _proto.bufferedPercent = function bufferedPercent$1() {
+ return bufferedPercent(this.buffered(), this.duration());
+ }
+ /**
+ * Get the ending time of the last buffered time range
+ * This is used in the progress bar to encapsulate all time ranges.
+ *
+ * @return {number}
+ * The end of the last buffered time range
+ */
+ ;
+
+ _proto.bufferedEnd = function bufferedEnd() {
+ var buffered = this.buffered();
+ var duration = this.duration();
+ var end = buffered.end(buffered.length - 1);
+
+ if (end > duration) {
+ end = duration;
+ }
+
+ return end;
+ }
+ /**
+ * Get or set the current volume of the media
+ *
+ * @param {number} [percentAsDecimal]
+ * The new volume as a decimal percent:
+ * - 0 is muted/0%/off
+ * - 1.0 is 100%/full
+ * - 0.5 is half volume or 50%
+ *
+ * @return {number}
+ * The current volume as a percent when getting
+ */
+ ;
+
+ _proto.volume = function volume(percentAsDecimal) {
+ var vol;
+
+ if (percentAsDecimal !== undefined) {
+ // Force value to between 0 and 1
+ vol = Math.max(0, Math.min(1, parseFloat(percentAsDecimal)));
+ this.cache_.volume = vol;
+ this.techCall_('setVolume', vol);
+
+ if (vol > 0) {
+ this.lastVolume_(vol);
+ }
+
+ return;
+ } // Default to 1 when returning current volume.
+
+
+ vol = parseFloat(this.techGet_('volume'));
+ return isNaN(vol) ? 1 : vol;
+ }
+ /**
+ * Get the current muted state, or turn mute on or off
+ *
+ * @param {boolean} [muted]
+ * - true to mute
+ * - false to unmute
+ *
+ * @return {boolean}
+ * - true if mute is on and getting
+ * - false if mute is off and getting
+ */
+ ;
+
+ _proto.muted = function muted(_muted) {
+ if (_muted !== undefined) {
+ this.techCall_('setMuted', _muted);
+ return;
+ }
+
+ return this.techGet_('muted') || false;
+ }
+ /**
+ * Get the current defaultMuted state, or turn defaultMuted on or off. defaultMuted
+ * indicates the state of muted on initial playback.
+ *
+ * ```js
+ * var myPlayer = videojs('some-player-id');
+ *
+ * myPlayer.src("http://www.example.com/path/to/video.mp4");
+ *
+ * // get, should be false
+ * console.log(myPlayer.defaultMuted());
+ * // set to true
+ * myPlayer.defaultMuted(true);
+ * // get should be true
+ * console.log(myPlayer.defaultMuted());
+ * ```
+ *
+ * @param {boolean} [defaultMuted]
+ * - true to mute
+ * - false to unmute
+ *
+ * @return {boolean|Player}
+ * - true if defaultMuted is on and getting
+ * - false if defaultMuted is off and getting
+ * - A reference to the current player when setting
+ */
+ ;
+
+ _proto.defaultMuted = function defaultMuted(_defaultMuted) {
+ if (_defaultMuted !== undefined) {
+ return this.techCall_('setDefaultMuted', _defaultMuted);
+ }
+
+ return this.techGet_('defaultMuted') || false;
+ }
+ /**
+ * Get the last volume, or set it
+ *
+ * @param {number} [percentAsDecimal]
+ * The new last volume as a decimal percent:
+ * - 0 is muted/0%/off
+ * - 1.0 is 100%/full
+ * - 0.5 is half volume or 50%
+ *
+ * @return {number}
+ * the current value of lastVolume as a percent when getting
+ *
+ * @private
+ */
+ ;
+
+ _proto.lastVolume_ = function lastVolume_(percentAsDecimal) {
+ if (percentAsDecimal !== undefined && percentAsDecimal !== 0) {
+ this.cache_.lastVolume = percentAsDecimal;
+ return;
+ }
+
+ return this.cache_.lastVolume;
+ }
+ /**
+ * Check if current tech can support native fullscreen
+ * (e.g. with built in controls like iOS)
+ *
+ * @return {boolean}
+ * if native fullscreen is supported
+ */
+ ;
+
+ _proto.supportsFullScreen = function supportsFullScreen() {
+ return this.techGet_('supportsFullScreen') || false;
+ }
+ /**
+ * Check if the player is in fullscreen mode or tell the player that it
+ * is or is not in fullscreen mode.
+ *
+ * > NOTE: As of the latest HTML5 spec, isFullscreen is no longer an official
+ * property and instead document.fullscreenElement is used. But isFullscreen is
+ * still a valuable property for internal player workings.
+ *
+ * @param {boolean} [isFS]
+ * Set the players current fullscreen state
+ *
+ * @return {boolean}
+ * - true if fullscreen is on and getting
+ * - false if fullscreen is off and getting
+ */
+ ;
+
+ _proto.isFullscreen = function isFullscreen(isFS) {
+ if (isFS !== undefined) {
+ var oldValue = this.isFullscreen_;
+ this.isFullscreen_ = Boolean(isFS); // if we changed fullscreen state and we're in prefixed mode, trigger fullscreenchange
+ // this is the only place where we trigger fullscreenchange events for older browsers
+ // fullWindow mode is treated as a prefixed event and will get a fullscreenchange event as well
+
+ if (this.isFullscreen_ !== oldValue && this.fsApi_.prefixed) {
+ /**
+ * @event Player#fullscreenchange
+ * @type {EventTarget~Event}
+ */
+ this.trigger('fullscreenchange');
+ }
+
+ this.toggleFullscreenClass_();
+ return;
+ }
+
+ return this.isFullscreen_;
+ }
+ /**
+ * Increase the size of the video to full screen
+ * In some browsers, full screen is not supported natively, so it enters
+ * "full window mode", where the video fills the browser window.
+ * In browsers and devices that support native full screen, sometimes the
+ * browser's default controls will be shown, and not the Video.js custom skin.
+ * This includes most mobile devices (iOS, Android) and older versions of
+ * Safari.
+ *
+ * @param {Object} [fullscreenOptions]
+ * Override the player fullscreen options
+ *
+ * @fires Player#fullscreenchange
+ */
+ ;
+
+ _proto.requestFullscreen = function requestFullscreen(fullscreenOptions) {
+ var PromiseClass = this.options_.Promise || window__default['default'].Promise;
+
+ if (PromiseClass) {
+ var self = this;
+ return new PromiseClass(function (resolve, reject) {
+ function offHandler() {
+ self.off('fullscreenerror', errorHandler);
+ self.off('fullscreenchange', changeHandler);
+ }
+
+ function changeHandler() {
+ offHandler();
+ resolve();
+ }
+
+ function errorHandler(e, err) {
+ offHandler();
+ reject(err);
+ }
+
+ self.one('fullscreenchange', changeHandler);
+ self.one('fullscreenerror', errorHandler);
+ var promise = self.requestFullscreenHelper_(fullscreenOptions);
+
+ if (promise) {
+ promise.then(offHandler, offHandler);
+ promise.then(resolve, reject);
+ }
+ });
+ }
+
+ return this.requestFullscreenHelper_();
+ };
+
+ _proto.requestFullscreenHelper_ = function requestFullscreenHelper_(fullscreenOptions) {
+ var _this12 = this;
+
+ var fsOptions; // Only pass fullscreen options to requestFullscreen in spec-compliant browsers.
+ // Use defaults or player configured option unless passed directly to this method.
+
+ if (!this.fsApi_.prefixed) {
+ fsOptions = this.options_.fullscreen && this.options_.fullscreen.options || {};
+
+ if (fullscreenOptions !== undefined) {
+ fsOptions = fullscreenOptions;
+ }
+ } // This method works as follows:
+ // 1. if a fullscreen api is available, use it
+ // 1. call requestFullscreen with potential options
+ // 2. if we got a promise from above, use it to update isFullscreen()
+ // 2. otherwise, if the tech supports fullscreen, call `enterFullScreen` on it.
+ // This is particularly used for iPhone, older iPads, and non-safari browser on iOS.
+ // 3. otherwise, use "fullWindow" mode
+
+
+ if (this.fsApi_.requestFullscreen) {
+ var promise = this.el_[this.fsApi_.requestFullscreen](fsOptions);
+
+ if (promise) {
+ promise.then(function () {
+ return _this12.isFullscreen(true);
+ }, function () {
+ return _this12.isFullscreen(false);
+ });
+ }
+
+ return promise;
+ } else if (this.tech_.supportsFullScreen() && !this.options_.preferFullWindow === true) {
+ // we can't take the video.js controls fullscreen but we can go fullscreen
+ // with native controls
+ this.techCall_('enterFullScreen');
+ } else {
+ // fullscreen isn't supported so we'll just stretch the video element to
+ // fill the viewport
+ this.enterFullWindow();
+ }
+ }
+ /**
+ * Return the video to its normal size after having been in full screen mode
+ *
+ * @fires Player#fullscreenchange
+ */
+ ;
+
+ _proto.exitFullscreen = function exitFullscreen() {
+ var PromiseClass = this.options_.Promise || window__default['default'].Promise;
+
+ if (PromiseClass) {
+ var self = this;
+ return new PromiseClass(function (resolve, reject) {
+ function offHandler() {
+ self.off('fullscreenerror', errorHandler);
+ self.off('fullscreenchange', changeHandler);
+ }
+
+ function changeHandler() {
+ offHandler();
+ resolve();
+ }
+
+ function errorHandler(e, err) {
+ offHandler();
+ reject(err);
+ }
+
+ self.one('fullscreenchange', changeHandler);
+ self.one('fullscreenerror', errorHandler);
+ var promise = self.exitFullscreenHelper_();
+
+ if (promise) {
+ promise.then(offHandler, offHandler); // map the promise to our resolve/reject methods
+
+ promise.then(resolve, reject);
+ }
+ });
+ }
+
+ return this.exitFullscreenHelper_();
+ };
+
+ _proto.exitFullscreenHelper_ = function exitFullscreenHelper_() {
+ var _this13 = this;
+
+ if (this.fsApi_.requestFullscreen) {
+ var promise = document__default['default'][this.fsApi_.exitFullscreen]();
+
+ if (promise) {
+ // we're splitting the promise here, so, we want to catch the
+ // potential error so that this chain doesn't have unhandled errors
+ silencePromise(promise.then(function () {
+ return _this13.isFullscreen(false);
+ }));
+ }
+
+ return promise;
+ } else if (this.tech_.supportsFullScreen() && !this.options_.preferFullWindow === true) {
+ this.techCall_('exitFullScreen');
+ } else {
+ this.exitFullWindow();
+ }
+ }
+ /**
+ * When fullscreen isn't supported we can stretch the
+ * video container to as wide as the browser will let us.
+ *
+ * @fires Player#enterFullWindow
+ */
+ ;
+
+ _proto.enterFullWindow = function enterFullWindow() {
+ this.isFullscreen(true);
+ this.isFullWindow = true; // Storing original doc overflow value to return to when fullscreen is off
+
+ this.docOrigOverflow = document__default['default'].documentElement.style.overflow; // Add listener for esc key to exit fullscreen
+
+ on(document__default['default'], 'keydown', this.boundFullWindowOnEscKey_); // Hide any scroll bars
+
+ document__default['default'].documentElement.style.overflow = 'hidden'; // Apply fullscreen styles
+
+ addClass(document__default['default'].body, 'vjs-full-window');
+ /**
+ * @event Player#enterFullWindow
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('enterFullWindow');
+ }
+ /**
+ * Check for call to either exit full window or
+ * full screen on ESC key
+ *
+ * @param {string} event
+ * Event to check for key press
+ */
+ ;
+
+ _proto.fullWindowOnEscKey = function fullWindowOnEscKey(event) {
+ if (keycode__default['default'].isEventKey(event, 'Esc')) {
+ if (this.isFullscreen() === true) {
+ if (!this.isFullWindow) {
+ this.exitFullscreen();
+ } else {
+ this.exitFullWindow();
+ }
+ }
+ }
+ }
+ /**
+ * Exit full window
+ *
+ * @fires Player#exitFullWindow
+ */
+ ;
+
+ _proto.exitFullWindow = function exitFullWindow() {
+ this.isFullscreen(false);
+ this.isFullWindow = false;
+ off(document__default['default'], 'keydown', this.boundFullWindowOnEscKey_); // Unhide scroll bars.
+
+ document__default['default'].documentElement.style.overflow = this.docOrigOverflow; // Remove fullscreen styles
+
+ removeClass(document__default['default'].body, 'vjs-full-window'); // Resize the box, controller, and poster to original sizes
+ // this.positionAll();
+
+ /**
+ * @event Player#exitFullWindow
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('exitFullWindow');
+ }
+ /**
+ * Disable Picture-in-Picture mode.
+ *
+ * @param {boolean} value
+ * - true will disable Picture-in-Picture mode
+ * - false will enable Picture-in-Picture mode
+ */
+ ;
+
+ _proto.disablePictureInPicture = function disablePictureInPicture(value) {
+ if (value === undefined) {
+ return this.techGet_('disablePictureInPicture');
+ }
+
+ this.techCall_('setDisablePictureInPicture', value);
+ this.options_.disablePictureInPicture = value;
+ this.trigger('disablepictureinpicturechanged');
+ }
+ /**
+ * Check if the player is in Picture-in-Picture mode or tell the player that it
+ * is or is not in Picture-in-Picture mode.
+ *
+ * @param {boolean} [isPiP]
+ * Set the players current Picture-in-Picture state
+ *
+ * @return {boolean}
+ * - true if Picture-in-Picture is on and getting
+ * - false if Picture-in-Picture is off and getting
+ */
+ ;
+
+ _proto.isInPictureInPicture = function isInPictureInPicture(isPiP) {
+ if (isPiP !== undefined) {
+ this.isInPictureInPicture_ = !!isPiP;
+ this.togglePictureInPictureClass_();
+ return;
+ }
+
+ return !!this.isInPictureInPicture_;
+ }
+ /**
+ * Create a floating video window always on top of other windows so that users may
+ * continue consuming media while they interact with other content sites, or
+ * applications on their device.
+ *
+ * @see [Spec]{@link https://wicg.github.io/picture-in-picture}
+ *
+ * @fires Player#enterpictureinpicture
+ *
+ * @return {Promise}
+ * A promise with a Picture-in-Picture window.
+ */
+ ;
+
+ _proto.requestPictureInPicture = function requestPictureInPicture() {
+ if ('pictureInPictureEnabled' in document__default['default'] && this.disablePictureInPicture() === false) {
+ /**
+ * This event fires when the player enters picture in picture mode
+ *
+ * @event Player#enterpictureinpicture
+ * @type {EventTarget~Event}
+ */
+ return this.techGet_('requestPictureInPicture');
+ }
+ }
+ /**
+ * Exit Picture-in-Picture mode.
+ *
+ * @see [Spec]{@link https://wicg.github.io/picture-in-picture}
+ *
+ * @fires Player#leavepictureinpicture
+ *
+ * @return {Promise}
+ * A promise.
+ */
+ ;
+
+ _proto.exitPictureInPicture = function exitPictureInPicture() {
+ if ('pictureInPictureEnabled' in document__default['default']) {
+ /**
+ * This event fires when the player leaves picture in picture mode
+ *
+ * @event Player#leavepictureinpicture
+ * @type {EventTarget~Event}
+ */
+ return document__default['default'].exitPictureInPicture();
+ }
+ }
+ /**
+ * Called when this Player has focus and a key gets pressed down, or when
+ * any Component of this player receives a key press that it doesn't handle.
+ * This allows player-wide hotkeys (either as defined below, or optionally
+ * by an external function).
+ *
+ * @param {EventTarget~Event} event
+ * The `keydown` event that caused this function to be called.
+ *
+ * @listens keydown
+ */
+ ;
+
+ _proto.handleKeyDown = function handleKeyDown(event) {
+ var userActions = this.options_.userActions; // Bail out if hotkeys are not configured.
+
+ if (!userActions || !userActions.hotkeys) {
+ return;
+ } // Function that determines whether or not to exclude an element from
+ // hotkeys handling.
+
+
+ var excludeElement = function excludeElement(el) {
+ var tagName = el.tagName.toLowerCase(); // The first and easiest test is for `contenteditable` elements.
+
+ if (el.isContentEditable) {
+ return true;
+ } // Inputs matching these types will still trigger hotkey handling as
+ // they are not text inputs.
+
+
+ var allowedInputTypes = ['button', 'checkbox', 'hidden', 'radio', 'reset', 'submit'];
+
+ if (tagName === 'input') {
+ return allowedInputTypes.indexOf(el.type) === -1;
+ } // The final test is by tag name. These tags will be excluded entirely.
+
+
+ var excludedTags = ['textarea'];
+ return excludedTags.indexOf(tagName) !== -1;
+ }; // Bail out if the user is focused on an interactive form element.
+
+
+ if (excludeElement(this.el_.ownerDocument.activeElement)) {
+ return;
+ }
+
+ if (typeof userActions.hotkeys === 'function') {
+ userActions.hotkeys.call(this, event);
+ } else {
+ this.handleHotkeys(event);
+ }
+ }
+ /**
+ * Called when this Player receives a hotkey keydown event.
+ * Supported player-wide hotkeys are:
+ *
+ * f - toggle fullscreen
+ * m - toggle mute
+ * k or Space - toggle play/pause
+ *
+ * @param {EventTarget~Event} event
+ * The `keydown` event that caused this function to be called.
+ */
+ ;
+
+ _proto.handleHotkeys = function handleHotkeys(event) {
+ var hotkeys = this.options_.userActions ? this.options_.userActions.hotkeys : {}; // set fullscreenKey, muteKey, playPauseKey from `hotkeys`, use defaults if not set
+
+ var _hotkeys$fullscreenKe = hotkeys.fullscreenKey,
+ fullscreenKey = _hotkeys$fullscreenKe === void 0 ? function (keydownEvent) {
+ return keycode__default['default'].isEventKey(keydownEvent, 'f');
+ } : _hotkeys$fullscreenKe,
+ _hotkeys$muteKey = hotkeys.muteKey,
+ muteKey = _hotkeys$muteKey === void 0 ? function (keydownEvent) {
+ return keycode__default['default'].isEventKey(keydownEvent, 'm');
+ } : _hotkeys$muteKey,
+ _hotkeys$playPauseKey = hotkeys.playPauseKey,
+ playPauseKey = _hotkeys$playPauseKey === void 0 ? function (keydownEvent) {
+ return keycode__default['default'].isEventKey(keydownEvent, 'k') || keycode__default['default'].isEventKey(keydownEvent, 'Space');
+ } : _hotkeys$playPauseKey;
+
+ if (fullscreenKey.call(this, event)) {
+ event.preventDefault();
+ event.stopPropagation();
+ var FSToggle = Component$1.getComponent('FullscreenToggle');
+
+ if (document__default['default'][this.fsApi_.fullscreenEnabled] !== false) {
+ FSToggle.prototype.handleClick.call(this, event);
+ }
+ } else if (muteKey.call(this, event)) {
+ event.preventDefault();
+ event.stopPropagation();
+ var MuteToggle = Component$1.getComponent('MuteToggle');
+ MuteToggle.prototype.handleClick.call(this, event);
+ } else if (playPauseKey.call(this, event)) {
+ event.preventDefault();
+ event.stopPropagation();
+ var PlayToggle = Component$1.getComponent('PlayToggle');
+ PlayToggle.prototype.handleClick.call(this, event);
+ }
+ }
+ /**
+ * Check whether the player can play a given mimetype
+ *
+ * @see https://www.w3.org/TR/2011/WD-html5-20110113/video.html#dom-navigator-canplaytype
+ *
+ * @param {string} type
+ * The mimetype to check
+ *
+ * @return {string}
+ * 'probably', 'maybe', or '' (empty string)
+ */
+ ;
+
+ _proto.canPlayType = function canPlayType(type) {
+ var can; // Loop through each playback technology in the options order
+
+ for (var i = 0, j = this.options_.techOrder; i < j.length; i++) {
+ var techName = j[i];
+ var tech = Tech.getTech(techName); // Support old behavior of techs being registered as components.
+ // Remove once that deprecated behavior is removed.
+
+ if (!tech) {
+ tech = Component$1.getComponent(techName);
+ } // Check if the current tech is defined before continuing
+
+
+ if (!tech) {
+ log$1.error("The \"" + techName + "\" tech is undefined. Skipped browser support check for that tech.");
+ continue;
+ } // Check if the browser supports this technology
+
+
+ if (tech.isSupported()) {
+ can = tech.canPlayType(type);
+
+ if (can) {
+ return can;
+ }
+ }
+ }
+
+ return '';
+ }
+ /**
+ * Select source based on tech-order or source-order
+ * Uses source-order selection if `options.sourceOrder` is truthy. Otherwise,
+ * defaults to tech-order selection
+ *
+ * @param {Array} sources
+ * The sources for a media asset
+ *
+ * @return {Object|boolean}
+ * Object of source and tech order or false
+ */
+ ;
+
+ _proto.selectSource = function selectSource(sources) {
+ var _this14 = this;
+
+ // Get only the techs specified in `techOrder` that exist and are supported by the
+ // current platform
+ var techs = this.options_.techOrder.map(function (techName) {
+ return [techName, Tech.getTech(techName)];
+ }).filter(function (_ref) {
+ var techName = _ref[0],
+ tech = _ref[1];
+
+ // Check if the current tech is defined before continuing
+ if (tech) {
+ // Check if the browser supports this technology
+ return tech.isSupported();
+ }
+
+ log$1.error("The \"" + techName + "\" tech is undefined. Skipped browser support check for that tech.");
+ return false;
+ }); // Iterate over each `innerArray` element once per `outerArray` element and execute
+ // `tester` with both. If `tester` returns a non-falsy value, exit early and return
+ // that value.
+
+ var findFirstPassingTechSourcePair = function findFirstPassingTechSourcePair(outerArray, innerArray, tester) {
+ var found;
+ outerArray.some(function (outerChoice) {
+ return innerArray.some(function (innerChoice) {
+ found = tester(outerChoice, innerChoice);
+
+ if (found) {
+ return true;
+ }
+ });
+ });
+ return found;
+ };
+
+ var foundSourceAndTech;
+
+ var flip = function flip(fn) {
+ return function (a, b) {
+ return fn(b, a);
+ };
+ };
+
+ var finder = function finder(_ref2, source) {
+ var techName = _ref2[0],
+ tech = _ref2[1];
+
+ if (tech.canPlaySource(source, _this14.options_[techName.toLowerCase()])) {
+ return {
+ source: source,
+ tech: techName
+ };
+ }
+ }; // Depending on the truthiness of `options.sourceOrder`, we swap the order of techs and sources
+ // to select from them based on their priority.
+
+
+ if (this.options_.sourceOrder) {
+ // Source-first ordering
+ foundSourceAndTech = findFirstPassingTechSourcePair(sources, techs, flip(finder));
+ } else {
+ // Tech-first ordering
+ foundSourceAndTech = findFirstPassingTechSourcePair(techs, sources, finder);
+ }
+
+ return foundSourceAndTech || false;
+ }
+ /**
+ * Executes source setting and getting logic
+ *
+ * @param {Tech~SourceObject|Tech~SourceObject[]|string} [source]
+ * A SourceObject, an array of SourceObjects, or a string referencing
+ * a URL to a media source. It is _highly recommended_ that an object
+ * or array of objects is used here, so that source selection
+ * algorithms can take the `type` into account.
+ *
+ * If not provided, this method acts as a getter.
+ * @param {boolean} isRetry
+ * Indicates whether this is being called internally as a result of a retry
+ *
+ * @return {string|undefined}
+ * If the `source` argument is missing, returns the current source
+ * URL. Otherwise, returns nothing/undefined.
+ */
+ ;
+
+ _proto.handleSrc_ = function handleSrc_(source, isRetry) {
+ var _this15 = this;
+
+ // getter usage
+ if (typeof source === 'undefined') {
+ return this.cache_.src || '';
+ } // Reset retry behavior for new source
+
+
+ if (this.resetRetryOnError_) {
+ this.resetRetryOnError_();
+ } // filter out invalid sources and turn our source into
+ // an array of source objects
+
+
+ var sources = filterSource(source); // if a source was passed in then it is invalid because
+ // it was filtered to a zero length Array. So we have to
+ // show an error
+
+ if (!sources.length) {
+ this.setTimeout(function () {
+ this.error({
+ code: 4,
+ message: this.options_.notSupportedMessage
+ });
+ }, 0);
+ return;
+ } // initial sources
+
+
+ this.changingSrc_ = true; // Only update the cached source list if we are not retrying a new source after error,
+ // since in that case we want to include the failed source(s) in the cache
+
+ if (!isRetry) {
+ this.cache_.sources = sources;
+ }
+
+ this.updateSourceCaches_(sources[0]); // middlewareSource is the source after it has been changed by middleware
+
+ setSource(this, sources[0], function (middlewareSource, mws) {
+ _this15.middleware_ = mws; // since sourceSet is async we have to update the cache again after we select a source since
+ // the source that is selected could be out of order from the cache update above this callback.
+
+ if (!isRetry) {
+ _this15.cache_.sources = sources;
+ }
+
+ _this15.updateSourceCaches_(middlewareSource);
+
+ var err = _this15.src_(middlewareSource);
+
+ if (err) {
+ if (sources.length > 1) {
+ return _this15.handleSrc_(sources.slice(1));
+ }
+
+ _this15.changingSrc_ = false; // We need to wrap this in a timeout to give folks a chance to add error event handlers
+
+ _this15.setTimeout(function () {
+ this.error({
+ code: 4,
+ message: this.options_.notSupportedMessage
+ });
+ }, 0); // we could not find an appropriate tech, but let's still notify the delegate that this is it
+ // this needs a better comment about why this is needed
+
+
+ _this15.triggerReady();
+
+ return;
+ }
+
+ setTech(mws, _this15.tech_);
+ }); // Try another available source if this one fails before playback.
+
+ if (this.options_.retryOnError && sources.length > 1) {
+ var retry = function retry() {
+ // Remove the error modal
+ _this15.error(null);
+
+ _this15.handleSrc_(sources.slice(1), true);
+ };
+
+ var stopListeningForErrors = function stopListeningForErrors() {
+ _this15.off('error', retry);
+ };
+
+ this.one('error', retry);
+ this.one('playing', stopListeningForErrors);
+
+ this.resetRetryOnError_ = function () {
+ _this15.off('error', retry);
+
+ _this15.off('playing', stopListeningForErrors);
+ };
+ }
+ }
+ /**
+ * Get or set the video source.
+ *
+ * @param {Tech~SourceObject|Tech~SourceObject[]|string} [source]
+ * A SourceObject, an array of SourceObjects, or a string referencing
+ * a URL to a media source. It is _highly recommended_ that an object
+ * or array of objects is used here, so that source selection
+ * algorithms can take the `type` into account.
+ *
+ * If not provided, this method acts as a getter.
+ *
+ * @return {string|undefined}
+ * If the `source` argument is missing, returns the current source
+ * URL. Otherwise, returns nothing/undefined.
+ */
+ ;
+
+ _proto.src = function src(source) {
+ return this.handleSrc_(source, false);
+ }
+ /**
+ * Set the source object on the tech, returns a boolean that indicates whether
+ * there is a tech that can play the source or not
+ *
+ * @param {Tech~SourceObject} source
+ * The source object to set on the Tech
+ *
+ * @return {boolean}
+ * - True if there is no Tech to playback this source
+ * - False otherwise
+ *
+ * @private
+ */
+ ;
+
+ _proto.src_ = function src_(source) {
+ var _this16 = this;
+
+ var sourceTech = this.selectSource([source]);
+
+ if (!sourceTech) {
+ return true;
+ }
+
+ if (!titleCaseEquals(sourceTech.tech, this.techName_)) {
+ this.changingSrc_ = true; // load this technology with the chosen source
+
+ this.loadTech_(sourceTech.tech, sourceTech.source);
+ this.tech_.ready(function () {
+ _this16.changingSrc_ = false;
+ });
+ return false;
+ } // wait until the tech is ready to set the source
+ // and set it synchronously if possible (#2326)
+
+
+ this.ready(function () {
+ // The setSource tech method was added with source handlers
+ // so older techs won't support it
+ // We need to check the direct prototype for the case where subclasses
+ // of the tech do not support source handlers
+ if (this.tech_.constructor.prototype.hasOwnProperty('setSource')) {
+ this.techCall_('setSource', source);
+ } else {
+ this.techCall_('src', source.src);
+ }
+
+ this.changingSrc_ = false;
+ }, true);
+ return false;
+ }
+ /**
+ * Begin loading the src data.
+ */
+ ;
+
+ _proto.load = function load() {
+ this.techCall_('load');
+ }
+ /**
+ * Reset the player. Loads the first tech in the techOrder,
+ * removes all the text tracks in the existing `tech`,
+ * and calls `reset` on the `tech`.
+ */
+ ;
+
+ _proto.reset = function reset() {
+ var _this17 = this;
+
+ var PromiseClass = this.options_.Promise || window__default['default'].Promise;
+
+ if (this.paused() || !PromiseClass) {
+ this.doReset_();
+ } else {
+ var playPromise = this.play();
+ silencePromise(playPromise.then(function () {
+ return _this17.doReset_();
+ }));
+ }
+ };
+
+ _proto.doReset_ = function doReset_() {
+ if (this.tech_) {
+ this.tech_.clearTracks('text');
+ }
+
+ this.resetCache_();
+ this.poster('');
+ this.loadTech_(this.options_.techOrder[0], null);
+ this.techCall_('reset');
+ this.resetControlBarUI_();
+
+ if (isEvented(this)) {
+ this.trigger('playerreset');
+ }
+ }
+ /**
+ * Reset Control Bar's UI by calling sub-methods that reset
+ * all of Control Bar's components
+ */
+ ;
+
+ _proto.resetControlBarUI_ = function resetControlBarUI_() {
+ this.resetProgressBar_();
+ this.resetPlaybackRate_();
+ this.resetVolumeBar_();
+ }
+ /**
+ * Reset tech's progress so progress bar is reset in the UI
+ */
+ ;
+
+ _proto.resetProgressBar_ = function resetProgressBar_() {
+ this.currentTime(0);
+
+ var _ref3 = this.controlBar || {},
+ durationDisplay = _ref3.durationDisplay,
+ remainingTimeDisplay = _ref3.remainingTimeDisplay;
+
+ if (durationDisplay) {
+ durationDisplay.updateContent();
+ }
+
+ if (remainingTimeDisplay) {
+ remainingTimeDisplay.updateContent();
+ }
+ }
+ /**
+ * Reset Playback ratio
+ */
+ ;
+
+ _proto.resetPlaybackRate_ = function resetPlaybackRate_() {
+ this.playbackRate(this.defaultPlaybackRate());
+ this.handleTechRateChange_();
+ }
+ /**
+ * Reset Volume bar
+ */
+ ;
+
+ _proto.resetVolumeBar_ = function resetVolumeBar_() {
+ this.volume(1.0);
+ this.trigger('volumechange');
+ }
+ /**
+ * Returns all of the current source objects.
+ *
+ * @return {Tech~SourceObject[]}
+ * The current source objects
+ */
+ ;
+
+ _proto.currentSources = function currentSources() {
+ var source = this.currentSource();
+ var sources = []; // assume `{}` or `{ src }`
+
+ if (Object.keys(source).length !== 0) {
+ sources.push(source);
+ }
+
+ return this.cache_.sources || sources;
+ }
+ /**
+ * Returns the current source object.
+ *
+ * @return {Tech~SourceObject}
+ * The current source object
+ */
+ ;
+
+ _proto.currentSource = function currentSource() {
+ return this.cache_.source || {};
+ }
+ /**
+ * Returns the fully qualified URL of the current source value e.g. http://mysite.com/video.mp4
+ * Can be used in conjunction with `currentType` to assist in rebuilding the current source object.
+ *
+ * @return {string}
+ * The current source
+ */
+ ;
+
+ _proto.currentSrc = function currentSrc() {
+ return this.currentSource() && this.currentSource().src || '';
+ }
+ /**
+ * Get the current source type e.g. video/mp4
+ * This can allow you rebuild the current source object so that you could load the same
+ * source and tech later
+ *
+ * @return {string}
+ * The source MIME type
+ */
+ ;
+
+ _proto.currentType = function currentType() {
+ return this.currentSource() && this.currentSource().type || '';
+ }
+ /**
+ * Get or set the preload attribute
+ *
+ * @param {boolean} [value]
+ * - true means that we should preload
+ * - false means that we should not preload
+ *
+ * @return {string}
+ * The preload attribute value when getting
+ */
+ ;
+
+ _proto.preload = function preload(value) {
+ if (value !== undefined) {
+ this.techCall_('setPreload', value);
+ this.options_.preload = value;
+ return;
+ }
+
+ return this.techGet_('preload');
+ }
+ /**
+ * Get or set the autoplay option. When this is a boolean it will
+ * modify the attribute on the tech. When this is a string the attribute on
+ * the tech will be removed and `Player` will handle autoplay on loadstarts.
+ *
+ * @param {boolean|string} [value]
+ * - true: autoplay using the browser behavior
+ * - false: do not autoplay
+ * - 'play': call play() on every loadstart
+ * - 'muted': call muted() then play() on every loadstart
+ * - 'any': call play() on every loadstart. if that fails call muted() then play().
+ * - *: values other than those listed here will be set `autoplay` to true
+ *
+ * @return {boolean|string}
+ * The current value of autoplay when getting
+ */
+ ;
+
+ _proto.autoplay = function autoplay(value) {
+ // getter usage
+ if (value === undefined) {
+ return this.options_.autoplay || false;
+ }
+
+ var techAutoplay; // if the value is a valid string set it to that, or normalize `true` to 'play', if need be
+
+ if (typeof value === 'string' && /(any|play|muted)/.test(value) || value === true && this.options_.normalizeAutoplay) {
+ this.options_.autoplay = value;
+ this.manualAutoplay_(typeof value === 'string' ? value : 'play');
+ techAutoplay = false; // any falsy value sets autoplay to false in the browser,
+ // lets do the same
+ } else if (!value) {
+ this.options_.autoplay = false; // any other value (ie truthy) sets autoplay to true
+ } else {
+ this.options_.autoplay = true;
+ }
+
+ techAutoplay = typeof techAutoplay === 'undefined' ? this.options_.autoplay : techAutoplay; // if we don't have a tech then we do not queue up
+ // a setAutoplay call on tech ready. We do this because the
+ // autoplay option will be passed in the constructor and we
+ // do not need to set it twice
+
+ if (this.tech_) {
+ this.techCall_('setAutoplay', techAutoplay);
+ }
+ }
+ /**
+ * Set or unset the playsinline attribute.
+ * Playsinline tells the browser that non-fullscreen playback is preferred.
+ *
+ * @param {boolean} [value]
+ * - true means that we should try to play inline by default
+ * - false means that we should use the browser's default playback mode,
+ * which in most cases is inline. iOS Safari is a notable exception
+ * and plays fullscreen by default.
+ *
+ * @return {string|Player}
+ * - the current value of playsinline
+ * - the player when setting
+ *
+ * @see [Spec]{@link https://html.spec.whatwg.org/#attr-video-playsinline}
+ */
+ ;
+
+ _proto.playsinline = function playsinline(value) {
+ if (value !== undefined) {
+ this.techCall_('setPlaysinline', value);
+ this.options_.playsinline = value;
+ return this;
+ }
+
+ return this.techGet_('playsinline');
+ }
+ /**
+ * Get or set the loop attribute on the video element.
+ *
+ * @param {boolean} [value]
+ * - true means that we should loop the video
+ * - false means that we should not loop the video
+ *
+ * @return {boolean}
+ * The current value of loop when getting
+ */
+ ;
+
+ _proto.loop = function loop(value) {
+ if (value !== undefined) {
+ this.techCall_('setLoop', value);
+ this.options_.loop = value;
+ return;
+ }
+
+ return this.techGet_('loop');
+ }
+ /**
+ * Get or set the poster image source url
+ *
+ * @fires Player#posterchange
+ *
+ * @param {string} [src]
+ * Poster image source URL
+ *
+ * @return {string}
+ * The current value of poster when getting
+ */
+ ;
+
+ _proto.poster = function poster(src) {
+ if (src === undefined) {
+ return this.poster_;
+ } // The correct way to remove a poster is to set as an empty string
+ // other falsey values will throw errors
+
+
+ if (!src) {
+ src = '';
+ }
+
+ if (src === this.poster_) {
+ return;
+ } // update the internal poster variable
+
+
+ this.poster_ = src; // update the tech's poster
+
+ this.techCall_('setPoster', src);
+ this.isPosterFromTech_ = false; // alert components that the poster has been set
+
+ /**
+ * This event fires when the poster image is changed on the player.
+ *
+ * @event Player#posterchange
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('posterchange');
+ }
+ /**
+ * Some techs (e.g. YouTube) can provide a poster source in an
+ * asynchronous way. We want the poster component to use this
+ * poster source so that it covers up the tech's controls.
+ * (YouTube's play button). However we only want to use this
+ * source if the player user hasn't set a poster through
+ * the normal APIs.
+ *
+ * @fires Player#posterchange
+ * @listens Tech#posterchange
+ * @private
+ */
+ ;
+
+ _proto.handleTechPosterChange_ = function handleTechPosterChange_() {
+ if ((!this.poster_ || this.options_.techCanOverridePoster) && this.tech_ && this.tech_.poster) {
+ var newPoster = this.tech_.poster() || '';
+
+ if (newPoster !== this.poster_) {
+ this.poster_ = newPoster;
+ this.isPosterFromTech_ = true; // Let components know the poster has changed
+
+ this.trigger('posterchange');
+ }
+ }
+ }
+ /**
+ * Get or set whether or not the controls are showing.
+ *
+ * @fires Player#controlsenabled
+ *
+ * @param {boolean} [bool]
+ * - true to turn controls on
+ * - false to turn controls off
+ *
+ * @return {boolean}
+ * The current value of controls when getting
+ */
+ ;
+
+ _proto.controls = function controls(bool) {
+ if (bool === undefined) {
+ return !!this.controls_;
+ }
+
+ bool = !!bool; // Don't trigger a change event unless it actually changed
+
+ if (this.controls_ === bool) {
+ return;
+ }
+
+ this.controls_ = bool;
+
+ if (this.usingNativeControls()) {
+ this.techCall_('setControls', bool);
+ }
+
+ if (this.controls_) {
+ this.removeClass('vjs-controls-disabled');
+ this.addClass('vjs-controls-enabled');
+ /**
+ * @event Player#controlsenabled
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('controlsenabled');
+
+ if (!this.usingNativeControls()) {
+ this.addTechControlsListeners_();
+ }
+ } else {
+ this.removeClass('vjs-controls-enabled');
+ this.addClass('vjs-controls-disabled');
+ /**
+ * @event Player#controlsdisabled
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('controlsdisabled');
+
+ if (!this.usingNativeControls()) {
+ this.removeTechControlsListeners_();
+ }
+ }
+ }
+ /**
+ * Toggle native controls on/off. Native controls are the controls built into
+ * devices (e.g. default iPhone controls) or other techs
+ * (e.g. Vimeo Controls)
+ * **This should only be set by the current tech, because only the tech knows
+ * if it can support native controls**
+ *
+ * @fires Player#usingnativecontrols
+ * @fires Player#usingcustomcontrols
+ *
+ * @param {boolean} [bool]
+ * - true to turn native controls on
+ * - false to turn native controls off
+ *
+ * @return {boolean}
+ * The current value of native controls when getting
+ */
+ ;
+
+ _proto.usingNativeControls = function usingNativeControls(bool) {
+ if (bool === undefined) {
+ return !!this.usingNativeControls_;
+ }
+
+ bool = !!bool; // Don't trigger a change event unless it actually changed
+
+ if (this.usingNativeControls_ === bool) {
+ return;
+ }
+
+ this.usingNativeControls_ = bool;
+
+ if (this.usingNativeControls_) {
+ this.addClass('vjs-using-native-controls');
+ /**
+ * player is using the native device controls
+ *
+ * @event Player#usingnativecontrols
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('usingnativecontrols');
+ } else {
+ this.removeClass('vjs-using-native-controls');
+ /**
+ * player is using the custom HTML controls
+ *
+ * @event Player#usingcustomcontrols
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('usingcustomcontrols');
+ }
+ }
+ /**
+ * Set or get the current MediaError
+ *
+ * @fires Player#error
+ *
+ * @param {MediaError|string|number} [err]
+ * A MediaError or a string/number to be turned
+ * into a MediaError
+ *
+ * @return {MediaError|null}
+ * The current MediaError when getting (or null)
+ */
+ ;
+
+ _proto.error = function error(err) {
+ var _this18 = this;
+
+ if (err === undefined) {
+ return this.error_ || null;
+ } // allow hooks to modify error object
+
+
+ hooks('beforeerror').forEach(function (hookFunction) {
+ var newErr = hookFunction(_this18, err);
+
+ if (!(isObject(newErr) && !Array.isArray(newErr) || typeof newErr === 'string' || typeof newErr === 'number' || newErr === null)) {
+ _this18.log.error('please return a value that MediaError expects in beforeerror hooks');
+
+ return;
+ }
+
+ err = newErr;
+ }); // Suppress the first error message for no compatible source until
+ // user interaction
+
+ if (this.options_.suppressNotSupportedError && err && err.code === 4) {
+ var triggerSuppressedError = function triggerSuppressedError() {
+ this.error(err);
+ };
+
+ this.options_.suppressNotSupportedError = false;
+ this.any(['click', 'touchstart'], triggerSuppressedError);
+ this.one('loadstart', function () {
+ this.off(['click', 'touchstart'], triggerSuppressedError);
+ });
+ return;
+ } // restoring to default
+
+
+ if (err === null) {
+ this.error_ = err;
+ this.removeClass('vjs-error');
+
+ if (this.errorDisplay) {
+ this.errorDisplay.close();
+ }
+
+ return;
+ }
+
+ this.error_ = new MediaError(err); // add the vjs-error classname to the player
+
+ this.addClass('vjs-error'); // log the name of the error type and any message
+ // IE11 logs "[object object]" and required you to expand message to see error object
+
+ log$1.error("(CODE:" + this.error_.code + " " + MediaError.errorTypes[this.error_.code] + ")", this.error_.message, this.error_);
+ /**
+ * @event Player#error
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('error'); // notify hooks of the per player error
+
+ hooks('error').forEach(function (hookFunction) {
+ return hookFunction(_this18, _this18.error_);
+ });
+ return;
+ }
+ /**
+ * Report user activity
+ *
+ * @param {Object} event
+ * Event object
+ */
+ ;
+
+ _proto.reportUserActivity = function reportUserActivity(event) {
+ this.userActivity_ = true;
+ }
+ /**
+ * Get/set if user is active
+ *
+ * @fires Player#useractive
+ * @fires Player#userinactive
+ *
+ * @param {boolean} [bool]
+ * - true if the user is active
+ * - false if the user is inactive
+ *
+ * @return {boolean}
+ * The current value of userActive when getting
+ */
+ ;
+
+ _proto.userActive = function userActive(bool) {
+ if (bool === undefined) {
+ return this.userActive_;
+ }
+
+ bool = !!bool;
+
+ if (bool === this.userActive_) {
+ return;
+ }
+
+ this.userActive_ = bool;
+
+ if (this.userActive_) {
+ this.userActivity_ = true;
+ this.removeClass('vjs-user-inactive');
+ this.addClass('vjs-user-active');
+ /**
+ * @event Player#useractive
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('useractive');
+ return;
+ } // Chrome/Safari/IE have bugs where when you change the cursor it can
+ // trigger a mousemove event. This causes an issue when you're hiding
+ // the cursor when the user is inactive, and a mousemove signals user
+ // activity. Making it impossible to go into inactive mode. Specifically
+ // this happens in fullscreen when we really need to hide the cursor.
+ //
+ // When this gets resolved in ALL browsers it can be removed
+ // https://code.google.com/p/chromium/issues/detail?id=103041
+
+
+ if (this.tech_) {
+ this.tech_.one('mousemove', function (e) {
+ e.stopPropagation();
+ e.preventDefault();
+ });
+ }
+
+ this.userActivity_ = false;
+ this.removeClass('vjs-user-active');
+ this.addClass('vjs-user-inactive');
+ /**
+ * @event Player#userinactive
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('userinactive');
+ }
+ /**
+ * Listen for user activity based on timeout value
+ *
+ * @private
+ */
+ ;
+
+ _proto.listenForUserActivity_ = function listenForUserActivity_() {
+ var mouseInProgress;
+ var lastMoveX;
+ var lastMoveY;
+ var handleActivity = bind(this, this.reportUserActivity);
+
+ var handleMouseMove = function handleMouseMove(e) {
+ // #1068 - Prevent mousemove spamming
+ // Chrome Bug: https://code.google.com/p/chromium/issues/detail?id=366970
+ if (e.screenX !== lastMoveX || e.screenY !== lastMoveY) {
+ lastMoveX = e.screenX;
+ lastMoveY = e.screenY;
+ handleActivity();
+ }
+ };
+
+ var handleMouseDown = function handleMouseDown() {
+ handleActivity(); // For as long as the they are touching the device or have their mouse down,
+ // we consider them active even if they're not moving their finger or mouse.
+ // So we want to continue to update that they are active
+
+ this.clearInterval(mouseInProgress); // Setting userActivity=true now and setting the interval to the same time
+ // as the activityCheck interval (250) should ensure we never miss the
+ // next activityCheck
+
+ mouseInProgress = this.setInterval(handleActivity, 250);
+ };
+
+ var handleMouseUpAndMouseLeave = function handleMouseUpAndMouseLeave(event) {
+ handleActivity(); // Stop the interval that maintains activity if the mouse/touch is down
+
+ this.clearInterval(mouseInProgress);
+ }; // Any mouse movement will be considered user activity
+
+
+ this.on('mousedown', handleMouseDown);
+ this.on('mousemove', handleMouseMove);
+ this.on('mouseup', handleMouseUpAndMouseLeave);
+ this.on('mouseleave', handleMouseUpAndMouseLeave);
+ var controlBar = this.getChild('controlBar'); // Fixes bug on Android & iOS where when tapping progressBar (when control bar is displayed)
+ // controlBar would no longer be hidden by default timeout.
+
+ if (controlBar && !IS_IOS && !IS_ANDROID) {
+ controlBar.on('mouseenter', function (event) {
+ if (this.player().options_.inactivityTimeout !== 0) {
+ this.player().cache_.inactivityTimeout = this.player().options_.inactivityTimeout;
+ }
+
+ this.player().options_.inactivityTimeout = 0;
+ });
+ controlBar.on('mouseleave', function (event) {
+ this.player().options_.inactivityTimeout = this.player().cache_.inactivityTimeout;
+ });
+ } // Listen for keyboard navigation
+ // Shouldn't need to use inProgress interval because of key repeat
+
+
+ this.on('keydown', handleActivity);
+ this.on('keyup', handleActivity); // Run an interval every 250 milliseconds instead of stuffing everything into
+ // the mousemove/touchmove function itself, to prevent performance degradation.
+ // `this.reportUserActivity` simply sets this.userActivity_ to true, which
+ // then gets picked up by this loop
+ // http://ejohn.org/blog/learning-from-twitter/
+
+ var inactivityTimeout;
+ this.setInterval(function () {
+ // Check to see if mouse/touch activity has happened
+ if (!this.userActivity_) {
+ return;
+ } // Reset the activity tracker
+
+
+ this.userActivity_ = false; // If the user state was inactive, set the state to active
+
+ this.userActive(true); // Clear any existing inactivity timeout to start the timer over
+
+ this.clearTimeout(inactivityTimeout);
+ var timeout = this.options_.inactivityTimeout;
+
+ if (timeout <= 0) {
+ return;
+ } // In milliseconds, if no more activity has occurred the
+ // user will be considered inactive
+
+
+ inactivityTimeout = this.setTimeout(function () {
+ // Protect against the case where the inactivityTimeout can trigger just
+ // before the next user activity is picked up by the activity check loop
+ // causing a flicker
+ if (!this.userActivity_) {
+ this.userActive(false);
+ }
+ }, timeout);
+ }, 250);
+ }
+ /**
+ * Gets or sets the current playback rate. A playback rate of
+ * 1.0 represents normal speed and 0.5 would indicate half-speed
+ * playback, for instance.
+ *
+ * @see https://html.spec.whatwg.org/multipage/embedded-content.html#dom-media-playbackrate
+ *
+ * @param {number} [rate]
+ * New playback rate to set.
+ *
+ * @return {number}
+ * The current playback rate when getting or 1.0
+ */
+ ;
+
+ _proto.playbackRate = function playbackRate(rate) {
+ if (rate !== undefined) {
+ // NOTE: this.cache_.lastPlaybackRate is set from the tech handler
+ // that is registered above
+ this.techCall_('setPlaybackRate', rate);
+ return;
+ }
+
+ if (this.tech_ && this.tech_.featuresPlaybackRate) {
+ return this.cache_.lastPlaybackRate || this.techGet_('playbackRate');
+ }
+
+ return 1.0;
+ }
+ /**
+ * Gets or sets the current default playback rate. A default playback rate of
+ * 1.0 represents normal speed and 0.5 would indicate half-speed playback, for instance.
+ * defaultPlaybackRate will only represent what the initial playbackRate of a video was, not
+ * not the current playbackRate.
+ *
+ * @see https://html.spec.whatwg.org/multipage/embedded-content.html#dom-media-defaultplaybackrate
+ *
+ * @param {number} [rate]
+ * New default playback rate to set.
+ *
+ * @return {number|Player}
+ * - The default playback rate when getting or 1.0
+ * - the player when setting
+ */
+ ;
+
+ _proto.defaultPlaybackRate = function defaultPlaybackRate(rate) {
+ if (rate !== undefined) {
+ return this.techCall_('setDefaultPlaybackRate', rate);
+ }
+
+ if (this.tech_ && this.tech_.featuresPlaybackRate) {
+ return this.techGet_('defaultPlaybackRate');
+ }
+
+ return 1.0;
+ }
+ /**
+ * Gets or sets the audio flag
+ *
+ * @param {boolean} bool
+ * - true signals that this is an audio player
+ * - false signals that this is not an audio player
+ *
+ * @return {boolean}
+ * The current value of isAudio when getting
+ */
+ ;
+
+ _proto.isAudio = function isAudio(bool) {
+ if (bool !== undefined) {
+ this.isAudio_ = !!bool;
+ return;
+ }
+
+ return !!this.isAudio_;
+ };
+
+ _proto.enableAudioOnlyUI_ = function enableAudioOnlyUI_() {
+ var _this19 = this;
+
+ // Update styling immediately to show the control bar so we can get its height
+ this.addClass('vjs-audio-only-mode');
+ var playerChildren = this.children();
+ var controlBar = this.getChild('ControlBar');
+ var controlBarHeight = controlBar && controlBar.currentHeight(); // Hide all player components except the control bar. Control bar components
+ // needed only for video are hidden with CSS
+
+ playerChildren.forEach(function (child) {
+ if (child === controlBar) {
+ return;
+ }
+
+ if (child.el_ && !child.hasClass('vjs-hidden')) {
+ child.hide();
+
+ _this19.audioOnlyCache_.hiddenChildren.push(child);
+ }
+ });
+ this.audioOnlyCache_.playerHeight = this.currentHeight(); // Set the player height the same as the control bar
+
+ this.height(controlBarHeight);
+ this.trigger('audioonlymodechange');
+ };
+
+ _proto.disableAudioOnlyUI_ = function disableAudioOnlyUI_() {
+ this.removeClass('vjs-audio-only-mode'); // Show player components that were previously hidden
+
+ this.audioOnlyCache_.hiddenChildren.forEach(function (child) {
+ return child.show();
+ }); // Reset player height
+
+ this.height(this.audioOnlyCache_.playerHeight);
+ this.trigger('audioonlymodechange');
+ }
+ /**
+ * Get the current audioOnlyMode state or set audioOnlyMode to true or false.
+ *
+ * Setting this to `true` will hide all player components except the control bar,
+ * as well as control bar components needed only for video.
+ *
+ * @param {boolean} [value]
+ * The value to set audioOnlyMode to.
+ *
+ * @return {Promise|boolean}
+ * A Promise is returned when setting the state, and a boolean when getting
+ * the present state
+ */
+ ;
+
+ _proto.audioOnlyMode = function audioOnlyMode(value) {
+ var _this20 = this;
+
+ if (typeof value !== 'boolean' || value === this.audioOnlyMode_) {
+ return this.audioOnlyMode_;
+ }
+
+ this.audioOnlyMode_ = value;
+ var PromiseClass = this.options_.Promise || window__default['default'].Promise;
+
+ if (PromiseClass) {
+ // Enable Audio Only Mode
+ if (value) {
+ var exitPromises = []; // Fullscreen and PiP are not supported in audioOnlyMode, so exit if we need to.
+
+ if (this.isInPictureInPicture()) {
+ exitPromises.push(this.exitPictureInPicture());
+ }
+
+ if (this.isFullscreen()) {
+ exitPromises.push(this.exitFullscreen());
+ }
+
+ if (this.audioPosterMode()) {
+ exitPromises.push(this.audioPosterMode(false));
+ }
+
+ return PromiseClass.all(exitPromises).then(function () {
+ return _this20.enableAudioOnlyUI_();
+ });
+ } // Disable Audio Only Mode
+
+
+ return PromiseClass.resolve().then(function () {
+ return _this20.disableAudioOnlyUI_();
+ });
+ }
+
+ if (value) {
+ if (this.isInPictureInPicture()) {
+ this.exitPictureInPicture();
+ }
+
+ if (this.isFullscreen()) {
+ this.exitFullscreen();
+ }
+
+ this.enableAudioOnlyUI_();
+ } else {
+ this.disableAudioOnlyUI_();
+ }
+ };
+
+ _proto.enablePosterModeUI_ = function enablePosterModeUI_() {
+ // Hide the video element and show the poster image to enable posterModeUI
+ var tech = this.tech_ && this.tech_;
+ tech.hide();
+ this.addClass('vjs-audio-poster-mode');
+ this.trigger('audiopostermodechange');
+ };
+
+ _proto.disablePosterModeUI_ = function disablePosterModeUI_() {
+ // Show the video element and hide the poster image to disable posterModeUI
+ var tech = this.tech_ && this.tech_;
+ tech.show();
+ this.removeClass('vjs-audio-poster-mode');
+ this.trigger('audiopostermodechange');
+ }
+ /**
+ * Get the current audioPosterMode state or set audioPosterMode to true or false
+ *
+ * @param {boolean} [value]
+ * The value to set audioPosterMode to.
+ *
+ * @return {Promise|boolean}
+ * A Promise is returned when setting the state, and a boolean when getting
+ * the present state
+ */
+ ;
+
+ _proto.audioPosterMode = function audioPosterMode(value) {
+ var _this21 = this;
+
+ if (typeof value !== 'boolean' || value === this.audioPosterMode_) {
+ return this.audioPosterMode_;
+ }
+
+ this.audioPosterMode_ = value;
+ var PromiseClass = this.options_.Promise || window__default['default'].Promise;
+
+ if (PromiseClass) {
+ if (value) {
+ if (this.audioOnlyMode()) {
+ var audioOnlyModePromise = this.audioOnlyMode(false);
+ return audioOnlyModePromise.then(function () {
+ // enable audio poster mode after audio only mode is disabled
+ _this21.enablePosterModeUI_();
+ });
+ }
+
+ return PromiseClass.resolve().then(function () {
+ // enable audio poster mode
+ _this21.enablePosterModeUI_();
+ });
+ }
+
+ return PromiseClass.resolve().then(function () {
+ // disable audio poster mode
+ _this21.disablePosterModeUI_();
+ });
+ }
+
+ if (value) {
+ if (this.audioOnlyMode()) {
+ this.audioOnlyMode(false);
+ }
+
+ this.enablePosterModeUI_();
+ return;
+ }
+
+ this.disablePosterModeUI_();
+ }
+ /**
+ * A helper method for adding a {@link TextTrack} to our
+ * {@link TextTrackList}.
+ *
+ * In addition to the W3C settings we allow adding additional info through options.
+ *
+ * @see http://www.w3.org/html/wg/drafts/html/master/embedded-content-0.html#dom-media-addtexttrack
+ *
+ * @param {string} [kind]
+ * the kind of TextTrack you are adding
+ *
+ * @param {string} [label]
+ * the label to give the TextTrack label
+ *
+ * @param {string} [language]
+ * the language to set on the TextTrack
+ *
+ * @return {TextTrack|undefined}
+ * the TextTrack that was added or undefined
+ * if there is no tech
+ */
+ ;
+
+ _proto.addTextTrack = function addTextTrack(kind, label, language) {
+ if (this.tech_) {
+ return this.tech_.addTextTrack(kind, label, language);
+ }
+ }
+ /**
+ * Create a remote {@link TextTrack} and an {@link HTMLTrackElement}.
+ * When manualCleanup is set to false, the track will be automatically removed
+ * on source changes.
+ *
+ * @param {Object} options
+ * Options to pass to {@link HTMLTrackElement} during creation. See
+ * {@link HTMLTrackElement} for object properties that you should use.
+ *
+ * @param {boolean} [manualCleanup=true] if set to false, the TextTrack will be
+ * removed on a source change
+ *
+ * @return {HtmlTrackElement}
+ * the HTMLTrackElement that was created and added
+ * to the HtmlTrackElementList and the remote
+ * TextTrackList
+ *
+ * @deprecated The default value of the "manualCleanup" parameter will default
+ * to "false" in upcoming versions of Video.js
+ */
+ ;
+
+ _proto.addRemoteTextTrack = function addRemoteTextTrack(options, manualCleanup) {
+ if (this.tech_) {
+ return this.tech_.addRemoteTextTrack(options, manualCleanup);
+ }
+ }
+ /**
+ * Remove a remote {@link TextTrack} from the respective
+ * {@link TextTrackList} and {@link HtmlTrackElementList}.
+ *
+ * @param {Object} track
+ * Remote {@link TextTrack} to remove
+ *
+ * @return {undefined}
+ * does not return anything
+ */
+ ;
+
+ _proto.removeRemoteTextTrack = function removeRemoteTextTrack(obj) {
+ if (obj === void 0) {
+ obj = {};
+ }
+
+ var _obj = obj,
+ track = _obj.track;
+
+ if (!track) {
+ track = obj;
+ } // destructure the input into an object with a track argument, defaulting to arguments[0]
+ // default the whole argument to an empty object if nothing was passed in
+
+
+ if (this.tech_) {
+ return this.tech_.removeRemoteTextTrack(track);
+ }
+ }
+ /**
+ * Gets available media playback quality metrics as specified by the W3C's Media
+ * Playback Quality API.
+ *
+ * @see [Spec]{@link https://wicg.github.io/media-playback-quality}
+ *
+ * @return {Object|undefined}
+ * An object with supported media playback quality metrics or undefined if there
+ * is no tech or the tech does not support it.
+ */
+ ;
+
+ _proto.getVideoPlaybackQuality = function getVideoPlaybackQuality() {
+ return this.techGet_('getVideoPlaybackQuality');
+ }
+ /**
+ * Get video width
+ *
+ * @return {number}
+ * current video width
+ */
+ ;
+
+ _proto.videoWidth = function videoWidth() {
+ return this.tech_ && this.tech_.videoWidth && this.tech_.videoWidth() || 0;
+ }
+ /**
+ * Get video height
+ *
+ * @return {number}
+ * current video height
+ */
+ ;
+
+ _proto.videoHeight = function videoHeight() {
+ return this.tech_ && this.tech_.videoHeight && this.tech_.videoHeight() || 0;
+ }
+ /**
+ * The player's language code.
+ *
+ * Changing the langauge will trigger
+ * [languagechange]{@link Player#event:languagechange}
+ * which Components can use to update control text.
+ * ClickableComponent will update its control text by default on
+ * [languagechange]{@link Player#event:languagechange}.
+ *
+ * @fires Player#languagechange
+ *
+ * @param {string} [code]
+ * the language code to set the player to
+ *
+ * @return {string}
+ * The current language code when getting
+ */
+ ;
+
+ _proto.language = function language(code) {
+ if (code === undefined) {
+ return this.language_;
+ }
+
+ if (this.language_ !== String(code).toLowerCase()) {
+ this.language_ = String(code).toLowerCase(); // during first init, it's possible some things won't be evented
+
+ if (isEvented(this)) {
+ /**
+ * fires when the player language change
+ *
+ * @event Player#languagechange
+ * @type {EventTarget~Event}
+ */
+ this.trigger('languagechange');
+ }
+ }
+ }
+ /**
+ * Get the player's language dictionary
+ * Merge every time, because a newly added plugin might call videojs.addLanguage() at any time
+ * Languages specified directly in the player options have precedence
+ *
+ * @return {Array}
+ * An array of of supported languages
+ */
+ ;
+
+ _proto.languages = function languages() {
+ return mergeOptions$3(Player.prototype.options_.languages, this.languages_);
+ }
+ /**
+ * returns a JavaScript object reperesenting the current track
+ * information. **DOES not return it as JSON**
+ *
+ * @return {Object}
+ * Object representing the current of track info
+ */
+ ;
+
+ _proto.toJSON = function toJSON() {
+ var options = mergeOptions$3(this.options_);
+ var tracks = options.tracks;
+ options.tracks = [];
+
+ for (var i = 0; i < tracks.length; i++) {
+ var track = tracks[i]; // deep merge tracks and null out player so no circular references
+
+ track = mergeOptions$3(track);
+ track.player = undefined;
+ options.tracks[i] = track;
+ }
+
+ return options;
+ }
+ /**
+ * Creates a simple modal dialog (an instance of the {@link ModalDialog}
+ * component) that immediately overlays the player with arbitrary
+ * content and removes itself when closed.
+ *
+ * @param {string|Function|Element|Array|null} content
+ * Same as {@link ModalDialog#content}'s param of the same name.
+ * The most straight-forward usage is to provide a string or DOM
+ * element.
+ *
+ * @param {Object} [options]
+ * Extra options which will be passed on to the {@link ModalDialog}.
+ *
+ * @return {ModalDialog}
+ * the {@link ModalDialog} that was created
+ */
+ ;
+
+ _proto.createModal = function createModal(content, options) {
+ var _this22 = this;
+
+ options = options || {};
+ options.content = content || '';
+ var modal = new ModalDialog(this, options);
+ this.addChild(modal);
+ modal.on('dispose', function () {
+ _this22.removeChild(modal);
+ });
+ modal.open();
+ return modal;
+ }
+ /**
+ * Change breakpoint classes when the player resizes.
+ *
+ * @private
+ */
+ ;
+
+ _proto.updateCurrentBreakpoint_ = function updateCurrentBreakpoint_() {
+ if (!this.responsive()) {
+ return;
+ }
+
+ var currentBreakpoint = this.currentBreakpoint();
+ var currentWidth = this.currentWidth();
+
+ for (var i = 0; i < BREAKPOINT_ORDER.length; i++) {
+ var candidateBreakpoint = BREAKPOINT_ORDER[i];
+ var maxWidth = this.breakpoints_[candidateBreakpoint];
+
+ if (currentWidth <= maxWidth) {
+ // The current breakpoint did not change, nothing to do.
+ if (currentBreakpoint === candidateBreakpoint) {
+ return;
+ } // Only remove a class if there is a current breakpoint.
+
+
+ if (currentBreakpoint) {
+ this.removeClass(BREAKPOINT_CLASSES[currentBreakpoint]);
+ }
+
+ this.addClass(BREAKPOINT_CLASSES[candidateBreakpoint]);
+ this.breakpoint_ = candidateBreakpoint;
+ break;
+ }
+ }
+ }
+ /**
+ * Removes the current breakpoint.
+ *
+ * @private
+ */
+ ;
+
+ _proto.removeCurrentBreakpoint_ = function removeCurrentBreakpoint_() {
+ var className = this.currentBreakpointClass();
+ this.breakpoint_ = '';
+
+ if (className) {
+ this.removeClass(className);
+ }
+ }
+ /**
+ * Get or set breakpoints on the player.
+ *
+ * Calling this method with an object or `true` will remove any previous
+ * custom breakpoints and start from the defaults again.
+ *
+ * @param {Object|boolean} [breakpoints]
+ * If an object is given, it can be used to provide custom
+ * breakpoints. If `true` is given, will set default breakpoints.
+ * If this argument is not given, will simply return the current
+ * breakpoints.
+ *
+ * @param {number} [breakpoints.tiny]
+ * The maximum width for the "vjs-layout-tiny" class.
+ *
+ * @param {number} [breakpoints.xsmall]
+ * The maximum width for the "vjs-layout-x-small" class.
+ *
+ * @param {number} [breakpoints.small]
+ * The maximum width for the "vjs-layout-small" class.
+ *
+ * @param {number} [breakpoints.medium]
+ * The maximum width for the "vjs-layout-medium" class.
+ *
+ * @param {number} [breakpoints.large]
+ * The maximum width for the "vjs-layout-large" class.
+ *
+ * @param {number} [breakpoints.xlarge]
+ * The maximum width for the "vjs-layout-x-large" class.
+ *
+ * @param {number} [breakpoints.huge]
+ * The maximum width for the "vjs-layout-huge" class.
+ *
+ * @return {Object}
+ * An object mapping breakpoint names to maximum width values.
+ */
+ ;
+
+ _proto.breakpoints = function breakpoints(_breakpoints) {
+ // Used as a getter.
+ if (_breakpoints === undefined) {
+ return assign(this.breakpoints_);
+ }
+
+ this.breakpoint_ = '';
+ this.breakpoints_ = assign({}, DEFAULT_BREAKPOINTS, _breakpoints); // When breakpoint definitions change, we need to update the currently
+ // selected breakpoint.
+
+ this.updateCurrentBreakpoint_(); // Clone the breakpoints before returning.
+
+ return assign(this.breakpoints_);
+ }
+ /**
+ * Get or set a flag indicating whether or not this player should adjust
+ * its UI based on its dimensions.
+ *
+ * @param {boolean} value
+ * Should be `true` if the player should adjust its UI based on its
+ * dimensions; otherwise, should be `false`.
+ *
+ * @return {boolean}
+ * Will be `true` if this player should adjust its UI based on its
+ * dimensions; otherwise, will be `false`.
+ */
+ ;
+
+ _proto.responsive = function responsive(value) {
+ // Used as a getter.
+ if (value === undefined) {
+ return this.responsive_;
+ }
+
+ value = Boolean(value);
+ var current = this.responsive_; // Nothing changed.
+
+ if (value === current) {
+ return;
+ } // The value actually changed, set it.
+
+
+ this.responsive_ = value; // Start listening for breakpoints and set the initial breakpoint if the
+ // player is now responsive.
+
+ if (value) {
+ this.on('playerresize', this.boundUpdateCurrentBreakpoint_);
+ this.updateCurrentBreakpoint_(); // Stop listening for breakpoints if the player is no longer responsive.
+ } else {
+ this.off('playerresize', this.boundUpdateCurrentBreakpoint_);
+ this.removeCurrentBreakpoint_();
+ }
+
+ return value;
+ }
+ /**
+ * Get current breakpoint name, if any.
+ *
+ * @return {string}
+ * If there is currently a breakpoint set, returns a the key from the
+ * breakpoints object matching it. Otherwise, returns an empty string.
+ */
+ ;
+
+ _proto.currentBreakpoint = function currentBreakpoint() {
+ return this.breakpoint_;
+ }
+ /**
+ * Get the current breakpoint class name.
+ *
+ * @return {string}
+ * The matching class name (e.g. `"vjs-layout-tiny"` or
+ * `"vjs-layout-large"`) for the current breakpoint. Empty string if
+ * there is no current breakpoint.
+ */
+ ;
+
+ _proto.currentBreakpointClass = function currentBreakpointClass() {
+ return BREAKPOINT_CLASSES[this.breakpoint_] || '';
+ }
+ /**
+ * An object that describes a single piece of media.
+ *
+ * Properties that are not part of this type description will be retained; so,
+ * this can be viewed as a generic metadata storage mechanism as well.
+ *
+ * @see {@link https://wicg.github.io/mediasession/#the-mediametadata-interface}
+ * @typedef {Object} Player~MediaObject
+ *
+ * @property {string} [album]
+ * Unused, except if this object is passed to the `MediaSession`
+ * API.
+ *
+ * @property {string} [artist]
+ * Unused, except if this object is passed to the `MediaSession`
+ * API.
+ *
+ * @property {Object[]} [artwork]
+ * Unused, except if this object is passed to the `MediaSession`
+ * API. If not specified, will be populated via the `poster`, if
+ * available.
+ *
+ * @property {string} [poster]
+ * URL to an image that will display before playback.
+ *
+ * @property {Tech~SourceObject|Tech~SourceObject[]|string} [src]
+ * A single source object, an array of source objects, or a string
+ * referencing a URL to a media source. It is _highly recommended_
+ * that an object or array of objects is used here, so that source
+ * selection algorithms can take the `type` into account.
+ *
+ * @property {string} [title]
+ * Unused, except if this object is passed to the `MediaSession`
+ * API.
+ *
+ * @property {Object[]} [textTracks]
+ * An array of objects to be used to create text tracks, following
+ * the {@link https://www.w3.org/TR/html50/embedded-content-0.html#the-track-element|native track element format}.
+ * For ease of removal, these will be created as "remote" text
+ * tracks and set to automatically clean up on source changes.
+ *
+ * These objects may have properties like `src`, `kind`, `label`,
+ * and `language`, see {@link Tech#createRemoteTextTrack}.
+ */
+
+ /**
+ * Populate the player using a {@link Player~MediaObject|MediaObject}.
+ *
+ * @param {Player~MediaObject} media
+ * A media object.
+ *
+ * @param {Function} ready
+ * A callback to be called when the player is ready.
+ */
+ ;
+
+ _proto.loadMedia = function loadMedia(media, ready) {
+ var _this23 = this;
+
+ if (!media || typeof media !== 'object') {
+ return;
+ }
+
+ this.reset(); // Clone the media object so it cannot be mutated from outside.
+
+ this.cache_.media = mergeOptions$3(media);
+ var _this$cache_$media = this.cache_.media,
+ artwork = _this$cache_$media.artwork,
+ poster = _this$cache_$media.poster,
+ src = _this$cache_$media.src,
+ textTracks = _this$cache_$media.textTracks; // If `artwork` is not given, create it using `poster`.
+
+ if (!artwork && poster) {
+ this.cache_.media.artwork = [{
+ src: poster,
+ type: getMimetype(poster)
+ }];
+ }
+
+ if (src) {
+ this.src(src);
+ }
+
+ if (poster) {
+ this.poster(poster);
+ }
+
+ if (Array.isArray(textTracks)) {
+ textTracks.forEach(function (tt) {
+ return _this23.addRemoteTextTrack(tt, false);
+ });
+ }
+
+ this.ready(ready);
+ }
+ /**
+ * Get a clone of the current {@link Player~MediaObject} for this player.
+ *
+ * If the `loadMedia` method has not been used, will attempt to return a
+ * {@link Player~MediaObject} based on the current state of the player.
+ *
+ * @return {Player~MediaObject}
+ */
+ ;
+
+ _proto.getMedia = function getMedia() {
+ if (!this.cache_.media) {
+ var poster = this.poster();
+ var src = this.currentSources();
+ var textTracks = Array.prototype.map.call(this.remoteTextTracks(), function (tt) {
+ return {
+ kind: tt.kind,
+ label: tt.label,
+ language: tt.language,
+ src: tt.src
+ };
+ });
+ var media = {
+ src: src,
+ textTracks: textTracks
+ };
+
+ if (poster) {
+ media.poster = poster;
+ media.artwork = [{
+ src: media.poster,
+ type: getMimetype(media.poster)
+ }];
+ }
+
+ return media;
+ }
+
+ return mergeOptions$3(this.cache_.media);
+ }
+ /**
+ * Gets tag settings
+ *
+ * @param {Element} tag
+ * The player tag
+ *
+ * @return {Object}
+ * An object containing all of the settings
+ * for a player tag
+ */
+ ;
+
+ Player.getTagSettings = function getTagSettings(tag) {
+ var baseOptions = {
+ sources: [],
+ tracks: []
+ };
+ var tagOptions = getAttributes(tag);
+ var dataSetup = tagOptions['data-setup'];
+
+ if (hasClass(tag, 'vjs-fill')) {
+ tagOptions.fill = true;
+ }
+
+ if (hasClass(tag, 'vjs-fluid')) {
+ tagOptions.fluid = true;
+ } // Check if data-setup attr exists.
+
+
+ if (dataSetup !== null) {
+ // Parse options JSON
+ // If empty string, make it a parsable json object.
+ var _safeParseTuple = safeParseTuple__default['default'](dataSetup || '{}'),
+ err = _safeParseTuple[0],
+ data = _safeParseTuple[1];
+
+ if (err) {
+ log$1.error(err);
+ }
+
+ assign(tagOptions, data);
+ }
+
+ assign(baseOptions, tagOptions); // Get tag children settings
+
+ if (tag.hasChildNodes()) {
+ var children = tag.childNodes;
+
+ for (var i = 0, j = children.length; i < j; i++) {
+ var child = children[i]; // Change case needed: http://ejohn.org/blog/nodename-case-sensitivity/
+
+ var childName = child.nodeName.toLowerCase();
+
+ if (childName === 'source') {
+ baseOptions.sources.push(getAttributes(child));
+ } else if (childName === 'track') {
+ baseOptions.tracks.push(getAttributes(child));
+ }
+ }
+ }
+
+ return baseOptions;
+ }
+ /**
+ * Determine whether or not flexbox is supported
+ *
+ * @return {boolean}
+ * - true if flexbox is supported
+ * - false if flexbox is not supported
+ */
+ ;
+
+ _proto.flexNotSupported_ = function flexNotSupported_() {
+ var elem = document__default['default'].createElement('i'); // Note: We don't actually use flexBasis (or flexOrder), but it's one of the more
+ // common flex features that we can rely on when checking for flex support.
+
+ return !('flexBasis' in elem.style || 'webkitFlexBasis' in elem.style || 'mozFlexBasis' in elem.style || 'msFlexBasis' in elem.style || // IE10-specific (2012 flex spec), available for completeness
+ 'msFlexOrder' in elem.style);
+ }
+ /**
+ * Set debug mode to enable/disable logs at info level.
+ *
+ * @param {boolean} enabled
+ * @fires Player#debugon
+ * @fires Player#debugoff
+ */
+ ;
+
+ _proto.debug = function debug(enabled) {
+ if (enabled === undefined) {
+ return this.debugEnabled_;
+ }
+
+ if (enabled) {
+ this.trigger('debugon');
+ this.previousLogLevel_ = this.log.level;
+ this.log.level('debug');
+ this.debugEnabled_ = true;
+ } else {
+ this.trigger('debugoff');
+ this.log.level(this.previousLogLevel_);
+ this.previousLogLevel_ = undefined;
+ this.debugEnabled_ = false;
+ }
+ }
+ /**
+ * Set or get current playback rates.
+ * Takes an array and updates the playback rates menu with the new items.
+ * Pass in an empty array to hide the menu.
+ * Values other than arrays are ignored.
+ *
+ * @fires Player#playbackrateschange
+ * @param {number[]} newRates
+ * The new rates that the playback rates menu should update to.
+ * An empty array will hide the menu
+ * @return {number[]} When used as a getter will return the current playback rates
+ */
+ ;
+
+ _proto.playbackRates = function playbackRates(newRates) {
+ if (newRates === undefined) {
+ return this.cache_.playbackRates;
+ } // ignore any value that isn't an array
+
+
+ if (!Array.isArray(newRates)) {
+ return;
+ } // ignore any arrays that don't only contain numbers
+
+
+ if (!newRates.every(function (rate) {
+ return typeof rate === 'number';
+ })) {
+ return;
+ }
+
+ this.cache_.playbackRates = newRates;
+ /**
+ * fires when the playback rates in a player are changed
+ *
+ * @event Player#playbackrateschange
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('playbackrateschange');
+ };
+
+ return Player;
+}(Component$1);
+/**
+ * Get the {@link VideoTrackList}
+ * @link https://html.spec.whatwg.org/multipage/embedded-content.html#videotracklist
+ *
+ * @return {VideoTrackList}
+ * the current video track list
+ *
+ * @method Player.prototype.videoTracks
+ */
+
+/**
+ * Get the {@link AudioTrackList}
+ * @link https://html.spec.whatwg.org/multipage/embedded-content.html#audiotracklist
+ *
+ * @return {AudioTrackList}
+ * the current audio track list
+ *
+ * @method Player.prototype.audioTracks
+ */
+
+/**
+ * Get the {@link TextTrackList}
+ *
+ * @link http://www.w3.org/html/wg/drafts/html/master/embedded-content-0.html#dom-media-texttracks
+ *
+ * @return {TextTrackList}
+ * the current text track list
+ *
+ * @method Player.prototype.textTracks
+ */
+
+/**
+ * Get the remote {@link TextTrackList}
+ *
+ * @return {TextTrackList}
+ * The current remote text track list
+ *
+ * @method Player.prototype.remoteTextTracks
+ */
+
+/**
+ * Get the remote {@link HtmlTrackElementList} tracks.
+ *
+ * @return {HtmlTrackElementList}
+ * The current remote text track element list
+ *
+ * @method Player.prototype.remoteTextTrackEls
+ */
+
+
+ALL.names.forEach(function (name) {
+ var props = ALL[name];
+
+ Player.prototype[props.getterName] = function () {
+ if (this.tech_) {
+ return this.tech_[props.getterName]();
+ } // if we have not yet loadTech_, we create {video,audio,text}Tracks_
+ // these will be passed to the tech during loading
+
+
+ this[props.privateName] = this[props.privateName] || new props.ListClass();
+ return this[props.privateName];
+ };
+});
+/**
+ * Get or set the `Player`'s crossorigin option. For the HTML5 player, this
+ * sets the `crossOrigin` property on the `` tag to control the CORS
+ * behavior.
+ *
+ * @see [Video Element Attributes]{@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/video#attr-crossorigin}
+ *
+ * @param {string} [value]
+ * The value to set the `Player`'s crossorigin to. If an argument is
+ * given, must be one of `anonymous` or `use-credentials`.
+ *
+ * @return {string|undefined}
+ * - The current crossorigin value of the `Player` when getting.
+ * - undefined when setting
+ */
+
+Player.prototype.crossorigin = Player.prototype.crossOrigin;
+/**
+ * Global enumeration of players.
+ *
+ * The keys are the player IDs and the values are either the {@link Player}
+ * instance or `null` for disposed players.
+ *
+ * @type {Object}
+ */
+
+Player.players = {};
+var navigator = window__default['default'].navigator;
+/*
+ * Player instance options, surfaced using options
+ * options = Player.prototype.options_
+ * Make changes in options, not here.
+ *
+ * @type {Object}
+ * @private
+ */
+
+Player.prototype.options_ = {
+ // Default order of fallback technology
+ techOrder: Tech.defaultTechOrder_,
+ html5: {},
+ // default inactivity timeout
+ inactivityTimeout: 2000,
+ // default playback rates
+ playbackRates: [],
+ // Add playback rate selection by adding rates
+ // 'playbackRates': [0.5, 1, 1.5, 2],
+ liveui: false,
+ // Included control sets
+ children: ['mediaLoader', 'posterImage', 'textTrackDisplay', 'loadingSpinner', 'bigPlayButton', 'liveTracker', 'controlBar', 'errorDisplay', 'textTrackSettings', 'resizeManager'],
+ language: navigator && (navigator.languages && navigator.languages[0] || navigator.userLanguage || navigator.language) || 'en',
+ // locales and their language translations
+ languages: {},
+ // Default message to show when a video cannot be played.
+ notSupportedMessage: 'No compatible source was found for this media.',
+ normalizeAutoplay: false,
+ fullscreen: {
+ options: {
+ navigationUI: 'hide'
+ }
+ },
+ breakpoints: {},
+ responsive: false,
+ audioOnlyMode: false,
+ audioPosterMode: false
+};
+[
+/**
+ * Returns whether or not the player is in the "ended" state.
+ *
+ * @return {Boolean} True if the player is in the ended state, false if not.
+ * @method Player#ended
+ */
+'ended',
+/**
+ * Returns whether or not the player is in the "seeking" state.
+ *
+ * @return {Boolean} True if the player is in the seeking state, false if not.
+ * @method Player#seeking
+ */
+'seeking',
+/**
+ * Returns the TimeRanges of the media that are currently available
+ * for seeking to.
+ *
+ * @return {TimeRanges} the seekable intervals of the media timeline
+ * @method Player#seekable
+ */
+'seekable',
+/**
+ * Returns the current state of network activity for the element, from
+ * the codes in the list below.
+ * - NETWORK_EMPTY (numeric value 0)
+ * The element has not yet been initialised. All attributes are in
+ * their initial states.
+ * - NETWORK_IDLE (numeric value 1)
+ * The element's resource selection algorithm is active and has
+ * selected a resource, but it is not actually using the network at
+ * this time.
+ * - NETWORK_LOADING (numeric value 2)
+ * The user agent is actively trying to download data.
+ * - NETWORK_NO_SOURCE (numeric value 3)
+ * The element's resource selection algorithm is active, but it has
+ * not yet found a resource to use.
+ *
+ * @see https://html.spec.whatwg.org/multipage/embedded-content.html#network-states
+ * @return {number} the current network activity state
+ * @method Player#networkState
+ */
+'networkState',
+/**
+ * Returns a value that expresses the current state of the element
+ * with respect to rendering the current playback position, from the
+ * codes in the list below.
+ * - HAVE_NOTHING (numeric value 0)
+ * No information regarding the media resource is available.
+ * - HAVE_METADATA (numeric value 1)
+ * Enough of the resource has been obtained that the duration of the
+ * resource is available.
+ * - HAVE_CURRENT_DATA (numeric value 2)
+ * Data for the immediate current playback position is available.
+ * - HAVE_FUTURE_DATA (numeric value 3)
+ * Data for the immediate current playback position is available, as
+ * well as enough data for the user agent to advance the current
+ * playback position in the direction of playback.
+ * - HAVE_ENOUGH_DATA (numeric value 4)
+ * The user agent estimates that enough data is available for
+ * playback to proceed uninterrupted.
+ *
+ * @see https://html.spec.whatwg.org/multipage/embedded-content.html#dom-media-readystate
+ * @return {number} the current playback rendering state
+ * @method Player#readyState
+ */
+'readyState'].forEach(function (fn) {
+ Player.prototype[fn] = function () {
+ return this.techGet_(fn);
+ };
+});
+TECH_EVENTS_RETRIGGER.forEach(function (event) {
+ Player.prototype["handleTech" + toTitleCase$1(event) + "_"] = function () {
+ return this.trigger(event);
+ };
+});
+/**
+ * Fired when the player has initial duration and dimension information
+ *
+ * @event Player#loadedmetadata
+ * @type {EventTarget~Event}
+ */
+
+/**
+ * Fired when the player has downloaded data at the current playback position
+ *
+ * @event Player#loadeddata
+ * @type {EventTarget~Event}
+ */
+
+/**
+ * Fired when the current playback position has changed *
+ * During playback this is fired every 15-250 milliseconds, depending on the
+ * playback technology in use.
+ *
+ * @event Player#timeupdate
+ * @type {EventTarget~Event}
+ */
+
+/**
+ * Fired when the volume changes
+ *
+ * @event Player#volumechange
+ * @type {EventTarget~Event}
+ */
+
+/**
+ * Reports whether or not a player has a plugin available.
+ *
+ * This does not report whether or not the plugin has ever been initialized
+ * on this player. For that, [usingPlugin]{@link Player#usingPlugin}.
+ *
+ * @method Player#hasPlugin
+ * @param {string} name
+ * The name of a plugin.
+ *
+ * @return {boolean}
+ * Whether or not this player has the requested plugin available.
+ */
+
+/**
+ * Reports whether or not a player is using a plugin by name.
+ *
+ * For basic plugins, this only reports whether the plugin has _ever_ been
+ * initialized on this player.
+ *
+ * @method Player#usingPlugin
+ * @param {string} name
+ * The name of a plugin.
+ *
+ * @return {boolean}
+ * Whether or not this player is using the requested plugin.
+ */
+
+Component$1.registerComponent('Player', Player);
+
+/**
+ * The base plugin name.
+ *
+ * @private
+ * @constant
+ * @type {string}
+ */
+
+var BASE_PLUGIN_NAME = 'plugin';
+/**
+ * The key on which a player's active plugins cache is stored.
+ *
+ * @private
+ * @constant
+ * @type {string}
+ */
+
+var PLUGIN_CACHE_KEY = 'activePlugins_';
+/**
+ * Stores registered plugins in a private space.
+ *
+ * @private
+ * @type {Object}
+ */
+
+var pluginStorage = {};
+/**
+ * Reports whether or not a plugin has been registered.
+ *
+ * @private
+ * @param {string} name
+ * The name of a plugin.
+ *
+ * @return {boolean}
+ * Whether or not the plugin has been registered.
+ */
+
+var pluginExists = function pluginExists(name) {
+ return pluginStorage.hasOwnProperty(name);
+};
+/**
+ * Get a single registered plugin by name.
+ *
+ * @private
+ * @param {string} name
+ * The name of a plugin.
+ *
+ * @return {Function|undefined}
+ * The plugin (or undefined).
+ */
+
+
+var getPlugin = function getPlugin(name) {
+ return pluginExists(name) ? pluginStorage[name] : undefined;
+};
+/**
+ * Marks a plugin as "active" on a player.
+ *
+ * Also, ensures that the player has an object for tracking active plugins.
+ *
+ * @private
+ * @param {Player} player
+ * A Video.js player instance.
+ *
+ * @param {string} name
+ * The name of a plugin.
+ */
+
+
+var markPluginAsActive = function markPluginAsActive(player, name) {
+ player[PLUGIN_CACHE_KEY] = player[PLUGIN_CACHE_KEY] || {};
+ player[PLUGIN_CACHE_KEY][name] = true;
+};
+/**
+ * Triggers a pair of plugin setup events.
+ *
+ * @private
+ * @param {Player} player
+ * A Video.js player instance.
+ *
+ * @param {Plugin~PluginEventHash} hash
+ * A plugin event hash.
+ *
+ * @param {boolean} [before]
+ * If true, prefixes the event name with "before". In other words,
+ * use this to trigger "beforepluginsetup" instead of "pluginsetup".
+ */
+
+
+var triggerSetupEvent = function triggerSetupEvent(player, hash, before) {
+ var eventName = (before ? 'before' : '') + 'pluginsetup';
+ player.trigger(eventName, hash);
+ player.trigger(eventName + ':' + hash.name, hash);
+};
+/**
+ * Takes a basic plugin function and returns a wrapper function which marks
+ * on the player that the plugin has been activated.
+ *
+ * @private
+ * @param {string} name
+ * The name of the plugin.
+ *
+ * @param {Function} plugin
+ * The basic plugin.
+ *
+ * @return {Function}
+ * A wrapper function for the given plugin.
+ */
+
+
+var createBasicPlugin = function createBasicPlugin(name, plugin) {
+ var basicPluginWrapper = function basicPluginWrapper() {
+ // We trigger the "beforepluginsetup" and "pluginsetup" events on the player
+ // regardless, but we want the hash to be consistent with the hash provided
+ // for advanced plugins.
+ //
+ // The only potentially counter-intuitive thing here is the `instance` in
+ // the "pluginsetup" event is the value returned by the `plugin` function.
+ triggerSetupEvent(this, {
+ name: name,
+ plugin: plugin,
+ instance: null
+ }, true);
+ var instance = plugin.apply(this, arguments);
+ markPluginAsActive(this, name);
+ triggerSetupEvent(this, {
+ name: name,
+ plugin: plugin,
+ instance: instance
+ });
+ return instance;
+ };
+
+ Object.keys(plugin).forEach(function (prop) {
+ basicPluginWrapper[prop] = plugin[prop];
+ });
+ return basicPluginWrapper;
+};
+/**
+ * Takes a plugin sub-class and returns a factory function for generating
+ * instances of it.
+ *
+ * This factory function will replace itself with an instance of the requested
+ * sub-class of Plugin.
+ *
+ * @private
+ * @param {string} name
+ * The name of the plugin.
+ *
+ * @param {Plugin} PluginSubClass
+ * The advanced plugin.
+ *
+ * @return {Function}
+ */
+
+
+var createPluginFactory = function createPluginFactory(name, PluginSubClass) {
+ // Add a `name` property to the plugin prototype so that each plugin can
+ // refer to itself by name.
+ PluginSubClass.prototype.name = name;
+ return function () {
+ triggerSetupEvent(this, {
+ name: name,
+ plugin: PluginSubClass,
+ instance: null
+ }, true);
+
+ for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
+ args[_key] = arguments[_key];
+ }
+
+ var instance = _construct__default['default'](PluginSubClass, [this].concat(args)); // The plugin is replaced by a function that returns the current instance.
+
+
+ this[name] = function () {
+ return instance;
+ };
+
+ triggerSetupEvent(this, instance.getEventHash());
+ return instance;
+ };
+};
+/**
+ * Parent class for all advanced plugins.
+ *
+ * @mixes module:evented~EventedMixin
+ * @mixes module:stateful~StatefulMixin
+ * @fires Player#beforepluginsetup
+ * @fires Player#beforepluginsetup:$name
+ * @fires Player#pluginsetup
+ * @fires Player#pluginsetup:$name
+ * @listens Player#dispose
+ * @throws {Error}
+ * If attempting to instantiate the base {@link Plugin} class
+ * directly instead of via a sub-class.
+ */
+
+
+var Plugin = /*#__PURE__*/function () {
+ /**
+ * Creates an instance of this class.
+ *
+ * Sub-classes should call `super` to ensure plugins are properly initialized.
+ *
+ * @param {Player} player
+ * A Video.js player instance.
+ */
+ function Plugin(player) {
+ if (this.constructor === Plugin) {
+ throw new Error('Plugin must be sub-classed; not directly instantiated.');
+ }
+
+ this.player = player;
+
+ if (!this.log) {
+ this.log = this.player.log.createLogger(this.name);
+ } // Make this object evented, but remove the added `trigger` method so we
+ // use the prototype version instead.
+
+
+ evented(this);
+ delete this.trigger;
+ stateful(this, this.constructor.defaultState);
+ markPluginAsActive(player, this.name); // Auto-bind the dispose method so we can use it as a listener and unbind
+ // it later easily.
+
+ this.dispose = this.dispose.bind(this); // If the player is disposed, dispose the plugin.
+
+ player.on('dispose', this.dispose);
+ }
+ /**
+ * Get the version of the plugin that was set on .VERSION
+ */
+
+
+ var _proto = Plugin.prototype;
+
+ _proto.version = function version() {
+ return this.constructor.VERSION;
+ }
+ /**
+ * Each event triggered by plugins includes a hash of additional data with
+ * conventional properties.
+ *
+ * This returns that object or mutates an existing hash.
+ *
+ * @param {Object} [hash={}]
+ * An object to be used as event an event hash.
+ *
+ * @return {Plugin~PluginEventHash}
+ * An event hash object with provided properties mixed-in.
+ */
+ ;
+
+ _proto.getEventHash = function getEventHash(hash) {
+ if (hash === void 0) {
+ hash = {};
+ }
+
+ hash.name = this.name;
+ hash.plugin = this.constructor;
+ hash.instance = this;
+ return hash;
+ }
+ /**
+ * Triggers an event on the plugin object and overrides
+ * {@link module:evented~EventedMixin.trigger|EventedMixin.trigger}.
+ *
+ * @param {string|Object} event
+ * An event type or an object with a type property.
+ *
+ * @param {Object} [hash={}]
+ * Additional data hash to merge with a
+ * {@link Plugin~PluginEventHash|PluginEventHash}.
+ *
+ * @return {boolean}
+ * Whether or not default was prevented.
+ */
+ ;
+
+ _proto.trigger = function trigger$1(event, hash) {
+ if (hash === void 0) {
+ hash = {};
+ }
+
+ return trigger(this.eventBusEl_, event, this.getEventHash(hash));
+ }
+ /**
+ * Handles "statechanged" events on the plugin. No-op by default, override by
+ * subclassing.
+ *
+ * @abstract
+ * @param {Event} e
+ * An event object provided by a "statechanged" event.
+ *
+ * @param {Object} e.changes
+ * An object describing changes that occurred with the "statechanged"
+ * event.
+ */
+ ;
+
+ _proto.handleStateChanged = function handleStateChanged(e) {}
+ /**
+ * Disposes a plugin.
+ *
+ * Subclasses can override this if they want, but for the sake of safety,
+ * it's probably best to subscribe the "dispose" event.
+ *
+ * @fires Plugin#dispose
+ */
+ ;
+
+ _proto.dispose = function dispose() {
+ var name = this.name,
+ player = this.player;
+ /**
+ * Signals that a advanced plugin is about to be disposed.
+ *
+ * @event Plugin#dispose
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('dispose');
+ this.off();
+ player.off('dispose', this.dispose); // Eliminate any possible sources of leaking memory by clearing up
+ // references between the player and the plugin instance and nulling out
+ // the plugin's state and replacing methods with a function that throws.
+
+ player[PLUGIN_CACHE_KEY][name] = false;
+ this.player = this.state = null; // Finally, replace the plugin name on the player with a new factory
+ // function, so that the plugin is ready to be set up again.
+
+ player[name] = createPluginFactory(name, pluginStorage[name]);
+ }
+ /**
+ * Determines if a plugin is a basic plugin (i.e. not a sub-class of `Plugin`).
+ *
+ * @param {string|Function} plugin
+ * If a string, matches the name of a plugin. If a function, will be
+ * tested directly.
+ *
+ * @return {boolean}
+ * Whether or not a plugin is a basic plugin.
+ */
+ ;
+
+ Plugin.isBasic = function isBasic(plugin) {
+ var p = typeof plugin === 'string' ? getPlugin(plugin) : plugin;
+ return typeof p === 'function' && !Plugin.prototype.isPrototypeOf(p.prototype);
+ }
+ /**
+ * Register a Video.js plugin.
+ *
+ * @param {string} name
+ * The name of the plugin to be registered. Must be a string and
+ * must not match an existing plugin or a method on the `Player`
+ * prototype.
+ *
+ * @param {Function} plugin
+ * A sub-class of `Plugin` or a function for basic plugins.
+ *
+ * @return {Function}
+ * For advanced plugins, a factory function for that plugin. For
+ * basic plugins, a wrapper function that initializes the plugin.
+ */
+ ;
+
+ Plugin.registerPlugin = function registerPlugin(name, plugin) {
+ if (typeof name !== 'string') {
+ throw new Error("Illegal plugin name, \"" + name + "\", must be a string, was " + typeof name + ".");
+ }
+
+ if (pluginExists(name)) {
+ log$1.warn("A plugin named \"" + name + "\" already exists. You may want to avoid re-registering plugins!");
+ } else if (Player.prototype.hasOwnProperty(name)) {
+ throw new Error("Illegal plugin name, \"" + name + "\", cannot share a name with an existing player method!");
+ }
+
+ if (typeof plugin !== 'function') {
+ throw new Error("Illegal plugin for \"" + name + "\", must be a function, was " + typeof plugin + ".");
+ }
+
+ pluginStorage[name] = plugin; // Add a player prototype method for all sub-classed plugins (but not for
+ // the base Plugin class).
+
+ if (name !== BASE_PLUGIN_NAME) {
+ if (Plugin.isBasic(plugin)) {
+ Player.prototype[name] = createBasicPlugin(name, plugin);
+ } else {
+ Player.prototype[name] = createPluginFactory(name, plugin);
+ }
+ }
+
+ return plugin;
+ }
+ /**
+ * De-register a Video.js plugin.
+ *
+ * @param {string} name
+ * The name of the plugin to be de-registered. Must be a string that
+ * matches an existing plugin.
+ *
+ * @throws {Error}
+ * If an attempt is made to de-register the base plugin.
+ */
+ ;
+
+ Plugin.deregisterPlugin = function deregisterPlugin(name) {
+ if (name === BASE_PLUGIN_NAME) {
+ throw new Error('Cannot de-register base plugin.');
+ }
+
+ if (pluginExists(name)) {
+ delete pluginStorage[name];
+ delete Player.prototype[name];
+ }
+ }
+ /**
+ * Gets an object containing multiple Video.js plugins.
+ *
+ * @param {Array} [names]
+ * If provided, should be an array of plugin names. Defaults to _all_
+ * plugin names.
+ *
+ * @return {Object|undefined}
+ * An object containing plugin(s) associated with their name(s) or
+ * `undefined` if no matching plugins exist).
+ */
+ ;
+
+ Plugin.getPlugins = function getPlugins(names) {
+ if (names === void 0) {
+ names = Object.keys(pluginStorage);
+ }
+
+ var result;
+ names.forEach(function (name) {
+ var plugin = getPlugin(name);
+
+ if (plugin) {
+ result = result || {};
+ result[name] = plugin;
+ }
+ });
+ return result;
+ }
+ /**
+ * Gets a plugin's version, if available
+ *
+ * @param {string} name
+ * The name of a plugin.
+ *
+ * @return {string}
+ * The plugin's version or an empty string.
+ */
+ ;
+
+ Plugin.getPluginVersion = function getPluginVersion(name) {
+ var plugin = getPlugin(name);
+ return plugin && plugin.VERSION || '';
+ };
+
+ return Plugin;
+}();
+/**
+ * Gets a plugin by name if it exists.
+ *
+ * @static
+ * @method getPlugin
+ * @memberOf Plugin
+ * @param {string} name
+ * The name of a plugin.
+ *
+ * @returns {Function|undefined}
+ * The plugin (or `undefined`).
+ */
+
+
+Plugin.getPlugin = getPlugin;
+/**
+ * The name of the base plugin class as it is registered.
+ *
+ * @type {string}
+ */
+
+Plugin.BASE_PLUGIN_NAME = BASE_PLUGIN_NAME;
+Plugin.registerPlugin(BASE_PLUGIN_NAME, Plugin);
+/**
+ * Documented in player.js
+ *
+ * @ignore
+ */
+
+Player.prototype.usingPlugin = function (name) {
+ return !!this[PLUGIN_CACHE_KEY] && this[PLUGIN_CACHE_KEY][name] === true;
+};
+/**
+ * Documented in player.js
+ *
+ * @ignore
+ */
+
+
+Player.prototype.hasPlugin = function (name) {
+ return !!pluginExists(name);
+};
+/**
+ * Signals that a plugin is about to be set up on a player.
+ *
+ * @event Player#beforepluginsetup
+ * @type {Plugin~PluginEventHash}
+ */
+
+/**
+ * Signals that a plugin is about to be set up on a player - by name. The name
+ * is the name of the plugin.
+ *
+ * @event Player#beforepluginsetup:$name
+ * @type {Plugin~PluginEventHash}
+ */
+
+/**
+ * Signals that a plugin has just been set up on a player.
+ *
+ * @event Player#pluginsetup
+ * @type {Plugin~PluginEventHash}
+ */
+
+/**
+ * Signals that a plugin has just been set up on a player - by name. The name
+ * is the name of the plugin.
+ *
+ * @event Player#pluginsetup:$name
+ * @type {Plugin~PluginEventHash}
+ */
+
+/**
+ * @typedef {Object} Plugin~PluginEventHash
+ *
+ * @property {string} instance
+ * For basic plugins, the return value of the plugin function. For
+ * advanced plugins, the plugin instance on which the event is fired.
+ *
+ * @property {string} name
+ * The name of the plugin.
+ *
+ * @property {string} plugin
+ * For basic plugins, the plugin function. For advanced plugins, the
+ * plugin class/constructor.
+ */
+
+/**
+ * @file extend.js
+ * @module extend
+ */
+/**
+ * Used to subclass an existing class by emulating ES subclassing using the
+ * `extends` keyword.
+ *
+ * @function
+ * @example
+ * var MyComponent = videojs.extend(videojs.getComponent('Component'), {
+ * myCustomMethod: function() {
+ * // Do things in my method.
+ * }
+ * });
+ *
+ * @param {Function} superClass
+ * The class to inherit from
+ *
+ * @param {Object} [subClassMethods={}]
+ * Methods of the new class
+ *
+ * @return {Function}
+ * The new class with subClassMethods that inherited superClass.
+ */
+
+var extend = function extend(superClass, subClassMethods) {
+ if (subClassMethods === void 0) {
+ subClassMethods = {};
+ }
+
+ var subClass = function subClass() {
+ superClass.apply(this, arguments);
+ };
+
+ var methods = {};
+
+ if (typeof subClassMethods === 'object') {
+ if (subClassMethods.constructor !== Object.prototype.constructor) {
+ subClass = subClassMethods.constructor;
+ }
+
+ methods = subClassMethods;
+ } else if (typeof subClassMethods === 'function') {
+ subClass = subClassMethods;
+ }
+
+ _inherits__default['default'](subClass, superClass); // this is needed for backward-compatibility and node compatibility.
+
+
+ if (superClass) {
+ subClass.super_ = superClass;
+ } // Extend subObj's prototype with functions and other properties from props
+
+
+ for (var name in methods) {
+ if (methods.hasOwnProperty(name)) {
+ subClass.prototype[name] = methods[name];
+ }
+ }
+
+ return subClass;
+};
+
+/**
+ * @file video.js
+ * @module videojs
+ */
+/**
+ * Normalize an `id` value by trimming off a leading `#`
+ *
+ * @private
+ * @param {string} id
+ * A string, maybe with a leading `#`.
+ *
+ * @return {string}
+ * The string, without any leading `#`.
+ */
+
+var normalizeId = function normalizeId(id) {
+ return id.indexOf('#') === 0 ? id.slice(1) : id;
+};
+/**
+ * The `videojs()` function doubles as the main function for users to create a
+ * {@link Player} instance as well as the main library namespace.
+ *
+ * It can also be used as a getter for a pre-existing {@link Player} instance.
+ * However, we _strongly_ recommend using `videojs.getPlayer()` for this
+ * purpose because it avoids any potential for unintended initialization.
+ *
+ * Due to [limitations](https://github.com/jsdoc3/jsdoc/issues/955#issuecomment-313829149)
+ * of our JSDoc template, we cannot properly document this as both a function
+ * and a namespace, so its function signature is documented here.
+ *
+ * #### Arguments
+ * ##### id
+ * string|Element, **required**
+ *
+ * Video element or video element ID.
+ *
+ * ##### options
+ * Object, optional
+ *
+ * Options object for providing settings.
+ * See: [Options Guide](https://docs.videojs.com/tutorial-options.html).
+ *
+ * ##### ready
+ * {@link Component~ReadyCallback}, optional
+ *
+ * A function to be called when the {@link Player} and {@link Tech} are ready.
+ *
+ * #### Return Value
+ *
+ * The `videojs()` function returns a {@link Player} instance.
+ *
+ * @namespace
+ *
+ * @borrows AudioTrack as AudioTrack
+ * @borrows Component.getComponent as getComponent
+ * @borrows module:computed-style~computedStyle as computedStyle
+ * @borrows module:events.on as on
+ * @borrows module:events.one as one
+ * @borrows module:events.off as off
+ * @borrows module:events.trigger as trigger
+ * @borrows EventTarget as EventTarget
+ * @borrows module:extend~extend as extend
+ * @borrows module:fn.bind as bind
+ * @borrows module:format-time.formatTime as formatTime
+ * @borrows module:format-time.resetFormatTime as resetFormatTime
+ * @borrows module:format-time.setFormatTime as setFormatTime
+ * @borrows module:merge-options.mergeOptions as mergeOptions
+ * @borrows module:middleware.use as use
+ * @borrows Player.players as players
+ * @borrows Plugin.registerPlugin as registerPlugin
+ * @borrows Plugin.deregisterPlugin as deregisterPlugin
+ * @borrows Plugin.getPlugins as getPlugins
+ * @borrows Plugin.getPlugin as getPlugin
+ * @borrows Plugin.getPluginVersion as getPluginVersion
+ * @borrows Tech.getTech as getTech
+ * @borrows Tech.registerTech as registerTech
+ * @borrows TextTrack as TextTrack
+ * @borrows module:time-ranges.createTimeRanges as createTimeRange
+ * @borrows module:time-ranges.createTimeRanges as createTimeRanges
+ * @borrows module:url.isCrossOrigin as isCrossOrigin
+ * @borrows module:url.parseUrl as parseUrl
+ * @borrows VideoTrack as VideoTrack
+ *
+ * @param {string|Element} id
+ * Video element or video element ID.
+ *
+ * @param {Object} [options]
+ * Options object for providing settings.
+ * See: [Options Guide](https://docs.videojs.com/tutorial-options.html).
+ *
+ * @param {Component~ReadyCallback} [ready]
+ * A function to be called when the {@link Player} and {@link Tech} are
+ * ready.
+ *
+ * @return {Player}
+ * The `videojs()` function returns a {@link Player|Player} instance.
+ */
+
+
+function videojs(id, options, ready) {
+ var player = videojs.getPlayer(id);
+
+ if (player) {
+ if (options) {
+ log$1.warn("Player \"" + id + "\" is already initialised. Options will not be applied.");
+ }
+
+ if (ready) {
+ player.ready(ready);
+ }
+
+ return player;
+ }
+
+ var el = typeof id === 'string' ? $('#' + normalizeId(id)) : id;
+
+ if (!isEl(el)) {
+ throw new TypeError('The element or ID supplied is not valid. (videojs)');
+ } // document.body.contains(el) will only check if el is contained within that one document.
+ // This causes problems for elements in iframes.
+ // Instead, use the element's ownerDocument instead of the global document.
+ // This will make sure that the element is indeed in the dom of that document.
+ // Additionally, check that the document in question has a default view.
+ // If the document is no longer attached to the dom, the defaultView of the document will be null.
+
+
+ if (!el.ownerDocument.defaultView || !el.ownerDocument.body.contains(el)) {
+ log$1.warn('The element supplied is not included in the DOM');
+ }
+
+ options = options || {}; // Store a copy of the el before modification, if it is to be restored in destroy()
+ // If div ingest, store the parent div
+
+ if (options.restoreEl === true) {
+ options.restoreEl = (el.parentNode && el.parentNode.hasAttribute('data-vjs-player') ? el.parentNode : el).cloneNode(true);
+ }
+
+ hooks('beforesetup').forEach(function (hookFunction) {
+ var opts = hookFunction(el, mergeOptions$3(options));
+
+ if (!isObject(opts) || Array.isArray(opts)) {
+ log$1.error('please return an object in beforesetup hooks');
+ return;
+ }
+
+ options = mergeOptions$3(options, opts);
+ }); // We get the current "Player" component here in case an integration has
+ // replaced it with a custom player.
+
+ var PlayerComponent = Component$1.getComponent('Player');
+ player = new PlayerComponent(el, options, ready);
+ hooks('setup').forEach(function (hookFunction) {
+ return hookFunction(player);
+ });
+ return player;
+}
+
+videojs.hooks_ = hooks_;
+videojs.hooks = hooks;
+videojs.hook = hook;
+videojs.hookOnce = hookOnce;
+videojs.removeHook = removeHook; // Add default styles
+
+if (window__default['default'].VIDEOJS_NO_DYNAMIC_STYLE !== true && isReal()) {
+ var style = $('.vjs-styles-defaults');
+
+ if (!style) {
+ style = createStyleElement('vjs-styles-defaults');
+ var head = $('head');
+
+ if (head) {
+ head.insertBefore(style, head.firstChild);
+ }
+
+ setTextContent(style, "\n .video-js {\n width: 300px;\n height: 150px;\n }\n\n .vjs-fluid:not(.vjs-audio-only-mode) {\n padding-top: 56.25%\n }\n ");
+ }
+} // Run Auto-load players
+// You have to wait at least once in case this script is loaded after your
+// video in the DOM (weird behavior only with minified version)
+
+
+autoSetupTimeout(1, videojs);
+/**
+ * Current Video.js version. Follows [semantic versioning](https://semver.org/).
+ *
+ * @type {string}
+ */
+
+videojs.VERSION = version$5;
+/**
+ * The global options object. These are the settings that take effect
+ * if no overrides are specified when the player is created.
+ *
+ * @type {Object}
+ */
+
+videojs.options = Player.prototype.options_;
+/**
+ * Get an object with the currently created players, keyed by player ID
+ *
+ * @return {Object}
+ * The created players
+ */
+
+videojs.getPlayers = function () {
+ return Player.players;
+};
+/**
+ * Get a single player based on an ID or DOM element.
+ *
+ * This is useful if you want to check if an element or ID has an associated
+ * Video.js player, but not create one if it doesn't.
+ *
+ * @param {string|Element} id
+ * An HTML element - ``, ``, or `` -
+ * or a string matching the `id` of such an element.
+ *
+ * @return {Player|undefined}
+ * A player instance or `undefined` if there is no player instance
+ * matching the argument.
+ */
+
+
+videojs.getPlayer = function (id) {
+ var players = Player.players;
+ var tag;
+
+ if (typeof id === 'string') {
+ var nId = normalizeId(id);
+ var player = players[nId];
+
+ if (player) {
+ return player;
+ }
+
+ tag = $('#' + nId);
+ } else {
+ tag = id;
+ }
+
+ if (isEl(tag)) {
+ var _tag = tag,
+ _player = _tag.player,
+ playerId = _tag.playerId; // Element may have a `player` property referring to an already created
+ // player instance. If so, return that.
+
+ if (_player || players[playerId]) {
+ return _player || players[playerId];
+ }
+ }
+};
+/**
+ * Returns an array of all current players.
+ *
+ * @return {Array}
+ * An array of all players. The array will be in the order that
+ * `Object.keys` provides, which could potentially vary between
+ * JavaScript engines.
+ *
+ */
+
+
+videojs.getAllPlayers = function () {
+ return (// Disposed players leave a key with a `null` value, so we need to make sure
+ // we filter those out.
+ Object.keys(Player.players).map(function (k) {
+ return Player.players[k];
+ }).filter(Boolean)
+ );
+};
+
+videojs.players = Player.players;
+videojs.getComponent = Component$1.getComponent;
+/**
+ * Register a component so it can referred to by name. Used when adding to other
+ * components, either through addChild `component.addChild('myComponent')` or through
+ * default children options `{ children: ['myComponent'] }`.
+ *
+ * > NOTE: You could also just initialize the component before adding.
+ * `component.addChild(new MyComponent());`
+ *
+ * @param {string} name
+ * The class name of the component
+ *
+ * @param {Component} comp
+ * The component class
+ *
+ * @return {Component}
+ * The newly registered component
+ */
+
+videojs.registerComponent = function (name, comp) {
+ if (Tech.isTech(comp)) {
+ log$1.warn("The " + name + " tech was registered as a component. It should instead be registered using videojs.registerTech(name, tech)");
+ }
+
+ Component$1.registerComponent.call(Component$1, name, comp);
+};
+
+videojs.getTech = Tech.getTech;
+videojs.registerTech = Tech.registerTech;
+videojs.use = use;
+/**
+ * An object that can be returned by a middleware to signify
+ * that the middleware is being terminated.
+ *
+ * @type {object}
+ * @property {object} middleware.TERMINATOR
+ */
+
+Object.defineProperty(videojs, 'middleware', {
+ value: {},
+ writeable: false,
+ enumerable: true
+});
+Object.defineProperty(videojs.middleware, 'TERMINATOR', {
+ value: TERMINATOR,
+ writeable: false,
+ enumerable: true
+});
+/**
+ * A reference to the {@link module:browser|browser utility module} as an object.
+ *
+ * @type {Object}
+ * @see {@link module:browser|browser}
+ */
+
+videojs.browser = browser;
+/**
+ * Use {@link module:browser.TOUCH_ENABLED|browser.TOUCH_ENABLED} instead; only
+ * included for backward-compatibility with 4.x.
+ *
+ * @deprecated Since version 5.0, use {@link module:browser.TOUCH_ENABLED|browser.TOUCH_ENABLED instead.
+ * @type {boolean}
+ */
+
+videojs.TOUCH_ENABLED = TOUCH_ENABLED;
+videojs.extend = extend;
+videojs.mergeOptions = mergeOptions$3;
+videojs.bind = bind;
+videojs.registerPlugin = Plugin.registerPlugin;
+videojs.deregisterPlugin = Plugin.deregisterPlugin;
+/**
+ * Deprecated method to register a plugin with Video.js
+ *
+ * @deprecated videojs.plugin() is deprecated; use videojs.registerPlugin() instead
+ *
+ * @param {string} name
+ * The plugin name
+ *
+ * @param {Plugin|Function} plugin
+ * The plugin sub-class or function
+ */
+
+videojs.plugin = function (name, plugin) {
+ log$1.warn('videojs.plugin() is deprecated; use videojs.registerPlugin() instead');
+ return Plugin.registerPlugin(name, plugin);
+};
+
+videojs.getPlugins = Plugin.getPlugins;
+videojs.getPlugin = Plugin.getPlugin;
+videojs.getPluginVersion = Plugin.getPluginVersion;
+/**
+ * Adding languages so that they're available to all players.
+ * Example: `videojs.addLanguage('es', { 'Hello': 'Hola' });`
+ *
+ * @param {string} code
+ * The language code or dictionary property
+ *
+ * @param {Object} data
+ * The data values to be translated
+ *
+ * @return {Object}
+ * The resulting language dictionary object
+ */
+
+videojs.addLanguage = function (code, data) {
+ var _mergeOptions;
+
+ code = ('' + code).toLowerCase();
+ videojs.options.languages = mergeOptions$3(videojs.options.languages, (_mergeOptions = {}, _mergeOptions[code] = data, _mergeOptions));
+ return videojs.options.languages[code];
+};
+/**
+ * A reference to the {@link module:log|log utility module} as an object.
+ *
+ * @type {Function}
+ * @see {@link module:log|log}
+ */
+
+
+videojs.log = log$1;
+videojs.createLogger = createLogger;
+videojs.createTimeRange = videojs.createTimeRanges = createTimeRanges;
+videojs.formatTime = formatTime;
+videojs.setFormatTime = setFormatTime;
+videojs.resetFormatTime = resetFormatTime;
+videojs.parseUrl = parseUrl;
+videojs.isCrossOrigin = isCrossOrigin;
+videojs.EventTarget = EventTarget$2;
+videojs.on = on;
+videojs.one = one;
+videojs.off = off;
+videojs.trigger = trigger;
+/**
+ * A cross-browser XMLHttpRequest wrapper.
+ *
+ * @function
+ * @param {Object} options
+ * Settings for the request.
+ *
+ * @return {XMLHttpRequest|XDomainRequest}
+ * The request object.
+ *
+ * @see https://github.com/Raynos/xhr
+ */
+
+videojs.xhr = XHR__default['default'];
+videojs.TextTrack = TextTrack;
+videojs.AudioTrack = AudioTrack;
+videojs.VideoTrack = VideoTrack;
+['isEl', 'isTextNode', 'createEl', 'hasClass', 'addClass', 'removeClass', 'toggleClass', 'setAttributes', 'getAttributes', 'emptyEl', 'appendContent', 'insertContent'].forEach(function (k) {
+ videojs[k] = function () {
+ log$1.warn("videojs." + k + "() is deprecated; use videojs.dom." + k + "() instead");
+ return Dom[k].apply(null, arguments);
+ };
+});
+videojs.computedStyle = computedStyle;
+/**
+ * A reference to the {@link module:dom|DOM utility module} as an object.
+ *
+ * @type {Object}
+ * @see {@link module:dom|dom}
+ */
+
+videojs.dom = Dom;
+/**
+ * A reference to the {@link module:url|URL utility module} as an object.
+ *
+ * @type {Object}
+ * @see {@link module:url|url}
+ */
+
+videojs.url = Url;
+videojs.defineLazyProperty = defineLazyProperty; // Adding less ambiguous text for fullscreen button.
+// In a major update this could become the default text and key.
+
+videojs.addLanguage('en', {
+ 'Non-Fullscreen': 'Exit Fullscreen'
+});
+
+/*! @name @videojs/http-streaming @version 2.14.2 @license Apache-2.0 */
+/**
+ * @file resolve-url.js - Handling how URLs are resolved and manipulated
+ */
+
+var resolveUrl = _resolveUrl__default['default'];
+/**
+ * Checks whether xhr request was redirected and returns correct url depending
+ * on `handleManifestRedirects` option
+ *
+ * @api private
+ *
+ * @param {string} url - an url being requested
+ * @param {XMLHttpRequest} req - xhr request result
+ *
+ * @return {string}
+ */
+
+var resolveManifestRedirect = function resolveManifestRedirect(handleManifestRedirect, url, req) {
+ // To understand how the responseURL below is set and generated:
+ // - https://fetch.spec.whatwg.org/#concept-response-url
+ // - https://fetch.spec.whatwg.org/#atomic-http-redirect-handling
+ if (handleManifestRedirect && req && req.responseURL && url !== req.responseURL) {
+ return req.responseURL;
+ }
+
+ return url;
+};
+
+var logger = function logger(source) {
+ if (videojs.log.debug) {
+ return videojs.log.debug.bind(videojs, 'VHS:', source + " >");
+ }
+
+ return function () {};
+};
+/**
+ * ranges
+ *
+ * Utilities for working with TimeRanges.
+ *
+ */
+
+
+var TIME_FUDGE_FACTOR = 1 / 30; // Comparisons between time values such as current time and the end of the buffered range
+// can be misleading because of precision differences or when the current media has poorly
+// aligned audio and video, which can cause values to be slightly off from what you would
+// expect. This value is what we consider to be safe to use in such comparisons to account
+// for these scenarios.
+
+var SAFE_TIME_DELTA = TIME_FUDGE_FACTOR * 3;
+
+var filterRanges = function filterRanges(timeRanges, predicate) {
+ var results = [];
+ var i;
+
+ if (timeRanges && timeRanges.length) {
+ // Search for ranges that match the predicate
+ for (i = 0; i < timeRanges.length; i++) {
+ if (predicate(timeRanges.start(i), timeRanges.end(i))) {
+ results.push([timeRanges.start(i), timeRanges.end(i)]);
+ }
+ }
+ }
+
+ return videojs.createTimeRanges(results);
+};
+/**
+ * Attempts to find the buffered TimeRange that contains the specified
+ * time.
+ *
+ * @param {TimeRanges} buffered - the TimeRanges object to query
+ * @param {number} time - the time to filter on.
+ * @return {TimeRanges} a new TimeRanges object
+ */
+
+
+var findRange = function findRange(buffered, time) {
+ return filterRanges(buffered, function (start, end) {
+ return start - SAFE_TIME_DELTA <= time && end + SAFE_TIME_DELTA >= time;
+ });
+};
+/**
+ * Returns the TimeRanges that begin later than the specified time.
+ *
+ * @param {TimeRanges} timeRanges - the TimeRanges object to query
+ * @param {number} time - the time to filter on.
+ * @return {TimeRanges} a new TimeRanges object.
+ */
+
+
+var findNextRange = function findNextRange(timeRanges, time) {
+ return filterRanges(timeRanges, function (start) {
+ return start - TIME_FUDGE_FACTOR >= time;
+ });
+};
+/**
+ * Returns gaps within a list of TimeRanges
+ *
+ * @param {TimeRanges} buffered - the TimeRanges object
+ * @return {TimeRanges} a TimeRanges object of gaps
+ */
+
+
+var findGaps = function findGaps(buffered) {
+ if (buffered.length < 2) {
+ return videojs.createTimeRanges();
+ }
+
+ var ranges = [];
+
+ for (var i = 1; i < buffered.length; i++) {
+ var start = buffered.end(i - 1);
+ var end = buffered.start(i);
+ ranges.push([start, end]);
+ }
+
+ return videojs.createTimeRanges(ranges);
+};
+/**
+ * Calculate the intersection of two TimeRanges
+ *
+ * @param {TimeRanges} bufferA
+ * @param {TimeRanges} bufferB
+ * @return {TimeRanges} The interesection of `bufferA` with `bufferB`
+ */
+
+
+var bufferIntersection = function bufferIntersection(bufferA, bufferB) {
+ var start = null;
+ var end = null;
+ var arity = 0;
+ var extents = [];
+ var ranges = [];
+
+ if (!bufferA || !bufferA.length || !bufferB || !bufferB.length) {
+ return videojs.createTimeRange();
+ } // Handle the case where we have both buffers and create an
+ // intersection of the two
+
+
+ var count = bufferA.length; // A) Gather up all start and end times
+
+ while (count--) {
+ extents.push({
+ time: bufferA.start(count),
+ type: 'start'
+ });
+ extents.push({
+ time: bufferA.end(count),
+ type: 'end'
+ });
+ }
+
+ count = bufferB.length;
+
+ while (count--) {
+ extents.push({
+ time: bufferB.start(count),
+ type: 'start'
+ });
+ extents.push({
+ time: bufferB.end(count),
+ type: 'end'
+ });
+ } // B) Sort them by time
+
+
+ extents.sort(function (a, b) {
+ return a.time - b.time;
+ }); // C) Go along one by one incrementing arity for start and decrementing
+ // arity for ends
+
+ for (count = 0; count < extents.length; count++) {
+ if (extents[count].type === 'start') {
+ arity++; // D) If arity is ever incremented to 2 we are entering an
+ // overlapping range
+
+ if (arity === 2) {
+ start = extents[count].time;
+ }
+ } else if (extents[count].type === 'end') {
+ arity--; // E) If arity is ever decremented to 1 we leaving an
+ // overlapping range
+
+ if (arity === 1) {
+ end = extents[count].time;
+ }
+ } // F) Record overlapping ranges
+
+
+ if (start !== null && end !== null) {
+ ranges.push([start, end]);
+ start = null;
+ end = null;
+ }
+ }
+
+ return videojs.createTimeRanges(ranges);
+};
+/**
+ * Gets a human readable string for a TimeRange
+ *
+ * @param {TimeRange} range
+ * @return {string} a human readable string
+ */
+
+
+var printableRange = function printableRange(range) {
+ var strArr = [];
+
+ if (!range || !range.length) {
+ return '';
+ }
+
+ for (var i = 0; i < range.length; i++) {
+ strArr.push(range.start(i) + ' => ' + range.end(i));
+ }
+
+ return strArr.join(', ');
+};
+/**
+ * Calculates the amount of time left in seconds until the player hits the end of the
+ * buffer and causes a rebuffer
+ *
+ * @param {TimeRange} buffered
+ * The state of the buffer
+ * @param {Numnber} currentTime
+ * The current time of the player
+ * @param {number} playbackRate
+ * The current playback rate of the player. Defaults to 1.
+ * @return {number}
+ * Time until the player has to start rebuffering in seconds.
+ * @function timeUntilRebuffer
+ */
+
+
+var timeUntilRebuffer = function timeUntilRebuffer(buffered, currentTime, playbackRate) {
+ if (playbackRate === void 0) {
+ playbackRate = 1;
+ }
+
+ var bufferedEnd = buffered.length ? buffered.end(buffered.length - 1) : 0;
+ return (bufferedEnd - currentTime) / playbackRate;
+};
+/**
+ * Converts a TimeRanges object into an array representation
+ *
+ * @param {TimeRanges} timeRanges
+ * @return {Array}
+ */
+
+
+var timeRangesToArray = function timeRangesToArray(timeRanges) {
+ var timeRangesList = [];
+
+ for (var i = 0; i < timeRanges.length; i++) {
+ timeRangesList.push({
+ start: timeRanges.start(i),
+ end: timeRanges.end(i)
+ });
+ }
+
+ return timeRangesList;
+};
+/**
+ * Determines if two time range objects are different.
+ *
+ * @param {TimeRange} a
+ * the first time range object to check
+ *
+ * @param {TimeRange} b
+ * the second time range object to check
+ *
+ * @return {Boolean}
+ * Whether the time range objects differ
+ */
+
+
+var isRangeDifferent = function isRangeDifferent(a, b) {
+ // same object
+ if (a === b) {
+ return false;
+ } // one or the other is undefined
+
+
+ if (!a && b || !b && a) {
+ return true;
+ } // length is different
+
+
+ if (a.length !== b.length) {
+ return true;
+ } // see if any start/end pair is different
+
+
+ for (var i = 0; i < a.length; i++) {
+ if (a.start(i) !== b.start(i) || a.end(i) !== b.end(i)) {
+ return true;
+ }
+ } // if the length and every pair is the same
+ // this is the same time range
+
+
+ return false;
+};
+
+var lastBufferedEnd = function lastBufferedEnd(a) {
+ if (!a || !a.length || !a.end) {
+ return;
+ }
+
+ return a.end(a.length - 1);
+};
+/**
+ * A utility function to add up the amount of time in a timeRange
+ * after a specified startTime.
+ * ie:[[0, 10], [20, 40], [50, 60]] with a startTime 0
+ * would return 40 as there are 40s seconds after 0 in the timeRange
+ *
+ * @param {TimeRange} range
+ * The range to check against
+ * @param {number} startTime
+ * The time in the time range that you should start counting from
+ *
+ * @return {number}
+ * The number of seconds in the buffer passed the specified time.
+ */
+
+
+var timeAheadOf = function timeAheadOf(range, startTime) {
+ var time = 0;
+
+ if (!range || !range.length) {
+ return time;
+ }
+
+ for (var i = 0; i < range.length; i++) {
+ var start = range.start(i);
+ var end = range.end(i); // startTime is after this range entirely
+
+ if (startTime > end) {
+ continue;
+ } // startTime is within this range
+
+
+ if (startTime > start && startTime <= end) {
+ time += end - startTime;
+ continue;
+ } // startTime is before this range.
+
+
+ time += end - start;
+ }
+
+ return time;
+};
+/**
+ * @file playlist.js
+ *
+ * Playlist related utilities.
+ */
+
+
+var createTimeRange = videojs.createTimeRange;
+/**
+ * Get the duration of a segment, with special cases for
+ * llhls segments that do not have a duration yet.
+ *
+ * @param {Object} playlist
+ * the playlist that the segment belongs to.
+ * @param {Object} segment
+ * the segment to get a duration for.
+ *
+ * @return {number}
+ * the segment duration
+ */
+
+var segmentDurationWithParts = function segmentDurationWithParts(playlist, segment) {
+ // if this isn't a preload segment
+ // then we will have a segment duration that is accurate.
+ if (!segment.preload) {
+ return segment.duration;
+ } // otherwise we have to add up parts and preload hints
+ // to get an up to date duration.
+
+
+ var result = 0;
+ (segment.parts || []).forEach(function (p) {
+ result += p.duration;
+ }); // for preload hints we have to use partTargetDuration
+ // as they won't even have a duration yet.
+
+ (segment.preloadHints || []).forEach(function (p) {
+ if (p.type === 'PART') {
+ result += playlist.partTargetDuration;
+ }
+ });
+ return result;
+};
+/**
+ * A function to get a combined list of parts and segments with durations
+ * and indexes.
+ *
+ * @param {Playlist} playlist the playlist to get the list for.
+ *
+ * @return {Array} The part/segment list.
+ */
+
+
+var getPartsAndSegments = function getPartsAndSegments(playlist) {
+ return (playlist.segments || []).reduce(function (acc, segment, si) {
+ if (segment.parts) {
+ segment.parts.forEach(function (part, pi) {
+ acc.push({
+ duration: part.duration,
+ segmentIndex: si,
+ partIndex: pi,
+ part: part,
+ segment: segment
+ });
+ });
+ } else {
+ acc.push({
+ duration: segment.duration,
+ segmentIndex: si,
+ partIndex: null,
+ segment: segment,
+ part: null
+ });
+ }
+
+ return acc;
+ }, []);
+};
+
+var getLastParts = function getLastParts(media) {
+ var lastSegment = media.segments && media.segments.length && media.segments[media.segments.length - 1];
+ return lastSegment && lastSegment.parts || [];
+};
+
+var getKnownPartCount = function getKnownPartCount(_ref) {
+ var preloadSegment = _ref.preloadSegment;
+
+ if (!preloadSegment) {
+ return;
+ }
+
+ var parts = preloadSegment.parts,
+ preloadHints = preloadSegment.preloadHints;
+ var partCount = (preloadHints || []).reduce(function (count, hint) {
+ return count + (hint.type === 'PART' ? 1 : 0);
+ }, 0);
+ partCount += parts && parts.length ? parts.length : 0;
+ return partCount;
+};
+/**
+ * Get the number of seconds to delay from the end of a
+ * live playlist.
+ *
+ * @param {Playlist} master the master playlist
+ * @param {Playlist} media the media playlist
+ * @return {number} the hold back in seconds.
+ */
+
+
+var liveEdgeDelay = function liveEdgeDelay(master, media) {
+ if (media.endList) {
+ return 0;
+ } // dash suggestedPresentationDelay trumps everything
+
+
+ if (master && master.suggestedPresentationDelay) {
+ return master.suggestedPresentationDelay;
+ }
+
+ var hasParts = getLastParts(media).length > 0; // look for "part" delays from ll-hls first
+
+ if (hasParts && media.serverControl && media.serverControl.partHoldBack) {
+ return media.serverControl.partHoldBack;
+ } else if (hasParts && media.partTargetDuration) {
+ return media.partTargetDuration * 3; // finally look for full segment delays
+ } else if (media.serverControl && media.serverControl.holdBack) {
+ return media.serverControl.holdBack;
+ } else if (media.targetDuration) {
+ return media.targetDuration * 3;
+ }
+
+ return 0;
+};
+/**
+ * walk backward until we find a duration we can use
+ * or return a failure
+ *
+ * @param {Playlist} playlist the playlist to walk through
+ * @param {Number} endSequence the mediaSequence to stop walking on
+ */
+
+
+var backwardDuration = function backwardDuration(playlist, endSequence) {
+ var result = 0;
+ var i = endSequence - playlist.mediaSequence; // if a start time is available for segment immediately following
+ // the interval, use it
+
+ var segment = playlist.segments[i]; // Walk backward until we find the latest segment with timeline
+ // information that is earlier than endSequence
+
+ if (segment) {
+ if (typeof segment.start !== 'undefined') {
+ return {
+ result: segment.start,
+ precise: true
+ };
+ }
+
+ if (typeof segment.end !== 'undefined') {
+ return {
+ result: segment.end - segment.duration,
+ precise: true
+ };
+ }
+ }
+
+ while (i--) {
+ segment = playlist.segments[i];
+
+ if (typeof segment.end !== 'undefined') {
+ return {
+ result: result + segment.end,
+ precise: true
+ };
+ }
+
+ result += segmentDurationWithParts(playlist, segment);
+
+ if (typeof segment.start !== 'undefined') {
+ return {
+ result: result + segment.start,
+ precise: true
+ };
+ }
+ }
+
+ return {
+ result: result,
+ precise: false
+ };
+};
+/**
+ * walk forward until we find a duration we can use
+ * or return a failure
+ *
+ * @param {Playlist} playlist the playlist to walk through
+ * @param {number} endSequence the mediaSequence to stop walking on
+ */
+
+
+var forwardDuration = function forwardDuration(playlist, endSequence) {
+ var result = 0;
+ var segment;
+ var i = endSequence - playlist.mediaSequence; // Walk forward until we find the earliest segment with timeline
+ // information
+
+ for (; i < playlist.segments.length; i++) {
+ segment = playlist.segments[i];
+
+ if (typeof segment.start !== 'undefined') {
+ return {
+ result: segment.start - result,
+ precise: true
+ };
+ }
+
+ result += segmentDurationWithParts(playlist, segment);
+
+ if (typeof segment.end !== 'undefined') {
+ return {
+ result: segment.end - result,
+ precise: true
+ };
+ }
+ } // indicate we didn't find a useful duration estimate
+
+
+ return {
+ result: -1,
+ precise: false
+ };
+};
+/**
+ * Calculate the media duration from the segments associated with a
+ * playlist. The duration of a subinterval of the available segments
+ * may be calculated by specifying an end index.
+ *
+ * @param {Object} playlist a media playlist object
+ * @param {number=} endSequence an exclusive upper boundary
+ * for the playlist. Defaults to playlist length.
+ * @param {number} expired the amount of time that has dropped
+ * off the front of the playlist in a live scenario
+ * @return {number} the duration between the first available segment
+ * and end index.
+ */
+
+
+var intervalDuration = function intervalDuration(playlist, endSequence, expired) {
+ if (typeof endSequence === 'undefined') {
+ endSequence = playlist.mediaSequence + playlist.segments.length;
+ }
+
+ if (endSequence < playlist.mediaSequence) {
+ return 0;
+ } // do a backward walk to estimate the duration
+
+
+ var backward = backwardDuration(playlist, endSequence);
+
+ if (backward.precise) {
+ // if we were able to base our duration estimate on timing
+ // information provided directly from the Media Source, return
+ // it
+ return backward.result;
+ } // walk forward to see if a precise duration estimate can be made
+ // that way
+
+
+ var forward = forwardDuration(playlist, endSequence);
+
+ if (forward.precise) {
+ // we found a segment that has been buffered and so it's
+ // position is known precisely
+ return forward.result;
+ } // return the less-precise, playlist-based duration estimate
+
+
+ return backward.result + expired;
+};
+/**
+ * Calculates the duration of a playlist. If a start and end index
+ * are specified, the duration will be for the subset of the media
+ * timeline between those two indices. The total duration for live
+ * playlists is always Infinity.
+ *
+ * @param {Object} playlist a media playlist object
+ * @param {number=} endSequence an exclusive upper
+ * boundary for the playlist. Defaults to the playlist media
+ * sequence number plus its length.
+ * @param {number=} expired the amount of time that has
+ * dropped off the front of the playlist in a live scenario
+ * @return {number} the duration between the start index and end
+ * index.
+ */
+
+
+var duration = function duration(playlist, endSequence, expired) {
+ if (!playlist) {
+ return 0;
+ }
+
+ if (typeof expired !== 'number') {
+ expired = 0;
+ } // if a slice of the total duration is not requested, use
+ // playlist-level duration indicators when they're present
+
+
+ if (typeof endSequence === 'undefined') {
+ // if present, use the duration specified in the playlist
+ if (playlist.totalDuration) {
+ return playlist.totalDuration;
+ } // duration should be Infinity for live playlists
+
+
+ if (!playlist.endList) {
+ return window__default['default'].Infinity;
+ }
+ } // calculate the total duration based on the segment durations
+
+
+ return intervalDuration(playlist, endSequence, expired);
+};
+/**
+ * Calculate the time between two indexes in the current playlist
+ * neight the start- nor the end-index need to be within the current
+ * playlist in which case, the targetDuration of the playlist is used
+ * to approximate the durations of the segments
+ *
+ * @param {Array} options.durationList list to iterate over for durations.
+ * @param {number} options.defaultDuration duration to use for elements before or after the durationList
+ * @param {number} options.startIndex partsAndSegments index to start
+ * @param {number} options.endIndex partsAndSegments index to end.
+ * @return {number} the number of seconds between startIndex and endIndex
+ */
+
+
+var sumDurations = function sumDurations(_ref2) {
+ var defaultDuration = _ref2.defaultDuration,
+ durationList = _ref2.durationList,
+ startIndex = _ref2.startIndex,
+ endIndex = _ref2.endIndex;
+ var durations = 0;
+
+ if (startIndex > endIndex) {
+ var _ref3 = [endIndex, startIndex];
+ startIndex = _ref3[0];
+ endIndex = _ref3[1];
+ }
+
+ if (startIndex < 0) {
+ for (var i = startIndex; i < Math.min(0, endIndex); i++) {
+ durations += defaultDuration;
+ }
+
+ startIndex = 0;
+ }
+
+ for (var _i = startIndex; _i < endIndex; _i++) {
+ durations += durationList[_i].duration;
+ }
+
+ return durations;
+};
+/**
+ * Calculates the playlist end time
+ *
+ * @param {Object} playlist a media playlist object
+ * @param {number=} expired the amount of time that has
+ * dropped off the front of the playlist in a live scenario
+ * @param {boolean|false} useSafeLiveEnd a boolean value indicating whether or not the
+ * playlist end calculation should consider the safe live end
+ * (truncate the playlist end by three segments). This is normally
+ * used for calculating the end of the playlist's seekable range.
+ * This takes into account the value of liveEdgePadding.
+ * Setting liveEdgePadding to 0 is equivalent to setting this to false.
+ * @param {number} liveEdgePadding a number indicating how far from the end of the playlist we should be in seconds.
+ * If this is provided, it is used in the safe live end calculation.
+ * Setting useSafeLiveEnd=false or liveEdgePadding=0 are equivalent.
+ * Corresponds to suggestedPresentationDelay in DASH manifests.
+ * @return {number} the end time of playlist
+ * @function playlistEnd
+ */
+
+
+var playlistEnd = function playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding) {
+ if (!playlist || !playlist.segments) {
+ return null;
+ }
+
+ if (playlist.endList) {
+ return duration(playlist);
+ }
+
+ if (expired === null) {
+ return null;
+ }
+
+ expired = expired || 0;
+ var lastSegmentEndTime = intervalDuration(playlist, playlist.mediaSequence + playlist.segments.length, expired);
+
+ if (useSafeLiveEnd) {
+ liveEdgePadding = typeof liveEdgePadding === 'number' ? liveEdgePadding : liveEdgeDelay(null, playlist);
+ lastSegmentEndTime -= liveEdgePadding;
+ } // don't return a time less than zero
+
+
+ return Math.max(0, lastSegmentEndTime);
+};
+/**
+ * Calculates the interval of time that is currently seekable in a
+ * playlist. The returned time ranges are relative to the earliest
+ * moment in the specified playlist that is still available. A full
+ * seekable implementation for live streams would need to offset
+ * these values by the duration of content that has expired from the
+ * stream.
+ *
+ * @param {Object} playlist a media playlist object
+ * dropped off the front of the playlist in a live scenario
+ * @param {number=} expired the amount of time that has
+ * dropped off the front of the playlist in a live scenario
+ * @param {number} liveEdgePadding how far from the end of the playlist we should be in seconds.
+ * Corresponds to suggestedPresentationDelay in DASH manifests.
+ * @return {TimeRanges} the periods of time that are valid targets
+ * for seeking
+ */
+
+
+var seekable = function seekable(playlist, expired, liveEdgePadding) {
+ var useSafeLiveEnd = true;
+ var seekableStart = expired || 0;
+ var seekableEnd = playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding);
+
+ if (seekableEnd === null) {
+ return createTimeRange();
+ }
+
+ return createTimeRange(seekableStart, seekableEnd);
+};
+/**
+ * Determine the index and estimated starting time of the segment that
+ * contains a specified playback position in a media playlist.
+ *
+ * @param {Object} options.playlist the media playlist to query
+ * @param {number} options.currentTime The number of seconds since the earliest
+ * possible position to determine the containing segment for
+ * @param {number} options.startTime the time when the segment/part starts
+ * @param {number} options.startingSegmentIndex the segment index to start looking at.
+ * @param {number?} [options.startingPartIndex] the part index to look at within the segment.
+ *
+ * @return {Object} an object with partIndex, segmentIndex, and startTime.
+ */
+
+
+var getMediaInfoForTime = function getMediaInfoForTime(_ref4) {
+ var playlist = _ref4.playlist,
+ currentTime = _ref4.currentTime,
+ startingSegmentIndex = _ref4.startingSegmentIndex,
+ startingPartIndex = _ref4.startingPartIndex,
+ startTime = _ref4.startTime,
+ experimentalExactManifestTimings = _ref4.experimentalExactManifestTimings;
+ var time = currentTime - startTime;
+ var partsAndSegments = getPartsAndSegments(playlist);
+ var startIndex = 0;
+
+ for (var i = 0; i < partsAndSegments.length; i++) {
+ var partAndSegment = partsAndSegments[i];
+
+ if (startingSegmentIndex !== partAndSegment.segmentIndex) {
+ continue;
+ } // skip this if part index does not match.
+
+
+ if (typeof startingPartIndex === 'number' && typeof partAndSegment.partIndex === 'number' && startingPartIndex !== partAndSegment.partIndex) {
+ continue;
+ }
+
+ startIndex = i;
+ break;
+ }
+
+ if (time < 0) {
+ // Walk backward from startIndex in the playlist, adding durations
+ // until we find a segment that contains `time` and return it
+ if (startIndex > 0) {
+ for (var _i2 = startIndex - 1; _i2 >= 0; _i2--) {
+ var _partAndSegment = partsAndSegments[_i2];
+ time += _partAndSegment.duration;
+
+ if (experimentalExactManifestTimings) {
+ if (time < 0) {
+ continue;
+ }
+ } else if (time + TIME_FUDGE_FACTOR <= 0) {
+ continue;
+ }
+
+ return {
+ partIndex: _partAndSegment.partIndex,
+ segmentIndex: _partAndSegment.segmentIndex,
+ startTime: startTime - sumDurations({
+ defaultDuration: playlist.targetDuration,
+ durationList: partsAndSegments,
+ startIndex: startIndex,
+ endIndex: _i2
+ })
+ };
+ }
+ } // We were unable to find a good segment within the playlist
+ // so select the first segment
+
+
+ return {
+ partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
+ segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
+ startTime: currentTime
+ };
+ } // When startIndex is negative, we first walk forward to first segment
+ // adding target durations. If we "run out of time" before getting to
+ // the first segment, return the first segment
+
+
+ if (startIndex < 0) {
+ for (var _i3 = startIndex; _i3 < 0; _i3++) {
+ time -= playlist.targetDuration;
+
+ if (time < 0) {
+ return {
+ partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
+ segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
+ startTime: currentTime
+ };
+ }
+ }
+
+ startIndex = 0;
+ } // Walk forward from startIndex in the playlist, subtracting durations
+ // until we find a segment that contains `time` and return it
+
+
+ for (var _i4 = startIndex; _i4 < partsAndSegments.length; _i4++) {
+ var _partAndSegment2 = partsAndSegments[_i4];
+ time -= _partAndSegment2.duration;
+
+ if (experimentalExactManifestTimings) {
+ if (time > 0) {
+ continue;
+ }
+ } else if (time - TIME_FUDGE_FACTOR >= 0) {
+ continue;
+ }
+
+ return {
+ partIndex: _partAndSegment2.partIndex,
+ segmentIndex: _partAndSegment2.segmentIndex,
+ startTime: startTime + sumDurations({
+ defaultDuration: playlist.targetDuration,
+ durationList: partsAndSegments,
+ startIndex: startIndex,
+ endIndex: _i4
+ })
+ };
+ } // We are out of possible candidates so load the last one...
+
+
+ return {
+ segmentIndex: partsAndSegments[partsAndSegments.length - 1].segmentIndex,
+ partIndex: partsAndSegments[partsAndSegments.length - 1].partIndex,
+ startTime: currentTime
+ };
+};
+/**
+ * Check whether the playlist is blacklisted or not.
+ *
+ * @param {Object} playlist the media playlist object
+ * @return {boolean} whether the playlist is blacklisted or not
+ * @function isBlacklisted
+ */
+
+
+var isBlacklisted = function isBlacklisted(playlist) {
+ return playlist.excludeUntil && playlist.excludeUntil > Date.now();
+};
+/**
+ * Check whether the playlist is compatible with current playback configuration or has
+ * been blacklisted permanently for being incompatible.
+ *
+ * @param {Object} playlist the media playlist object
+ * @return {boolean} whether the playlist is incompatible or not
+ * @function isIncompatible
+ */
+
+
+var isIncompatible = function isIncompatible(playlist) {
+ return playlist.excludeUntil && playlist.excludeUntil === Infinity;
+};
+/**
+ * Check whether the playlist is enabled or not.
+ *
+ * @param {Object} playlist the media playlist object
+ * @return {boolean} whether the playlist is enabled or not
+ * @function isEnabled
+ */
+
+
+var isEnabled = function isEnabled(playlist) {
+ var blacklisted = isBlacklisted(playlist);
+ return !playlist.disabled && !blacklisted;
+};
+/**
+ * Check whether the playlist has been manually disabled through the representations api.
+ *
+ * @param {Object} playlist the media playlist object
+ * @return {boolean} whether the playlist is disabled manually or not
+ * @function isDisabled
+ */
+
+
+var isDisabled = function isDisabled(playlist) {
+ return playlist.disabled;
+};
+/**
+ * Returns whether the current playlist is an AES encrypted HLS stream
+ *
+ * @return {boolean} true if it's an AES encrypted HLS stream
+ */
+
+
+var isAes = function isAes(media) {
+ for (var i = 0; i < media.segments.length; i++) {
+ if (media.segments[i].key) {
+ return true;
+ }
+ }
+
+ return false;
+};
+/**
+ * Checks if the playlist has a value for the specified attribute
+ *
+ * @param {string} attr
+ * Attribute to check for
+ * @param {Object} playlist
+ * The media playlist object
+ * @return {boolean}
+ * Whether the playlist contains a value for the attribute or not
+ * @function hasAttribute
+ */
+
+
+var hasAttribute = function hasAttribute(attr, playlist) {
+ return playlist.attributes && playlist.attributes[attr];
+};
+/**
+ * Estimates the time required to complete a segment download from the specified playlist
+ *
+ * @param {number} segmentDuration
+ * Duration of requested segment
+ * @param {number} bandwidth
+ * Current measured bandwidth of the player
+ * @param {Object} playlist
+ * The media playlist object
+ * @param {number=} bytesReceived
+ * Number of bytes already received for the request. Defaults to 0
+ * @return {number|NaN}
+ * The estimated time to request the segment. NaN if bandwidth information for
+ * the given playlist is unavailable
+ * @function estimateSegmentRequestTime
+ */
+
+
+var estimateSegmentRequestTime = function estimateSegmentRequestTime(segmentDuration, bandwidth, playlist, bytesReceived) {
+ if (bytesReceived === void 0) {
+ bytesReceived = 0;
+ }
+
+ if (!hasAttribute('BANDWIDTH', playlist)) {
+ return NaN;
+ }
+
+ var size = segmentDuration * playlist.attributes.BANDWIDTH;
+ return (size - bytesReceived * 8) / bandwidth;
+};
+/*
+ * Returns whether the current playlist is the lowest rendition
+ *
+ * @return {Boolean} true if on lowest rendition
+ */
+
+
+var isLowestEnabledRendition = function isLowestEnabledRendition(master, media) {
+ if (master.playlists.length === 1) {
+ return true;
+ }
+
+ var currentBandwidth = media.attributes.BANDWIDTH || Number.MAX_VALUE;
+ return master.playlists.filter(function (playlist) {
+ if (!isEnabled(playlist)) {
+ return false;
+ }
+
+ return (playlist.attributes.BANDWIDTH || 0) < currentBandwidth;
+ }).length === 0;
+};
+
+var playlistMatch = function playlistMatch(a, b) {
+ // both playlits are null
+ // or only one playlist is non-null
+ // no match
+ if (!a && !b || !a && b || a && !b) {
+ return false;
+ } // playlist objects are the same, match
+
+
+ if (a === b) {
+ return true;
+ } // first try to use id as it should be the most
+ // accurate
+
+
+ if (a.id && b.id && a.id === b.id) {
+ return true;
+ } // next try to use reslovedUri as it should be the
+ // second most accurate.
+
+
+ if (a.resolvedUri && b.resolvedUri && a.resolvedUri === b.resolvedUri) {
+ return true;
+ } // finally try to use uri as it should be accurate
+ // but might miss a few cases for relative uris
+
+
+ if (a.uri && b.uri && a.uri === b.uri) {
+ return true;
+ }
+
+ return false;
+};
+
+var someAudioVariant = function someAudioVariant(master, callback) {
+ var AUDIO = master && master.mediaGroups && master.mediaGroups.AUDIO || {};
+ var found = false;
+
+ for (var groupName in AUDIO) {
+ for (var label in AUDIO[groupName]) {
+ found = callback(AUDIO[groupName][label]);
+
+ if (found) {
+ break;
+ }
+ }
+
+ if (found) {
+ break;
+ }
+ }
+
+ return !!found;
+};
+
+var isAudioOnly = function isAudioOnly(master) {
+ // we are audio only if we have no main playlists but do
+ // have media group playlists.
+ if (!master || !master.playlists || !master.playlists.length) {
+ // without audio variants or playlists this
+ // is not an audio only master.
+ var found = someAudioVariant(master, function (variant) {
+ return variant.playlists && variant.playlists.length || variant.uri;
+ });
+ return found;
+ } // if every playlist has only an audio codec it is audio only
+
+
+ var _loop = function _loop(i) {
+ var playlist = master.playlists[i];
+ var CODECS = playlist.attributes && playlist.attributes.CODECS; // all codecs are audio, this is an audio playlist.
+
+ if (CODECS && CODECS.split(',').every(function (c) {
+ return codecs_js.isAudioCodec(c);
+ })) {
+ return "continue";
+ } // playlist is in an audio group it is audio only
+
+
+ var found = someAudioVariant(master, function (variant) {
+ return playlistMatch(playlist, variant);
+ });
+
+ if (found) {
+ return "continue";
+ } // if we make it here this playlist isn't audio and we
+ // are not audio only
+
+
+ return {
+ v: false
+ };
+ };
+
+ for (var i = 0; i < master.playlists.length; i++) {
+ var _ret = _loop(i);
+
+ if (_ret === "continue") continue;
+ if (typeof _ret === "object") return _ret.v;
+ } // if we make it past every playlist without returning, then
+ // this is an audio only playlist.
+
+
+ return true;
+}; // exports
+
+
+var Playlist = {
+ liveEdgeDelay: liveEdgeDelay,
+ duration: duration,
+ seekable: seekable,
+ getMediaInfoForTime: getMediaInfoForTime,
+ isEnabled: isEnabled,
+ isDisabled: isDisabled,
+ isBlacklisted: isBlacklisted,
+ isIncompatible: isIncompatible,
+ playlistEnd: playlistEnd,
+ isAes: isAes,
+ hasAttribute: hasAttribute,
+ estimateSegmentRequestTime: estimateSegmentRequestTime,
+ isLowestEnabledRendition: isLowestEnabledRendition,
+ isAudioOnly: isAudioOnly,
+ playlistMatch: playlistMatch,
+ segmentDurationWithParts: segmentDurationWithParts
+};
+var log = videojs.log;
+
+var createPlaylistID = function createPlaylistID(index, uri) {
+ return index + "-" + uri;
+};
+/**
+ * Parses a given m3u8 playlist
+ *
+ * @param {Function} [onwarn]
+ * a function to call when the parser triggers a warning event.
+ * @param {Function} [oninfo]
+ * a function to call when the parser triggers an info event.
+ * @param {string} manifestString
+ * The downloaded manifest string
+ * @param {Object[]} [customTagParsers]
+ * An array of custom tag parsers for the m3u8-parser instance
+ * @param {Object[]} [customTagMappers]
+ * An array of custom tag mappers for the m3u8-parser instance
+ * @param {boolean} [experimentalLLHLS=false]
+ * Whether to keep ll-hls features in the manifest after parsing.
+ * @return {Object}
+ * The manifest object
+ */
+
+
+var parseManifest = function parseManifest(_ref) {
+ var onwarn = _ref.onwarn,
+ oninfo = _ref.oninfo,
+ manifestString = _ref.manifestString,
+ _ref$customTagParsers = _ref.customTagParsers,
+ customTagParsers = _ref$customTagParsers === void 0 ? [] : _ref$customTagParsers,
+ _ref$customTagMappers = _ref.customTagMappers,
+ customTagMappers = _ref$customTagMappers === void 0 ? [] : _ref$customTagMappers,
+ experimentalLLHLS = _ref.experimentalLLHLS;
+ var parser = new m3u8Parser.Parser();
+
+ if (onwarn) {
+ parser.on('warn', onwarn);
+ }
+
+ if (oninfo) {
+ parser.on('info', oninfo);
+ }
+
+ customTagParsers.forEach(function (customParser) {
+ return parser.addParser(customParser);
+ });
+ customTagMappers.forEach(function (mapper) {
+ return parser.addTagMapper(mapper);
+ });
+ parser.push(manifestString);
+ parser.end();
+ var manifest = parser.manifest; // remove llhls features from the parsed manifest
+ // if we don't want llhls support.
+
+ if (!experimentalLLHLS) {
+ ['preloadSegment', 'skip', 'serverControl', 'renditionReports', 'partInf', 'partTargetDuration'].forEach(function (k) {
+ if (manifest.hasOwnProperty(k)) {
+ delete manifest[k];
+ }
+ });
+
+ if (manifest.segments) {
+ manifest.segments.forEach(function (segment) {
+ ['parts', 'preloadHints'].forEach(function (k) {
+ if (segment.hasOwnProperty(k)) {
+ delete segment[k];
+ }
+ });
+ });
+ }
+ }
+
+ if (!manifest.targetDuration) {
+ var targetDuration = 10;
+
+ if (manifest.segments && manifest.segments.length) {
+ targetDuration = manifest.segments.reduce(function (acc, s) {
+ return Math.max(acc, s.duration);
+ }, 0);
+ }
+
+ if (onwarn) {
+ onwarn("manifest has no targetDuration defaulting to " + targetDuration);
+ }
+
+ manifest.targetDuration = targetDuration;
+ }
+
+ var parts = getLastParts(manifest);
+
+ if (parts.length && !manifest.partTargetDuration) {
+ var partTargetDuration = parts.reduce(function (acc, p) {
+ return Math.max(acc, p.duration);
+ }, 0);
+
+ if (onwarn) {
+ onwarn("manifest has no partTargetDuration defaulting to " + partTargetDuration);
+ log.error('LL-HLS manifest has parts but lacks required #EXT-X-PART-INF:PART-TARGET value. See https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-09#section-4.4.3.7. Playback is not guaranteed.');
+ }
+
+ manifest.partTargetDuration = partTargetDuration;
+ }
+
+ return manifest;
+};
+/**
+ * Loops through all supported media groups in master and calls the provided
+ * callback for each group
+ *
+ * @param {Object} master
+ * The parsed master manifest object
+ * @param {Function} callback
+ * Callback to call for each media group
+ */
+
+
+var forEachMediaGroup = function forEachMediaGroup(master, callback) {
+ if (!master.mediaGroups) {
+ return;
+ }
+
+ ['AUDIO', 'SUBTITLES'].forEach(function (mediaType) {
+ if (!master.mediaGroups[mediaType]) {
+ return;
+ }
+
+ for (var groupKey in master.mediaGroups[mediaType]) {
+ for (var labelKey in master.mediaGroups[mediaType][groupKey]) {
+ var mediaProperties = master.mediaGroups[mediaType][groupKey][labelKey];
+ callback(mediaProperties, mediaType, groupKey, labelKey);
+ }
+ }
+ });
+};
+/**
+ * Adds properties and attributes to the playlist to keep consistent functionality for
+ * playlists throughout VHS.
+ *
+ * @param {Object} config
+ * Arguments object
+ * @param {Object} config.playlist
+ * The media playlist
+ * @param {string} [config.uri]
+ * The uri to the media playlist (if media playlist is not from within a master
+ * playlist)
+ * @param {string} id
+ * ID to use for the playlist
+ */
+
+
+var setupMediaPlaylist = function setupMediaPlaylist(_ref2) {
+ var playlist = _ref2.playlist,
+ uri = _ref2.uri,
+ id = _ref2.id;
+ playlist.id = id;
+ playlist.playlistErrors_ = 0;
+
+ if (uri) {
+ // For media playlists, m3u8-parser does not have access to a URI, as HLS media
+ // playlists do not contain their own source URI, but one is needed for consistency in
+ // VHS.
+ playlist.uri = uri;
+ } // For HLS master playlists, even though certain attributes MUST be defined, the
+ // stream may still be played without them.
+ // For HLS media playlists, m3u8-parser does not attach an attributes object to the
+ // manifest.
+ //
+ // To avoid undefined reference errors through the project, and make the code easier
+ // to write/read, add an empty attributes object for these cases.
+
+
+ playlist.attributes = playlist.attributes || {};
+};
+/**
+ * Adds ID, resolvedUri, and attributes properties to each playlist of the master, where
+ * necessary. In addition, creates playlist IDs for each playlist and adds playlist ID to
+ * playlist references to the playlists array.
+ *
+ * @param {Object} master
+ * The master playlist
+ */
+
+
+var setupMediaPlaylists = function setupMediaPlaylists(master) {
+ var i = master.playlists.length;
+
+ while (i--) {
+ var playlist = master.playlists[i];
+ setupMediaPlaylist({
+ playlist: playlist,
+ id: createPlaylistID(i, playlist.uri)
+ });
+ playlist.resolvedUri = resolveUrl(master.uri, playlist.uri);
+ master.playlists[playlist.id] = playlist; // URI reference added for backwards compatibility
+
+ master.playlists[playlist.uri] = playlist; // Although the spec states an #EXT-X-STREAM-INF tag MUST have a BANDWIDTH attribute,
+ // the stream can be played without it. Although an attributes property may have been
+ // added to the playlist to prevent undefined references, issue a warning to fix the
+ // manifest.
+
+ if (!playlist.attributes.BANDWIDTH) {
+ log.warn('Invalid playlist STREAM-INF detected. Missing BANDWIDTH attribute.');
+ }
+ }
+};
+/**
+ * Adds resolvedUri properties to each media group.
+ *
+ * @param {Object} master
+ * The master playlist
+ */
+
+
+var resolveMediaGroupUris = function resolveMediaGroupUris(master) {
+ forEachMediaGroup(master, function (properties) {
+ if (properties.uri) {
+ properties.resolvedUri = resolveUrl(master.uri, properties.uri);
+ }
+ });
+};
+/**
+ * Creates a master playlist wrapper to insert a sole media playlist into.
+ *
+ * @param {Object} media
+ * Media playlist
+ * @param {string} uri
+ * The media URI
+ *
+ * @return {Object}
+ * Master playlist
+ */
+
+
+var masterForMedia = function masterForMedia(media, uri) {
+ var id = createPlaylistID(0, uri);
+ var master = {
+ mediaGroups: {
+ 'AUDIO': {},
+ 'VIDEO': {},
+ 'CLOSED-CAPTIONS': {},
+ 'SUBTITLES': {}
+ },
+ uri: window__default['default'].location.href,
+ resolvedUri: window__default['default'].location.href,
+ playlists: [{
+ uri: uri,
+ id: id,
+ resolvedUri: uri,
+ // m3u8-parser does not attach an attributes property to media playlists so make
+ // sure that the property is attached to avoid undefined reference errors
+ attributes: {}
+ }]
+ }; // set up ID reference
+
+ master.playlists[id] = master.playlists[0]; // URI reference added for backwards compatibility
+
+ master.playlists[uri] = master.playlists[0];
+ return master;
+};
+/**
+ * Does an in-place update of the master manifest to add updated playlist URI references
+ * as well as other properties needed by VHS that aren't included by the parser.
+ *
+ * @param {Object} master
+ * Master manifest object
+ * @param {string} uri
+ * The source URI
+ */
+
+
+var addPropertiesToMaster = function addPropertiesToMaster(master, uri) {
+ master.uri = uri;
+
+ for (var i = 0; i < master.playlists.length; i++) {
+ if (!master.playlists[i].uri) {
+ // Set up phony URIs for the playlists since playlists are referenced by their URIs
+ // throughout VHS, but some formats (e.g., DASH) don't have external URIs
+ // TODO: consider adding dummy URIs in mpd-parser
+ var phonyUri = "placeholder-uri-" + i;
+ master.playlists[i].uri = phonyUri;
+ }
+ }
+
+ var audioOnlyMaster = isAudioOnly(master);
+ forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
+ var groupId = "placeholder-uri-" + mediaType + "-" + groupKey + "-" + labelKey; // add a playlist array under properties
+
+ if (!properties.playlists || !properties.playlists.length) {
+ // If the manifest is audio only and this media group does not have a uri, check
+ // if the media group is located in the main list of playlists. If it is, don't add
+ // placeholder properties as it shouldn't be considered an alternate audio track.
+ if (audioOnlyMaster && mediaType === 'AUDIO' && !properties.uri) {
+ for (var _i = 0; _i < master.playlists.length; _i++) {
+ var p = master.playlists[_i];
+
+ if (p.attributes && p.attributes.AUDIO && p.attributes.AUDIO === groupKey) {
+ return;
+ }
+ }
+ }
+
+ properties.playlists = [_extends__default['default']({}, properties)];
+ }
+
+ properties.playlists.forEach(function (p, i) {
+ var id = createPlaylistID(i, groupId);
+
+ if (p.uri) {
+ p.resolvedUri = p.resolvedUri || resolveUrl(master.uri, p.uri);
+ } else {
+ // DEPRECATED, this has been added to prevent a breaking change.
+ // previously we only ever had a single media group playlist, so
+ // we mark the first playlist uri without prepending the index as we used to
+ // ideally we would do all of the playlists the same way.
+ p.uri = i === 0 ? groupId : id; // don't resolve a placeholder uri to an absolute url, just use
+ // the placeholder again
+
+ p.resolvedUri = p.uri;
+ }
+
+ p.id = p.id || id; // add an empty attributes object, all playlists are
+ // expected to have this.
+
+ p.attributes = p.attributes || {}; // setup ID and URI references (URI for backwards compatibility)
+
+ master.playlists[p.id] = p;
+ master.playlists[p.uri] = p;
+ });
+ });
+ setupMediaPlaylists(master);
+ resolveMediaGroupUris(master);
+};
+
+var mergeOptions$2 = videojs.mergeOptions,
+ EventTarget$1 = videojs.EventTarget;
+
+var addLLHLSQueryDirectives = function addLLHLSQueryDirectives(uri, media) {
+ if (media.endList || !media.serverControl) {
+ return uri;
+ }
+
+ var parameters = {};
+
+ if (media.serverControl.canBlockReload) {
+ var preloadSegment = media.preloadSegment; // next msn is a zero based value, length is not.
+
+ var nextMSN = media.mediaSequence + media.segments.length; // If preload segment has parts then it is likely
+ // that we are going to request a part of that preload segment.
+ // the logic below is used to determine that.
+
+ if (preloadSegment) {
+ var parts = preloadSegment.parts || []; // _HLS_part is a zero based index
+
+ var nextPart = getKnownPartCount(media) - 1; // if nextPart is > -1 and not equal to just the
+ // length of parts, then we know we had part preload hints
+ // and we need to add the _HLS_part= query
+
+ if (nextPart > -1 && nextPart !== parts.length - 1) {
+ // add existing parts to our preload hints
+ // eslint-disable-next-line
+ parameters._HLS_part = nextPart;
+ } // this if statement makes sure that we request the msn
+ // of the preload segment if:
+ // 1. the preload segment had parts (and was not yet a full segment)
+ // but was added to our segments array
+ // 2. the preload segment had preload hints for parts that are not in
+ // the manifest yet.
+ // in all other cases we want the segment after the preload segment
+ // which will be given by using media.segments.length because it is 1 based
+ // rather than 0 based.
+
+
+ if (nextPart > -1 || parts.length) {
+ nextMSN--;
+ }
+ } // add _HLS_msn= in front of any _HLS_part query
+ // eslint-disable-next-line
+
+
+ parameters._HLS_msn = nextMSN;
+ }
+
+ if (media.serverControl && media.serverControl.canSkipUntil) {
+ // add _HLS_skip= infront of all other queries.
+ // eslint-disable-next-line
+ parameters._HLS_skip = media.serverControl.canSkipDateranges ? 'v2' : 'YES';
+ }
+
+ if (Object.keys(parameters).length) {
+ var parsedUri = new window__default['default'].URL(uri);
+ ['_HLS_skip', '_HLS_msn', '_HLS_part'].forEach(function (name) {
+ if (!parameters.hasOwnProperty(name)) {
+ return;
+ }
+
+ parsedUri.searchParams.set(name, parameters[name]);
+ });
+ uri = parsedUri.toString();
+ }
+
+ return uri;
+};
+/**
+ * Returns a new segment object with properties and
+ * the parts array merged.
+ *
+ * @param {Object} a the old segment
+ * @param {Object} b the new segment
+ *
+ * @return {Object} the merged segment
+ */
+
+
+var updateSegment = function updateSegment(a, b) {
+ if (!a) {
+ return b;
+ }
+
+ var result = mergeOptions$2(a, b); // if only the old segment has preload hints
+ // and the new one does not, remove preload hints.
+
+ if (a.preloadHints && !b.preloadHints) {
+ delete result.preloadHints;
+ } // if only the old segment has parts
+ // then the parts are no longer valid
+
+
+ if (a.parts && !b.parts) {
+ delete result.parts; // if both segments have parts
+ // copy part propeties from the old segment
+ // to the new one.
+ } else if (a.parts && b.parts) {
+ for (var i = 0; i < b.parts.length; i++) {
+ if (a.parts && a.parts[i]) {
+ result.parts[i] = mergeOptions$2(a.parts[i], b.parts[i]);
+ }
+ }
+ } // set skipped to false for segments that have
+ // have had information merged from the old segment.
+
+
+ if (!a.skipped && b.skipped) {
+ result.skipped = false;
+ } // set preload to false for segments that have
+ // had information added in the new segment.
+
+
+ if (a.preload && !b.preload) {
+ result.preload = false;
+ }
+
+ return result;
+};
+/**
+ * Returns a new array of segments that is the result of merging
+ * properties from an older list of segments onto an updated
+ * list. No properties on the updated playlist will be ovewritten.
+ *
+ * @param {Array} original the outdated list of segments
+ * @param {Array} update the updated list of segments
+ * @param {number=} offset the index of the first update
+ * segment in the original segment list. For non-live playlists,
+ * this should always be zero and does not need to be
+ * specified. For live playlists, it should be the difference
+ * between the media sequence numbers in the original and updated
+ * playlists.
+ * @return {Array} a list of merged segment objects
+ */
+
+
+var updateSegments = function updateSegments(original, update, offset) {
+ var oldSegments = original.slice();
+ var newSegments = update.slice();
+ offset = offset || 0;
+ var result = [];
+ var currentMap;
+
+ for (var newIndex = 0; newIndex < newSegments.length; newIndex++) {
+ var oldSegment = oldSegments[newIndex + offset];
+ var newSegment = newSegments[newIndex];
+
+ if (oldSegment) {
+ currentMap = oldSegment.map || currentMap;
+ result.push(updateSegment(oldSegment, newSegment));
+ } else {
+ // carry over map to new segment if it is missing
+ if (currentMap && !newSegment.map) {
+ newSegment.map = currentMap;
+ }
+
+ result.push(newSegment);
+ }
+ }
+
+ return result;
+};
+
+var resolveSegmentUris = function resolveSegmentUris(segment, baseUri) {
+ // preloadSegment will not have a uri at all
+ // as the segment isn't actually in the manifest yet, only parts
+ if (!segment.resolvedUri && segment.uri) {
+ segment.resolvedUri = resolveUrl(baseUri, segment.uri);
+ }
+
+ if (segment.key && !segment.key.resolvedUri) {
+ segment.key.resolvedUri = resolveUrl(baseUri, segment.key.uri);
+ }
+
+ if (segment.map && !segment.map.resolvedUri) {
+ segment.map.resolvedUri = resolveUrl(baseUri, segment.map.uri);
+ }
+
+ if (segment.map && segment.map.key && !segment.map.key.resolvedUri) {
+ segment.map.key.resolvedUri = resolveUrl(baseUri, segment.map.key.uri);
+ }
+
+ if (segment.parts && segment.parts.length) {
+ segment.parts.forEach(function (p) {
+ if (p.resolvedUri) {
+ return;
+ }
+
+ p.resolvedUri = resolveUrl(baseUri, p.uri);
+ });
+ }
+
+ if (segment.preloadHints && segment.preloadHints.length) {
+ segment.preloadHints.forEach(function (p) {
+ if (p.resolvedUri) {
+ return;
+ }
+
+ p.resolvedUri = resolveUrl(baseUri, p.uri);
+ });
+ }
+};
+
+var getAllSegments = function getAllSegments(media) {
+ var segments = media.segments || [];
+ var preloadSegment = media.preloadSegment; // a preloadSegment with only preloadHints is not currently
+ // a usable segment, only include a preloadSegment that has
+ // parts.
+
+ if (preloadSegment && preloadSegment.parts && preloadSegment.parts.length) {
+ // if preloadHints has a MAP that means that the
+ // init segment is going to change. We cannot use any of the parts
+ // from this preload segment.
+ if (preloadSegment.preloadHints) {
+ for (var i = 0; i < preloadSegment.preloadHints.length; i++) {
+ if (preloadSegment.preloadHints[i].type === 'MAP') {
+ return segments;
+ }
+ }
+ } // set the duration for our preload segment to target duration.
+
+
+ preloadSegment.duration = media.targetDuration;
+ preloadSegment.preload = true;
+ segments.push(preloadSegment);
+ }
+
+ return segments;
+}; // consider the playlist unchanged if the playlist object is the same or
+// the number of segments is equal, the media sequence number is unchanged,
+// and this playlist hasn't become the end of the playlist
+
+
+var isPlaylistUnchanged = function isPlaylistUnchanged(a, b) {
+ return a === b || a.segments && b.segments && a.segments.length === b.segments.length && a.endList === b.endList && a.mediaSequence === b.mediaSequence && a.preloadSegment === b.preloadSegment;
+};
+/**
+ * Returns a new master playlist that is the result of merging an
+ * updated media playlist into the original version. If the
+ * updated media playlist does not match any of the playlist
+ * entries in the original master playlist, null is returned.
+ *
+ * @param {Object} master a parsed master M3U8 object
+ * @param {Object} media a parsed media M3U8 object
+ * @return {Object} a new object that represents the original
+ * master playlist with the updated media playlist merged in, or
+ * null if the merge produced no change.
+ */
+
+
+var updateMaster$1 = function updateMaster(master, newMedia, unchangedCheck) {
+ if (unchangedCheck === void 0) {
+ unchangedCheck = isPlaylistUnchanged;
+ }
+
+ var result = mergeOptions$2(master, {});
+ var oldMedia = result.playlists[newMedia.id];
+
+ if (!oldMedia) {
+ return null;
+ }
+
+ if (unchangedCheck(oldMedia, newMedia)) {
+ return null;
+ }
+
+ newMedia.segments = getAllSegments(newMedia);
+ var mergedPlaylist = mergeOptions$2(oldMedia, newMedia); // always use the new media's preload segment
+
+ if (mergedPlaylist.preloadSegment && !newMedia.preloadSegment) {
+ delete mergedPlaylist.preloadSegment;
+ } // if the update could overlap existing segment information, merge the two segment lists
+
+
+ if (oldMedia.segments) {
+ if (newMedia.skip) {
+ newMedia.segments = newMedia.segments || []; // add back in objects for skipped segments, so that we merge
+ // old properties into the new segments
+
+ for (var i = 0; i < newMedia.skip.skippedSegments; i++) {
+ newMedia.segments.unshift({
+ skipped: true
+ });
+ }
+ }
+
+ mergedPlaylist.segments = updateSegments(oldMedia.segments, newMedia.segments, newMedia.mediaSequence - oldMedia.mediaSequence);
+ } // resolve any segment URIs to prevent us from having to do it later
+
+
+ mergedPlaylist.segments.forEach(function (segment) {
+ resolveSegmentUris(segment, mergedPlaylist.resolvedUri);
+ }); // TODO Right now in the playlists array there are two references to each playlist, one
+ // that is referenced by index, and one by URI. The index reference may no longer be
+ // necessary.
+
+ for (var _i = 0; _i < result.playlists.length; _i++) {
+ if (result.playlists[_i].id === newMedia.id) {
+ result.playlists[_i] = mergedPlaylist;
+ }
+ }
+
+ result.playlists[newMedia.id] = mergedPlaylist; // URI reference added for backwards compatibility
+
+ result.playlists[newMedia.uri] = mergedPlaylist; // update media group playlist references.
+
+ forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
+ if (!properties.playlists) {
+ return;
+ }
+
+ for (var _i2 = 0; _i2 < properties.playlists.length; _i2++) {
+ if (newMedia.id === properties.playlists[_i2].id) {
+ properties.playlists[_i2] = mergedPlaylist;
+ }
+ }
+ });
+ return result;
+};
+/**
+ * Calculates the time to wait before refreshing a live playlist
+ *
+ * @param {Object} media
+ * The current media
+ * @param {boolean} update
+ * True if there were any updates from the last refresh, false otherwise
+ * @return {number}
+ * The time in ms to wait before refreshing the live playlist
+ */
+
+
+var refreshDelay = function refreshDelay(media, update) {
+ var segments = media.segments || [];
+ var lastSegment = segments[segments.length - 1];
+ var lastPart = lastSegment && lastSegment.parts && lastSegment.parts[lastSegment.parts.length - 1];
+ var lastDuration = lastPart && lastPart.duration || lastSegment && lastSegment.duration;
+
+ if (update && lastDuration) {
+ return lastDuration * 1000;
+ } // if the playlist is unchanged since the last reload or last segment duration
+ // cannot be determined, try again after half the target duration
+
+
+ return (media.partTargetDuration || media.targetDuration || 10) * 500;
+};
+/**
+ * Load a playlist from a remote location
+ *
+ * @class PlaylistLoader
+ * @extends Stream
+ * @param {string|Object} src url or object of manifest
+ * @param {boolean} withCredentials the withCredentials xhr option
+ * @class
+ */
+
+
+var PlaylistLoader = /*#__PURE__*/function (_EventTarget) {
+ _inheritsLoose__default['default'](PlaylistLoader, _EventTarget);
+
+ function PlaylistLoader(src, vhs, options) {
+ var _this;
+
+ if (options === void 0) {
+ options = {};
+ }
+
+ _this = _EventTarget.call(this) || this;
+
+ if (!src) {
+ throw new Error('A non-empty playlist URL or object is required');
+ }
+
+ _this.logger_ = logger('PlaylistLoader');
+ var _options = options,
+ _options$withCredenti = _options.withCredentials,
+ withCredentials = _options$withCredenti === void 0 ? false : _options$withCredenti,
+ _options$handleManife = _options.handleManifestRedirects,
+ handleManifestRedirects = _options$handleManife === void 0 ? false : _options$handleManife;
+ _this.src = src;
+ _this.vhs_ = vhs;
+ _this.withCredentials = withCredentials;
+ _this.handleManifestRedirects = handleManifestRedirects;
+ var vhsOptions = vhs.options_;
+ _this.customTagParsers = vhsOptions && vhsOptions.customTagParsers || [];
+ _this.customTagMappers = vhsOptions && vhsOptions.customTagMappers || [];
+ _this.experimentalLLHLS = vhsOptions && vhsOptions.experimentalLLHLS || false; // force experimentalLLHLS for IE 11
+
+ if (videojs.browser.IE_VERSION) {
+ _this.experimentalLLHLS = false;
+ } // initialize the loader state
+
+
+ _this.state = 'HAVE_NOTHING'; // live playlist staleness timeout
+
+ _this.handleMediaupdatetimeout_ = _this.handleMediaupdatetimeout_.bind(_assertThisInitialized__default['default'](_this));
+
+ _this.on('mediaupdatetimeout', _this.handleMediaupdatetimeout_);
+
+ return _this;
+ }
+
+ var _proto = PlaylistLoader.prototype;
+
+ _proto.handleMediaupdatetimeout_ = function handleMediaupdatetimeout_() {
+ var _this2 = this;
+
+ if (this.state !== 'HAVE_METADATA') {
+ // only refresh the media playlist if no other activity is going on
+ return;
+ }
+
+ var media = this.media();
+ var uri = resolveUrl(this.master.uri, media.uri);
+
+ if (this.experimentalLLHLS) {
+ uri = addLLHLSQueryDirectives(uri, media);
+ }
+
+ this.state = 'HAVE_CURRENT_METADATA';
+ this.request = this.vhs_.xhr({
+ uri: uri,
+ withCredentials: this.withCredentials
+ }, function (error, req) {
+ // disposed
+ if (!_this2.request) {
+ return;
+ }
+
+ if (error) {
+ return _this2.playlistRequestError(_this2.request, _this2.media(), 'HAVE_METADATA');
+ }
+
+ _this2.haveMetadata({
+ playlistString: _this2.request.responseText,
+ url: _this2.media().uri,
+ id: _this2.media().id
+ });
+ });
+ };
+
+ _proto.playlistRequestError = function playlistRequestError(xhr, playlist, startingState) {
+ var uri = playlist.uri,
+ id = playlist.id; // any in-flight request is now finished
+
+ this.request = null;
+
+ if (startingState) {
+ this.state = startingState;
+ }
+
+ this.error = {
+ playlist: this.master.playlists[id],
+ status: xhr.status,
+ message: "HLS playlist request error at URL: " + uri + ".",
+ responseText: xhr.responseText,
+ code: xhr.status >= 500 ? 4 : 2
+ };
+ this.trigger('error');
+ };
+
+ _proto.parseManifest_ = function parseManifest_(_ref) {
+ var _this3 = this;
+
+ var url = _ref.url,
+ manifestString = _ref.manifestString;
+ return parseManifest({
+ onwarn: function onwarn(_ref2) {
+ var message = _ref2.message;
+ return _this3.logger_("m3u8-parser warn for " + url + ": " + message);
+ },
+ oninfo: function oninfo(_ref3) {
+ var message = _ref3.message;
+ return _this3.logger_("m3u8-parser info for " + url + ": " + message);
+ },
+ manifestString: manifestString,
+ customTagParsers: this.customTagParsers,
+ customTagMappers: this.customTagMappers,
+ experimentalLLHLS: this.experimentalLLHLS
+ });
+ }
+ /**
+ * Update the playlist loader's state in response to a new or updated playlist.
+ *
+ * @param {string} [playlistString]
+ * Playlist string (if playlistObject is not provided)
+ * @param {Object} [playlistObject]
+ * Playlist object (if playlistString is not provided)
+ * @param {string} url
+ * URL of playlist
+ * @param {string} id
+ * ID to use for playlist
+ */
+ ;
+
+ _proto.haveMetadata = function haveMetadata(_ref4) {
+ var playlistString = _ref4.playlistString,
+ playlistObject = _ref4.playlistObject,
+ url = _ref4.url,
+ id = _ref4.id; // any in-flight request is now finished
+
+ this.request = null;
+ this.state = 'HAVE_METADATA';
+ var playlist = playlistObject || this.parseManifest_({
+ url: url,
+ manifestString: playlistString
+ });
+ playlist.lastRequest = Date.now();
+ setupMediaPlaylist({
+ playlist: playlist,
+ uri: url,
+ id: id
+ }); // merge this playlist into the master
+
+ var update = updateMaster$1(this.master, playlist);
+ this.targetDuration = playlist.partTargetDuration || playlist.targetDuration;
+ this.pendingMedia_ = null;
+
+ if (update) {
+ this.master = update;
+ this.media_ = this.master.playlists[id];
+ } else {
+ this.trigger('playlistunchanged');
+ }
+
+ this.updateMediaUpdateTimeout_(refreshDelay(this.media(), !!update));
+ this.trigger('loadedplaylist');
+ }
+ /**
+ * Abort any outstanding work and clean up.
+ */
+ ;
+
+ _proto.dispose = function dispose() {
+ this.trigger('dispose');
+ this.stopRequest();
+ window__default['default'].clearTimeout(this.mediaUpdateTimeout);
+ window__default['default'].clearTimeout(this.finalRenditionTimeout);
+ this.off();
+ };
+
+ _proto.stopRequest = function stopRequest() {
+ if (this.request) {
+ var oldRequest = this.request;
+ this.request = null;
+ oldRequest.onreadystatechange = null;
+ oldRequest.abort();
+ }
+ }
+ /**
+ * When called without any arguments, returns the currently
+ * active media playlist. When called with a single argument,
+ * triggers the playlist loader to asynchronously switch to the
+ * specified media playlist. Calling this method while the
+ * loader is in the HAVE_NOTHING causes an error to be emitted
+ * but otherwise has no effect.
+ *
+ * @param {Object=} playlist the parsed media playlist
+ * object to switch to
+ * @param {boolean=} shouldDelay whether we should delay the request by half target duration
+ *
+ * @return {Playlist} the current loaded media
+ */
+ ;
+
+ _proto.media = function media(playlist, shouldDelay) {
+ var _this4 = this; // getter
+
+
+ if (!playlist) {
+ return this.media_;
+ } // setter
+
+
+ if (this.state === 'HAVE_NOTHING') {
+ throw new Error('Cannot switch media playlist from ' + this.state);
+ } // find the playlist object if the target playlist has been
+ // specified by URI
+
+
+ if (typeof playlist === 'string') {
+ if (!this.master.playlists[playlist]) {
+ throw new Error('Unknown playlist URI: ' + playlist);
+ }
+
+ playlist = this.master.playlists[playlist];
+ }
+
+ window__default['default'].clearTimeout(this.finalRenditionTimeout);
+
+ if (shouldDelay) {
+ var delay = (playlist.partTargetDuration || playlist.targetDuration) / 2 * 1000 || 5 * 1000;
+ this.finalRenditionTimeout = window__default['default'].setTimeout(this.media.bind(this, playlist, false), delay);
+ return;
+ }
+
+ var startingState = this.state;
+ var mediaChange = !this.media_ || playlist.id !== this.media_.id;
+ var masterPlaylistRef = this.master.playlists[playlist.id]; // switch to fully loaded playlists immediately
+
+ if (masterPlaylistRef && masterPlaylistRef.endList || // handle the case of a playlist object (e.g., if using vhs-json with a resolved
+ // media playlist or, for the case of demuxed audio, a resolved audio media group)
+ playlist.endList && playlist.segments.length) {
+ // abort outstanding playlist requests
+ if (this.request) {
+ this.request.onreadystatechange = null;
+ this.request.abort();
+ this.request = null;
+ }
+
+ this.state = 'HAVE_METADATA';
+ this.media_ = playlist; // trigger media change if the active media has been updated
+
+ if (mediaChange) {
+ this.trigger('mediachanging');
+
+ if (startingState === 'HAVE_MASTER') {
+ // The initial playlist was a master manifest, and the first media selected was
+ // also provided (in the form of a resolved playlist object) as part of the
+ // source object (rather than just a URL). Therefore, since the media playlist
+ // doesn't need to be requested, loadedmetadata won't trigger as part of the
+ // normal flow, and needs an explicit trigger here.
+ this.trigger('loadedmetadata');
+ } else {
+ this.trigger('mediachange');
+ }
+ }
+
+ return;
+ } // We update/set the timeout here so that live playlists
+ // that are not a media change will "start" the loader as expected.
+ // We expect that this function will start the media update timeout
+ // cycle again. This also prevents a playlist switch failure from
+ // causing us to stall during live.
+
+
+ this.updateMediaUpdateTimeout_(refreshDelay(playlist, true)); // switching to the active playlist is a no-op
+
+ if (!mediaChange) {
+ return;
+ }
+
+ this.state = 'SWITCHING_MEDIA'; // there is already an outstanding playlist request
+
+ if (this.request) {
+ if (playlist.resolvedUri === this.request.url) {
+ // requesting to switch to the same playlist multiple times
+ // has no effect after the first
+ return;
+ }
+
+ this.request.onreadystatechange = null;
+ this.request.abort();
+ this.request = null;
+ } // request the new playlist
+
+
+ if (this.media_) {
+ this.trigger('mediachanging');
+ }
+
+ this.pendingMedia_ = playlist;
+ this.request = this.vhs_.xhr({
+ uri: playlist.resolvedUri,
+ withCredentials: this.withCredentials
+ }, function (error, req) {
+ // disposed
+ if (!_this4.request) {
+ return;
+ }
+
+ playlist.lastRequest = Date.now();
+ playlist.resolvedUri = resolveManifestRedirect(_this4.handleManifestRedirects, playlist.resolvedUri, req);
+
+ if (error) {
+ return _this4.playlistRequestError(_this4.request, playlist, startingState);
+ }
+
+ _this4.haveMetadata({
+ playlistString: req.responseText,
+ url: playlist.uri,
+ id: playlist.id
+ }); // fire loadedmetadata the first time a media playlist is loaded
+
+
+ if (startingState === 'HAVE_MASTER') {
+ _this4.trigger('loadedmetadata');
+ } else {
+ _this4.trigger('mediachange');
+ }
+ });
+ }
+ /**
+ * pause loading of the playlist
+ */
+ ;
+
+ _proto.pause = function pause() {
+ if (this.mediaUpdateTimeout) {
+ window__default['default'].clearTimeout(this.mediaUpdateTimeout);
+ this.mediaUpdateTimeout = null;
+ }
+
+ this.stopRequest();
+
+ if (this.state === 'HAVE_NOTHING') {
+ // If we pause the loader before any data has been retrieved, its as if we never
+ // started, so reset to an unstarted state.
+ this.started = false;
+ } // Need to restore state now that no activity is happening
+
+
+ if (this.state === 'SWITCHING_MEDIA') {
+ // if the loader was in the process of switching media, it should either return to
+ // HAVE_MASTER or HAVE_METADATA depending on if the loader has loaded a media
+ // playlist yet. This is determined by the existence of loader.media_
+ if (this.media_) {
+ this.state = 'HAVE_METADATA';
+ } else {
+ this.state = 'HAVE_MASTER';
+ }
+ } else if (this.state === 'HAVE_CURRENT_METADATA') {
+ this.state = 'HAVE_METADATA';
+ }
+ }
+ /**
+ * start loading of the playlist
+ */
+ ;
+
+ _proto.load = function load(shouldDelay) {
+ var _this5 = this;
+
+ if (this.mediaUpdateTimeout) {
+ window__default['default'].clearTimeout(this.mediaUpdateTimeout);
+ this.mediaUpdateTimeout = null;
+ }
+
+ var media = this.media();
+
+ if (shouldDelay) {
+ var delay = media ? (media.partTargetDuration || media.targetDuration) / 2 * 1000 : 5 * 1000;
+ this.mediaUpdateTimeout = window__default['default'].setTimeout(function () {
+ _this5.mediaUpdateTimeout = null;
+
+ _this5.load();
+ }, delay);
+ return;
+ }
+
+ if (!this.started) {
+ this.start();
+ return;
+ }
+
+ if (media && !media.endList) {
+ this.trigger('mediaupdatetimeout');
+ } else {
+ this.trigger('loadedplaylist');
+ }
+ };
+
+ _proto.updateMediaUpdateTimeout_ = function updateMediaUpdateTimeout_(delay) {
+ var _this6 = this;
+
+ if (this.mediaUpdateTimeout) {
+ window__default['default'].clearTimeout(this.mediaUpdateTimeout);
+ this.mediaUpdateTimeout = null;
+ } // we only have use mediaupdatetimeout for live playlists.
+
+
+ if (!this.media() || this.media().endList) {
+ return;
+ }
+
+ this.mediaUpdateTimeout = window__default['default'].setTimeout(function () {
+ _this6.mediaUpdateTimeout = null;
+
+ _this6.trigger('mediaupdatetimeout');
+
+ _this6.updateMediaUpdateTimeout_(delay);
+ }, delay);
+ }
+ /**
+ * start loading of the playlist
+ */
+ ;
+
+ _proto.start = function start() {
+ var _this7 = this;
+
+ this.started = true;
+
+ if (typeof this.src === 'object') {
+ // in the case of an entirely constructed manifest object (meaning there's no actual
+ // manifest on a server), default the uri to the page's href
+ if (!this.src.uri) {
+ this.src.uri = window__default['default'].location.href;
+ } // resolvedUri is added on internally after the initial request. Since there's no
+ // request for pre-resolved manifests, add on resolvedUri here.
+
+
+ this.src.resolvedUri = this.src.uri; // Since a manifest object was passed in as the source (instead of a URL), the first
+ // request can be skipped (since the top level of the manifest, at a minimum, is
+ // already available as a parsed manifest object). However, if the manifest object
+ // represents a master playlist, some media playlists may need to be resolved before
+ // the starting segment list is available. Therefore, go directly to setup of the
+ // initial playlist, and let the normal flow continue from there.
+ //
+ // Note that the call to setup is asynchronous, as other sections of VHS may assume
+ // that the first request is asynchronous.
+
+ setTimeout(function () {
+ _this7.setupInitialPlaylist(_this7.src);
+ }, 0);
+ return;
+ } // request the specified URL
+
+
+ this.request = this.vhs_.xhr({
+ uri: this.src,
+ withCredentials: this.withCredentials
+ }, function (error, req) {
+ // disposed
+ if (!_this7.request) {
+ return;
+ } // clear the loader's request reference
+
+
+ _this7.request = null;
+
+ if (error) {
+ _this7.error = {
+ status: req.status,
+ message: "HLS playlist request error at URL: " + _this7.src + ".",
+ responseText: req.responseText,
+ // MEDIA_ERR_NETWORK
+ code: 2
+ };
+
+ if (_this7.state === 'HAVE_NOTHING') {
+ _this7.started = false;
+ }
+
+ return _this7.trigger('error');
+ }
+
+ _this7.src = resolveManifestRedirect(_this7.handleManifestRedirects, _this7.src, req);
+
+ var manifest = _this7.parseManifest_({
+ manifestString: req.responseText,
+ url: _this7.src
+ });
+
+ _this7.setupInitialPlaylist(manifest);
+ });
+ };
+
+ _proto.srcUri = function srcUri() {
+ return typeof this.src === 'string' ? this.src : this.src.uri;
+ }
+ /**
+ * Given a manifest object that's either a master or media playlist, trigger the proper
+ * events and set the state of the playlist loader.
+ *
+ * If the manifest object represents a master playlist, `loadedplaylist` will be
+ * triggered to allow listeners to select a playlist. If none is selected, the loader
+ * will default to the first one in the playlists array.
+ *
+ * If the manifest object represents a media playlist, `loadedplaylist` will be
+ * triggered followed by `loadedmetadata`, as the only available playlist is loaded.
+ *
+ * In the case of a media playlist, a master playlist object wrapper with one playlist
+ * will be created so that all logic can handle playlists in the same fashion (as an
+ * assumed manifest object schema).
+ *
+ * @param {Object} manifest
+ * The parsed manifest object
+ */
+ ;
+
+ _proto.setupInitialPlaylist = function setupInitialPlaylist(manifest) {
+ this.state = 'HAVE_MASTER';
+
+ if (manifest.playlists) {
+ this.master = manifest;
+ addPropertiesToMaster(this.master, this.srcUri()); // If the initial master playlist has playlists wtih segments already resolved,
+ // then resolve URIs in advance, as they are usually done after a playlist request,
+ // which may not happen if the playlist is resolved.
+
+ manifest.playlists.forEach(function (playlist) {
+ playlist.segments = getAllSegments(playlist);
+ playlist.segments.forEach(function (segment) {
+ resolveSegmentUris(segment, playlist.resolvedUri);
+ });
+ });
+ this.trigger('loadedplaylist');
+
+ if (!this.request) {
+ // no media playlist was specifically selected so start
+ // from the first listed one
+ this.media(this.master.playlists[0]);
+ }
+
+ return;
+ } // In order to support media playlists passed in as vhs-json, the case where the uri
+ // is not provided as part of the manifest should be considered, and an appropriate
+ // default used.
+
+
+ var uri = this.srcUri() || window__default['default'].location.href;
+ this.master = masterForMedia(manifest, uri);
+ this.haveMetadata({
+ playlistObject: manifest,
+ url: uri,
+ id: this.master.playlists[0].id
+ });
+ this.trigger('loadedmetadata');
+ };
+
+ return PlaylistLoader;
+}(EventTarget$1);
+/**
+ * @file xhr.js
+ */
+
+
+var videojsXHR = videojs.xhr,
+ mergeOptions$1 = videojs.mergeOptions;
+
+var callbackWrapper = function callbackWrapper(request, error, response, callback) {
+ var reqResponse = request.responseType === 'arraybuffer' ? request.response : request.responseText;
+
+ if (!error && reqResponse) {
+ request.responseTime = Date.now();
+ request.roundTripTime = request.responseTime - request.requestTime;
+ request.bytesReceived = reqResponse.byteLength || reqResponse.length;
+
+ if (!request.bandwidth) {
+ request.bandwidth = Math.floor(request.bytesReceived / request.roundTripTime * 8 * 1000);
+ }
+ }
+
+ if (response.headers) {
+ request.responseHeaders = response.headers;
+ } // videojs.xhr now uses a specific code on the error
+ // object to signal that a request has timed out instead
+ // of setting a boolean on the request object
+
+
+ if (error && error.code === 'ETIMEDOUT') {
+ request.timedout = true;
+ } // videojs.xhr no longer considers status codes outside of 200 and 0
+ // (for file uris) to be errors, but the old XHR did, so emulate that
+ // behavior. Status 206 may be used in response to byterange requests.
+
+
+ if (!error && !request.aborted && response.statusCode !== 200 && response.statusCode !== 206 && response.statusCode !== 0) {
+ error = new Error('XHR Failed with a response of: ' + (request && (reqResponse || request.responseText)));
+ }
+
+ callback(error, request);
+};
+
+var xhrFactory = function xhrFactory() {
+ var xhr = function XhrFunction(options, callback) {
+ // Add a default timeout
+ options = mergeOptions$1({
+ timeout: 45e3
+ }, options); // Allow an optional user-specified function to modify the option
+ // object before we construct the xhr request
+
+ var beforeRequest = XhrFunction.beforeRequest || videojs.Vhs.xhr.beforeRequest;
+
+ if (beforeRequest && typeof beforeRequest === 'function') {
+ var newOptions = beforeRequest(options);
+
+ if (newOptions) {
+ options = newOptions;
+ }
+ } // Use the standard videojs.xhr() method unless `videojs.Vhs.xhr` has been overriden
+ // TODO: switch back to videojs.Vhs.xhr.name === 'XhrFunction' when we drop IE11
+
+
+ var xhrMethod = videojs.Vhs.xhr.original === true ? videojsXHR : videojs.Vhs.xhr;
+ var request = xhrMethod(options, function (error, response) {
+ return callbackWrapper(request, error, response, callback);
+ });
+ var originalAbort = request.abort;
+
+ request.abort = function () {
+ request.aborted = true;
+ return originalAbort.apply(request, arguments);
+ };
+
+ request.uri = options.uri;
+ request.requestTime = Date.now();
+ return request;
+ };
+
+ xhr.original = true;
+ return xhr;
+};
+/**
+ * Turns segment byterange into a string suitable for use in
+ * HTTP Range requests
+ *
+ * @param {Object} byterange - an object with two values defining the start and end
+ * of a byte-range
+ */
+
+
+var byterangeStr = function byterangeStr(byterange) {
+ // `byterangeEnd` is one less than `offset + length` because the HTTP range
+ // header uses inclusive ranges
+ var byterangeEnd;
+ var byterangeStart = byterange.offset;
+
+ if (typeof byterange.offset === 'bigint' || typeof byterange.length === 'bigint') {
+ byterangeEnd = window__default['default'].BigInt(byterange.offset) + window__default['default'].BigInt(byterange.length) - window__default['default'].BigInt(1);
+ } else {
+ byterangeEnd = byterange.offset + byterange.length - 1;
+ }
+
+ return 'bytes=' + byterangeStart + '-' + byterangeEnd;
+};
+/**
+ * Defines headers for use in the xhr request for a particular segment.
+ *
+ * @param {Object} segment - a simplified copy of the segmentInfo object
+ * from SegmentLoader
+ */
+
+
+var segmentXhrHeaders = function segmentXhrHeaders(segment) {
+ var headers = {};
+
+ if (segment.byterange) {
+ headers.Range = byterangeStr(segment.byterange);
+ }
+
+ return headers;
+};
+/**
+ * @file bin-utils.js
+ */
+
+/**
+ * convert a TimeRange to text
+ *
+ * @param {TimeRange} range the timerange to use for conversion
+ * @param {number} i the iterator on the range to convert
+ * @return {string} the range in string format
+ */
+
+
+var textRange = function textRange(range, i) {
+ return range.start(i) + '-' + range.end(i);
+};
+/**
+ * format a number as hex string
+ *
+ * @param {number} e The number
+ * @param {number} i the iterator
+ * @return {string} the hex formatted number as a string
+ */
+
+
+var formatHexString = function formatHexString(e, i) {
+ var value = e.toString(16);
+ return '00'.substring(0, 2 - value.length) + value + (i % 2 ? ' ' : '');
+};
+
+var formatAsciiString = function formatAsciiString(e) {
+ if (e >= 0x20 && e < 0x7e) {
+ return String.fromCharCode(e);
+ }
+
+ return '.';
+};
+/**
+ * Creates an object for sending to a web worker modifying properties that are TypedArrays
+ * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
+ *
+ * @param {Object} message
+ * Object of properties and values to send to the web worker
+ * @return {Object}
+ * Modified message with TypedArray values expanded
+ * @function createTransferableMessage
+ */
+
+
+var createTransferableMessage = function createTransferableMessage(message) {
+ var transferable = {};
+ Object.keys(message).forEach(function (key) {
+ var value = message[key];
+
+ if (byteHelpers.isArrayBufferView(value)) {
+ transferable[key] = {
+ bytes: value.buffer,
+ byteOffset: value.byteOffset,
+ byteLength: value.byteLength
+ };
+ } else {
+ transferable[key] = value;
+ }
+ });
+ return transferable;
+};
+/**
+ * Returns a unique string identifier for a media initialization
+ * segment.
+ *
+ * @param {Object} initSegment
+ * the init segment object.
+ *
+ * @return {string} the generated init segment id
+ */
+
+
+var initSegmentId = function initSegmentId(initSegment) {
+ var byterange = initSegment.byterange || {
+ length: Infinity,
+ offset: 0
+ };
+ return [byterange.length, byterange.offset, initSegment.resolvedUri].join(',');
+};
+/**
+ * Returns a unique string identifier for a media segment key.
+ *
+ * @param {Object} key the encryption key
+ * @return {string} the unique id for the media segment key.
+ */
+
+
+var segmentKeyId = function segmentKeyId(key) {
+ return key.resolvedUri;
+};
+/**
+ * utils to help dump binary data to the console
+ *
+ * @param {Array|TypedArray} data
+ * data to dump to a string
+ *
+ * @return {string} the data as a hex string.
+ */
+
+
+var hexDump = function hexDump(data) {
+ var bytes = Array.prototype.slice.call(data);
+ var step = 16;
+ var result = '';
+ var hex;
+ var ascii;
+
+ for (var j = 0; j < bytes.length / step; j++) {
+ hex = bytes.slice(j * step, j * step + step).map(formatHexString).join('');
+ ascii = bytes.slice(j * step, j * step + step).map(formatAsciiString).join('');
+ result += hex + ' ' + ascii + '\n';
+ }
+
+ return result;
+};
+
+var tagDump = function tagDump(_ref) {
+ var bytes = _ref.bytes;
+ return hexDump(bytes);
+};
+
+var textRanges = function textRanges(ranges) {
+ var result = '';
+ var i;
+
+ for (i = 0; i < ranges.length; i++) {
+ result += textRange(ranges, i) + ' ';
+ }
+
+ return result;
+};
+
+var utils = /*#__PURE__*/Object.freeze({
+ __proto__: null,
+ createTransferableMessage: createTransferableMessage,
+ initSegmentId: initSegmentId,
+ segmentKeyId: segmentKeyId,
+ hexDump: hexDump,
+ tagDump: tagDump,
+ textRanges: textRanges
+}); // TODO handle fmp4 case where the timing info is accurate and doesn't involve transmux
+// 25% was arbitrarily chosen, and may need to be refined over time.
+
+var SEGMENT_END_FUDGE_PERCENT = 0.25;
+/**
+ * Converts a player time (any time that can be gotten/set from player.currentTime(),
+ * e.g., any time within player.seekable().start(0) to player.seekable().end(0)) to a
+ * program time (any time referencing the real world (e.g., EXT-X-PROGRAM-DATE-TIME)).
+ *
+ * The containing segment is required as the EXT-X-PROGRAM-DATE-TIME serves as an "anchor
+ * point" (a point where we have a mapping from program time to player time, with player
+ * time being the post transmux start of the segment).
+ *
+ * For more details, see [this doc](../../docs/program-time-from-player-time.md).
+ *
+ * @param {number} playerTime the player time
+ * @param {Object} segment the segment which contains the player time
+ * @return {Date} program time
+ */
+
+var playerTimeToProgramTime = function playerTimeToProgramTime(playerTime, segment) {
+ if (!segment.dateTimeObject) {
+ // Can't convert without an "anchor point" for the program time (i.e., a time that can
+ // be used to map the start of a segment with a real world time).
+ return null;
+ }
+
+ var transmuxerPrependedSeconds = segment.videoTimingInfo.transmuxerPrependedSeconds;
+ var transmuxedStart = segment.videoTimingInfo.transmuxedPresentationStart; // get the start of the content from before old content is prepended
+
+ var startOfSegment = transmuxedStart + transmuxerPrependedSeconds;
+ var offsetFromSegmentStart = playerTime - startOfSegment;
+ return new Date(segment.dateTimeObject.getTime() + offsetFromSegmentStart * 1000);
+};
+
+var originalSegmentVideoDuration = function originalSegmentVideoDuration(videoTimingInfo) {
+ return videoTimingInfo.transmuxedPresentationEnd - videoTimingInfo.transmuxedPresentationStart - videoTimingInfo.transmuxerPrependedSeconds;
+};
+/**
+ * Finds a segment that contains the time requested given as an ISO-8601 string. The
+ * returned segment might be an estimate or an accurate match.
+ *
+ * @param {string} programTime The ISO-8601 programTime to find a match for
+ * @param {Object} playlist A playlist object to search within
+ */
+
+
+var findSegmentForProgramTime = function findSegmentForProgramTime(programTime, playlist) {
+ // Assumptions:
+ // - verifyProgramDateTimeTags has already been run
+ // - live streams have been started
+ var dateTimeObject;
+
+ try {
+ dateTimeObject = new Date(programTime);
+ } catch (e) {
+ return null;
+ }
+
+ if (!playlist || !playlist.segments || playlist.segments.length === 0) {
+ return null;
+ }
+
+ var segment = playlist.segments[0];
+
+ if (dateTimeObject < segment.dateTimeObject) {
+ // Requested time is before stream start.
+ return null;
+ }
+
+ for (var i = 0; i < playlist.segments.length - 1; i++) {
+ segment = playlist.segments[i];
+ var nextSegmentStart = playlist.segments[i + 1].dateTimeObject;
+
+ if (dateTimeObject < nextSegmentStart) {
+ break;
+ }
+ }
+
+ var lastSegment = playlist.segments[playlist.segments.length - 1];
+ var lastSegmentStart = lastSegment.dateTimeObject;
+ var lastSegmentDuration = lastSegment.videoTimingInfo ? originalSegmentVideoDuration(lastSegment.videoTimingInfo) : lastSegment.duration + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT;
+ var lastSegmentEnd = new Date(lastSegmentStart.getTime() + lastSegmentDuration * 1000);
+
+ if (dateTimeObject > lastSegmentEnd) {
+ // Beyond the end of the stream, or our best guess of the end of the stream.
+ return null;
+ }
+
+ if (dateTimeObject > lastSegmentStart) {
+ segment = lastSegment;
+ }
+
+ return {
+ segment: segment,
+ estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : Playlist.duration(playlist, playlist.mediaSequence + playlist.segments.indexOf(segment)),
+ // Although, given that all segments have accurate date time objects, the segment
+ // selected should be accurate, unless the video has been transmuxed at some point
+ // (determined by the presence of the videoTimingInfo object), the segment's "player
+ // time" (the start time in the player) can't be considered accurate.
+ type: segment.videoTimingInfo ? 'accurate' : 'estimate'
+ };
+};
+/**
+ * Finds a segment that contains the given player time(in seconds).
+ *
+ * @param {number} time The player time to find a match for
+ * @param {Object} playlist A playlist object to search within
+ */
+
+
+var findSegmentForPlayerTime = function findSegmentForPlayerTime(time, playlist) {
+ // Assumptions:
+ // - there will always be a segment.duration
+ // - we can start from zero
+ // - segments are in time order
+ if (!playlist || !playlist.segments || playlist.segments.length === 0) {
+ return null;
+ }
+
+ var segmentEnd = 0;
+ var segment;
+
+ for (var i = 0; i < playlist.segments.length; i++) {
+ segment = playlist.segments[i]; // videoTimingInfo is set after the segment is downloaded and transmuxed, and
+ // should contain the most accurate values we have for the segment's player times.
+ //
+ // Use the accurate transmuxedPresentationEnd value if it is available, otherwise fall
+ // back to an estimate based on the manifest derived (inaccurate) segment.duration, to
+ // calculate an end value.
+
+ segmentEnd = segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationEnd : segmentEnd + segment.duration;
+
+ if (time <= segmentEnd) {
+ break;
+ }
+ }
+
+ var lastSegment = playlist.segments[playlist.segments.length - 1];
+
+ if (lastSegment.videoTimingInfo && lastSegment.videoTimingInfo.transmuxedPresentationEnd < time) {
+ // The time requested is beyond the stream end.
+ return null;
+ }
+
+ if (time > segmentEnd) {
+ // The time is within or beyond the last segment.
+ //
+ // Check to see if the time is beyond a reasonable guess of the end of the stream.
+ if (time > segmentEnd + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT) {
+ // Technically, because the duration value is only an estimate, the time may still
+ // exist in the last segment, however, there isn't enough information to make even
+ // a reasonable estimate.
+ return null;
+ }
+
+ segment = lastSegment;
+ }
+
+ return {
+ segment: segment,
+ estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : segmentEnd - segment.duration,
+ // Because videoTimingInfo is only set after transmux, it is the only way to get
+ // accurate timing values.
+ type: segment.videoTimingInfo ? 'accurate' : 'estimate'
+ };
+};
+/**
+ * Gives the offset of the comparisonTimestamp from the programTime timestamp in seconds.
+ * If the offset returned is positive, the programTime occurs after the
+ * comparisonTimestamp.
+ * If the offset is negative, the programTime occurs before the comparisonTimestamp.
+ *
+ * @param {string} comparisonTimeStamp An ISO-8601 timestamp to compare against
+ * @param {string} programTime The programTime as an ISO-8601 string
+ * @return {number} offset
+ */
+
+
+var getOffsetFromTimestamp = function getOffsetFromTimestamp(comparisonTimeStamp, programTime) {
+ var segmentDateTime;
+ var programDateTime;
+
+ try {
+ segmentDateTime = new Date(comparisonTimeStamp);
+ programDateTime = new Date(programTime);
+ } catch (e) {// TODO handle error
+ }
+
+ var segmentTimeEpoch = segmentDateTime.getTime();
+ var programTimeEpoch = programDateTime.getTime();
+ return (programTimeEpoch - segmentTimeEpoch) / 1000;
+};
+/**
+ * Checks that all segments in this playlist have programDateTime tags.
+ *
+ * @param {Object} playlist A playlist object
+ */
+
+
+var verifyProgramDateTimeTags = function verifyProgramDateTimeTags(playlist) {
+ if (!playlist.segments || playlist.segments.length === 0) {
+ return false;
+ }
+
+ for (var i = 0; i < playlist.segments.length; i++) {
+ var segment = playlist.segments[i];
+
+ if (!segment.dateTimeObject) {
+ return false;
+ }
+ }
+
+ return true;
+};
+/**
+ * Returns the programTime of the media given a playlist and a playerTime.
+ * The playlist must have programDateTime tags for a programDateTime tag to be returned.
+ * If the segments containing the time requested have not been buffered yet, an estimate
+ * may be returned to the callback.
+ *
+ * @param {Object} args
+ * @param {Object} args.playlist A playlist object to search within
+ * @param {number} time A playerTime in seconds
+ * @param {Function} callback(err, programTime)
+ * @return {string} err.message A detailed error message
+ * @return {Object} programTime
+ * @return {number} programTime.mediaSeconds The streamTime in seconds
+ * @return {string} programTime.programDateTime The programTime as an ISO-8601 String
+ */
+
+
+var getProgramTime = function getProgramTime(_ref) {
+ var playlist = _ref.playlist,
+ _ref$time = _ref.time,
+ time = _ref$time === void 0 ? undefined : _ref$time,
+ callback = _ref.callback;
+
+ if (!callback) {
+ throw new Error('getProgramTime: callback must be provided');
+ }
+
+ if (!playlist || time === undefined) {
+ return callback({
+ message: 'getProgramTime: playlist and time must be provided'
+ });
+ }
+
+ var matchedSegment = findSegmentForPlayerTime(time, playlist);
+
+ if (!matchedSegment) {
+ return callback({
+ message: 'valid programTime was not found'
+ });
+ }
+
+ if (matchedSegment.type === 'estimate') {
+ return callback({
+ message: 'Accurate programTime could not be determined.' + ' Please seek to e.seekTime and try again',
+ seekTime: matchedSegment.estimatedStart
+ });
+ }
+
+ var programTimeObject = {
+ mediaSeconds: time
+ };
+ var programTime = playerTimeToProgramTime(time, matchedSegment.segment);
+
+ if (programTime) {
+ programTimeObject.programDateTime = programTime.toISOString();
+ }
+
+ return callback(null, programTimeObject);
+};
+/**
+ * Seeks in the player to a time that matches the given programTime ISO-8601 string.
+ *
+ * @param {Object} args
+ * @param {string} args.programTime A programTime to seek to as an ISO-8601 String
+ * @param {Object} args.playlist A playlist to look within
+ * @param {number} args.retryCount The number of times to try for an accurate seek. Default is 2.
+ * @param {Function} args.seekTo A method to perform a seek
+ * @param {boolean} args.pauseAfterSeek Whether to end in a paused state after seeking. Default is true.
+ * @param {Object} args.tech The tech to seek on
+ * @param {Function} args.callback(err, newTime) A callback to return the new time to
+ * @return {string} err.message A detailed error message
+ * @return {number} newTime The exact time that was seeked to in seconds
+ */
+
+
+var seekToProgramTime = function seekToProgramTime(_ref2) {
+ var programTime = _ref2.programTime,
+ playlist = _ref2.playlist,
+ _ref2$retryCount = _ref2.retryCount,
+ retryCount = _ref2$retryCount === void 0 ? 2 : _ref2$retryCount,
+ seekTo = _ref2.seekTo,
+ _ref2$pauseAfterSeek = _ref2.pauseAfterSeek,
+ pauseAfterSeek = _ref2$pauseAfterSeek === void 0 ? true : _ref2$pauseAfterSeek,
+ tech = _ref2.tech,
+ callback = _ref2.callback;
+
+ if (!callback) {
+ throw new Error('seekToProgramTime: callback must be provided');
+ }
+
+ if (typeof programTime === 'undefined' || !playlist || !seekTo) {
+ return callback({
+ message: 'seekToProgramTime: programTime, seekTo and playlist must be provided'
+ });
+ }
+
+ if (!playlist.endList && !tech.hasStarted_) {
+ return callback({
+ message: 'player must be playing a live stream to start buffering'
+ });
+ }
+
+ if (!verifyProgramDateTimeTags(playlist)) {
+ return callback({
+ message: 'programDateTime tags must be provided in the manifest ' + playlist.resolvedUri
+ });
+ }
+
+ var matchedSegment = findSegmentForProgramTime(programTime, playlist); // no match
+
+ if (!matchedSegment) {
+ return callback({
+ message: programTime + " was not found in the stream"
+ });
+ }
+
+ var segment = matchedSegment.segment;
+ var mediaOffset = getOffsetFromTimestamp(segment.dateTimeObject, programTime);
+
+ if (matchedSegment.type === 'estimate') {
+ // we've run out of retries
+ if (retryCount === 0) {
+ return callback({
+ message: programTime + " is not buffered yet. Try again"
+ });
+ }
+
+ seekTo(matchedSegment.estimatedStart + mediaOffset);
+ tech.one('seeked', function () {
+ seekToProgramTime({
+ programTime: programTime,
+ playlist: playlist,
+ retryCount: retryCount - 1,
+ seekTo: seekTo,
+ pauseAfterSeek: pauseAfterSeek,
+ tech: tech,
+ callback: callback
+ });
+ });
+ return;
+ } // Since the segment.start value is determined from the buffered end or ending time
+ // of the prior segment, the seekToTime doesn't need to account for any transmuxer
+ // modifications.
+
+
+ var seekToTime = segment.start + mediaOffset;
+
+ var seekedCallback = function seekedCallback() {
+ return callback(null, tech.currentTime());
+ }; // listen for seeked event
+
+
+ tech.one('seeked', seekedCallback); // pause before seeking as video.js will restore this state
+
+ if (pauseAfterSeek) {
+ tech.pause();
+ }
+
+ seekTo(seekToTime);
+}; // which will only happen if the request is complete.
+
+
+var callbackOnCompleted = function callbackOnCompleted(request, cb) {
+ if (request.readyState === 4) {
+ return cb();
+ }
+
+ return;
+};
+
+var containerRequest = function containerRequest(uri, xhr, cb) {
+ var bytes = [];
+ var id3Offset;
+ var finished = false;
+
+ var endRequestAndCallback = function endRequestAndCallback(err, req, type, _bytes) {
+ req.abort();
+ finished = true;
+ return cb(err, req, type, _bytes);
+ };
+
+ var progressListener = function progressListener(error, request) {
+ if (finished) {
+ return;
+ }
+
+ if (error) {
+ return endRequestAndCallback(error, request, '', bytes);
+ } // grap the new part of content that was just downloaded
+
+
+ var newPart = request.responseText.substring(bytes && bytes.byteLength || 0, request.responseText.length); // add that onto bytes
+
+ bytes = byteHelpers.concatTypedArrays(bytes, byteHelpers.stringToBytes(newPart, true));
+ id3Offset = id3Offset || id3Helpers.getId3Offset(bytes); // we need at least 10 bytes to determine a type
+ // or we need at least two bytes after an id3Offset
+
+ if (bytes.length < 10 || id3Offset && bytes.length < id3Offset + 2) {
+ return callbackOnCompleted(request, function () {
+ return endRequestAndCallback(error, request, '', bytes);
+ });
+ }
+
+ var type = containers.detectContainerForBytes(bytes); // if this looks like a ts segment but we don't have enough data
+ // to see the second sync byte, wait until we have enough data
+ // before declaring it ts
+
+ if (type === 'ts' && bytes.length < 188) {
+ return callbackOnCompleted(request, function () {
+ return endRequestAndCallback(error, request, '', bytes);
+ });
+ } // this may be an unsynced ts segment
+ // wait for 376 bytes before detecting no container
+
+
+ if (!type && bytes.length < 376) {
+ return callbackOnCompleted(request, function () {
+ return endRequestAndCallback(error, request, '', bytes);
+ });
+ }
+
+ return endRequestAndCallback(null, request, type, bytes);
+ };
+
+ var options = {
+ uri: uri,
+ beforeSend: function beforeSend(request) {
+ // this forces the browser to pass the bytes to us unprocessed
+ request.overrideMimeType('text/plain; charset=x-user-defined');
+ request.addEventListener('progress', function (_ref) {
+ _ref.total;
+ _ref.loaded;
+ return callbackWrapper(request, null, {
+ statusCode: request.status
+ }, progressListener);
+ });
+ }
+ };
+ var request = xhr(options, function (error, response) {
+ return callbackWrapper(request, error, response, progressListener);
+ });
+ return request;
+};
+
+var EventTarget = videojs.EventTarget,
+ mergeOptions = videojs.mergeOptions;
+
+var dashPlaylistUnchanged = function dashPlaylistUnchanged(a, b) {
+ if (!isPlaylistUnchanged(a, b)) {
+ return false;
+ } // for dash the above check will often return true in scenarios where
+ // the playlist actually has changed because mediaSequence isn't a
+ // dash thing, and we often set it to 1. So if the playlists have the same amount
+ // of segments we return true.
+ // So for dash we need to make sure that the underlying segments are different.
+ // if sidx changed then the playlists are different.
+
+
+ if (a.sidx && b.sidx && (a.sidx.offset !== b.sidx.offset || a.sidx.length !== b.sidx.length)) {
+ return false;
+ } else if (!a.sidx && b.sidx || a.sidx && !b.sidx) {
+ return false;
+ } // one or the other does not have segments
+ // there was a change.
+
+
+ if (a.segments && !b.segments || !a.segments && b.segments) {
+ return false;
+ } // neither has segments nothing changed
+
+
+ if (!a.segments && !b.segments) {
+ return true;
+ } // check segments themselves
+
+
+ for (var i = 0; i < a.segments.length; i++) {
+ var aSegment = a.segments[i];
+ var bSegment = b.segments[i]; // if uris are different between segments there was a change
+
+ if (aSegment.uri !== bSegment.uri) {
+ return false;
+ } // neither segment has a byterange, there will be no byterange change.
+
+
+ if (!aSegment.byterange && !bSegment.byterange) {
+ continue;
+ }
+
+ var aByterange = aSegment.byterange;
+ var bByterange = bSegment.byterange; // if byterange only exists on one of the segments, there was a change.
+
+ if (aByterange && !bByterange || !aByterange && bByterange) {
+ return false;
+ } // if both segments have byterange with different offsets, there was a change.
+
+
+ if (aByterange.offset !== bByterange.offset || aByterange.length !== bByterange.length) {
+ return false;
+ }
+ } // if everything was the same with segments, this is the same playlist.
+
+
+ return true;
+};
+/**
+ * Parses the master XML string and updates playlist URI references.
+ *
+ * @param {Object} config
+ * Object of arguments
+ * @param {string} config.masterXml
+ * The mpd XML
+ * @param {string} config.srcUrl
+ * The mpd URL
+ * @param {Date} config.clientOffset
+ * A time difference between server and client
+ * @param {Object} config.sidxMapping
+ * SIDX mappings for moof/mdat URIs and byte ranges
+ * @return {Object}
+ * The parsed mpd manifest object
+ */
+
+
+var parseMasterXml = function parseMasterXml(_ref) {
+ var masterXml = _ref.masterXml,
+ srcUrl = _ref.srcUrl,
+ clientOffset = _ref.clientOffset,
+ sidxMapping = _ref.sidxMapping,
+ previousManifest = _ref.previousManifest;
+ var manifest = mpdParser.parse(masterXml, {
+ manifestUri: srcUrl,
+ clientOffset: clientOffset,
+ sidxMapping: sidxMapping,
+ previousManifest: previousManifest
+ });
+ addPropertiesToMaster(manifest, srcUrl);
+ return manifest;
+};
+/**
+ * Returns a new master manifest that is the result of merging an updated master manifest
+ * into the original version.
+ *
+ * @param {Object} oldMaster
+ * The old parsed mpd object
+ * @param {Object} newMaster
+ * The updated parsed mpd object
+ * @return {Object}
+ * A new object representing the original master manifest with the updated media
+ * playlists merged in
+ */
+
+
+var updateMaster = function updateMaster(oldMaster, newMaster, sidxMapping) {
+ var noChanges = true;
+ var update = mergeOptions(oldMaster, {
+ // These are top level properties that can be updated
+ duration: newMaster.duration,
+ minimumUpdatePeriod: newMaster.minimumUpdatePeriod,
+ timelineStarts: newMaster.timelineStarts
+ }); // First update the playlists in playlist list
+
+ for (var i = 0; i < newMaster.playlists.length; i++) {
+ var playlist = newMaster.playlists[i];
+
+ if (playlist.sidx) {
+ var sidxKey = mpdParser.generateSidxKey(playlist.sidx); // add sidx segments to the playlist if we have all the sidx info already
+
+ if (sidxMapping && sidxMapping[sidxKey] && sidxMapping[sidxKey].sidx) {
+ mpdParser.addSidxSegmentsToPlaylist(playlist, sidxMapping[sidxKey].sidx, playlist.sidx.resolvedUri);
+ }
+ }
+
+ var playlistUpdate = updateMaster$1(update, playlist, dashPlaylistUnchanged);
+
+ if (playlistUpdate) {
+ update = playlistUpdate;
+ noChanges = false;
+ }
+ } // Then update media group playlists
+
+
+ forEachMediaGroup(newMaster, function (properties, type, group, label) {
+ if (properties.playlists && properties.playlists.length) {
+ var id = properties.playlists[0].id;
+
+ var _playlistUpdate = updateMaster$1(update, properties.playlists[0], dashPlaylistUnchanged);
+
+ if (_playlistUpdate) {
+ update = _playlistUpdate; // update the playlist reference within media groups
+
+ update.mediaGroups[type][group][label].playlists[0] = update.playlists[id];
+ noChanges = false;
+ }
+ }
+ });
+
+ if (newMaster.minimumUpdatePeriod !== oldMaster.minimumUpdatePeriod) {
+ noChanges = false;
+ }
+
+ if (noChanges) {
+ return null;
+ }
+
+ return update;
+}; // SIDX should be equivalent if the URI and byteranges of the SIDX match.
+// If the SIDXs have maps, the two maps should match,
+// both `a` and `b` missing SIDXs is considered matching.
+// If `a` or `b` but not both have a map, they aren't matching.
+
+
+var equivalentSidx = function equivalentSidx(a, b) {
+ var neitherMap = Boolean(!a.map && !b.map);
+ var equivalentMap = neitherMap || Boolean(a.map && b.map && a.map.byterange.offset === b.map.byterange.offset && a.map.byterange.length === b.map.byterange.length);
+ return equivalentMap && a.uri === b.uri && a.byterange.offset === b.byterange.offset && a.byterange.length === b.byterange.length;
+}; // exported for testing
+
+
+var compareSidxEntry = function compareSidxEntry(playlists, oldSidxMapping) {
+ var newSidxMapping = {};
+
+ for (var id in playlists) {
+ var playlist = playlists[id];
+ var currentSidxInfo = playlist.sidx;
+
+ if (currentSidxInfo) {
+ var key = mpdParser.generateSidxKey(currentSidxInfo);
+
+ if (!oldSidxMapping[key]) {
+ break;
+ }
+
+ var savedSidxInfo = oldSidxMapping[key].sidxInfo;
+
+ if (equivalentSidx(savedSidxInfo, currentSidxInfo)) {
+ newSidxMapping[key] = oldSidxMapping[key];
+ }
+ }
+ }
+
+ return newSidxMapping;
+};
+/**
+ * A function that filters out changed items as they need to be requested separately.
+ *
+ * The method is exported for testing
+ *
+ * @param {Object} master the parsed mpd XML returned via mpd-parser
+ * @param {Object} oldSidxMapping the SIDX to compare against
+ */
+
+
+var filterChangedSidxMappings = function filterChangedSidxMappings(master, oldSidxMapping) {
+ var videoSidx = compareSidxEntry(master.playlists, oldSidxMapping);
+ var mediaGroupSidx = videoSidx;
+ forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
+ if (properties.playlists && properties.playlists.length) {
+ var playlists = properties.playlists;
+ mediaGroupSidx = mergeOptions(mediaGroupSidx, compareSidxEntry(playlists, oldSidxMapping));
+ }
+ });
+ return mediaGroupSidx;
+};
+
+var DashPlaylistLoader = /*#__PURE__*/function (_EventTarget) {
+ _inheritsLoose__default['default'](DashPlaylistLoader, _EventTarget); // DashPlaylistLoader must accept either a src url or a playlist because subsequent
+ // playlist loader setups from media groups will expect to be able to pass a playlist
+ // (since there aren't external URLs to media playlists with DASH)
+
+
+ function DashPlaylistLoader(srcUrlOrPlaylist, vhs, options, masterPlaylistLoader) {
+ var _this;
+
+ if (options === void 0) {
+ options = {};
+ }
+
+ _this = _EventTarget.call(this) || this;
+ _this.masterPlaylistLoader_ = masterPlaylistLoader || _assertThisInitialized__default['default'](_this);
+
+ if (!masterPlaylistLoader) {
+ _this.isMaster_ = true;
+ }
+
+ var _options = options,
+ _options$withCredenti = _options.withCredentials,
+ withCredentials = _options$withCredenti === void 0 ? false : _options$withCredenti,
+ _options$handleManife = _options.handleManifestRedirects,
+ handleManifestRedirects = _options$handleManife === void 0 ? false : _options$handleManife;
+ _this.vhs_ = vhs;
+ _this.withCredentials = withCredentials;
+ _this.handleManifestRedirects = handleManifestRedirects;
+
+ if (!srcUrlOrPlaylist) {
+ throw new Error('A non-empty playlist URL or object is required');
+ } // event naming?
+
+
+ _this.on('minimumUpdatePeriod', function () {
+ _this.refreshXml_();
+ }); // live playlist staleness timeout
+
+
+ _this.on('mediaupdatetimeout', function () {
+ _this.refreshMedia_(_this.media().id);
+ });
+
+ _this.state = 'HAVE_NOTHING';
+ _this.loadedPlaylists_ = {};
+ _this.logger_ = logger('DashPlaylistLoader'); // initialize the loader state
+ // The masterPlaylistLoader will be created with a string
+
+ if (_this.isMaster_) {
+ _this.masterPlaylistLoader_.srcUrl = srcUrlOrPlaylist; // TODO: reset sidxMapping between period changes
+ // once multi-period is refactored
+
+ _this.masterPlaylistLoader_.sidxMapping_ = {};
+ } else {
+ _this.childPlaylist_ = srcUrlOrPlaylist;
+ }
+
+ return _this;
+ }
+
+ var _proto = DashPlaylistLoader.prototype;
+
+ _proto.requestErrored_ = function requestErrored_(err, request, startingState) {
+ // disposed
+ if (!this.request) {
+ return true;
+ } // pending request is cleared
+
+
+ this.request = null;
+
+ if (err) {
+ // use the provided error object or create one
+ // based on the request/response
+ this.error = typeof err === 'object' && !(err instanceof Error) ? err : {
+ status: request.status,
+ message: 'DASH request error at URL: ' + request.uri,
+ response: request.response,
+ // MEDIA_ERR_NETWORK
+ code: 2
+ };
+
+ if (startingState) {
+ this.state = startingState;
+ }
+
+ this.trigger('error');
+ return true;
+ }
+ }
+ /**
+ * Verify that the container of the sidx segment can be parsed
+ * and if it can, get and parse that segment.
+ */
+ ;
+
+ _proto.addSidxSegments_ = function addSidxSegments_(playlist, startingState, cb) {
+ var _this2 = this;
+
+ var sidxKey = playlist.sidx && mpdParser.generateSidxKey(playlist.sidx); // playlist lacks sidx or sidx segments were added to this playlist already.
+
+ if (!playlist.sidx || !sidxKey || this.masterPlaylistLoader_.sidxMapping_[sidxKey]) {
+ // keep this function async
+ this.mediaRequest_ = window__default['default'].setTimeout(function () {
+ return cb(false);
+ }, 0);
+ return;
+ } // resolve the segment URL relative to the playlist
+
+
+ var uri = resolveManifestRedirect(this.handleManifestRedirects, playlist.sidx.resolvedUri);
+
+ var fin = function fin(err, request) {
+ if (_this2.requestErrored_(err, request, startingState)) {
+ return;
+ }
+
+ var sidxMapping = _this2.masterPlaylistLoader_.sidxMapping_;
+ var sidx;
+
+ try {
+ sidx = parseSidx__default['default'](byteHelpers.toUint8(request.response).subarray(8));
+ } catch (e) {
+ // sidx parsing failed.
+ _this2.requestErrored_(e, request, startingState);
+
+ return;
+ }
+
+ sidxMapping[sidxKey] = {
+ sidxInfo: playlist.sidx,
+ sidx: sidx
+ };
+ mpdParser.addSidxSegmentsToPlaylist(playlist, sidx, playlist.sidx.resolvedUri);
+ return cb(true);
+ };
+
+ this.request = containerRequest(uri, this.vhs_.xhr, function (err, request, container, bytes) {
+ if (err) {
+ return fin(err, request);
+ }
+
+ if (!container || container !== 'mp4') {
+ return fin({
+ status: request.status,
+ message: "Unsupported " + (container || 'unknown') + " container type for sidx segment at URL: " + uri,
+ // response is just bytes in this case
+ // but we really don't want to return that.
+ response: '',
+ playlist: playlist,
+ internal: true,
+ blacklistDuration: Infinity,
+ // MEDIA_ERR_NETWORK
+ code: 2
+ }, request);
+ } // if we already downloaded the sidx bytes in the container request, use them
+
+
+ var _playlist$sidx$bytera = playlist.sidx.byterange,
+ offset = _playlist$sidx$bytera.offset,
+ length = _playlist$sidx$bytera.length;
+
+ if (bytes.length >= length + offset) {
+ return fin(err, {
+ response: bytes.subarray(offset, offset + length),
+ status: request.status,
+ uri: request.uri
+ });
+ } // otherwise request sidx bytes
+
+
+ _this2.request = _this2.vhs_.xhr({
+ uri: uri,
+ responseType: 'arraybuffer',
+ headers: segmentXhrHeaders({
+ byterange: playlist.sidx.byterange
+ })
+ }, fin);
+ });
+ };
+
+ _proto.dispose = function dispose() {
+ this.trigger('dispose');
+ this.stopRequest();
+ this.loadedPlaylists_ = {};
+ window__default['default'].clearTimeout(this.minimumUpdatePeriodTimeout_);
+ window__default['default'].clearTimeout(this.mediaRequest_);
+ window__default['default'].clearTimeout(this.mediaUpdateTimeout);
+ this.mediaUpdateTimeout = null;
+ this.mediaRequest_ = null;
+ this.minimumUpdatePeriodTimeout_ = null;
+
+ if (this.masterPlaylistLoader_.createMupOnMedia_) {
+ this.off('loadedmetadata', this.masterPlaylistLoader_.createMupOnMedia_);
+ this.masterPlaylistLoader_.createMupOnMedia_ = null;
+ }
+
+ this.off();
+ };
+
+ _proto.hasPendingRequest = function hasPendingRequest() {
+ return this.request || this.mediaRequest_;
+ };
+
+ _proto.stopRequest = function stopRequest() {
+ if (this.request) {
+ var oldRequest = this.request;
+ this.request = null;
+ oldRequest.onreadystatechange = null;
+ oldRequest.abort();
+ }
+ };
+
+ _proto.media = function media(playlist) {
+ var _this3 = this; // getter
+
+
+ if (!playlist) {
+ return this.media_;
+ } // setter
+
+
+ if (this.state === 'HAVE_NOTHING') {
+ throw new Error('Cannot switch media playlist from ' + this.state);
+ }
+
+ var startingState = this.state; // find the playlist object if the target playlist has been specified by URI
+
+ if (typeof playlist === 'string') {
+ if (!this.masterPlaylistLoader_.master.playlists[playlist]) {
+ throw new Error('Unknown playlist URI: ' + playlist);
+ }
+
+ playlist = this.masterPlaylistLoader_.master.playlists[playlist];
+ }
+
+ var mediaChange = !this.media_ || playlist.id !== this.media_.id; // switch to previously loaded playlists immediately
+
+ if (mediaChange && this.loadedPlaylists_[playlist.id] && this.loadedPlaylists_[playlist.id].endList) {
+ this.state = 'HAVE_METADATA';
+ this.media_ = playlist; // trigger media change if the active media has been updated
+
+ if (mediaChange) {
+ this.trigger('mediachanging');
+ this.trigger('mediachange');
+ }
+
+ return;
+ } // switching to the active playlist is a no-op
+
+
+ if (!mediaChange) {
+ return;
+ } // switching from an already loaded playlist
+
+
+ if (this.media_) {
+ this.trigger('mediachanging');
+ }
+
+ this.addSidxSegments_(playlist, startingState, function (sidxChanged) {
+ // everything is ready just continue to haveMetadata
+ _this3.haveMetadata({
+ startingState: startingState,
+ playlist: playlist
+ });
+ });
+ };
+
+ _proto.haveMetadata = function haveMetadata(_ref2) {
+ var startingState = _ref2.startingState,
+ playlist = _ref2.playlist;
+ this.state = 'HAVE_METADATA';
+ this.loadedPlaylists_[playlist.id] = playlist;
+ this.mediaRequest_ = null; // This will trigger loadedplaylist
+
+ this.refreshMedia_(playlist.id); // fire loadedmetadata the first time a media playlist is loaded
+ // to resolve setup of media groups
+
+ if (startingState === 'HAVE_MASTER') {
+ this.trigger('loadedmetadata');
+ } else {
+ // trigger media change if the active media has been updated
+ this.trigger('mediachange');
+ }
+ };
+
+ _proto.pause = function pause() {
+ if (this.masterPlaylistLoader_.createMupOnMedia_) {
+ this.off('loadedmetadata', this.masterPlaylistLoader_.createMupOnMedia_);
+ this.masterPlaylistLoader_.createMupOnMedia_ = null;
+ }
+
+ this.stopRequest();
+ window__default['default'].clearTimeout(this.mediaUpdateTimeout);
+ this.mediaUpdateTimeout = null;
+
+ if (this.isMaster_) {
+ window__default['default'].clearTimeout(this.masterPlaylistLoader_.minimumUpdatePeriodTimeout_);
+ this.masterPlaylistLoader_.minimumUpdatePeriodTimeout_ = null;
+ }
+
+ if (this.state === 'HAVE_NOTHING') {
+ // If we pause the loader before any data has been retrieved, its as if we never
+ // started, so reset to an unstarted state.
+ this.started = false;
+ }
+ };
+
+ _proto.load = function load(isFinalRendition) {
+ var _this4 = this;
+
+ window__default['default'].clearTimeout(this.mediaUpdateTimeout);
+ this.mediaUpdateTimeout = null;
+ var media = this.media();
+
+ if (isFinalRendition) {
+ var delay = media ? media.targetDuration / 2 * 1000 : 5 * 1000;
+ this.mediaUpdateTimeout = window__default['default'].setTimeout(function () {
+ return _this4.load();
+ }, delay);
+ return;
+ } // because the playlists are internal to the manifest, load should either load the
+ // main manifest, or do nothing but trigger an event
+
+
+ if (!this.started) {
+ this.start();
+ return;
+ }
+
+ if (media && !media.endList) {
+ // Check to see if this is the master loader and the MUP was cleared (this happens
+ // when the loader was paused). `media` should be set at this point since one is always
+ // set during `start()`.
+ if (this.isMaster_ && !this.minimumUpdatePeriodTimeout_) {
+ // Trigger minimumUpdatePeriod to refresh the master manifest
+ this.trigger('minimumUpdatePeriod'); // Since there was no prior minimumUpdatePeriodTimeout it should be recreated
+
+ this.updateMinimumUpdatePeriodTimeout_();
+ }
+
+ this.trigger('mediaupdatetimeout');
+ } else {
+ this.trigger('loadedplaylist');
+ }
+ };
+
+ _proto.start = function start() {
+ var _this5 = this;
+
+ this.started = true; // We don't need to request the master manifest again
+ // Call this asynchronously to match the xhr request behavior below
+
+ if (!this.isMaster_) {
+ this.mediaRequest_ = window__default['default'].setTimeout(function () {
+ return _this5.haveMaster_();
+ }, 0);
+ return;
+ }
+
+ this.requestMaster_(function (req, masterChanged) {
+ _this5.haveMaster_();
+
+ if (!_this5.hasPendingRequest() && !_this5.media_) {
+ _this5.media(_this5.masterPlaylistLoader_.master.playlists[0]);
+ }
+ });
+ };
+
+ _proto.requestMaster_ = function requestMaster_(cb) {
+ var _this6 = this;
+
+ this.request = this.vhs_.xhr({
+ uri: this.masterPlaylistLoader_.srcUrl,
+ withCredentials: this.withCredentials
+ }, function (error, req) {
+ if (_this6.requestErrored_(error, req)) {
+ if (_this6.state === 'HAVE_NOTHING') {
+ _this6.started = false;
+ }
+
+ return;
+ }
+
+ var masterChanged = req.responseText !== _this6.masterPlaylistLoader_.masterXml_;
+ _this6.masterPlaylistLoader_.masterXml_ = req.responseText;
+
+ if (req.responseHeaders && req.responseHeaders.date) {
+ _this6.masterLoaded_ = Date.parse(req.responseHeaders.date);
+ } else {
+ _this6.masterLoaded_ = Date.now();
+ }
+
+ _this6.masterPlaylistLoader_.srcUrl = resolveManifestRedirect(_this6.handleManifestRedirects, _this6.masterPlaylistLoader_.srcUrl, req);
+
+ if (masterChanged) {
+ _this6.handleMaster_();
+
+ _this6.syncClientServerClock_(function () {
+ return cb(req, masterChanged);
+ });
+
+ return;
+ }
+
+ return cb(req, masterChanged);
+ });
+ }
+ /**
+ * Parses the master xml for UTCTiming node to sync the client clock to the server
+ * clock. If the UTCTiming node requires a HEAD or GET request, that request is made.
+ *
+ * @param {Function} done
+ * Function to call when clock sync has completed
+ */
+ ;
+
+ _proto.syncClientServerClock_ = function syncClientServerClock_(done) {
+ var _this7 = this;
+
+ var utcTiming = mpdParser.parseUTCTiming(this.masterPlaylistLoader_.masterXml_); // No UTCTiming element found in the mpd. Use Date header from mpd request as the
+ // server clock
+
+ if (utcTiming === null) {
+ this.masterPlaylistLoader_.clientOffset_ = this.masterLoaded_ - Date.now();
+ return done();
+ }
+
+ if (utcTiming.method === 'DIRECT') {
+ this.masterPlaylistLoader_.clientOffset_ = utcTiming.value - Date.now();
+ return done();
+ }
+
+ this.request = this.vhs_.xhr({
+ uri: resolveUrl(this.masterPlaylistLoader_.srcUrl, utcTiming.value),
+ method: utcTiming.method,
+ withCredentials: this.withCredentials
+ }, function (error, req) {
+ // disposed
+ if (!_this7.request) {
+ return;
+ }
+
+ if (error) {
+ // sync request failed, fall back to using date header from mpd
+ // TODO: log warning
+ _this7.masterPlaylistLoader_.clientOffset_ = _this7.masterLoaded_ - Date.now();
+ return done();
+ }
+
+ var serverTime;
+
+ if (utcTiming.method === 'HEAD') {
+ if (!req.responseHeaders || !req.responseHeaders.date) {
+ // expected date header not preset, fall back to using date header from mpd
+ // TODO: log warning
+ serverTime = _this7.masterLoaded_;
+ } else {
+ serverTime = Date.parse(req.responseHeaders.date);
+ }
+ } else {
+ serverTime = Date.parse(req.responseText);
+ }
+
+ _this7.masterPlaylistLoader_.clientOffset_ = serverTime - Date.now();
+ done();
+ });
+ };
+
+ _proto.haveMaster_ = function haveMaster_() {
+ this.state = 'HAVE_MASTER';
+
+ if (this.isMaster_) {
+ // We have the master playlist at this point, so
+ // trigger this to allow MasterPlaylistController
+ // to make an initial playlist selection
+ this.trigger('loadedplaylist');
+ } else if (!this.media_) {
+ // no media playlist was specifically selected so select
+ // the one the child playlist loader was created with
+ this.media(this.childPlaylist_);
+ }
+ };
+
+ _proto.handleMaster_ = function handleMaster_() {
+ // clear media request
+ this.mediaRequest_ = null;
+ var oldMaster = this.masterPlaylistLoader_.master;
+ var newMaster = parseMasterXml({
+ masterXml: this.masterPlaylistLoader_.masterXml_,
+ srcUrl: this.masterPlaylistLoader_.srcUrl,
+ clientOffset: this.masterPlaylistLoader_.clientOffset_,
+ sidxMapping: this.masterPlaylistLoader_.sidxMapping_,
+ previousManifest: oldMaster
+ }); // if we have an old master to compare the new master against
+
+ if (oldMaster) {
+ newMaster = updateMaster(oldMaster, newMaster, this.masterPlaylistLoader_.sidxMapping_);
+ } // only update master if we have a new master
+
+
+ this.masterPlaylistLoader_.master = newMaster ? newMaster : oldMaster;
+ var location = this.masterPlaylistLoader_.master.locations && this.masterPlaylistLoader_.master.locations[0];
+
+ if (location && location !== this.masterPlaylistLoader_.srcUrl) {
+ this.masterPlaylistLoader_.srcUrl = location;
+ }
+
+ if (!oldMaster || newMaster && newMaster.minimumUpdatePeriod !== oldMaster.minimumUpdatePeriod) {
+ this.updateMinimumUpdatePeriodTimeout_();
+ }
+
+ return Boolean(newMaster);
+ };
+
+ _proto.updateMinimumUpdatePeriodTimeout_ = function updateMinimumUpdatePeriodTimeout_() {
+ var mpl = this.masterPlaylistLoader_; // cancel any pending creation of mup on media
+ // a new one will be added if needed.
+
+ if (mpl.createMupOnMedia_) {
+ mpl.off('loadedmetadata', mpl.createMupOnMedia_);
+ mpl.createMupOnMedia_ = null;
+ } // clear any pending timeouts
+
+
+ if (mpl.minimumUpdatePeriodTimeout_) {
+ window__default['default'].clearTimeout(mpl.minimumUpdatePeriodTimeout_);
+ mpl.minimumUpdatePeriodTimeout_ = null;
+ }
+
+ var mup = mpl.master && mpl.master.minimumUpdatePeriod; // If the minimumUpdatePeriod has a value of 0, that indicates that the current
+ // MPD has no future validity, so a new one will need to be acquired when new
+ // media segments are to be made available. Thus, we use the target duration
+ // in this case
+
+ if (mup === 0) {
+ if (mpl.media()) {
+ mup = mpl.media().targetDuration * 1000;
+ } else {
+ mpl.createMupOnMedia_ = mpl.updateMinimumUpdatePeriodTimeout_;
+ mpl.one('loadedmetadata', mpl.createMupOnMedia_);
+ }
+ } // if minimumUpdatePeriod is invalid or <= zero, which
+ // can happen when a live video becomes VOD. skip timeout
+ // creation.
+
+
+ if (typeof mup !== 'number' || mup <= 0) {
+ if (mup < 0) {
+ this.logger_("found invalid minimumUpdatePeriod of " + mup + ", not setting a timeout");
+ }
+
+ return;
+ }
+
+ this.createMUPTimeout_(mup);
+ };
+
+ _proto.createMUPTimeout_ = function createMUPTimeout_(mup) {
+ var mpl = this.masterPlaylistLoader_;
+ mpl.minimumUpdatePeriodTimeout_ = window__default['default'].setTimeout(function () {
+ mpl.minimumUpdatePeriodTimeout_ = null;
+ mpl.trigger('minimumUpdatePeriod');
+ mpl.createMUPTimeout_(mup);
+ }, mup);
+ }
+ /**
+ * Sends request to refresh the master xml and updates the parsed master manifest
+ */
+ ;
+
+ _proto.refreshXml_ = function refreshXml_() {
+ var _this8 = this;
+
+ this.requestMaster_(function (req, masterChanged) {
+ if (!masterChanged) {
+ return;
+ }
+
+ if (_this8.media_) {
+ _this8.media_ = _this8.masterPlaylistLoader_.master.playlists[_this8.media_.id];
+ } // This will filter out updated sidx info from the mapping
+
+
+ _this8.masterPlaylistLoader_.sidxMapping_ = filterChangedSidxMappings(_this8.masterPlaylistLoader_.master, _this8.masterPlaylistLoader_.sidxMapping_);
+
+ _this8.addSidxSegments_(_this8.media(), _this8.state, function (sidxChanged) {
+ // TODO: do we need to reload the current playlist?
+ _this8.refreshMedia_(_this8.media().id);
+ });
+ });
+ }
+ /**
+ * Refreshes the media playlist by re-parsing the master xml and updating playlist
+ * references. If this is an alternate loader, the updated parsed manifest is retrieved
+ * from the master loader.
+ */
+ ;
+
+ _proto.refreshMedia_ = function refreshMedia_(mediaID) {
+ var _this9 = this;
+
+ if (!mediaID) {
+ throw new Error('refreshMedia_ must take a media id');
+ } // for master we have to reparse the master xml
+ // to re-create segments based on current timing values
+ // which may change media. We only skip updating master
+ // if this is the first time this.media_ is being set.
+ // as master was just parsed in that case.
+
+
+ if (this.media_ && this.isMaster_) {
+ this.handleMaster_();
+ }
+
+ var playlists = this.masterPlaylistLoader_.master.playlists;
+ var mediaChanged = !this.media_ || this.media_ !== playlists[mediaID];
+
+ if (mediaChanged) {
+ this.media_ = playlists[mediaID];
+ } else {
+ this.trigger('playlistunchanged');
+ }
+
+ if (!this.mediaUpdateTimeout) {
+ var createMediaUpdateTimeout = function createMediaUpdateTimeout() {
+ if (_this9.media().endList) {
+ return;
+ }
+
+ _this9.mediaUpdateTimeout = window__default['default'].setTimeout(function () {
+ _this9.trigger('mediaupdatetimeout');
+
+ createMediaUpdateTimeout();
+ }, refreshDelay(_this9.media(), Boolean(mediaChanged)));
+ };
+
+ createMediaUpdateTimeout();
+ }
+
+ this.trigger('loadedplaylist');
+ };
+
+ return DashPlaylistLoader;
+}(EventTarget);
+
+var Config = {
+ GOAL_BUFFER_LENGTH: 30,
+ MAX_GOAL_BUFFER_LENGTH: 60,
+ BACK_BUFFER_LENGTH: 30,
+ GOAL_BUFFER_LENGTH_RATE: 1,
+ // 0.5 MB/s
+ INITIAL_BANDWIDTH: 4194304,
+ // A fudge factor to apply to advertised playlist bitrates to account for
+ // temporary flucations in client bandwidth
+ BANDWIDTH_VARIANCE: 1.2,
+ // How much of the buffer must be filled before we consider upswitching
+ BUFFER_LOW_WATER_LINE: 0,
+ MAX_BUFFER_LOW_WATER_LINE: 30,
+ // TODO: Remove this when experimentalBufferBasedABR is removed
+ EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE: 16,
+ BUFFER_LOW_WATER_LINE_RATE: 1,
+ // If the buffer is greater than the high water line, we won't switch down
+ BUFFER_HIGH_WATER_LINE: 30
+};
+
+var stringToArrayBuffer = function stringToArrayBuffer(string) {
+ var view = new Uint8Array(new ArrayBuffer(string.length));
+
+ for (var i = 0; i < string.length; i++) {
+ view[i] = string.charCodeAt(i);
+ }
+
+ return view.buffer;
+};
+/* global Blob, BlobBuilder, Worker */
+// unify worker interface
+
+
+var browserWorkerPolyFill = function browserWorkerPolyFill(workerObj) {
+ // node only supports on/off
+ workerObj.on = workerObj.addEventListener;
+ workerObj.off = workerObj.removeEventListener;
+ return workerObj;
+};
+
+var createObjectURL = function createObjectURL(str) {
+ try {
+ return URL.createObjectURL(new Blob([str], {
+ type: 'application/javascript'
+ }));
+ } catch (e) {
+ var blob = new BlobBuilder();
+ blob.append(str);
+ return URL.createObjectURL(blob.getBlob());
+ }
+};
+
+var factory = function factory(code) {
+ return function () {
+ var objectUrl = createObjectURL(code);
+ var worker = browserWorkerPolyFill(new Worker(objectUrl));
+ worker.objURL = objectUrl;
+ var terminate = worker.terminate;
+ worker.on = worker.addEventListener;
+ worker.off = worker.removeEventListener;
+
+ worker.terminate = function () {
+ URL.revokeObjectURL(objectUrl);
+ return terminate.call(this);
+ };
+
+ return worker;
+ };
+};
+
+var transform = function transform(code) {
+ return "var browserWorkerPolyFill = " + browserWorkerPolyFill.toString() + ";\n" + 'browserWorkerPolyFill(self);\n' + code;
+};
+
+var getWorkerString = function getWorkerString(fn) {
+ return fn.toString().replace(/^function.+?{/, '').slice(0, -1);
+};
+/* rollup-plugin-worker-factory start for worker!/Users/bclifford/Code/vhs-release-test/src/transmuxer-worker.js */
+
+
+var workerCode$1 = transform(getWorkerString(function () {
+ /**
+ * mux.js
+ *
+ * Copyright (c) Brightcove
+ * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
+ *
+ * A lightweight readable stream implemention that handles event dispatching.
+ * Objects that inherit from streams should call init in their constructors.
+ */
+ var Stream = function Stream() {
+ this.init = function () {
+ var listeners = {};
+ /**
+ * Add a listener for a specified event type.
+ * @param type {string} the event name
+ * @param listener {function} the callback to be invoked when an event of
+ * the specified type occurs
+ */
+
+ this.on = function (type, listener) {
+ if (!listeners[type]) {
+ listeners[type] = [];
+ }
+
+ listeners[type] = listeners[type].concat(listener);
+ };
+ /**
+ * Remove a listener for a specified event type.
+ * @param type {string} the event name
+ * @param listener {function} a function previously registered for this
+ * type of event through `on`
+ */
+
+
+ this.off = function (type, listener) {
+ var index;
+
+ if (!listeners[type]) {
+ return false;
+ }
+
+ index = listeners[type].indexOf(listener);
+ listeners[type] = listeners[type].slice();
+ listeners[type].splice(index, 1);
+ return index > -1;
+ };
+ /**
+ * Trigger an event of the specified type on this stream. Any additional
+ * arguments to this function are passed as parameters to event listeners.
+ * @param type {string} the event name
+ */
+
+
+ this.trigger = function (type) {
+ var callbacks, i, length, args;
+ callbacks = listeners[type];
+
+ if (!callbacks) {
+ return;
+ } // Slicing the arguments on every invocation of this method
+ // can add a significant amount of overhead. Avoid the
+ // intermediate object creation for the common case of a
+ // single callback argument
+
+
+ if (arguments.length === 2) {
+ length = callbacks.length;
+
+ for (i = 0; i < length; ++i) {
+ callbacks[i].call(this, arguments[1]);
+ }
+ } else {
+ args = [];
+ i = arguments.length;
+
+ for (i = 1; i < arguments.length; ++i) {
+ args.push(arguments[i]);
+ }
+
+ length = callbacks.length;
+
+ for (i = 0; i < length; ++i) {
+ callbacks[i].apply(this, args);
+ }
+ }
+ };
+ /**
+ * Destroys the stream and cleans up.
+ */
+
+
+ this.dispose = function () {
+ listeners = {};
+ };
+ };
+ };
+ /**
+ * Forwards all `data` events on this stream to the destination stream. The
+ * destination stream should provide a method `push` to receive the data
+ * events as they arrive.
+ * @param destination {stream} the stream that will receive all `data` events
+ * @param autoFlush {boolean} if false, we will not call `flush` on the destination
+ * when the current stream emits a 'done' event
+ * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
+ */
+
+
+ Stream.prototype.pipe = function (destination) {
+ this.on('data', function (data) {
+ destination.push(data);
+ });
+ this.on('done', function (flushSource) {
+ destination.flush(flushSource);
+ });
+ this.on('partialdone', function (flushSource) {
+ destination.partialFlush(flushSource);
+ });
+ this.on('endedtimeline', function (flushSource) {
+ destination.endTimeline(flushSource);
+ });
+ this.on('reset', function (flushSource) {
+ destination.reset(flushSource);
+ });
+ return destination;
+ }; // Default stream functions that are expected to be overridden to perform
+ // actual work. These are provided by the prototype as a sort of no-op
+ // implementation so that we don't have to check for their existence in the
+ // `pipe` function above.
+
+
+ Stream.prototype.push = function (data) {
+ this.trigger('data', data);
+ };
+
+ Stream.prototype.flush = function (flushSource) {
+ this.trigger('done', flushSource);
+ };
+
+ Stream.prototype.partialFlush = function (flushSource) {
+ this.trigger('partialdone', flushSource);
+ };
+
+ Stream.prototype.endTimeline = function (flushSource) {
+ this.trigger('endedtimeline', flushSource);
+ };
+
+ Stream.prototype.reset = function (flushSource) {
+ this.trigger('reset', flushSource);
+ };
+
+ var stream = Stream;
+ var MAX_UINT32$1 = Math.pow(2, 32);
+
+ var getUint64$2 = function getUint64(uint8) {
+ var dv = new DataView(uint8.buffer, uint8.byteOffset, uint8.byteLength);
+ var value;
+
+ if (dv.getBigUint64) {
+ value = dv.getBigUint64(0);
+
+ if (value < Number.MAX_SAFE_INTEGER) {
+ return Number(value);
+ }
+
+ return value;
+ }
+
+ return dv.getUint32(0) * MAX_UINT32$1 + dv.getUint32(4);
+ };
+
+ var numbers = {
+ getUint64: getUint64$2,
+ MAX_UINT32: MAX_UINT32$1
+ };
+ var MAX_UINT32 = numbers.MAX_UINT32;
+ var box, dinf, esds, ftyp, mdat, mfhd, minf, moof, moov, mvex, mvhd, trak, tkhd, mdia, mdhd, hdlr, sdtp, stbl, stsd, traf, trex, trun$1, types, MAJOR_BRAND, MINOR_VERSION, AVC1_BRAND, VIDEO_HDLR, AUDIO_HDLR, HDLR_TYPES, VMHD, SMHD, DREF, STCO, STSC, STSZ, STTS; // pre-calculate constants
+
+ (function () {
+ var i;
+ types = {
+ avc1: [],
+ // codingname
+ avcC: [],
+ btrt: [],
+ dinf: [],
+ dref: [],
+ esds: [],
+ ftyp: [],
+ hdlr: [],
+ mdat: [],
+ mdhd: [],
+ mdia: [],
+ mfhd: [],
+ minf: [],
+ moof: [],
+ moov: [],
+ mp4a: [],
+ // codingname
+ mvex: [],
+ mvhd: [],
+ pasp: [],
+ sdtp: [],
+ smhd: [],
+ stbl: [],
+ stco: [],
+ stsc: [],
+ stsd: [],
+ stsz: [],
+ stts: [],
+ styp: [],
+ tfdt: [],
+ tfhd: [],
+ traf: [],
+ trak: [],
+ trun: [],
+ trex: [],
+ tkhd: [],
+ vmhd: []
+ }; // In environments where Uint8Array is undefined (e.g., IE8), skip set up so that we
+ // don't throw an error
+
+ if (typeof Uint8Array === 'undefined') {
+ return;
+ }
+
+ for (i in types) {
+ if (types.hasOwnProperty(i)) {
+ types[i] = [i.charCodeAt(0), i.charCodeAt(1), i.charCodeAt(2), i.charCodeAt(3)];
+ }
+ }
+
+ MAJOR_BRAND = new Uint8Array(['i'.charCodeAt(0), 's'.charCodeAt(0), 'o'.charCodeAt(0), 'm'.charCodeAt(0)]);
+ AVC1_BRAND = new Uint8Array(['a'.charCodeAt(0), 'v'.charCodeAt(0), 'c'.charCodeAt(0), '1'.charCodeAt(0)]);
+ MINOR_VERSION = new Uint8Array([0, 0, 0, 1]);
+ VIDEO_HDLR = new Uint8Array([0x00, // version 0
+ 0x00, 0x00, 0x00, // flags
+ 0x00, 0x00, 0x00, 0x00, // pre_defined
+ 0x76, 0x69, 0x64, 0x65, // handler_type: 'vide'
+ 0x00, 0x00, 0x00, 0x00, // reserved
+ 0x00, 0x00, 0x00, 0x00, // reserved
+ 0x00, 0x00, 0x00, 0x00, // reserved
+ 0x56, 0x69, 0x64, 0x65, 0x6f, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler'
+ ]);
+ AUDIO_HDLR = new Uint8Array([0x00, // version 0
+ 0x00, 0x00, 0x00, // flags
+ 0x00, 0x00, 0x00, 0x00, // pre_defined
+ 0x73, 0x6f, 0x75, 0x6e, // handler_type: 'soun'
+ 0x00, 0x00, 0x00, 0x00, // reserved
+ 0x00, 0x00, 0x00, 0x00, // reserved
+ 0x00, 0x00, 0x00, 0x00, // reserved
+ 0x53, 0x6f, 0x75, 0x6e, 0x64, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler'
+ ]);
+ HDLR_TYPES = {
+ video: VIDEO_HDLR,
+ audio: AUDIO_HDLR
+ };
+ DREF = new Uint8Array([0x00, // version 0
+ 0x00, 0x00, 0x00, // flags
+ 0x00, 0x00, 0x00, 0x01, // entry_count
+ 0x00, 0x00, 0x00, 0x0c, // entry_size
+ 0x75, 0x72, 0x6c, 0x20, // 'url' type
+ 0x00, // version 0
+ 0x00, 0x00, 0x01 // entry_flags
+ ]);
+ SMHD = new Uint8Array([0x00, // version
+ 0x00, 0x00, 0x00, // flags
+ 0x00, 0x00, // balance, 0 means centered
+ 0x00, 0x00 // reserved
+ ]);
+ STCO = new Uint8Array([0x00, // version
+ 0x00, 0x00, 0x00, // flags
+ 0x00, 0x00, 0x00, 0x00 // entry_count
+ ]);
+ STSC = STCO;
+ STSZ = new Uint8Array([0x00, // version
+ 0x00, 0x00, 0x00, // flags
+ 0x00, 0x00, 0x00, 0x00, // sample_size
+ 0x00, 0x00, 0x00, 0x00 // sample_count
+ ]);
+ STTS = STCO;
+ VMHD = new Uint8Array([0x00, // version
+ 0x00, 0x00, 0x01, // flags
+ 0x00, 0x00, // graphicsmode
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 // opcolor
+ ]);
+ })();
+
+ box = function box(type) {
+ var payload = [],
+ size = 0,
+ i,
+ result,
+ view;
+
+ for (i = 1; i < arguments.length; i++) {
+ payload.push(arguments[i]);
+ }
+
+ i = payload.length; // calculate the total size we need to allocate
+
+ while (i--) {
+ size += payload[i].byteLength;
+ }
+
+ result = new Uint8Array(size + 8);
+ view = new DataView(result.buffer, result.byteOffset, result.byteLength);
+ view.setUint32(0, result.byteLength);
+ result.set(type, 4); // copy the payload into the result
+
+ for (i = 0, size = 8; i < payload.length; i++) {
+ result.set(payload[i], size);
+ size += payload[i].byteLength;
+ }
+
+ return result;
+ };
+
+ dinf = function dinf() {
+ return box(types.dinf, box(types.dref, DREF));
+ };
+
+ esds = function esds(track) {
+ return box(types.esds, new Uint8Array([0x00, // version
+ 0x00, 0x00, 0x00, // flags
+ // ES_Descriptor
+ 0x03, // tag, ES_DescrTag
+ 0x19, // length
+ 0x00, 0x00, // ES_ID
+ 0x00, // streamDependenceFlag, URL_flag, reserved, streamPriority
+ // DecoderConfigDescriptor
+ 0x04, // tag, DecoderConfigDescrTag
+ 0x11, // length
+ 0x40, // object type
+ 0x15, // streamType
+ 0x00, 0x06, 0x00, // bufferSizeDB
+ 0x00, 0x00, 0xda, 0xc0, // maxBitrate
+ 0x00, 0x00, 0xda, 0xc0, // avgBitrate
+ // DecoderSpecificInfo
+ 0x05, // tag, DecoderSpecificInfoTag
+ 0x02, // length
+ // ISO/IEC 14496-3, AudioSpecificConfig
+ // for samplingFrequencyIndex see ISO/IEC 13818-7:2006, 8.1.3.2.2, Table 35
+ track.audioobjecttype << 3 | track.samplingfrequencyindex >>> 1, track.samplingfrequencyindex << 7 | track.channelcount << 3, 0x06, 0x01, 0x02 // GASpecificConfig
+ ]));
+ };
+
+ ftyp = function ftyp() {
+ return box(types.ftyp, MAJOR_BRAND, MINOR_VERSION, MAJOR_BRAND, AVC1_BRAND);
+ };
+
+ hdlr = function hdlr(type) {
+ return box(types.hdlr, HDLR_TYPES[type]);
+ };
+
+ mdat = function mdat(data) {
+ return box(types.mdat, data);
+ };
+
+ mdhd = function mdhd(track) {
+ var result = new Uint8Array([0x00, // version 0
+ 0x00, 0x00, 0x00, // flags
+ 0x00, 0x00, 0x00, 0x02, // creation_time
+ 0x00, 0x00, 0x00, 0x03, // modification_time
+ 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
+ track.duration >>> 24 & 0xFF, track.duration >>> 16 & 0xFF, track.duration >>> 8 & 0xFF, track.duration & 0xFF, // duration
+ 0x55, 0xc4, // 'und' language (undetermined)
+ 0x00, 0x00]); // Use the sample rate from the track metadata, when it is
+ // defined. The sample rate can be parsed out of an ADTS header, for
+ // instance.
+
+ if (track.samplerate) {
+ result[12] = track.samplerate >>> 24 & 0xFF;
+ result[13] = track.samplerate >>> 16 & 0xFF;
+ result[14] = track.samplerate >>> 8 & 0xFF;
+ result[15] = track.samplerate & 0xFF;
+ }
+
+ return box(types.mdhd, result);
+ };
+
+ mdia = function mdia(track) {
+ return box(types.mdia, mdhd(track), hdlr(track.type), minf(track));
+ };
+
+ mfhd = function mfhd(sequenceNumber) {
+ return box(types.mfhd, new Uint8Array([0x00, 0x00, 0x00, 0x00, // flags
+ (sequenceNumber & 0xFF000000) >> 24, (sequenceNumber & 0xFF0000) >> 16, (sequenceNumber & 0xFF00) >> 8, sequenceNumber & 0xFF // sequence_number
+ ]));
+ };
+
+ minf = function minf(track) {
+ return box(types.minf, track.type === 'video' ? box(types.vmhd, VMHD) : box(types.smhd, SMHD), dinf(), stbl(track));
+ };
+
+ moof = function moof(sequenceNumber, tracks) {
+ var trackFragments = [],
+ i = tracks.length; // build traf boxes for each track fragment
+
+ while (i--) {
+ trackFragments[i] = traf(tracks[i]);
+ }
+
+ return box.apply(null, [types.moof, mfhd(sequenceNumber)].concat(trackFragments));
+ };
+ /**
+ * Returns a movie box.
+ * @param tracks {array} the tracks associated with this movie
+ * @see ISO/IEC 14496-12:2012(E), section 8.2.1
+ */
+
+
+ moov = function moov(tracks) {
+ var i = tracks.length,
+ boxes = [];
+
+ while (i--) {
+ boxes[i] = trak(tracks[i]);
+ }
+
+ return box.apply(null, [types.moov, mvhd(0xffffffff)].concat(boxes).concat(mvex(tracks)));
+ };
+
+ mvex = function mvex(tracks) {
+ var i = tracks.length,
+ boxes = [];
+
+ while (i--) {
+ boxes[i] = trex(tracks[i]);
+ }
+
+ return box.apply(null, [types.mvex].concat(boxes));
+ };
+
+ mvhd = function mvhd(duration) {
+ var bytes = new Uint8Array([0x00, // version 0
+ 0x00, 0x00, 0x00, // flags
+ 0x00, 0x00, 0x00, 0x01, // creation_time
+ 0x00, 0x00, 0x00, 0x02, // modification_time
+ 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
+ (duration & 0xFF000000) >> 24, (duration & 0xFF0000) >> 16, (duration & 0xFF00) >> 8, duration & 0xFF, // duration
+ 0x00, 0x01, 0x00, 0x00, // 1.0 rate
+ 0x01, 0x00, // 1.0 volume
+ 0x00, 0x00, // reserved
+ 0x00, 0x00, 0x00, 0x00, // reserved
+ 0x00, 0x00, 0x00, 0x00, // reserved
+ 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
+ 0xff, 0xff, 0xff, 0xff // next_track_ID
+ ]);
+ return box(types.mvhd, bytes);
+ };
+
+ sdtp = function sdtp(track) {
+ var samples = track.samples || [],
+ bytes = new Uint8Array(4 + samples.length),
+ flags,
+ i; // leave the full box header (4 bytes) all zero
+ // write the sample table
+
+ for (i = 0; i < samples.length; i++) {
+ flags = samples[i].flags;
+ bytes[i + 4] = flags.dependsOn << 4 | flags.isDependedOn << 2 | flags.hasRedundancy;
+ }
+
+ return box(types.sdtp, bytes);
+ };
+
+ stbl = function stbl(track) {
+ return box(types.stbl, stsd(track), box(types.stts, STTS), box(types.stsc, STSC), box(types.stsz, STSZ), box(types.stco, STCO));
+ };
+
+ (function () {
+ var videoSample, audioSample;
+
+ stsd = function stsd(track) {
+ return box(types.stsd, new Uint8Array([0x00, // version 0
+ 0x00, 0x00, 0x00, // flags
+ 0x00, 0x00, 0x00, 0x01]), track.type === 'video' ? videoSample(track) : audioSample(track));
+ };
+
+ videoSample = function videoSample(track) {
+ var sps = track.sps || [],
+ pps = track.pps || [],
+ sequenceParameterSets = [],
+ pictureParameterSets = [],
+ i,
+ avc1Box; // assemble the SPSs
+
+ for (i = 0; i < sps.length; i++) {
+ sequenceParameterSets.push((sps[i].byteLength & 0xFF00) >>> 8);
+ sequenceParameterSets.push(sps[i].byteLength & 0xFF); // sequenceParameterSetLength
+
+ sequenceParameterSets = sequenceParameterSets.concat(Array.prototype.slice.call(sps[i])); // SPS
+ } // assemble the PPSs
+
+
+ for (i = 0; i < pps.length; i++) {
+ pictureParameterSets.push((pps[i].byteLength & 0xFF00) >>> 8);
+ pictureParameterSets.push(pps[i].byteLength & 0xFF);
+ pictureParameterSets = pictureParameterSets.concat(Array.prototype.slice.call(pps[i]));
+ }
+
+ avc1Box = [types.avc1, new Uint8Array([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
+ 0x00, 0x01, // data_reference_index
+ 0x00, 0x00, // pre_defined
+ 0x00, 0x00, // reserved
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
+ (track.width & 0xff00) >> 8, track.width & 0xff, // width
+ (track.height & 0xff00) >> 8, track.height & 0xff, // height
+ 0x00, 0x48, 0x00, 0x00, // horizresolution
+ 0x00, 0x48, 0x00, 0x00, // vertresolution
+ 0x00, 0x00, 0x00, 0x00, // reserved
+ 0x00, 0x01, // frame_count
+ 0x13, 0x76, 0x69, 0x64, 0x65, 0x6f, 0x6a, 0x73, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x69, 0x62, 0x2d, 0x68, 0x6c, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // compressorname
+ 0x00, 0x18, // depth = 24
+ 0x11, 0x11 // pre_defined = -1
+ ]), box(types.avcC, new Uint8Array([0x01, // configurationVersion
+ track.profileIdc, // AVCProfileIndication
+ track.profileCompatibility, // profile_compatibility
+ track.levelIdc, // AVCLevelIndication
+ 0xff // lengthSizeMinusOne, hard-coded to 4 bytes
+ ].concat([sps.length], // numOfSequenceParameterSets
+ sequenceParameterSets, // "SPS"
+ [pps.length], // numOfPictureParameterSets
+ pictureParameterSets // "PPS"
+ ))), box(types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80, // bufferSizeDB
+ 0x00, 0x2d, 0xc6, 0xc0, // maxBitrate
+ 0x00, 0x2d, 0xc6, 0xc0 // avgBitrate
+ ]))];
+
+ if (track.sarRatio) {
+ var hSpacing = track.sarRatio[0],
+ vSpacing = track.sarRatio[1];
+ avc1Box.push(box(types.pasp, new Uint8Array([(hSpacing & 0xFF000000) >> 24, (hSpacing & 0xFF0000) >> 16, (hSpacing & 0xFF00) >> 8, hSpacing & 0xFF, (vSpacing & 0xFF000000) >> 24, (vSpacing & 0xFF0000) >> 16, (vSpacing & 0xFF00) >> 8, vSpacing & 0xFF])));
+ }
+
+ return box.apply(null, avc1Box);
+ };
+
+ audioSample = function audioSample(track) {
+ return box(types.mp4a, new Uint8Array([// SampleEntry, ISO/IEC 14496-12
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
+ 0x00, 0x01, // data_reference_index
+ // AudioSampleEntry, ISO/IEC 14496-12
+ 0x00, 0x00, 0x00, 0x00, // reserved
+ 0x00, 0x00, 0x00, 0x00, // reserved
+ (track.channelcount & 0xff00) >> 8, track.channelcount & 0xff, // channelcount
+ (track.samplesize & 0xff00) >> 8, track.samplesize & 0xff, // samplesize
+ 0x00, 0x00, // pre_defined
+ 0x00, 0x00, // reserved
+ (track.samplerate & 0xff00) >> 8, track.samplerate & 0xff, 0x00, 0x00 // samplerate, 16.16
+ // MP4AudioSampleEntry, ISO/IEC 14496-14
+ ]), esds(track));
+ };
+ })();
+
+ tkhd = function tkhd(track) {
+ var result = new Uint8Array([0x00, // version 0
+ 0x00, 0x00, 0x07, // flags
+ 0x00, 0x00, 0x00, 0x00, // creation_time
+ 0x00, 0x00, 0x00, 0x00, // modification_time
+ (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
+ 0x00, 0x00, 0x00, 0x00, // reserved
+ (track.duration & 0xFF000000) >> 24, (track.duration & 0xFF0000) >> 16, (track.duration & 0xFF00) >> 8, track.duration & 0xFF, // duration
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
+ 0x00, 0x00, // layer
+ 0x00, 0x00, // alternate_group
+ 0x01, 0x00, // non-audio track volume
+ 0x00, 0x00, // reserved
+ 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
+ (track.width & 0xFF00) >> 8, track.width & 0xFF, 0x00, 0x00, // width
+ (track.height & 0xFF00) >> 8, track.height & 0xFF, 0x00, 0x00 // height
+ ]);
+ return box(types.tkhd, result);
+ };
+ /**
+ * Generate a track fragment (traf) box. A traf box collects metadata
+ * about tracks in a movie fragment (moof) box.
+ */
+
+
+ traf = function traf(track) {
+ var trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable, dataOffset, upperWordBaseMediaDecodeTime, lowerWordBaseMediaDecodeTime;
+ trackFragmentHeader = box(types.tfhd, new Uint8Array([0x00, // version 0
+ 0x00, 0x00, 0x3a, // flags
+ (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
+ 0x00, 0x00, 0x00, 0x01, // sample_description_index
+ 0x00, 0x00, 0x00, 0x00, // default_sample_duration
+ 0x00, 0x00, 0x00, 0x00, // default_sample_size
+ 0x00, 0x00, 0x00, 0x00 // default_sample_flags
+ ]));
+ upperWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime / MAX_UINT32);
+ lowerWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime % MAX_UINT32);
+ trackFragmentDecodeTime = box(types.tfdt, new Uint8Array([0x01, // version 1
+ 0x00, 0x00, 0x00, // flags
+ // baseMediaDecodeTime
+ upperWordBaseMediaDecodeTime >>> 24 & 0xFF, upperWordBaseMediaDecodeTime >>> 16 & 0xFF, upperWordBaseMediaDecodeTime >>> 8 & 0xFF, upperWordBaseMediaDecodeTime & 0xFF, lowerWordBaseMediaDecodeTime >>> 24 & 0xFF, lowerWordBaseMediaDecodeTime >>> 16 & 0xFF, lowerWordBaseMediaDecodeTime >>> 8 & 0xFF, lowerWordBaseMediaDecodeTime & 0xFF])); // the data offset specifies the number of bytes from the start of
+ // the containing moof to the first payload byte of the associated
+ // mdat
+
+ dataOffset = 32 + // tfhd
+ 20 + // tfdt
+ 8 + // traf header
+ 16 + // mfhd
+ 8 + // moof header
+ 8; // mdat header
+ // audio tracks require less metadata
+
+ if (track.type === 'audio') {
+ trackFragmentRun = trun$1(track, dataOffset);
+ return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun);
+ } // video tracks should contain an independent and disposable samples
+ // box (sdtp)
+ // generate one and adjust offsets to match
+
+
+ sampleDependencyTable = sdtp(track);
+ trackFragmentRun = trun$1(track, sampleDependencyTable.length + dataOffset);
+ return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable);
+ };
+ /**
+ * Generate a track box.
+ * @param track {object} a track definition
+ * @return {Uint8Array} the track box
+ */
+
+
+ trak = function trak(track) {
+ track.duration = track.duration || 0xffffffff;
+ return box(types.trak, tkhd(track), mdia(track));
+ };
+
+ trex = function trex(track) {
+ var result = new Uint8Array([0x00, // version 0
+ 0x00, 0x00, 0x00, // flags
+ (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
+ 0x00, 0x00, 0x00, 0x01, // default_sample_description_index
+ 0x00, 0x00, 0x00, 0x00, // default_sample_duration
+ 0x00, 0x00, 0x00, 0x00, // default_sample_size
+ 0x00, 0x01, 0x00, 0x01 // default_sample_flags
+ ]); // the last two bytes of default_sample_flags is the sample
+ // degradation priority, a hint about the importance of this sample
+ // relative to others. Lower the degradation priority for all sample
+ // types other than video.
+
+ if (track.type !== 'video') {
+ result[result.length - 1] = 0x00;
+ }
+
+ return box(types.trex, result);
+ };
+
+ (function () {
+ var audioTrun, videoTrun, trunHeader; // This method assumes all samples are uniform. That is, if a
+ // duration is present for the first sample, it will be present for
+ // all subsequent samples.
+ // see ISO/IEC 14496-12:2012, Section 8.8.8.1
+
+ trunHeader = function trunHeader(samples, offset) {
+ var durationPresent = 0,
+ sizePresent = 0,
+ flagsPresent = 0,
+ compositionTimeOffset = 0; // trun flag constants
+
+ if (samples.length) {
+ if (samples[0].duration !== undefined) {
+ durationPresent = 0x1;
+ }
+
+ if (samples[0].size !== undefined) {
+ sizePresent = 0x2;
+ }
+
+ if (samples[0].flags !== undefined) {
+ flagsPresent = 0x4;
+ }
+
+ if (samples[0].compositionTimeOffset !== undefined) {
+ compositionTimeOffset = 0x8;
+ }
+ }
+
+ return [0x00, // version 0
+ 0x00, durationPresent | sizePresent | flagsPresent | compositionTimeOffset, 0x01, // flags
+ (samples.length & 0xFF000000) >>> 24, (samples.length & 0xFF0000) >>> 16, (samples.length & 0xFF00) >>> 8, samples.length & 0xFF, // sample_count
+ (offset & 0xFF000000) >>> 24, (offset & 0xFF0000) >>> 16, (offset & 0xFF00) >>> 8, offset & 0xFF // data_offset
+ ];
+ };
+
+ videoTrun = function videoTrun(track, offset) {
+ var bytesOffest, bytes, header, samples, sample, i;
+ samples = track.samples || [];
+ offset += 8 + 12 + 16 * samples.length;
+ header = trunHeader(samples, offset);
+ bytes = new Uint8Array(header.length + samples.length * 16);
+ bytes.set(header);
+ bytesOffest = header.length;
+
+ for (i = 0; i < samples.length; i++) {
+ sample = samples[i];
+ bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
+ bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
+ bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
+ bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
+
+ bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
+ bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
+ bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
+ bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
+
+ bytes[bytesOffest++] = sample.flags.isLeading << 2 | sample.flags.dependsOn;
+ bytes[bytesOffest++] = sample.flags.isDependedOn << 6 | sample.flags.hasRedundancy << 4 | sample.flags.paddingValue << 1 | sample.flags.isNonSyncSample;
+ bytes[bytesOffest++] = sample.flags.degradationPriority & 0xF0 << 8;
+ bytes[bytesOffest++] = sample.flags.degradationPriority & 0x0F; // sample_flags
+
+ bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF000000) >>> 24;
+ bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF0000) >>> 16;
+ bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF00) >>> 8;
+ bytes[bytesOffest++] = sample.compositionTimeOffset & 0xFF; // sample_composition_time_offset
+ }
+
+ return box(types.trun, bytes);
+ };
+
+ audioTrun = function audioTrun(track, offset) {
+ var bytes, bytesOffest, header, samples, sample, i;
+ samples = track.samples || [];
+ offset += 8 + 12 + 8 * samples.length;
+ header = trunHeader(samples, offset);
+ bytes = new Uint8Array(header.length + samples.length * 8);
+ bytes.set(header);
+ bytesOffest = header.length;
+
+ for (i = 0; i < samples.length; i++) {
+ sample = samples[i];
+ bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
+ bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
+ bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
+ bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
+
+ bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
+ bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
+ bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
+ bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
+ }
+
+ return box(types.trun, bytes);
+ };
+
+ trun$1 = function trun(track, offset) {
+ if (track.type === 'audio') {
+ return audioTrun(track, offset);
+ }
+
+ return videoTrun(track, offset);
+ };
+ })();
+
+ var mp4Generator = {
+ ftyp: ftyp,
+ mdat: mdat,
+ moof: moof,
+ moov: moov,
+ initSegment: function initSegment(tracks) {
+ var fileType = ftyp(),
+ movie = moov(tracks),
+ result;
+ result = new Uint8Array(fileType.byteLength + movie.byteLength);
+ result.set(fileType);
+ result.set(movie, fileType.byteLength);
+ return result;
+ }
+ };
+ /**
+ * mux.js
+ *
+ * Copyright (c) Brightcove
+ * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
+ */
+ // Convert an array of nal units into an array of frames with each frame being
+ // composed of the nal units that make up that frame
+ // Also keep track of cummulative data about the frame from the nal units such
+ // as the frame duration, starting pts, etc.
+
+ var groupNalsIntoFrames = function groupNalsIntoFrames(nalUnits) {
+ var i,
+ currentNal,
+ currentFrame = [],
+ frames = []; // TODO added for LHLS, make sure this is OK
+
+ frames.byteLength = 0;
+ frames.nalCount = 0;
+ frames.duration = 0;
+ currentFrame.byteLength = 0;
+
+ for (i = 0; i < nalUnits.length; i++) {
+ currentNal = nalUnits[i]; // Split on 'aud'-type nal units
+
+ if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
+ // Since the very first nal unit is expected to be an AUD
+ // only push to the frames array when currentFrame is not empty
+ if (currentFrame.length) {
+ currentFrame.duration = currentNal.dts - currentFrame.dts; // TODO added for LHLS, make sure this is OK
+
+ frames.byteLength += currentFrame.byteLength;
+ frames.nalCount += currentFrame.length;
+ frames.duration += currentFrame.duration;
+ frames.push(currentFrame);
+ }
+
+ currentFrame = [currentNal];
+ currentFrame.byteLength = currentNal.data.byteLength;
+ currentFrame.pts = currentNal.pts;
+ currentFrame.dts = currentNal.dts;
+ } else {
+ // Specifically flag key frames for ease of use later
+ if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
+ currentFrame.keyFrame = true;
+ }
+
+ currentFrame.duration = currentNal.dts - currentFrame.dts;
+ currentFrame.byteLength += currentNal.data.byteLength;
+ currentFrame.push(currentNal);
+ }
+ } // For the last frame, use the duration of the previous frame if we
+ // have nothing better to go on
+
+
+ if (frames.length && (!currentFrame.duration || currentFrame.duration <= 0)) {
+ currentFrame.duration = frames[frames.length - 1].duration;
+ } // Push the final frame
+ // TODO added for LHLS, make sure this is OK
+
+
+ frames.byteLength += currentFrame.byteLength;
+ frames.nalCount += currentFrame.length;
+ frames.duration += currentFrame.duration;
+ frames.push(currentFrame);
+ return frames;
+ }; // Convert an array of frames into an array of Gop with each Gop being composed
+ // of the frames that make up that Gop
+ // Also keep track of cummulative data about the Gop from the frames such as the
+ // Gop duration, starting pts, etc.
+
+
+ var groupFramesIntoGops = function groupFramesIntoGops(frames) {
+ var i,
+ currentFrame,
+ currentGop = [],
+ gops = []; // We must pre-set some of the values on the Gop since we
+ // keep running totals of these values
+
+ currentGop.byteLength = 0;
+ currentGop.nalCount = 0;
+ currentGop.duration = 0;
+ currentGop.pts = frames[0].pts;
+ currentGop.dts = frames[0].dts; // store some metadata about all the Gops
+
+ gops.byteLength = 0;
+ gops.nalCount = 0;
+ gops.duration = 0;
+ gops.pts = frames[0].pts;
+ gops.dts = frames[0].dts;
+
+ for (i = 0; i < frames.length; i++) {
+ currentFrame = frames[i];
+
+ if (currentFrame.keyFrame) {
+ // Since the very first frame is expected to be an keyframe
+ // only push to the gops array when currentGop is not empty
+ if (currentGop.length) {
+ gops.push(currentGop);
+ gops.byteLength += currentGop.byteLength;
+ gops.nalCount += currentGop.nalCount;
+ gops.duration += currentGop.duration;
+ }
+
+ currentGop = [currentFrame];
+ currentGop.nalCount = currentFrame.length;
+ currentGop.byteLength = currentFrame.byteLength;
+ currentGop.pts = currentFrame.pts;
+ currentGop.dts = currentFrame.dts;
+ currentGop.duration = currentFrame.duration;
+ } else {
+ currentGop.duration += currentFrame.duration;
+ currentGop.nalCount += currentFrame.length;
+ currentGop.byteLength += currentFrame.byteLength;
+ currentGop.push(currentFrame);
+ }
+ }
+
+ if (gops.length && currentGop.duration <= 0) {
+ currentGop.duration = gops[gops.length - 1].duration;
+ }
+
+ gops.byteLength += currentGop.byteLength;
+ gops.nalCount += currentGop.nalCount;
+ gops.duration += currentGop.duration; // push the final Gop
+
+ gops.push(currentGop);
+ return gops;
+ };
+ /*
+ * Search for the first keyframe in the GOPs and throw away all frames
+ * until that keyframe. Then extend the duration of the pulled keyframe
+ * and pull the PTS and DTS of the keyframe so that it covers the time
+ * range of the frames that were disposed.
+ *
+ * @param {Array} gops video GOPs
+ * @returns {Array} modified video GOPs
+ */
+
+
+ var extendFirstKeyFrame = function extendFirstKeyFrame(gops) {
+ var currentGop;
+
+ if (!gops[0][0].keyFrame && gops.length > 1) {
+ // Remove the first GOP
+ currentGop = gops.shift();
+ gops.byteLength -= currentGop.byteLength;
+ gops.nalCount -= currentGop.nalCount; // Extend the first frame of what is now the
+ // first gop to cover the time period of the
+ // frames we just removed
+
+ gops[0][0].dts = currentGop.dts;
+ gops[0][0].pts = currentGop.pts;
+ gops[0][0].duration += currentGop.duration;
+ }
+
+ return gops;
+ };
+ /**
+ * Default sample object
+ * see ISO/IEC 14496-12:2012, section 8.6.4.3
+ */
+
+
+ var createDefaultSample = function createDefaultSample() {
+ return {
+ size: 0,
+ flags: {
+ isLeading: 0,
+ dependsOn: 1,
+ isDependedOn: 0,
+ hasRedundancy: 0,
+ degradationPriority: 0,
+ isNonSyncSample: 1
+ }
+ };
+ };
+ /*
+ * Collates information from a video frame into an object for eventual
+ * entry into an MP4 sample table.
+ *
+ * @param {Object} frame the video frame
+ * @param {Number} dataOffset the byte offset to position the sample
+ * @return {Object} object containing sample table info for a frame
+ */
+
+
+ var sampleForFrame = function sampleForFrame(frame, dataOffset) {
+ var sample = createDefaultSample();
+ sample.dataOffset = dataOffset;
+ sample.compositionTimeOffset = frame.pts - frame.dts;
+ sample.duration = frame.duration;
+ sample.size = 4 * frame.length; // Space for nal unit size
+
+ sample.size += frame.byteLength;
+
+ if (frame.keyFrame) {
+ sample.flags.dependsOn = 2;
+ sample.flags.isNonSyncSample = 0;
+ }
+
+ return sample;
+ }; // generate the track's sample table from an array of gops
+
+
+ var generateSampleTable$1 = function generateSampleTable(gops, baseDataOffset) {
+ var h,
+ i,
+ sample,
+ currentGop,
+ currentFrame,
+ dataOffset = baseDataOffset || 0,
+ samples = [];
+
+ for (h = 0; h < gops.length; h++) {
+ currentGop = gops[h];
+
+ for (i = 0; i < currentGop.length; i++) {
+ currentFrame = currentGop[i];
+ sample = sampleForFrame(currentFrame, dataOffset);
+ dataOffset += sample.size;
+ samples.push(sample);
+ }
+ }
+
+ return samples;
+ }; // generate the track's raw mdat data from an array of gops
+
+
+ var concatenateNalData = function concatenateNalData(gops) {
+ var h,
+ i,
+ j,
+ currentGop,
+ currentFrame,
+ currentNal,
+ dataOffset = 0,
+ nalsByteLength = gops.byteLength,
+ numberOfNals = gops.nalCount,
+ totalByteLength = nalsByteLength + 4 * numberOfNals,
+ data = new Uint8Array(totalByteLength),
+ view = new DataView(data.buffer); // For each Gop..
+
+ for (h = 0; h < gops.length; h++) {
+ currentGop = gops[h]; // For each Frame..
+
+ for (i = 0; i < currentGop.length; i++) {
+ currentFrame = currentGop[i]; // For each NAL..
+
+ for (j = 0; j < currentFrame.length; j++) {
+ currentNal = currentFrame[j];
+ view.setUint32(dataOffset, currentNal.data.byteLength);
+ dataOffset += 4;
+ data.set(currentNal.data, dataOffset);
+ dataOffset += currentNal.data.byteLength;
+ }
+ }
+ }
+
+ return data;
+ }; // generate the track's sample table from a frame
+
+
+ var generateSampleTableForFrame = function generateSampleTableForFrame(frame, baseDataOffset) {
+ var sample,
+ dataOffset = baseDataOffset || 0,
+ samples = [];
+ sample = sampleForFrame(frame, dataOffset);
+ samples.push(sample);
+ return samples;
+ }; // generate the track's raw mdat data from a frame
+
+
+ var concatenateNalDataForFrame = function concatenateNalDataForFrame(frame) {
+ var i,
+ currentNal,
+ dataOffset = 0,
+ nalsByteLength = frame.byteLength,
+ numberOfNals = frame.length,
+ totalByteLength = nalsByteLength + 4 * numberOfNals,
+ data = new Uint8Array(totalByteLength),
+ view = new DataView(data.buffer); // For each NAL..
+
+ for (i = 0; i < frame.length; i++) {
+ currentNal = frame[i];
+ view.setUint32(dataOffset, currentNal.data.byteLength);
+ dataOffset += 4;
+ data.set(currentNal.data, dataOffset);
+ dataOffset += currentNal.data.byteLength;
+ }
+
+ return data;
+ };
+
+ var frameUtils = {
+ groupNalsIntoFrames: groupNalsIntoFrames,
+ groupFramesIntoGops: groupFramesIntoGops,
+ extendFirstKeyFrame: extendFirstKeyFrame,
+ generateSampleTable: generateSampleTable$1,
+ concatenateNalData: concatenateNalData,
+ generateSampleTableForFrame: generateSampleTableForFrame,
+ concatenateNalDataForFrame: concatenateNalDataForFrame
+ };
+ /**
+ * mux.js
+ *
+ * Copyright (c) Brightcove
+ * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
+ */
+
+ var highPrefix = [33, 16, 5, 32, 164, 27];
+ var lowPrefix = [33, 65, 108, 84, 1, 2, 4, 8, 168, 2, 4, 8, 17, 191, 252];
+
+ var zeroFill = function zeroFill(count) {
+ var a = [];
+
+ while (count--) {
+ a.push(0);
+ }
+
+ return a;
+ };
+
+ var makeTable = function makeTable(metaTable) {
+ return Object.keys(metaTable).reduce(function (obj, key) {
+ obj[key] = new Uint8Array(metaTable[key].reduce(function (arr, part) {
+ return arr.concat(part);
+ }, []));
+ return obj;
+ }, {});
+ };
+
+ var silence;
+
+ var silence_1 = function silence_1() {
+ if (!silence) {
+ // Frames-of-silence to use for filling in missing AAC frames
+ var coneOfSilence = {
+ 96000: [highPrefix, [227, 64], zeroFill(154), [56]],
+ 88200: [highPrefix, [231], zeroFill(170), [56]],
+ 64000: [highPrefix, [248, 192], zeroFill(240), [56]],
+ 48000: [highPrefix, [255, 192], zeroFill(268), [55, 148, 128], zeroFill(54), [112]],
+ 44100: [highPrefix, [255, 192], zeroFill(268), [55, 163, 128], zeroFill(84), [112]],
+ 32000: [highPrefix, [255, 192], zeroFill(268), [55, 234], zeroFill(226), [112]],
+ 24000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 112], zeroFill(126), [224]],
+ 16000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 255], zeroFill(269), [223, 108], zeroFill(195), [1, 192]],
+ 12000: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 253, 128], zeroFill(259), [56]],
+ 11025: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 255, 192], zeroFill(268), [55, 175, 128], zeroFill(108), [112]],
+ 8000: [lowPrefix, zeroFill(268), [3, 121, 16], zeroFill(47), [7]]
+ };
+ silence = makeTable(coneOfSilence);
+ }
+
+ return silence;
+ };
+ /**
+ * mux.js
+ *
+ * Copyright (c) Brightcove
+ * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
+ */
+
+
+ var ONE_SECOND_IN_TS$4 = 90000,
+ // 90kHz clock
+ secondsToVideoTs,
+ secondsToAudioTs,
+ videoTsToSeconds,
+ audioTsToSeconds,
+ audioTsToVideoTs,
+ videoTsToAudioTs,
+ metadataTsToSeconds;
+
+ secondsToVideoTs = function secondsToVideoTs(seconds) {
+ return seconds * ONE_SECOND_IN_TS$4;
+ };
+
+ secondsToAudioTs = function secondsToAudioTs(seconds, sampleRate) {
+ return seconds * sampleRate;
+ };
+
+ videoTsToSeconds = function videoTsToSeconds(timestamp) {
+ return timestamp / ONE_SECOND_IN_TS$4;
+ };
+
+ audioTsToSeconds = function audioTsToSeconds(timestamp, sampleRate) {
+ return timestamp / sampleRate;
+ };
+
+ audioTsToVideoTs = function audioTsToVideoTs(timestamp, sampleRate) {
+ return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
+ };
+
+ videoTsToAudioTs = function videoTsToAudioTs(timestamp, sampleRate) {
+ return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
+ };
+ /**
+ * Adjust ID3 tag or caption timing information by the timeline pts values
+ * (if keepOriginalTimestamps is false) and convert to seconds
+ */
+
+
+ metadataTsToSeconds = function metadataTsToSeconds(timestamp, timelineStartPts, keepOriginalTimestamps) {
+ return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);
+ };
+
+ var clock = {
+ ONE_SECOND_IN_TS: ONE_SECOND_IN_TS$4,
+ secondsToVideoTs: secondsToVideoTs,
+ secondsToAudioTs: secondsToAudioTs,
+ videoTsToSeconds: videoTsToSeconds,
+ audioTsToSeconds: audioTsToSeconds,
+ audioTsToVideoTs: audioTsToVideoTs,
+ videoTsToAudioTs: videoTsToAudioTs,
+ metadataTsToSeconds: metadataTsToSeconds
+ };
+ /**
+ * mux.js
+ *
+ * Copyright (c) Brightcove
+ * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
+ */
+
+ /**
+ * Sum the `byteLength` properties of the data in each AAC frame
+ */
+
+ var sumFrameByteLengths = function sumFrameByteLengths(array) {
+ var i,
+ currentObj,
+ sum = 0; // sum the byteLength's all each nal unit in the frame
+
+ for (i = 0; i < array.length; i++) {
+ currentObj = array[i];
+ sum += currentObj.data.byteLength;
+ }
+
+ return sum;
+ }; // Possibly pad (prefix) the audio track with silence if appending this track
+ // would lead to the introduction of a gap in the audio buffer
+
+
+ var prefixWithSilence = function prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime) {
+ var baseMediaDecodeTimeTs,
+ frameDuration = 0,
+ audioGapDuration = 0,
+ audioFillFrameCount = 0,
+ audioFillDuration = 0,
+ silentFrame,
+ i,
+ firstFrame;
+
+ if (!frames.length) {
+ return;
+ }
+
+ baseMediaDecodeTimeTs = clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate); // determine frame clock duration based on sample rate, round up to avoid overfills
+
+ frameDuration = Math.ceil(clock.ONE_SECOND_IN_TS / (track.samplerate / 1024));
+
+ if (audioAppendStartTs && videoBaseMediaDecodeTime) {
+ // insert the shortest possible amount (audio gap or audio to video gap)
+ audioGapDuration = baseMediaDecodeTimeTs - Math.max(audioAppendStartTs, videoBaseMediaDecodeTime); // number of full frames in the audio gap
+
+ audioFillFrameCount = Math.floor(audioGapDuration / frameDuration);
+ audioFillDuration = audioFillFrameCount * frameDuration;
+ } // don't attempt to fill gaps smaller than a single frame or larger
+ // than a half second
+
+
+ if (audioFillFrameCount < 1 || audioFillDuration > clock.ONE_SECOND_IN_TS / 2) {
+ return;
+ }
+
+ silentFrame = silence_1()[track.samplerate];
+
+ if (!silentFrame) {
+ // we don't have a silent frame pregenerated for the sample rate, so use a frame
+ // from the content instead
+ silentFrame = frames[0].data;
+ }
+
+ for (i = 0; i < audioFillFrameCount; i++) {
+ firstFrame = frames[0];
+ frames.splice(0, 0, {
+ data: silentFrame,
+ dts: firstFrame.dts - frameDuration,
+ pts: firstFrame.pts - frameDuration
+ });
+ }
+
+ track.baseMediaDecodeTime -= Math.floor(clock.videoTsToAudioTs(audioFillDuration, track.samplerate));
+ return audioFillDuration;
+ }; // If the audio segment extends before the earliest allowed dts
+ // value, remove AAC frames until starts at or after the earliest
+ // allowed DTS so that we don't end up with a negative baseMedia-
+ // DecodeTime for the audio track
+
+
+ var trimAdtsFramesByEarliestDts = function trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts) {
+ if (track.minSegmentDts >= earliestAllowedDts) {
+ return adtsFrames;
+ } // We will need to recalculate the earliest segment Dts
+
+
+ track.minSegmentDts = Infinity;
+ return adtsFrames.filter(function (currentFrame) {
+ // If this is an allowed frame, keep it and record it's Dts
+ if (currentFrame.dts >= earliestAllowedDts) {
+ track.minSegmentDts = Math.min(track.minSegmentDts, currentFrame.dts);
+ track.minSegmentPts = track.minSegmentDts;
+ return true;
+ } // Otherwise, discard it
+
+
+ return false;
+ });
+ }; // generate the track's raw mdat data from an array of frames
+
+
+ var generateSampleTable = function generateSampleTable(frames) {
+ var i,
+ currentFrame,
+ samples = [];
+
+ for (i = 0; i < frames.length; i++) {
+ currentFrame = frames[i];
+ samples.push({
+ size: currentFrame.data.byteLength,
+ duration: 1024 // For AAC audio, all samples contain 1024 samples
+
+ });
+ }
+
+ return samples;
+ }; // generate the track's sample table from an array of frames
+
+
+ var concatenateFrameData = function concatenateFrameData(frames) {
+ var i,
+ currentFrame,
+ dataOffset = 0,
+ data = new Uint8Array(sumFrameByteLengths(frames));
+
+ for (i = 0; i < frames.length; i++) {
+ currentFrame = frames[i];
+ data.set(currentFrame.data, dataOffset);
+ dataOffset += currentFrame.data.byteLength;
+ }
+
+ return data;
+ };
+
+ var audioFrameUtils = {
+ prefixWithSilence: prefixWithSilence,
+ trimAdtsFramesByEarliestDts: trimAdtsFramesByEarliestDts,
+ generateSampleTable: generateSampleTable,
+ concatenateFrameData: concatenateFrameData
+ };
+ /**
+ * mux.js
+ *
+ * Copyright (c) Brightcove
+ * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
+ */
+
+ var ONE_SECOND_IN_TS$3 = clock.ONE_SECOND_IN_TS;
+ /**
+ * Store information about the start and end of the track and the
+ * duration for each frame/sample we process in order to calculate
+ * the baseMediaDecodeTime
+ */
+
+ var collectDtsInfo = function collectDtsInfo(track, data) {
+ if (typeof data.pts === 'number') {
+ if (track.timelineStartInfo.pts === undefined) {
+ track.timelineStartInfo.pts = data.pts;
+ }
+
+ if (track.minSegmentPts === undefined) {
+ track.minSegmentPts = data.pts;
+ } else {
+ track.minSegmentPts = Math.min(track.minSegmentPts, data.pts);
+ }
+
+ if (track.maxSegmentPts === undefined) {
+ track.maxSegmentPts = data.pts;
+ } else {
+ track.maxSegmentPts = Math.max(track.maxSegmentPts, data.pts);
+ }
+ }
+
+ if (typeof data.dts === 'number') {
+ if (track.timelineStartInfo.dts === undefined) {
+ track.timelineStartInfo.dts = data.dts;
+ }
+
+ if (track.minSegmentDts === undefined) {
+ track.minSegmentDts = data.dts;
+ } else {
+ track.minSegmentDts = Math.min(track.minSegmentDts, data.dts);
+ }
+
+ if (track.maxSegmentDts === undefined) {
+ track.maxSegmentDts = data.dts;
+ } else {
+ track.maxSegmentDts = Math.max(track.maxSegmentDts, data.dts);
+ }
+ }
+ };
+ /**
+ * Clear values used to calculate the baseMediaDecodeTime between
+ * tracks
+ */
+
+
+ var clearDtsInfo = function clearDtsInfo(track) {
+ delete track.minSegmentDts;
+ delete track.maxSegmentDts;
+ delete track.minSegmentPts;
+ delete track.maxSegmentPts;
+ };
+ /**
+ * Calculate the track's baseMediaDecodeTime based on the earliest
+ * DTS the transmuxer has ever seen and the minimum DTS for the
+ * current track
+ * @param track {object} track metadata configuration
+ * @param keepOriginalTimestamps {boolean} If true, keep the timestamps
+ * in the source; false to adjust the first segment to start at 0.
+ */
+
+
+ var calculateTrackBaseMediaDecodeTime = function calculateTrackBaseMediaDecodeTime(track, keepOriginalTimestamps) {
+ var baseMediaDecodeTime,
+ scale,
+ minSegmentDts = track.minSegmentDts; // Optionally adjust the time so the first segment starts at zero.
+
+ if (!keepOriginalTimestamps) {
+ minSegmentDts -= track.timelineStartInfo.dts;
+ } // track.timelineStartInfo.baseMediaDecodeTime is the location, in time, where
+ // we want the start of the first segment to be placed
+
+
+ baseMediaDecodeTime = track.timelineStartInfo.baseMediaDecodeTime; // Add to that the distance this segment is from the very first
+
+ baseMediaDecodeTime += minSegmentDts; // baseMediaDecodeTime must not become negative
+
+ baseMediaDecodeTime = Math.max(0, baseMediaDecodeTime);
+
+ if (track.type === 'audio') {
+ // Audio has a different clock equal to the sampling_rate so we need to
+ // scale the PTS values into the clock rate of the track
+ scale = track.samplerate / ONE_SECOND_IN_TS$3;
+ baseMediaDecodeTime *= scale;
+ baseMediaDecodeTime = Math.floor(baseMediaDecodeTime);
+ }
+
+ return baseMediaDecodeTime;
+ };
+
+ var trackDecodeInfo = {
+ clearDtsInfo: clearDtsInfo,
+ calculateTrackBaseMediaDecodeTime: calculateTrackBaseMediaDecodeTime,
+ collectDtsInfo: collectDtsInfo
+ };
+ /**
+ * mux.js
+ *
+ * Copyright (c) Brightcove
+ * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
+ *
+ * Reads in-band caption information from a video elementary
+ * stream. Captions must follow the CEA-708 standard for injection
+ * into an MPEG-2 transport streams.
+ * @see https://en.wikipedia.org/wiki/CEA-708
+ * @see https://www.gpo.gov/fdsys/pkg/CFR-2007-title47-vol1/pdf/CFR-2007-title47-vol1-sec15-119.pdf
+ */
+ // payload type field to indicate how they are to be
+ // interpreted. CEAS-708 caption content is always transmitted with
+ // payload type 0x04.
+
+ var USER_DATA_REGISTERED_ITU_T_T35 = 4,
+ RBSP_TRAILING_BITS = 128;
+ /**
+ * Parse a supplemental enhancement information (SEI) NAL unit.
+ * Stops parsing once a message of type ITU T T35 has been found.
+ *
+ * @param bytes {Uint8Array} the bytes of a SEI NAL unit
+ * @return {object} the parsed SEI payload
+ * @see Rec. ITU-T H.264, 7.3.2.3.1
+ */
+
+ var parseSei = function parseSei(bytes) {
+ var i = 0,
+ result = {
+ payloadType: -1,
+ payloadSize: 0
+ },
+ payloadType = 0,
+ payloadSize = 0; // go through the sei_rbsp parsing each each individual sei_message
+
+ while (i < bytes.byteLength) {
+ // stop once we have hit the end of the sei_rbsp
+ if (bytes[i] === RBSP_TRAILING_BITS) {
+ break;
+ } // Parse payload type
+
+
+ while (bytes[i] === 0xFF) {
+ payloadType += 255;
+ i++;
+ }
+
+ payloadType += bytes[i++]; // Parse payload size
+
+ while (bytes[i] === 0xFF) {
+ payloadSize += 255;
+ i++;
+ }
+
+ payloadSize += bytes[i++]; // this sei_message is a 608/708 caption so save it and break
+ // there can only ever be one caption message in a frame's sei
+
+ if (!result.payload && payloadType === USER_DATA_REGISTERED_ITU_T_T35) {
+ var userIdentifier = String.fromCharCode(bytes[i + 3], bytes[i + 4], bytes[i + 5], bytes[i + 6]);
+
+ if (userIdentifier === 'GA94') {
+ result.payloadType = payloadType;
+ result.payloadSize = payloadSize;
+ result.payload = bytes.subarray(i, i + payloadSize);
+ break;
+ } else {
+ result.payload = void 0;
+ }
+ } // skip the payload and parse the next message
+
+
+ i += payloadSize;
+ payloadType = 0;
+ payloadSize = 0;
+ }
+
+ return result;
+ }; // see ANSI/SCTE 128-1 (2013), section 8.1
+
+
+ var parseUserData = function parseUserData(sei) {
+ // itu_t_t35_contry_code must be 181 (United States) for
+ // captions
+ if (sei.payload[0] !== 181) {
+ return null;
+ } // itu_t_t35_provider_code should be 49 (ATSC) for captions
+
+
+ if ((sei.payload[1] << 8 | sei.payload[2]) !== 49) {
+ return null;
+ } // the user_identifier should be "GA94" to indicate ATSC1 data
+
+
+ if (String.fromCharCode(sei.payload[3], sei.payload[4], sei.payload[5], sei.payload[6]) !== 'GA94') {
+ return null;
+ } // finally, user_data_type_code should be 0x03 for caption data
+
+
+ if (sei.payload[7] !== 0x03) {
+ return null;
+ } // return the user_data_type_structure and strip the trailing
+ // marker bits
+
+
+ return sei.payload.subarray(8, sei.payload.length - 1);
+ }; // see CEA-708-D, section 4.4
+
+
+ var parseCaptionPackets = function parseCaptionPackets(pts, userData) {
+ var results = [],
+ i,
+ count,
+ offset,
+ data; // if this is just filler, return immediately
+
+ if (!(userData[0] & 0x40)) {
+ return results;
+ } // parse out the cc_data_1 and cc_data_2 fields
+
+
+ count = userData[0] & 0x1f;
+
+ for (i = 0; i < count; i++) {
+ offset = i * 3;
+ data = {
+ type: userData[offset + 2] & 0x03,
+ pts: pts
+ }; // capture cc data when cc_valid is 1
+
+ if (userData[offset + 2] & 0x04) {
+ data.ccData = userData[offset + 3] << 8 | userData[offset + 4];
+ results.push(data);
+ }
+ }
+
+ return results;
+ };
+
+ var discardEmulationPreventionBytes$1 = function discardEmulationPreventionBytes(data) {
+ var length = data.byteLength,
+ emulationPreventionBytesPositions = [],
+ i = 1,
+ newLength,
+ newData; // Find all `Emulation Prevention Bytes`
+
+ while (i < length - 2) {
+ if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
+ emulationPreventionBytesPositions.push(i + 2);
+ i += 2;
+ } else {
+ i++;
+ }
+ } // If no Emulation Prevention Bytes were found just return the original
+ // array
+
+
+ if (emulationPreventionBytesPositions.length === 0) {
+ return data;
+ } // Create a new array to hold the NAL unit data
+
+
+ newLength = length - emulationPreventionBytesPositions.length;
+ newData = new Uint8Array(newLength);
+ var sourceIndex = 0;
+
+ for (i = 0; i < newLength; sourceIndex++, i++) {
+ if (sourceIndex === emulationPreventionBytesPositions[0]) {
+ // Skip this byte
+ sourceIndex++; // Remove this position index
+
+ emulationPreventionBytesPositions.shift();
+ }
+
+ newData[i] = data[sourceIndex];
+ }
+
+ return newData;
+ }; // exports
+
+
+ var captionPacketParser = {
+ parseSei: parseSei,
+ parseUserData: parseUserData,
+ parseCaptionPackets: parseCaptionPackets,
+ discardEmulationPreventionBytes: discardEmulationPreventionBytes$1,
+ USER_DATA_REGISTERED_ITU_T_T35: USER_DATA_REGISTERED_ITU_T_T35
+ }; // Link To Transport
+ // -----------------
+
+ var CaptionStream$1 = function CaptionStream(options) {
+ options = options || {};
+ CaptionStream.prototype.init.call(this); // parse708captions flag, default to true
+
+ this.parse708captions_ = typeof options.parse708captions === 'boolean' ? options.parse708captions : true;
+ this.captionPackets_ = [];
+ this.ccStreams_ = [new Cea608Stream(0, 0), // eslint-disable-line no-use-before-define
+ new Cea608Stream(0, 1), // eslint-disable-line no-use-before-define
+ new Cea608Stream(1, 0), // eslint-disable-line no-use-before-define
+ new Cea608Stream(1, 1) // eslint-disable-line no-use-before-define
+ ];
+
+ if (this.parse708captions_) {
+ this.cc708Stream_ = new Cea708Stream({
+ captionServices: options.captionServices
+ }); // eslint-disable-line no-use-before-define
+ }
+
+ this.reset(); // forward data and done events from CCs to this CaptionStream
+
+ this.ccStreams_.forEach(function (cc) {
+ cc.on('data', this.trigger.bind(this, 'data'));
+ cc.on('partialdone', this.trigger.bind(this, 'partialdone'));
+ cc.on('done', this.trigger.bind(this, 'done'));
+ }, this);
+
+ if (this.parse708captions_) {
+ this.cc708Stream_.on('data', this.trigger.bind(this, 'data'));
+ this.cc708Stream_.on('partialdone', this.trigger.bind(this, 'partialdone'));
+ this.cc708Stream_.on('done', this.trigger.bind(this, 'done'));
+ }
+ };
+
+ CaptionStream$1.prototype = new stream();
+
+ CaptionStream$1.prototype.push = function (event) {
+ var sei, userData, newCaptionPackets; // only examine SEI NALs
+
+ if (event.nalUnitType !== 'sei_rbsp') {
+ return;
+ } // parse the sei
+
+
+ sei = captionPacketParser.parseSei(event.escapedRBSP); // no payload data, skip
+
+ if (!sei.payload) {
+ return;
+ } // ignore everything but user_data_registered_itu_t_t35
+
+
+ if (sei.payloadType !== captionPacketParser.USER_DATA_REGISTERED_ITU_T_T35) {
+ return;
+ } // parse out the user data payload
+
+
+ userData = captionPacketParser.parseUserData(sei); // ignore unrecognized userData
+
+ if (!userData) {
+ return;
+ } // Sometimes, the same segment # will be downloaded twice. To stop the
+ // caption data from being processed twice, we track the latest dts we've
+ // received and ignore everything with a dts before that. However, since
+ // data for a specific dts can be split across packets on either side of
+ // a segment boundary, we need to make sure we *don't* ignore the packets
+ // from the *next* segment that have dts === this.latestDts_. By constantly
+ // tracking the number of packets received with dts === this.latestDts_, we
+ // know how many should be ignored once we start receiving duplicates.
+
+
+ if (event.dts < this.latestDts_) {
+ // We've started getting older data, so set the flag.
+ this.ignoreNextEqualDts_ = true;
+ return;
+ } else if (event.dts === this.latestDts_ && this.ignoreNextEqualDts_) {
+ this.numSameDts_--;
+
+ if (!this.numSameDts_) {
+ // We've received the last duplicate packet, time to start processing again
+ this.ignoreNextEqualDts_ = false;
+ }
+
+ return;
+ } // parse out CC data packets and save them for later
+
+
+ newCaptionPackets = captionPacketParser.parseCaptionPackets(event.pts, userData);
+ this.captionPackets_ = this.captionPackets_.concat(newCaptionPackets);
+
+ if (this.latestDts_ !== event.dts) {
+ this.numSameDts_ = 0;
+ }
+
+ this.numSameDts_++;
+ this.latestDts_ = event.dts;
+ };
+
+ CaptionStream$1.prototype.flushCCStreams = function (flushType) {
+ this.ccStreams_.forEach(function (cc) {
+ return flushType === 'flush' ? cc.flush() : cc.partialFlush();
+ }, this);
+ };
+
+ CaptionStream$1.prototype.flushStream = function (flushType) {
+ // make sure we actually parsed captions before proceeding
+ if (!this.captionPackets_.length) {
+ this.flushCCStreams(flushType);
+ return;
+ } // In Chrome, the Array#sort function is not stable so add a
+ // presortIndex that we can use to ensure we get a stable-sort
+
+
+ this.captionPackets_.forEach(function (elem, idx) {
+ elem.presortIndex = idx;
+ }); // sort caption byte-pairs based on their PTS values
+
+ this.captionPackets_.sort(function (a, b) {
+ if (a.pts === b.pts) {
+ return a.presortIndex - b.presortIndex;
+ }
+
+ return a.pts - b.pts;
+ });
+ this.captionPackets_.forEach(function (packet) {
+ if (packet.type < 2) {
+ // Dispatch packet to the right Cea608Stream
+ this.dispatchCea608Packet(packet);
+ } else {
+ // Dispatch packet to the Cea708Stream
+ this.dispatchCea708Packet(packet);
+ }
+ }, this);
+ this.captionPackets_.length = 0;
+ this.flushCCStreams(flushType);
+ };
+
+ CaptionStream$1.prototype.flush = function () {
+ return this.flushStream('flush');
+ }; // Only called if handling partial data
+
+
+ CaptionStream$1.prototype.partialFlush = function () {
+ return this.flushStream('partialFlush');
+ };
+
+ CaptionStream$1.prototype.reset = function () {
+ this.latestDts_ = null;
+ this.ignoreNextEqualDts_ = false;
+ this.numSameDts_ = 0;
+ this.activeCea608Channel_ = [null, null];
+ this.ccStreams_.forEach(function (ccStream) {
+ ccStream.reset();
+ });
+ }; // From the CEA-608 spec:
+
+ /*
+ * When XDS sub-packets are interleaved with other services, the end of each sub-packet shall be followed
+ * by a control pair to change to a different service. When any of the control codes from 0x10 to 0x1F is
+ * used to begin a control code pair, it indicates the return to captioning or Text data. The control code pair
+ * and subsequent data should then be processed according to the FCC rules. It may be necessary for the
+ * line 21 data encoder to automatically insert a control code pair (i.e. RCL, RU2, RU3, RU4, RDC, or RTD)
+ * to switch to captioning or Text.
+ */
+ // With that in mind, we ignore any data between an XDS control code and a
+ // subsequent closed-captioning control code.
+
+
+ CaptionStream$1.prototype.dispatchCea608Packet = function (packet) {
+ // NOTE: packet.type is the CEA608 field
+ if (this.setsTextOrXDSActive(packet)) {
+ this.activeCea608Channel_[packet.type] = null;
+ } else if (this.setsChannel1Active(packet)) {
+ this.activeCea608Channel_[packet.type] = 0;
+ } else if (this.setsChannel2Active(packet)) {
+ this.activeCea608Channel_[packet.type] = 1;
+ }
+
+ if (this.activeCea608Channel_[packet.type] === null) {
+ // If we haven't received anything to set the active channel, or the
+ // packets are Text/XDS data, discard the data; we don't want jumbled
+ // captions
+ return;
+ }
+
+ this.ccStreams_[(packet.type << 1) + this.activeCea608Channel_[packet.type]].push(packet);
+ };
+
+ CaptionStream$1.prototype.setsChannel1Active = function (packet) {
+ return (packet.ccData & 0x7800) === 0x1000;
+ };
+
+ CaptionStream$1.prototype.setsChannel2Active = function (packet) {
+ return (packet.ccData & 0x7800) === 0x1800;
+ };
+
+ CaptionStream$1.prototype.setsTextOrXDSActive = function (packet) {
+ return (packet.ccData & 0x7100) === 0x0100 || (packet.ccData & 0x78fe) === 0x102a || (packet.ccData & 0x78fe) === 0x182a;
+ };
+
+ CaptionStream$1.prototype.dispatchCea708Packet = function (packet) {
+ if (this.parse708captions_) {
+ this.cc708Stream_.push(packet);
+ }
+ }; // ----------------------
+ // Session to Application
+ // ----------------------
+ // This hash maps special and extended character codes to their
+ // proper Unicode equivalent. The first one-byte key is just a
+ // non-standard character code. The two-byte keys that follow are
+ // the extended CEA708 character codes, along with the preceding
+ // 0x10 extended character byte to distinguish these codes from
+ // non-extended character codes. Every CEA708 character code that
+ // is not in this object maps directly to a standard unicode
+ // character code.
+ // The transparent space and non-breaking transparent space are
+ // technically not fully supported since there is no code to
+ // make them transparent, so they have normal non-transparent
+ // stand-ins.
+ // The special closed caption (CC) character isn't a standard
+ // unicode character, so a fairly similar unicode character was
+ // chosen in it's place.
+
+
+ var CHARACTER_TRANSLATION_708 = {
+ 0x7f: 0x266a,
+ // ♪
+ 0x1020: 0x20,
+ // Transparent Space
+ 0x1021: 0xa0,
+ // Nob-breaking Transparent Space
+ 0x1025: 0x2026,
+ // …
+ 0x102a: 0x0160,
+ // Š
+ 0x102c: 0x0152,
+ // Œ
+ 0x1030: 0x2588,
+ // █
+ 0x1031: 0x2018,
+ // ‘
+ 0x1032: 0x2019,
+ // ’
+ 0x1033: 0x201c,
+ // “
+ 0x1034: 0x201d,
+ // ”
+ 0x1035: 0x2022,
+ // •
+ 0x1039: 0x2122,
+ // ™
+ 0x103a: 0x0161,
+ // š
+ 0x103c: 0x0153,
+ // œ
+ 0x103d: 0x2120,
+ // ℠
+ 0x103f: 0x0178,
+ // Ÿ
+ 0x1076: 0x215b,
+ // ⅛
+ 0x1077: 0x215c,
+ // ⅜
+ 0x1078: 0x215d,
+ // ⅝
+ 0x1079: 0x215e,
+ // ⅞
+ 0x107a: 0x23d0,
+ // ⏐
+ 0x107b: 0x23a4,
+ // ⎤
+ 0x107c: 0x23a3,
+ // ⎣
+ 0x107d: 0x23af,
+ // ⎯
+ 0x107e: 0x23a6,
+ // ⎦
+ 0x107f: 0x23a1,
+ // ⎡
+ 0x10a0: 0x3138 // ㄸ (CC char)
+
+ };
+
+ var get708CharFromCode = function get708CharFromCode(code) {
+ var newCode = CHARACTER_TRANSLATION_708[code] || code;
+
+ if (code & 0x1000 && code === newCode) {
+ // Invalid extended code
+ return '';
+ }
+
+ return String.fromCharCode(newCode);
+ };
+
+ var within708TextBlock = function within708TextBlock(b) {
+ return 0x20 <= b && b <= 0x7f || 0xa0 <= b && b <= 0xff;
+ };
+
+ var Cea708Window = function Cea708Window(windowNum) {
+ this.windowNum = windowNum;
+ this.reset();
+ };
+
+ Cea708Window.prototype.reset = function () {
+ this.clearText();
+ this.pendingNewLine = false;
+ this.winAttr = {};
+ this.penAttr = {};
+ this.penLoc = {};
+ this.penColor = {}; // These default values are arbitrary,
+ // defineWindow will usually override them
+
+ this.visible = 0;
+ this.rowLock = 0;
+ this.columnLock = 0;
+ this.priority = 0;
+ this.relativePositioning = 0;
+ this.anchorVertical = 0;
+ this.anchorHorizontal = 0;
+ this.anchorPoint = 0;
+ this.rowCount = 1;
+ this.virtualRowCount = this.rowCount + 1;
+ this.columnCount = 41;
+ this.windowStyle = 0;
+ this.penStyle = 0;
+ };
+
+ Cea708Window.prototype.getText = function () {
+ return this.rows.join('\n');
+ };
+
+ Cea708Window.prototype.clearText = function () {
+ this.rows = [''];
+ this.rowIdx = 0;
+ };
+
+ Cea708Window.prototype.newLine = function (pts) {
+ if (this.rows.length >= this.virtualRowCount && typeof this.beforeRowOverflow === 'function') {
+ this.beforeRowOverflow(pts);
+ }
+
+ if (this.rows.length > 0) {
+ this.rows.push('');
+ this.rowIdx++;
+ } // Show all virtual rows since there's no visible scrolling
+
+
+ while (this.rows.length > this.virtualRowCount) {
+ this.rows.shift();
+ this.rowIdx--;
+ }
+ };
+
+ Cea708Window.prototype.isEmpty = function () {
+ if (this.rows.length === 0) {
+ return true;
+ } else if (this.rows.length === 1) {
+ return this.rows[0] === '';
+ }
+
+ return false;
+ };
+
+ Cea708Window.prototype.addText = function (text) {
+ this.rows[this.rowIdx] += text;
+ };
+
+ Cea708Window.prototype.backspace = function () {
+ if (!this.isEmpty()) {
+ var row = this.rows[this.rowIdx];
+ this.rows[this.rowIdx] = row.substr(0, row.length - 1);
+ }
+ };
+
+ var Cea708Service = function Cea708Service(serviceNum, encoding, stream) {
+ this.serviceNum = serviceNum;
+ this.text = '';
+ this.currentWindow = new Cea708Window(-1);
+ this.windows = [];
+ this.stream = stream; // Try to setup a TextDecoder if an `encoding` value was provided
+
+ if (typeof encoding === 'string') {
+ this.createTextDecoder(encoding);
+ }
+ };
+ /**
+ * Initialize service windows
+ * Must be run before service use
+ *
+ * @param {Integer} pts PTS value
+ * @param {Function} beforeRowOverflow Function to execute before row overflow of a window
+ */
+
+
+ Cea708Service.prototype.init = function (pts, beforeRowOverflow) {
+ this.startPts = pts;
+
+ for (var win = 0; win < 8; win++) {
+ this.windows[win] = new Cea708Window(win);
+
+ if (typeof beforeRowOverflow === 'function') {
+ this.windows[win].beforeRowOverflow = beforeRowOverflow;
+ }
+ }
+ };
+ /**
+ * Set current window of service to be affected by commands
+ *
+ * @param {Integer} windowNum Window number
+ */
+
+
+ Cea708Service.prototype.setCurrentWindow = function (windowNum) {
+ this.currentWindow = this.windows[windowNum];
+ };
+ /**
+ * Try to create a TextDecoder if it is natively supported
+ */
+
+
+ Cea708Service.prototype.createTextDecoder = function (encoding) {
+ if (typeof TextDecoder === 'undefined') {
+ this.stream.trigger('log', {
+ level: 'warn',
+ message: 'The `encoding` option is unsupported without TextDecoder support'
+ });
+ } else {
+ try {
+ this.textDecoder_ = new TextDecoder(encoding);
+ } catch (error) {
+ this.stream.trigger('log', {
+ level: 'warn',
+ message: 'TextDecoder could not be created with ' + encoding + ' encoding. ' + error
+ });
+ }
+ }
+ };
+
+ var Cea708Stream = function Cea708Stream(options) {
+ options = options || {};
+ Cea708Stream.prototype.init.call(this);
+ var self = this;
+ var captionServices = options.captionServices || {};
+ var captionServiceEncodings = {};
+ var serviceProps; // Get service encodings from captionServices option block
+
+ Object.keys(captionServices).forEach(function (serviceName) {
+ serviceProps = captionServices[serviceName];
+
+ if (/^SERVICE/.test(serviceName)) {
+ captionServiceEncodings[serviceName] = serviceProps.encoding;
+ }
+ });
+ this.serviceEncodings = captionServiceEncodings;
+ this.current708Packet = null;
+ this.services = {};
+
+ this.push = function (packet) {
+ if (packet.type === 3) {
+ // 708 packet start
+ self.new708Packet();
+ self.add708Bytes(packet);
+ } else {
+ if (self.current708Packet === null) {
+ // This should only happen at the start of a file if there's no packet start.
+ self.new708Packet();
+ }
+
+ self.add708Bytes(packet);
+ }
+ };
+ };
+
+ Cea708Stream.prototype = new stream();
+ /**
+ * Push current 708 packet, create new 708 packet.
+ */
+
+ Cea708Stream.prototype.new708Packet = function () {
+ if (this.current708Packet !== null) {
+ this.push708Packet();
+ }
+
+ this.current708Packet = {
+ data: [],
+ ptsVals: []
+ };
+ };
+ /**
+ * Add pts and both bytes from packet into current 708 packet.
+ */
+
+
+ Cea708Stream.prototype.add708Bytes = function (packet) {
+ var data = packet.ccData;
+ var byte0 = data >>> 8;
+ var byte1 = data & 0xff; // I would just keep a list of packets instead of bytes, but it isn't clear in the spec
+ // that service blocks will always line up with byte pairs.
+
+ this.current708Packet.ptsVals.push(packet.pts);
+ this.current708Packet.data.push(byte0);
+ this.current708Packet.data.push(byte1);
+ };
+ /**
+ * Parse completed 708 packet into service blocks and push each service block.
+ */
+
+
+ Cea708Stream.prototype.push708Packet = function () {
+ var packet708 = this.current708Packet;
+ var packetData = packet708.data;
+ var serviceNum = null;
+ var blockSize = null;
+ var i = 0;
+ var b = packetData[i++];
+ packet708.seq = b >> 6;
+ packet708.sizeCode = b & 0x3f; // 0b00111111;
+
+ for (; i < packetData.length; i++) {
+ b = packetData[i++];
+ serviceNum = b >> 5;
+ blockSize = b & 0x1f; // 0b00011111
+
+ if (serviceNum === 7 && blockSize > 0) {
+ // Extended service num
+ b = packetData[i++];
+ serviceNum = b;
+ }
+
+ this.pushServiceBlock(serviceNum, i, blockSize);
+
+ if (blockSize > 0) {
+ i += blockSize - 1;
+ }
+ }
+ };
+ /**
+ * Parse service block, execute commands, read text.
+ *
+ * Note: While many of these commands serve important purposes,
+ * many others just parse out the parameters or attributes, but
+ * nothing is done with them because this is not a full and complete
+ * implementation of the entire 708 spec.
+ *
+ * @param {Integer} serviceNum Service number
+ * @param {Integer} start Start index of the 708 packet data
+ * @param {Integer} size Block size
+ */
+
+
+ Cea708Stream.prototype.pushServiceBlock = function (serviceNum, start, size) {
+ var b;
+ var i = start;
+ var packetData = this.current708Packet.data;
+ var service = this.services[serviceNum];
+
+ if (!service) {
+ service = this.initService(serviceNum, i);
+ }
+
+ for (; i < start + size && i < packetData.length; i++) {
+ b = packetData[i];
+
+ if (within708TextBlock(b)) {
+ i = this.handleText(i, service);
+ } else if (b === 0x18) {
+ i = this.multiByteCharacter(i, service);
+ } else if (b === 0x10) {
+ i = this.extendedCommands(i, service);
+ } else if (0x80 <= b && b <= 0x87) {
+ i = this.setCurrentWindow(i, service);
+ } else if (0x98 <= b && b <= 0x9f) {
+ i = this.defineWindow(i, service);
+ } else if (b === 0x88) {
+ i = this.clearWindows(i, service);
+ } else if (b === 0x8c) {
+ i = this.deleteWindows(i, service);
+ } else if (b === 0x89) {
+ i = this.displayWindows(i, service);
+ } else if (b === 0x8a) {
+ i = this.hideWindows(i, service);
+ } else if (b === 0x8b) {
+ i = this.toggleWindows(i, service);
+ } else if (b === 0x97) {
+ i = this.setWindowAttributes(i, service);
+ } else if (b === 0x90) {
+ i = this.setPenAttributes(i, service);
+ } else if (b === 0x91) {
+ i = this.setPenColor(i, service);
+ } else if (b === 0x92) {
+ i = this.setPenLocation(i, service);
+ } else if (b === 0x8f) {
+ service = this.reset(i, service);
+ } else if (b === 0x08) {
+ // BS: Backspace
+ service.currentWindow.backspace();
+ } else if (b === 0x0c) {
+ // FF: Form feed
+ service.currentWindow.clearText();
+ } else if (b === 0x0d) {
+ // CR: Carriage return
+ service.currentWindow.pendingNewLine = true;
+ } else if (b === 0x0e) {
+ // HCR: Horizontal carriage return
+ service.currentWindow.clearText();
+ } else if (b === 0x8d) {
+ // DLY: Delay, nothing to do
+ i++;
+ } else ;
+ }
+ };
+ /**
+ * Execute an extended command
+ *
+ * @param {Integer} i Current index in the 708 packet
+ * @param {Service} service The service object to be affected
+ * @return {Integer} New index after parsing
+ */
+
+
+ Cea708Stream.prototype.extendedCommands = function (i, service) {
+ var packetData = this.current708Packet.data;
+ var b = packetData[++i];
+
+ if (within708TextBlock(b)) {
+ i = this.handleText(i, service, {
+ isExtended: true
+ });
+ }
+
+ return i;
+ };
+ /**
+ * Get PTS value of a given byte index
+ *
+ * @param {Integer} byteIndex Index of the byte
+ * @return {Integer} PTS
+ */
+
+
+ Cea708Stream.prototype.getPts = function (byteIndex) {
+ // There's 1 pts value per 2 bytes
+ return this.current708Packet.ptsVals[Math.floor(byteIndex / 2)];
+ };
+ /**
+ * Initializes a service
+ *
+ * @param {Integer} serviceNum Service number
+ * @return {Service} Initialized service object
+ */
+
+
+ Cea708Stream.prototype.initService = function (serviceNum, i) {
+ var serviceName = 'SERVICE' + serviceNum;
+ var self = this;
+ var serviceName;
+ var encoding;
+
+ if (serviceName in this.serviceEncodings) {
+ encoding = this.serviceEncodings[serviceName];
+ }
+
+ this.services[serviceNum] = new Cea708Service(serviceNum, encoding, self);
+ this.services[serviceNum].init(this.getPts(i), function (pts) {
+ self.flushDisplayed(pts, self.services[serviceNum]);
+ });
+ return this.services[serviceNum];
+ };
+ /**
+ * Execute text writing to current window
+ *
+ * @param {Integer} i Current index in the 708 packet
+ * @param {Service} service The service object to be affected
+ * @return {Integer} New index after parsing
+ */
+
+
+ Cea708Stream.prototype.handleText = function (i, service, options) {
+ var isExtended = options && options.isExtended;
+ var isMultiByte = options && options.isMultiByte;
+ var packetData = this.current708Packet.data;
+ var extended = isExtended ? 0x1000 : 0x0000;
+ var currentByte = packetData[i];
+ var nextByte = packetData[i + 1];
+ var win = service.currentWindow;
+
+ var _char;
+
+ var charCodeArray; // Use the TextDecoder if one was created for this service
+
+ if (service.textDecoder_ && !isExtended) {
+ if (isMultiByte) {
+ charCodeArray = [currentByte, nextByte];
+ i++;
+ } else {
+ charCodeArray = [currentByte];
+ }
+
+ _char = service.textDecoder_.decode(new Uint8Array(charCodeArray));
+ } else {
+ _char = get708CharFromCode(extended | currentByte);
+ }
+
+ if (win.pendingNewLine && !win.isEmpty()) {
+ win.newLine(this.getPts(i));
+ }
+
+ win.pendingNewLine = false;
+ win.addText(_char);
+ return i;
+ };
+ /**
+ * Handle decoding of multibyte character
+ *
+ * @param {Integer} i Current index in the 708 packet
+ * @param {Service} service The service object to be affected
+ * @return {Integer} New index after parsing
+ */
+
+
+ Cea708Stream.prototype.multiByteCharacter = function (i, service) {
+ var packetData = this.current708Packet.data;
+ var firstByte = packetData[i + 1];
+ var secondByte = packetData[i + 2];
+
+ if (within708TextBlock(firstByte) && within708TextBlock(secondByte)) {
+ i = this.handleText(++i, service, {
+ isMultiByte: true
+ });
+ }
+
+ return i;
+ };
+ /**
+ * Parse and execute the CW# command.
+ *
+ * Set the current window.
+ *
+ * @param {Integer} i Current index in the 708 packet
+ * @param {Service} service The service object to be affected
+ * @return {Integer} New index after parsing
+ */
+
+
+ Cea708Stream.prototype.setCurrentWindow = function (i, service) {
+ var packetData = this.current708Packet.data;
+ var b = packetData[i];
+ var windowNum = b & 0x07;
+ service.setCurrentWindow(windowNum);
+ return i;
+ };
+ /**
+ * Parse and execute the DF# command.
+ *
+ * Define a window and set it as the current window.
+ *
+ * @param {Integer} i Current index in the 708 packet
+ * @param {Service} service The service object to be affected
+ * @return {Integer} New index after parsing
+ */
+
+
+ Cea708Stream.prototype.defineWindow = function (i, service) {
+ var packetData = this.current708Packet.data;
+ var b = packetData[i];
+ var windowNum = b & 0x07;
+ service.setCurrentWindow(windowNum);
+ var win = service.currentWindow;
+ b = packetData[++i];
+ win.visible = (b & 0x20) >> 5; // v
+
+ win.rowLock = (b & 0x10) >> 4; // rl
+
+ win.columnLock = (b & 0x08) >> 3; // cl
+
+ win.priority = b & 0x07; // p
+
+ b = packetData[++i];
+ win.relativePositioning = (b & 0x80) >> 7; // rp
+
+ win.anchorVertical = b & 0x7f; // av
+
+ b = packetData[++i];
+ win.anchorHorizontal = b; // ah
+
+ b = packetData[++i];
+ win.anchorPoint = (b & 0xf0) >> 4; // ap
+
+ win.rowCount = b & 0x0f; // rc
+
+ b = packetData[++i];
+ win.columnCount = b & 0x3f; // cc
+
+ b = packetData[++i];
+ win.windowStyle = (b & 0x38) >> 3; // ws
+
+ win.penStyle = b & 0x07; // ps
+ // The spec says there are (rowCount+1) "virtual rows"
+
+ win.virtualRowCount = win.rowCount + 1;
+ return i;
+ };
+ /**
+ * Parse and execute the SWA command.
+ *
+ * Set attributes of the current window.
+ *
+ * @param {Integer} i Current index in the 708 packet
+ * @param {Service} service The service object to be affected
+ * @return {Integer} New index after parsing
+ */
+
+
+ Cea708Stream.prototype.setWindowAttributes = function (i, service) {
+ var packetData = this.current708Packet.data;
+ var b = packetData[i];
+ var winAttr = service.currentWindow.winAttr;
+ b = packetData[++i];
+ winAttr.fillOpacity = (b & 0xc0) >> 6; // fo
+
+ winAttr.fillRed = (b & 0x30) >> 4; // fr
+
+ winAttr.fillGreen = (b & 0x0c) >> 2; // fg
+
+ winAttr.fillBlue = b & 0x03; // fb
+
+ b = packetData[++i];
+ winAttr.borderType = (b & 0xc0) >> 6; // bt
+
+ winAttr.borderRed = (b & 0x30) >> 4; // br
+
+ winAttr.borderGreen = (b & 0x0c) >> 2; // bg
+
+ winAttr.borderBlue = b & 0x03; // bb
+
+ b = packetData[++i];
+ winAttr.borderType += (b & 0x80) >> 5; // bt
+
+ winAttr.wordWrap = (b & 0x40) >> 6; // ww
+
+ winAttr.printDirection = (b & 0x30) >> 4; // pd
+
+ winAttr.scrollDirection = (b & 0x0c) >> 2; // sd
+
+ winAttr.justify = b & 0x03; // j
+
+ b = packetData[++i];
+ winAttr.effectSpeed = (b & 0xf0) >> 4; // es
+
+ winAttr.effectDirection = (b & 0x0c) >> 2; // ed
+
+ winAttr.displayEffect = b & 0x03; // de
+
+ return i;
+ };
+ /**
+ * Gather text from all displayed windows and push a caption to output.
+ *
+ * @param {Integer} i Current index in the 708 packet
+ * @param {Service} service The service object to be affected
+ */
+
+
+ Cea708Stream.prototype.flushDisplayed = function (pts, service) {
+ var displayedText = []; // TODO: Positioning not supported, displaying multiple windows will not necessarily
+ // display text in the correct order, but sample files so far have not shown any issue.
+
+ for (var winId = 0; winId < 8; winId++) {
+ if (service.windows[winId].visible && !service.windows[winId].isEmpty()) {
+ displayedText.push(service.windows[winId].getText());
+ }
+ }
+
+ service.endPts = pts;
+ service.text = displayedText.join('\n\n');
+ this.pushCaption(service);
+ service.startPts = pts;
+ };
+ /**
+ * Push a caption to output if the caption contains text.
+ *
+ * @param {Service} service The service object to be affected
+ */
+
+
+ Cea708Stream.prototype.pushCaption = function (service) {
+ if (service.text !== '') {
+ this.trigger('data', {
+ startPts: service.startPts,
+ endPts: service.endPts,
+ text: service.text,
+ stream: 'cc708_' + service.serviceNum
+ });
+ service.text = '';
+ service.startPts = service.endPts;
+ }
+ };
+ /**
+ * Parse and execute the DSW command.
+ *
+ * Set visible property of windows based on the parsed bitmask.
+ *
+ * @param {Integer} i Current index in the 708 packet
+ * @param {Service} service The service object to be affected
+ * @return {Integer} New index after parsing
+ */
+
+
+ Cea708Stream.prototype.displayWindows = function (i, service) {
+ var packetData = this.current708Packet.data;
+ var b = packetData[++i];
+ var pts = this.getPts(i);
+ this.flushDisplayed(pts, service);
+
+ for (var winId = 0; winId < 8; winId++) {
+ if (b & 0x01 << winId) {
+ service.windows[winId].visible = 1;
+ }
+ }
+
+ return i;
+ };
+ /**
+ * Parse and execute the HDW command.
+ *
+ * Set visible property of windows based on the parsed bitmask.
+ *
+ * @param {Integer} i Current index in the 708 packet
+ * @param {Service} service The service object to be affected
+ * @return {Integer} New index after parsing
+ */
+
+
+ Cea708Stream.prototype.hideWindows = function (i, service) {
+ var packetData = this.current708Packet.data;
+ var b = packetData[++i];
+ var pts = this.getPts(i);
+ this.flushDisplayed(pts, service);
+
+ for (var winId = 0; winId < 8; winId++) {
+ if (b & 0x01 << winId) {
+ service.windows[winId].visible = 0;
+ }
+ }
+
+ return i;
+ };
+ /**
+ * Parse and execute the TGW command.
+ *
+ * Set visible property of windows based on the parsed bitmask.
+ *
+ * @param {Integer} i Current index in the 708 packet
+ * @param {Service} service The service object to be affected
+ * @return {Integer} New index after parsing
+ */
+
+
+ Cea708Stream.prototype.toggleWindows = function (i, service) {
+ var packetData = this.current708Packet.data;
+ var b = packetData[++i];
+ var pts = this.getPts(i);
+ this.flushDisplayed(pts, service);
+
+ for (var winId = 0; winId < 8; winId++) {
+ if (b & 0x01 << winId) {
+ service.windows[winId].visible ^= 1;
+ }
+ }
+
+ return i;
+ };
+ /**
+ * Parse and execute the CLW command.
+ *
+ * Clear text of windows based on the parsed bitmask.
+ *
+ * @param {Integer} i Current index in the 708 packet
+ * @param {Service} service The service object to be affected
+ * @return {Integer} New index after parsing
+ */
+
+
+ Cea708Stream.prototype.clearWindows = function (i, service) {
+ var packetData = this.current708Packet.data;
+ var b = packetData[++i];
+ var pts = this.getPts(i);
+ this.flushDisplayed(pts, service);
+
+ for (var winId = 0; winId < 8; winId++) {
+ if (b & 0x01 << winId) {
+ service.windows[winId].clearText();
+ }
+ }
+
+ return i;
+ };
+ /**
+ * Parse and execute the DLW command.
+ *
+ * Re-initialize windows based on the parsed bitmask.
+ *
+ * @param {Integer} i Current index in the 708 packet
+ * @param {Service} service The service object to be affected
+ * @return {Integer} New index after parsing
+ */
+
+
+ Cea708Stream.prototype.deleteWindows = function (i, service) {
+ var packetData = this.current708Packet.data;
+ var b = packetData[++i];
+ var pts = this.getPts(i);
+ this.flushDisplayed(pts, service);
+
+ for (var winId = 0; winId < 8; winId++) {
+ if (b & 0x01 << winId) {
+ service.windows[winId].reset();
+ }
+ }
+
+ return i;
+ };
+ /**
+ * Parse and execute the SPA command.
+ *
+ * Set pen attributes of the current window.
+ *
+ * @param {Integer} i Current index in the 708 packet
+ * @param {Service} service The service object to be affected
+ * @return {Integer} New index after parsing
+ */
+
+
+ Cea708Stream.prototype.setPenAttributes = function (i, service) {
+ var packetData = this.current708Packet.data;
+ var b = packetData[i];
+ var penAttr = service.currentWindow.penAttr;
+ b = packetData[++i];
+ penAttr.textTag = (b & 0xf0) >> 4; // tt
+
+ penAttr.offset = (b & 0x0c) >> 2; // o
+
+ penAttr.penSize = b & 0x03; // s
+
+ b = packetData[++i];
+ penAttr.italics = (b & 0x80) >> 7; // i
+
+ penAttr.underline = (b & 0x40) >> 6; // u
+
+ penAttr.edgeType = (b & 0x38) >> 3; // et
+
+ penAttr.fontStyle = b & 0x07; // fs
+
+ return i;
+ };
+ /**
+ * Parse and execute the SPC command.
+ *
+ * Set pen color of the current window.
+ *
+ * @param {Integer} i Current index in the 708 packet
+ * @param {Service} service The service object to be affected
+ * @return {Integer} New index after parsing
+ */
+
+
+ Cea708Stream.prototype.setPenColor = function (i, service) {
+ var packetData = this.current708Packet.data;
+ var b = packetData[i];
+ var penColor = service.currentWindow.penColor;
+ b = packetData[++i];
+ penColor.fgOpacity = (b & 0xc0) >> 6; // fo
+
+ penColor.fgRed = (b & 0x30) >> 4; // fr
+
+ penColor.fgGreen = (b & 0x0c) >> 2; // fg
+
+ penColor.fgBlue = b & 0x03; // fb
+
+ b = packetData[++i];
+ penColor.bgOpacity = (b & 0xc0) >> 6; // bo
+
+ penColor.bgRed = (b & 0x30) >> 4; // br
+
+ penColor.bgGreen = (b & 0x0c) >> 2; // bg
+
+ penColor.bgBlue = b & 0x03; // bb
+
+ b = packetData[++i];
+ penColor.edgeRed = (b & 0x30) >> 4; // er
+
+ penColor.edgeGreen = (b & 0x0c) >> 2; // eg
+
+ penColor.edgeBlue = b & 0x03; // eb
+
+ return i;
+ };
+ /**
+ * Parse and execute the SPL command.
+ *
+ * Set pen location of the current window.
+ *
+ * @param {Integer} i Current index in the 708 packet
+ * @param {Service} service The service object to be affected
+ * @return {Integer} New index after parsing
+ */
+
+
+ Cea708Stream.prototype.setPenLocation = function (i, service) {
+ var packetData = this.current708Packet.data;
+ var b = packetData[i];
+ var penLoc = service.currentWindow.penLoc; // Positioning isn't really supported at the moment, so this essentially just inserts a linebreak
+
+ service.currentWindow.pendingNewLine = true;
+ b = packetData[++i];
+ penLoc.row = b & 0x0f; // r
+
+ b = packetData[++i];
+ penLoc.column = b & 0x3f; // c
+
+ return i;
+ };
+ /**
+ * Execute the RST command.
+ *
+ * Reset service to a clean slate. Re-initialize.
+ *
+ * @param {Integer} i Current index in the 708 packet
+ * @param {Service} service The service object to be affected
+ * @return {Service} Re-initialized service
+ */
+
+
+ Cea708Stream.prototype.reset = function (i, service) {
+ var pts = this.getPts(i);
+ this.flushDisplayed(pts, service);
+ return this.initService(service.serviceNum, i);
+ }; // This hash maps non-ASCII, special, and extended character codes to their
+ // proper Unicode equivalent. The first keys that are only a single byte
+ // are the non-standard ASCII characters, which simply map the CEA608 byte
+ // to the standard ASCII/Unicode. The two-byte keys that follow are the CEA608
+ // character codes, but have their MSB bitmasked with 0x03 so that a lookup
+ // can be performed regardless of the field and data channel on which the
+ // character code was received.
+
+
+ var CHARACTER_TRANSLATION = {
+ 0x2a: 0xe1,
+ // á
+ 0x5c: 0xe9,
+ // é
+ 0x5e: 0xed,
+ // í
+ 0x5f: 0xf3,
+ // ó
+ 0x60: 0xfa,
+ // ú
+ 0x7b: 0xe7,
+ // ç
+ 0x7c: 0xf7,
+ // ÷
+ 0x7d: 0xd1,
+ // Ñ
+ 0x7e: 0xf1,
+ // ñ
+ 0x7f: 0x2588,
+ // █
+ 0x0130: 0xae,
+ // ®
+ 0x0131: 0xb0,
+ // °
+ 0x0132: 0xbd,
+ // ½
+ 0x0133: 0xbf,
+ // ¿
+ 0x0134: 0x2122,
+ // ™
+ 0x0135: 0xa2,
+ // ¢
+ 0x0136: 0xa3,
+ // £
+ 0x0137: 0x266a,
+ // ♪
+ 0x0138: 0xe0,
+ // à
+ 0x0139: 0xa0,
+ //
+ 0x013a: 0xe8,
+ // è
+ 0x013b: 0xe2,
+ // â
+ 0x013c: 0xea,
+ // ê
+ 0x013d: 0xee,
+ // î
+ 0x013e: 0xf4,
+ // ô
+ 0x013f: 0xfb,
+ // û
+ 0x0220: 0xc1,
+ // Á
+ 0x0221: 0xc9,
+ // É
+ 0x0222: 0xd3,
+ // Ó
+ 0x0223: 0xda,
+ // Ú
+ 0x0224: 0xdc,
+ // Ü
+ 0x0225: 0xfc,
+ // ü
+ 0x0226: 0x2018,
+ // ‘
+ 0x0227: 0xa1,
+ // ¡
+ 0x0228: 0x2a,
+ // *
+ 0x0229: 0x27,
+ // '
+ 0x022a: 0x2014,
+ // —
+ 0x022b: 0xa9,
+ // ©
+ 0x022c: 0x2120,
+ // ℠
+ 0x022d: 0x2022,
+ // •
+ 0x022e: 0x201c,
+ // “
+ 0x022f: 0x201d,
+ // ”
+ 0x0230: 0xc0,
+ // À
+ 0x0231: 0xc2,
+ // Â
+ 0x0232: 0xc7,
+ // Ç
+ 0x0233: 0xc8,
+ // È
+ 0x0234: 0xca,
+ // Ê
+ 0x0235: 0xcb,
+ // Ë
+ 0x0236: 0xeb,
+ // ë
+ 0x0237: 0xce,
+ // Î
+ 0x0238: 0xcf,
+ // Ï
+ 0x0239: 0xef,
+ // ï
+ 0x023a: 0xd4,
+ // Ô
+ 0x023b: 0xd9,
+ // Ù
+ 0x023c: 0xf9,
+ // ù
+ 0x023d: 0xdb,
+ // Û
+ 0x023e: 0xab,
+ // «
+ 0x023f: 0xbb,
+ // »
+ 0x0320: 0xc3,
+ // Ã
+ 0x0321: 0xe3,
+ // ã
+ 0x0322: 0xcd,
+ // Í
+ 0x0323: 0xcc,
+ // Ì
+ 0x0324: 0xec,
+ // ì
+ 0x0325: 0xd2,
+ // Ò
+ 0x0326: 0xf2,
+ // ò
+ 0x0327: 0xd5,
+ // Õ
+ 0x0328: 0xf5,
+ // õ
+ 0x0329: 0x7b,
+ // {
+ 0x032a: 0x7d,
+ // }
+ 0x032b: 0x5c,
+ // \
+ 0x032c: 0x5e,
+ // ^
+ 0x032d: 0x5f,
+ // _
+ 0x032e: 0x7c,
+ // |
+ 0x032f: 0x7e,
+ // ~
+ 0x0330: 0xc4,
+ // Ä
+ 0x0331: 0xe4,
+ // ä
+ 0x0332: 0xd6,
+ // Ö
+ 0x0333: 0xf6,
+ // ö
+ 0x0334: 0xdf,
+ // ß
+ 0x0335: 0xa5,
+ // ¥
+ 0x0336: 0xa4,
+ // ¤
+ 0x0337: 0x2502,
+ // │
+ 0x0338: 0xc5,
+ // Å
+ 0x0339: 0xe5,
+ // å
+ 0x033a: 0xd8,
+ // Ø
+ 0x033b: 0xf8,
+ // ø
+ 0x033c: 0x250c,
+ // ┌
+ 0x033d: 0x2510,
+ // ┐
+ 0x033e: 0x2514,
+ // └
+ 0x033f: 0x2518 // ┘
+
+ };
+
+ var getCharFromCode = function getCharFromCode(code) {
+ if (code === null) {
+ return '';
+ }
+
+ code = CHARACTER_TRANSLATION[code] || code;
+ return String.fromCharCode(code);
+ }; // the index of the last row in a CEA-608 display buffer
+
+
+ var BOTTOM_ROW = 14; // This array is used for mapping PACs -> row #, since there's no way of
+ // getting it through bit logic.
+
+ var ROWS = [0x1100, 0x1120, 0x1200, 0x1220, 0x1500, 0x1520, 0x1600, 0x1620, 0x1700, 0x1720, 0x1000, 0x1300, 0x1320, 0x1400, 0x1420]; // CEA-608 captions are rendered onto a 34x15 matrix of character
+ // cells. The "bottom" row is the last element in the outer array.
+
+ var createDisplayBuffer = function createDisplayBuffer() {
+ var result = [],
+ i = BOTTOM_ROW + 1;
+
+ while (i--) {
+ result.push('');
+ }
+
+ return result;
+ };
+
+ var Cea608Stream = function Cea608Stream(field, dataChannel) {
+ Cea608Stream.prototype.init.call(this);
+ this.field_ = field || 0;
+ this.dataChannel_ = dataChannel || 0;
+ this.name_ = 'CC' + ((this.field_ << 1 | this.dataChannel_) + 1);
+ this.setConstants();
+ this.reset();
+
+ this.push = function (packet) {
+ var data, swap, char0, char1, text; // remove the parity bits
+
+ data = packet.ccData & 0x7f7f; // ignore duplicate control codes; the spec demands they're sent twice
+
+ if (data === this.lastControlCode_) {
+ this.lastControlCode_ = null;
+ return;
+ } // Store control codes
+
+
+ if ((data & 0xf000) === 0x1000) {
+ this.lastControlCode_ = data;
+ } else if (data !== this.PADDING_) {
+ this.lastControlCode_ = null;
+ }
+
+ char0 = data >>> 8;
+ char1 = data & 0xff;
+
+ if (data === this.PADDING_) {
+ return;
+ } else if (data === this.RESUME_CAPTION_LOADING_) {
+ this.mode_ = 'popOn';
+ } else if (data === this.END_OF_CAPTION_) {
+ // If an EOC is received while in paint-on mode, the displayed caption
+ // text should be swapped to non-displayed memory as if it was a pop-on
+ // caption. Because of that, we should explicitly switch back to pop-on
+ // mode
+ this.mode_ = 'popOn';
+ this.clearFormatting(packet.pts); // if a caption was being displayed, it's gone now
+
+ this.flushDisplayed(packet.pts); // flip memory
+
+ swap = this.displayed_;
+ this.displayed_ = this.nonDisplayed_;
+ this.nonDisplayed_ = swap; // start measuring the time to display the caption
+
+ this.startPts_ = packet.pts;
+ } else if (data === this.ROLL_UP_2_ROWS_) {
+ this.rollUpRows_ = 2;
+ this.setRollUp(packet.pts);
+ } else if (data === this.ROLL_UP_3_ROWS_) {
+ this.rollUpRows_ = 3;
+ this.setRollUp(packet.pts);
+ } else if (data === this.ROLL_UP_4_ROWS_) {
+ this.rollUpRows_ = 4;
+ this.setRollUp(packet.pts);
+ } else if (data === this.CARRIAGE_RETURN_) {
+ this.clearFormatting(packet.pts);
+ this.flushDisplayed(packet.pts);
+ this.shiftRowsUp_();
+ this.startPts_ = packet.pts;
+ } else if (data === this.BACKSPACE_) {
+ if (this.mode_ === 'popOn') {
+ this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
+ } else {
+ this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
+ }
+ } else if (data === this.ERASE_DISPLAYED_MEMORY_) {
+ this.flushDisplayed(packet.pts);
+ this.displayed_ = createDisplayBuffer();
+ } else if (data === this.ERASE_NON_DISPLAYED_MEMORY_) {
+ this.nonDisplayed_ = createDisplayBuffer();
+ } else if (data === this.RESUME_DIRECT_CAPTIONING_) {
+ if (this.mode_ !== 'paintOn') {
+ // NOTE: This should be removed when proper caption positioning is
+ // implemented
+ this.flushDisplayed(packet.pts);
+ this.displayed_ = createDisplayBuffer();
+ }
+
+ this.mode_ = 'paintOn';
+ this.startPts_ = packet.pts; // Append special characters to caption text
+ } else if (this.isSpecialCharacter(char0, char1)) {
+ // Bitmask char0 so that we can apply character transformations
+ // regardless of field and data channel.
+ // Then byte-shift to the left and OR with char1 so we can pass the
+ // entire character code to `getCharFromCode`.
+ char0 = (char0 & 0x03) << 8;
+ text = getCharFromCode(char0 | char1);
+ this[this.mode_](packet.pts, text);
+ this.column_++; // Append extended characters to caption text
+ } else if (this.isExtCharacter(char0, char1)) {
+ // Extended characters always follow their "non-extended" equivalents.
+ // IE if a "è" is desired, you'll always receive "eè"; non-compliant
+ // decoders are supposed to drop the "è", while compliant decoders
+ // backspace the "e" and insert "è".
+ // Delete the previous character
+ if (this.mode_ === 'popOn') {
+ this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
+ } else {
+ this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
+ } // Bitmask char0 so that we can apply character transformations
+ // regardless of field and data channel.
+ // Then byte-shift to the left and OR with char1 so we can pass the
+ // entire character code to `getCharFromCode`.
+
+
+ char0 = (char0 & 0x03) << 8;
+ text = getCharFromCode(char0 | char1);
+ this[this.mode_](packet.pts, text);
+ this.column_++; // Process mid-row codes
+ } else if (this.isMidRowCode(char0, char1)) {
+ // Attributes are not additive, so clear all formatting
+ this.clearFormatting(packet.pts); // According to the standard, mid-row codes
+ // should be replaced with spaces, so add one now
+
+ this[this.mode_](packet.pts, ' ');
+ this.column_++;
+
+ if ((char1 & 0xe) === 0xe) {
+ this.addFormatting(packet.pts, ['i']);
+ }
+
+ if ((char1 & 0x1) === 0x1) {
+ this.addFormatting(packet.pts, ['u']);
+ } // Detect offset control codes and adjust cursor
+
+ } else if (this.isOffsetControlCode(char0, char1)) {
+ // Cursor position is set by indent PAC (see below) in 4-column
+ // increments, with an additional offset code of 1-3 to reach any
+ // of the 32 columns specified by CEA-608. So all we need to do
+ // here is increment the column cursor by the given offset.
+ this.column_ += char1 & 0x03; // Detect PACs (Preamble Address Codes)
+ } else if (this.isPAC(char0, char1)) {
+ // There's no logic for PAC -> row mapping, so we have to just
+ // find the row code in an array and use its index :(
+ var row = ROWS.indexOf(data & 0x1f20); // Configure the caption window if we're in roll-up mode
+
+ if (this.mode_ === 'rollUp') {
+ // This implies that the base row is incorrectly set.
+ // As per the recommendation in CEA-608(Base Row Implementation), defer to the number
+ // of roll-up rows set.
+ if (row - this.rollUpRows_ + 1 < 0) {
+ row = this.rollUpRows_ - 1;
+ }
+
+ this.setRollUp(packet.pts, row);
+ }
+
+ if (row !== this.row_) {
+ // formatting is only persistent for current row
+ this.clearFormatting(packet.pts);
+ this.row_ = row;
+ } // All PACs can apply underline, so detect and apply
+ // (All odd-numbered second bytes set underline)
+
+
+ if (char1 & 0x1 && this.formatting_.indexOf('u') === -1) {
+ this.addFormatting(packet.pts, ['u']);
+ }
+
+ if ((data & 0x10) === 0x10) {
+ // We've got an indent level code. Each successive even number
+ // increments the column cursor by 4, so we can get the desired
+ // column position by bit-shifting to the right (to get n/2)
+ // and multiplying by 4.
+ this.column_ = ((data & 0xe) >> 1) * 4;
+ }
+
+ if (this.isColorPAC(char1)) {
+ // it's a color code, though we only support white, which
+ // can be either normal or italicized. white italics can be
+ // either 0x4e or 0x6e depending on the row, so we just
+ // bitwise-and with 0xe to see if italics should be turned on
+ if ((char1 & 0xe) === 0xe) {
+ this.addFormatting(packet.pts, ['i']);
+ }
+ } // We have a normal character in char0, and possibly one in char1
+
+ } else if (this.isNormalChar(char0)) {
+ if (char1 === 0x00) {
+ char1 = null;
+ }
+
+ text = getCharFromCode(char0);
+ text += getCharFromCode(char1);
+ this[this.mode_](packet.pts, text);
+ this.column_ += text.length;
+ } // finish data processing
+
+ };
+ };
+
+ Cea608Stream.prototype = new stream(); // Trigger a cue point that captures the current state of the
+ // display buffer
+
+ Cea608Stream.prototype.flushDisplayed = function (pts) {
+ var content = this.displayed_ // remove spaces from the start and end of the string
+ .map(function (row, index) {
+ try {
+ return row.trim();
+ } catch (e) {
+ // Ordinarily, this shouldn't happen. However, caption
+ // parsing errors should not throw exceptions and
+ // break playback.
+ this.trigger('log', {
+ level: 'warn',
+ message: 'Skipping a malformed 608 caption at index ' + index + '.'
+ });
+ return '';
+ }
+ }, this) // combine all text rows to display in one cue
+ .join('\n') // and remove blank rows from the start and end, but not the middle
+ .replace(/^\n+|\n+$/g, '');
+
+ if (content.length) {
+ this.trigger('data', {
+ startPts: this.startPts_,
+ endPts: pts,
+ text: content,
+ stream: this.name_
+ });
+ }
+ };
+ /**
+ * Zero out the data, used for startup and on seek
+ */
+
+
+ Cea608Stream.prototype.reset = function () {
+ this.mode_ = 'popOn'; // When in roll-up mode, the index of the last row that will
+ // actually display captions. If a caption is shifted to a row
+ // with a lower index than this, it is cleared from the display
+ // buffer
+
+ this.topRow_ = 0;
+ this.startPts_ = 0;
+ this.displayed_ = createDisplayBuffer();
+ this.nonDisplayed_ = createDisplayBuffer();
+ this.lastControlCode_ = null; // Track row and column for proper line-breaking and spacing
+
+ this.column_ = 0;
+ this.row_ = BOTTOM_ROW;
+ this.rollUpRows_ = 2; // This variable holds currently-applied formatting
+
+ this.formatting_ = [];
+ };
+ /**
+ * Sets up control code and related constants for this instance
+ */
+
+
+ Cea608Stream.prototype.setConstants = function () {
+ // The following attributes have these uses:
+ // ext_ : char0 for mid-row codes, and the base for extended
+ // chars (ext_+0, ext_+1, and ext_+2 are char0s for
+ // extended codes)
+ // control_: char0 for control codes, except byte-shifted to the
+ // left so that we can do this.control_ | CONTROL_CODE
+ // offset_: char0 for tab offset codes
+ //
+ // It's also worth noting that control codes, and _only_ control codes,
+ // differ between field 1 and field2. Field 2 control codes are always
+ // their field 1 value plus 1. That's why there's the "| field" on the
+ // control value.
+ if (this.dataChannel_ === 0) {
+ this.BASE_ = 0x10;
+ this.EXT_ = 0x11;
+ this.CONTROL_ = (0x14 | this.field_) << 8;
+ this.OFFSET_ = 0x17;
+ } else if (this.dataChannel_ === 1) {
+ this.BASE_ = 0x18;
+ this.EXT_ = 0x19;
+ this.CONTROL_ = (0x1c | this.field_) << 8;
+ this.OFFSET_ = 0x1f;
+ } // Constants for the LSByte command codes recognized by Cea608Stream. This
+ // list is not exhaustive. For a more comprehensive listing and semantics see
+ // http://www.gpo.gov/fdsys/pkg/CFR-2010-title47-vol1/pdf/CFR-2010-title47-vol1-sec15-119.pdf
+ // Padding
+
+
+ this.PADDING_ = 0x0000; // Pop-on Mode
+
+ this.RESUME_CAPTION_LOADING_ = this.CONTROL_ | 0x20;
+ this.END_OF_CAPTION_ = this.CONTROL_ | 0x2f; // Roll-up Mode
+
+ this.ROLL_UP_2_ROWS_ = this.CONTROL_ | 0x25;
+ this.ROLL_UP_3_ROWS_ = this.CONTROL_ | 0x26;
+ this.ROLL_UP_4_ROWS_ = this.CONTROL_ | 0x27;
+ this.CARRIAGE_RETURN_ = this.CONTROL_ | 0x2d; // paint-on mode
+
+ this.RESUME_DIRECT_CAPTIONING_ = this.CONTROL_ | 0x29; // Erasure
+
+ this.BACKSPACE_ = this.CONTROL_ | 0x21;
+ this.ERASE_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2c;
+ this.ERASE_NON_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2e;
+ };
+ /**
+ * Detects if the 2-byte packet data is a special character
+ *
+ * Special characters have a second byte in the range 0x30 to 0x3f,
+ * with the first byte being 0x11 (for data channel 1) or 0x19 (for
+ * data channel 2).
+ *
+ * @param {Integer} char0 The first byte
+ * @param {Integer} char1 The second byte
+ * @return {Boolean} Whether the 2 bytes are an special character
+ */
+
+
+ Cea608Stream.prototype.isSpecialCharacter = function (char0, char1) {
+ return char0 === this.EXT_ && char1 >= 0x30 && char1 <= 0x3f;
+ };
+ /**
+ * Detects if the 2-byte packet data is an extended character
+ *
+ * Extended characters have a second byte in the range 0x20 to 0x3f,
+ * with the first byte being 0x12 or 0x13 (for data channel 1) or
+ * 0x1a or 0x1b (for data channel 2).
+ *
+ * @param {Integer} char0 The first byte
+ * @param {Integer} char1 The second byte
+ * @return {Boolean} Whether the 2 bytes are an extended character
+ */
+
+
+ Cea608Stream.prototype.isExtCharacter = function (char0, char1) {
+ return (char0 === this.EXT_ + 1 || char0 === this.EXT_ + 2) && char1 >= 0x20 && char1 <= 0x3f;
+ };
+ /**
+ * Detects if the 2-byte packet is a mid-row code
+ *
+ * Mid-row codes have a second byte in the range 0x20 to 0x2f, with
+ * the first byte being 0x11 (for data channel 1) or 0x19 (for data
+ * channel 2).
+ *
+ * @param {Integer} char0 The first byte
+ * @param {Integer} char1 The second byte
+ * @return {Boolean} Whether the 2 bytes are a mid-row code
+ */
+
+
+ Cea608Stream.prototype.isMidRowCode = function (char0, char1) {
+ return char0 === this.EXT_ && char1 >= 0x20 && char1 <= 0x2f;
+ };
+ /**
+ * Detects if the 2-byte packet is an offset control code
+ *
+ * Offset control codes have a second byte in the range 0x21 to 0x23,
+ * with the first byte being 0x17 (for data channel 1) or 0x1f (for
+ * data channel 2).
+ *
+ * @param {Integer} char0 The first byte
+ * @param {Integer} char1 The second byte
+ * @return {Boolean} Whether the 2 bytes are an offset control code
+ */
+
+
+ Cea608Stream.prototype.isOffsetControlCode = function (char0, char1) {
+ return char0 === this.OFFSET_ && char1 >= 0x21 && char1 <= 0x23;
+ };
+ /**
+ * Detects if the 2-byte packet is a Preamble Address Code
+ *
+ * PACs have a first byte in the range 0x10 to 0x17 (for data channel 1)
+ * or 0x18 to 0x1f (for data channel 2), with the second byte in the
+ * range 0x40 to 0x7f.
+ *
+ * @param {Integer} char0 The first byte
+ * @param {Integer} char1 The second byte
+ * @return {Boolean} Whether the 2 bytes are a PAC
+ */
+
+
+ Cea608Stream.prototype.isPAC = function (char0, char1) {
+ return char0 >= this.BASE_ && char0 < this.BASE_ + 8 && char1 >= 0x40 && char1 <= 0x7f;
+ };
+ /**
+ * Detects if a packet's second byte is in the range of a PAC color code
+ *
+ * PAC color codes have the second byte be in the range 0x40 to 0x4f, or
+ * 0x60 to 0x6f.
+ *
+ * @param {Integer} char1 The second byte
+ * @return {Boolean} Whether the byte is a color PAC
+ */
+
+
+ Cea608Stream.prototype.isColorPAC = function (char1) {
+ return char1 >= 0x40 && char1 <= 0x4f || char1 >= 0x60 && char1 <= 0x7f;
+ };
+ /**
+ * Detects if a single byte is in the range of a normal character
+ *
+ * Normal text bytes are in the range 0x20 to 0x7f.
+ *
+ * @param {Integer} char The byte
+ * @return {Boolean} Whether the byte is a normal character
+ */
+
+
+ Cea608Stream.prototype.isNormalChar = function (_char2) {
+ return _char2 >= 0x20 && _char2 <= 0x7f;
+ };
+ /**
+ * Configures roll-up
+ *
+ * @param {Integer} pts Current PTS
+ * @param {Integer} newBaseRow Used by PACs to slide the current window to
+ * a new position
+ */
+
+
+ Cea608Stream.prototype.setRollUp = function (pts, newBaseRow) {
+ // Reset the base row to the bottom row when switching modes
+ if (this.mode_ !== 'rollUp') {
+ this.row_ = BOTTOM_ROW;
+ this.mode_ = 'rollUp'; // Spec says to wipe memories when switching to roll-up
+
+ this.flushDisplayed(pts);
+ this.nonDisplayed_ = createDisplayBuffer();
+ this.displayed_ = createDisplayBuffer();
+ }
+
+ if (newBaseRow !== undefined && newBaseRow !== this.row_) {
+ // move currently displayed captions (up or down) to the new base row
+ for (var i = 0; i < this.rollUpRows_; i++) {
+ this.displayed_[newBaseRow - i] = this.displayed_[this.row_ - i];
+ this.displayed_[this.row_ - i] = '';
+ }
+ }
+
+ if (newBaseRow === undefined) {
+ newBaseRow = this.row_;
+ }
+
+ this.topRow_ = newBaseRow - this.rollUpRows_ + 1;
+ }; // Adds the opening HTML tag for the passed character to the caption text,
+ // and keeps track of it for later closing
+
+
+ Cea608Stream.prototype.addFormatting = function (pts, format) {
+ this.formatting_ = this.formatting_.concat(format);
+ var text = format.reduce(function (text, format) {
+ return text + '<' + format + '>';
+ }, '');
+ this[this.mode_](pts, text);
+ }; // Adds HTML closing tags for current formatting to caption text and
+ // clears remembered formatting
+
+
+ Cea608Stream.prototype.clearFormatting = function (pts) {
+ if (!this.formatting_.length) {
+ return;
+ }
+
+ var text = this.formatting_.reverse().reduce(function (text, format) {
+ return text + '' + format + '>';
+ }, '');
+ this.formatting_ = [];
+ this[this.mode_](pts, text);
+ }; // Mode Implementations
+
+
+ Cea608Stream.prototype.popOn = function (pts, text) {
+ var baseRow = this.nonDisplayed_[this.row_]; // buffer characters
+
+ baseRow += text;
+ this.nonDisplayed_[this.row_] = baseRow;
+ };
+
+ Cea608Stream.prototype.rollUp = function (pts, text) {
+ var baseRow = this.displayed_[this.row_];
+ baseRow += text;
+ this.displayed_[this.row_] = baseRow;
+ };
+
+ Cea608Stream.prototype.shiftRowsUp_ = function () {
+ var i; // clear out inactive rows
+
+ for (i = 0; i < this.topRow_; i++) {
+ this.displayed_[i] = '';
+ }
+
+ for (i = this.row_ + 1; i < BOTTOM_ROW + 1; i++) {
+ this.displayed_[i] = '';
+ } // shift displayed rows up
+
+
+ for (i = this.topRow_; i < this.row_; i++) {
+ this.displayed_[i] = this.displayed_[i + 1];
+ } // clear out the bottom row
+
+
+ this.displayed_[this.row_] = '';
+ };
+
+ Cea608Stream.prototype.paintOn = function (pts, text) {
+ var baseRow = this.displayed_[this.row_];
+ baseRow += text;
+ this.displayed_[this.row_] = baseRow;
+ }; // exports
+
+
+ var captionStream = {
+ CaptionStream: CaptionStream$1,
+ Cea608Stream: Cea608Stream,
+ Cea708Stream: Cea708Stream
+ };
+ /**
+ * mux.js
+ *
+ * Copyright (c) Brightcove
+ * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
+ */
+
+ var streamTypes = {
+ H264_STREAM_TYPE: 0x1B,
+ ADTS_STREAM_TYPE: 0x0F,
+ METADATA_STREAM_TYPE: 0x15
+ };
+ var MAX_TS = 8589934592;
+ var RO_THRESH = 4294967296;
+ var TYPE_SHARED = 'shared';
+
+ var handleRollover$1 = function handleRollover(value, reference) {
+ var direction = 1;
+
+ if (value > reference) {
+ // If the current timestamp value is greater than our reference timestamp and we detect a
+ // timestamp rollover, this means the roll over is happening in the opposite direction.
+ // Example scenario: Enter a long stream/video just after a rollover occurred. The reference
+ // point will be set to a small number, e.g. 1. The user then seeks backwards over the
+ // rollover point. In loading this segment, the timestamp values will be very large,
+ // e.g. 2^33 - 1. Since this comes before the data we loaded previously, we want to adjust
+ // the time stamp to be `value - 2^33`.
+ direction = -1;
+ } // Note: A seek forwards or back that is greater than the RO_THRESH (2^32, ~13 hours) will
+ // cause an incorrect adjustment.
+
+
+ while (Math.abs(reference - value) > RO_THRESH) {
+ value += direction * MAX_TS;
+ }
+
+ return value;
+ };
+
+ var TimestampRolloverStream$1 = function TimestampRolloverStream(type) {
+ var lastDTS, referenceDTS;
+ TimestampRolloverStream.prototype.init.call(this); // The "shared" type is used in cases where a stream will contain muxed
+ // video and audio. We could use `undefined` here, but having a string
+ // makes debugging a little clearer.
+
+ this.type_ = type || TYPE_SHARED;
+
+ this.push = function (data) {
+ // Any "shared" rollover streams will accept _all_ data. Otherwise,
+ // streams will only accept data that matches their type.
+ if (this.type_ !== TYPE_SHARED && data.type !== this.type_) {
+ return;
+ }
+
+ if (referenceDTS === undefined) {
+ referenceDTS = data.dts;
+ }
+
+ data.dts = handleRollover$1(data.dts, referenceDTS);
+ data.pts = handleRollover$1(data.pts, referenceDTS);
+ lastDTS = data.dts;
+ this.trigger('data', data);
+ };
+
+ this.flush = function () {
+ referenceDTS = lastDTS;
+ this.trigger('done');
+ };
+
+ this.endTimeline = function () {
+ this.flush();
+ this.trigger('endedtimeline');
+ };
+
+ this.discontinuity = function () {
+ referenceDTS = void 0;
+ lastDTS = void 0;
+ };
+
+ this.reset = function () {
+ this.discontinuity();
+ this.trigger('reset');
+ };
+ };
+
+ TimestampRolloverStream$1.prototype = new stream();
+ var timestampRolloverStream = {
+ TimestampRolloverStream: TimestampRolloverStream$1,
+ handleRollover: handleRollover$1
+ };
+
+ var percentEncode$1 = function percentEncode(bytes, start, end) {
+ var i,
+ result = '';
+
+ for (i = start; i < end; i++) {
+ result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
+ }
+
+ return result;
+ },
+ // return the string representation of the specified byte range,
+ // interpreted as UTf-8.
+ parseUtf8 = function parseUtf8(bytes, start, end) {
+ return decodeURIComponent(percentEncode$1(bytes, start, end));
+ },
+ // return the string representation of the specified byte range,
+ // interpreted as ISO-8859-1.
+ parseIso88591$1 = function parseIso88591(bytes, start, end) {
+ return unescape(percentEncode$1(bytes, start, end)); // jshint ignore:line
+ },
+ parseSyncSafeInteger$1 = function parseSyncSafeInteger(data) {
+ return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
+ },
+ tagParsers = {
+ TXXX: function TXXX(tag) {
+ var i;
+
+ if (tag.data[0] !== 3) {
+ // ignore frames with unrecognized character encodings
+ return;
+ }
+
+ for (i = 1; i < tag.data.length; i++) {
+ if (tag.data[i] === 0) {
+ // parse the text fields
+ tag.description = parseUtf8(tag.data, 1, i); // do not include the null terminator in the tag value
+
+ tag.value = parseUtf8(tag.data, i + 1, tag.data.length).replace(/\0*$/, '');
+ break;
+ }
+ }
+
+ tag.data = tag.value;
+ },
+ WXXX: function WXXX(tag) {
+ var i;
+
+ if (tag.data[0] !== 3) {
+ // ignore frames with unrecognized character encodings
+ return;
+ }
+
+ for (i = 1; i < tag.data.length; i++) {
+ if (tag.data[i] === 0) {
+ // parse the description and URL fields
+ tag.description = parseUtf8(tag.data, 1, i);
+ tag.url = parseUtf8(tag.data, i + 1, tag.data.length);
+ break;
+ }
+ }
+ },
+ PRIV: function PRIV(tag) {
+ var i;
+
+ for (i = 0; i < tag.data.length; i++) {
+ if (tag.data[i] === 0) {
+ // parse the description and URL fields
+ tag.owner = parseIso88591$1(tag.data, 0, i);
+ break;
+ }
+ }
+
+ tag.privateData = tag.data.subarray(i + 1);
+ tag.data = tag.privateData;
+ }
+ },
+ _MetadataStream;
+
+ _MetadataStream = function MetadataStream(options) {
+ var settings = {
+ // the bytes of the program-level descriptor field in MP2T
+ // see ISO/IEC 13818-1:2013 (E), section 2.6 "Program and
+ // program element descriptors"
+ descriptor: options && options.descriptor
+ },
+ // the total size in bytes of the ID3 tag being parsed
+ tagSize = 0,
+ // tag data that is not complete enough to be parsed
+ buffer = [],
+ // the total number of bytes currently in the buffer
+ bufferSize = 0,
+ i;
+
+ _MetadataStream.prototype.init.call(this); // calculate the text track in-band metadata track dispatch type
+ // https://html.spec.whatwg.org/multipage/embedded-content.html#steps-to-expose-a-media-resource-specific-text-track
+
+
+ this.dispatchType = streamTypes.METADATA_STREAM_TYPE.toString(16);
+
+ if (settings.descriptor) {
+ for (i = 0; i < settings.descriptor.length; i++) {
+ this.dispatchType += ('00' + settings.descriptor[i].toString(16)).slice(-2);
+ }
+ }
+
+ this.push = function (chunk) {
+ var tag, frameStart, frameSize, frame, i, frameHeader;
+
+ if (chunk.type !== 'timed-metadata') {
+ return;
+ } // if data_alignment_indicator is set in the PES header,
+ // we must have the start of a new ID3 tag. Assume anything
+ // remaining in the buffer was malformed and throw it out
+
+
+ if (chunk.dataAlignmentIndicator) {
+ bufferSize = 0;
+ buffer.length = 0;
+ } // ignore events that don't look like ID3 data
+
+
+ if (buffer.length === 0 && (chunk.data.length < 10 || chunk.data[0] !== 'I'.charCodeAt(0) || chunk.data[1] !== 'D'.charCodeAt(0) || chunk.data[2] !== '3'.charCodeAt(0))) {
+ this.trigger('log', {
+ level: 'warn',
+ message: 'Skipping unrecognized metadata packet'
+ });
+ return;
+ } // add this chunk to the data we've collected so far
+
+
+ buffer.push(chunk);
+ bufferSize += chunk.data.byteLength; // grab the size of the entire frame from the ID3 header
+
+ if (buffer.length === 1) {
+ // the frame size is transmitted as a 28-bit integer in the
+ // last four bytes of the ID3 header.
+ // The most significant bit of each byte is dropped and the
+ // results concatenated to recover the actual value.
+ tagSize = parseSyncSafeInteger$1(chunk.data.subarray(6, 10)); // ID3 reports the tag size excluding the header but it's more
+ // convenient for our comparisons to include it
+
+ tagSize += 10;
+ } // if the entire frame has not arrived, wait for more data
+
+
+ if (bufferSize < tagSize) {
+ return;
+ } // collect the entire frame so it can be parsed
+
+
+ tag = {
+ data: new Uint8Array(tagSize),
+ frames: [],
+ pts: buffer[0].pts,
+ dts: buffer[0].dts
+ };
+
+ for (i = 0; i < tagSize;) {
+ tag.data.set(buffer[0].data.subarray(0, tagSize - i), i);
+ i += buffer[0].data.byteLength;
+ bufferSize -= buffer[0].data.byteLength;
+ buffer.shift();
+ } // find the start of the first frame and the end of the tag
+
+
+ frameStart = 10;
+
+ if (tag.data[5] & 0x40) {
+ // advance the frame start past the extended header
+ frameStart += 4; // header size field
+
+ frameStart += parseSyncSafeInteger$1(tag.data.subarray(10, 14)); // clip any padding off the end
+
+ tagSize -= parseSyncSafeInteger$1(tag.data.subarray(16, 20));
+ } // parse one or more ID3 frames
+ // http://id3.org/id3v2.3.0#ID3v2_frame_overview
+
+
+ do {
+ // determine the number of bytes in this frame
+ frameSize = parseSyncSafeInteger$1(tag.data.subarray(frameStart + 4, frameStart + 8));
+
+ if (frameSize < 1) {
+ this.trigger('log', {
+ level: 'warn',
+ message: 'Malformed ID3 frame encountered. Skipping metadata parsing.'
+ });
+ return;
+ }
+
+ frameHeader = String.fromCharCode(tag.data[frameStart], tag.data[frameStart + 1], tag.data[frameStart + 2], tag.data[frameStart + 3]);
+ frame = {
+ id: frameHeader,
+ data: tag.data.subarray(frameStart + 10, frameStart + frameSize + 10)
+ };
+ frame.key = frame.id;
+
+ if (tagParsers[frame.id]) {
+ tagParsers[frame.id](frame); // handle the special PRIV frame used to indicate the start
+ // time for raw AAC data
+
+ if (frame.owner === 'com.apple.streaming.transportStreamTimestamp') {
+ var d = frame.data,
+ size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
+ size *= 4;
+ size += d[7] & 0x03;
+ frame.timeStamp = size; // in raw AAC, all subsequent data will be timestamped based
+ // on the value of this frame
+ // we couldn't have known the appropriate pts and dts before
+ // parsing this ID3 tag so set those values now
+
+ if (tag.pts === undefined && tag.dts === undefined) {
+ tag.pts = frame.timeStamp;
+ tag.dts = frame.timeStamp;
+ }
+
+ this.trigger('timestamp', frame);
+ }
+ }
+
+ tag.frames.push(frame);
+ frameStart += 10; // advance past the frame header
+
+ frameStart += frameSize; // advance past the frame body
+ } while (frameStart < tagSize);
+
+ this.trigger('data', tag);
+ };
+ };
+
+ _MetadataStream.prototype = new stream();
+ var metadataStream = _MetadataStream;
+ var TimestampRolloverStream = timestampRolloverStream.TimestampRolloverStream; // object types
+
+ var _TransportPacketStream, _TransportParseStream, _ElementaryStream; // constants
+
+
+ var MP2T_PACKET_LENGTH$1 = 188,
+ // bytes
+ SYNC_BYTE$1 = 0x47;
+ /**
+ * Splits an incoming stream of binary data into MPEG-2 Transport
+ * Stream packets.
+ */
+
+ _TransportPacketStream = function TransportPacketStream() {
+ var buffer = new Uint8Array(MP2T_PACKET_LENGTH$1),
+ bytesInBuffer = 0;
+
+ _TransportPacketStream.prototype.init.call(this); // Deliver new bytes to the stream.
+
+ /**
+ * Split a stream of data into M2TS packets
+ **/
+
+
+ this.push = function (bytes) {
+ var startIndex = 0,
+ endIndex = MP2T_PACKET_LENGTH$1,
+ everything; // If there are bytes remaining from the last segment, prepend them to the
+ // bytes that were pushed in
+
+ if (bytesInBuffer) {
+ everything = new Uint8Array(bytes.byteLength + bytesInBuffer);
+ everything.set(buffer.subarray(0, bytesInBuffer));
+ everything.set(bytes, bytesInBuffer);
+ bytesInBuffer = 0;
+ } else {
+ everything = bytes;
+ } // While we have enough data for a packet
+
+
+ while (endIndex < everything.byteLength) {
+ // Look for a pair of start and end sync bytes in the data..
+ if (everything[startIndex] === SYNC_BYTE$1 && everything[endIndex] === SYNC_BYTE$1) {
+ // We found a packet so emit it and jump one whole packet forward in
+ // the stream
+ this.trigger('data', everything.subarray(startIndex, endIndex));
+ startIndex += MP2T_PACKET_LENGTH$1;
+ endIndex += MP2T_PACKET_LENGTH$1;
+ continue;
+ } // If we get here, we have somehow become de-synchronized and we need to step
+ // forward one byte at a time until we find a pair of sync bytes that denote
+ // a packet
+
+
+ startIndex++;
+ endIndex++;
+ } // If there was some data left over at the end of the segment that couldn't
+ // possibly be a whole packet, keep it because it might be the start of a packet
+ // that continues in the next segment
+
+
+ if (startIndex < everything.byteLength) {
+ buffer.set(everything.subarray(startIndex), 0);
+ bytesInBuffer = everything.byteLength - startIndex;
+ }
+ };
+ /**
+ * Passes identified M2TS packets to the TransportParseStream to be parsed
+ **/
+
+
+ this.flush = function () {
+ // If the buffer contains a whole packet when we are being flushed, emit it
+ // and empty the buffer. Otherwise hold onto the data because it may be
+ // important for decoding the next segment
+ if (bytesInBuffer === MP2T_PACKET_LENGTH$1 && buffer[0] === SYNC_BYTE$1) {
+ this.trigger('data', buffer);
+ bytesInBuffer = 0;
+ }
+
+ this.trigger('done');
+ };
+
+ this.endTimeline = function () {
+ this.flush();
+ this.trigger('endedtimeline');
+ };
+
+ this.reset = function () {
+ bytesInBuffer = 0;
+ this.trigger('reset');
+ };
+ };
+
+ _TransportPacketStream.prototype = new stream();
+ /**
+ * Accepts an MP2T TransportPacketStream and emits data events with parsed
+ * forms of the individual transport stream packets.
+ */
+
+ _TransportParseStream = function TransportParseStream() {
+ var parsePsi, parsePat, parsePmt, self;
+
+ _TransportParseStream.prototype.init.call(this);
+
+ self = this;
+ this.packetsWaitingForPmt = [];
+ this.programMapTable = undefined;
+
+ parsePsi = function parsePsi(payload, psi) {
+ var offset = 0; // PSI packets may be split into multiple sections and those
+ // sections may be split into multiple packets. If a PSI
+ // section starts in this packet, the payload_unit_start_indicator
+ // will be true and the first byte of the payload will indicate
+ // the offset from the current position to the start of the
+ // section.
+
+ if (psi.payloadUnitStartIndicator) {
+ offset += payload[offset] + 1;
+ }
+
+ if (psi.type === 'pat') {
+ parsePat(payload.subarray(offset), psi);
+ } else {
+ parsePmt(payload.subarray(offset), psi);
+ }
+ };
+
+ parsePat = function parsePat(payload, pat) {
+ pat.section_number = payload[7]; // eslint-disable-line camelcase
+
+ pat.last_section_number = payload[8]; // eslint-disable-line camelcase
+ // skip the PSI header and parse the first PMT entry
+
+ self.pmtPid = (payload[10] & 0x1F) << 8 | payload[11];
+ pat.pmtPid = self.pmtPid;
+ };
+ /**
+ * Parse out the relevant fields of a Program Map Table (PMT).
+ * @param payload {Uint8Array} the PMT-specific portion of an MP2T
+ * packet. The first byte in this array should be the table_id
+ * field.
+ * @param pmt {object} the object that should be decorated with
+ * fields parsed from the PMT.
+ */
+
+
+ parsePmt = function parsePmt(payload, pmt) {
+ var sectionLength, tableEnd, programInfoLength, offset; // PMTs can be sent ahead of the time when they should actually
+ // take effect. We don't believe this should ever be the case
+ // for HLS but we'll ignore "forward" PMT declarations if we see
+ // them. Future PMT declarations have the current_next_indicator
+ // set to zero.
+
+ if (!(payload[5] & 0x01)) {
+ return;
+ } // overwrite any existing program map table
+
+
+ self.programMapTable = {
+ video: null,
+ audio: null,
+ 'timed-metadata': {}
+ }; // the mapping table ends at the end of the current section
+
+ sectionLength = (payload[1] & 0x0f) << 8 | payload[2];
+ tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
+ // long the program info descriptors are
+
+ programInfoLength = (payload[10] & 0x0f) << 8 | payload[11]; // advance the offset to the first entry in the mapping table
+
+ offset = 12 + programInfoLength;
+
+ while (offset < tableEnd) {
+ var streamType = payload[offset];
+ var pid = (payload[offset + 1] & 0x1F) << 8 | payload[offset + 2]; // only map a single elementary_pid for audio and video stream types
+ // TODO: should this be done for metadata too? for now maintain behavior of
+ // multiple metadata streams
+
+ if (streamType === streamTypes.H264_STREAM_TYPE && self.programMapTable.video === null) {
+ self.programMapTable.video = pid;
+ } else if (streamType === streamTypes.ADTS_STREAM_TYPE && self.programMapTable.audio === null) {
+ self.programMapTable.audio = pid;
+ } else if (streamType === streamTypes.METADATA_STREAM_TYPE) {
+ // map pid to stream type for metadata streams
+ self.programMapTable['timed-metadata'][pid] = streamType;
+ } // move to the next table entry
+ // skip past the elementary stream descriptors, if present
+
+
+ offset += ((payload[offset + 3] & 0x0F) << 8 | payload[offset + 4]) + 5;
+ } // record the map on the packet as well
+
+
+ pmt.programMapTable = self.programMapTable;
+ };
+ /**
+ * Deliver a new MP2T packet to the next stream in the pipeline.
+ */
+
+
+ this.push = function (packet) {
+ var result = {},
+ offset = 4;
+ result.payloadUnitStartIndicator = !!(packet[1] & 0x40); // pid is a 13-bit field starting at the last bit of packet[1]
+
+ result.pid = packet[1] & 0x1f;
+ result.pid <<= 8;
+ result.pid |= packet[2]; // if an adaption field is present, its length is specified by the
+ // fifth byte of the TS packet header. The adaptation field is
+ // used to add stuffing to PES packets that don't fill a complete
+ // TS packet, and to specify some forms of timing and control data
+ // that we do not currently use.
+
+ if ((packet[3] & 0x30) >>> 4 > 0x01) {
+ offset += packet[offset] + 1;
+ } // parse the rest of the packet based on the type
+
+
+ if (result.pid === 0) {
+ result.type = 'pat';
+ parsePsi(packet.subarray(offset), result);
+ this.trigger('data', result);
+ } else if (result.pid === this.pmtPid) {
+ result.type = 'pmt';
+ parsePsi(packet.subarray(offset), result);
+ this.trigger('data', result); // if there are any packets waiting for a PMT to be found, process them now
+
+ while (this.packetsWaitingForPmt.length) {
+ this.processPes_.apply(this, this.packetsWaitingForPmt.shift());
+ }
+ } else if (this.programMapTable === undefined) {
+ // When we have not seen a PMT yet, defer further processing of
+ // PES packets until one has been parsed
+ this.packetsWaitingForPmt.push([packet, offset, result]);
+ } else {
+ this.processPes_(packet, offset, result);
+ }
+ };
+
+ this.processPes_ = function (packet, offset, result) {
+ // set the appropriate stream type
+ if (result.pid === this.programMapTable.video) {
+ result.streamType = streamTypes.H264_STREAM_TYPE;
+ } else if (result.pid === this.programMapTable.audio) {
+ result.streamType = streamTypes.ADTS_STREAM_TYPE;
+ } else {
+ // if not video or audio, it is timed-metadata or unknown
+ // if unknown, streamType will be undefined
+ result.streamType = this.programMapTable['timed-metadata'][result.pid];
+ }
+
+ result.type = 'pes';
+ result.data = packet.subarray(offset);
+ this.trigger('data', result);
+ };
+ };
+
+ _TransportParseStream.prototype = new stream();
+ _TransportParseStream.STREAM_TYPES = {
+ h264: 0x1b,
+ adts: 0x0f
+ };
+ /**
+ * Reconsistutes program elementary stream (PES) packets from parsed
+ * transport stream packets. That is, if you pipe an
+ * mp2t.TransportParseStream into a mp2t.ElementaryStream, the output
+ * events will be events which capture the bytes for individual PES
+ * packets plus relevant metadata that has been extracted from the
+ * container.
+ */
+
+ _ElementaryStream = function ElementaryStream() {
+ var self = this,
+ segmentHadPmt = false,
+ // PES packet fragments
+ video = {
+ data: [],
+ size: 0
+ },
+ audio = {
+ data: [],
+ size: 0
+ },
+ timedMetadata = {
+ data: [],
+ size: 0
+ },
+ programMapTable,
+ parsePes = function parsePes(payload, pes) {
+ var ptsDtsFlags;
+ var startPrefix = payload[0] << 16 | payload[1] << 8 | payload[2]; // default to an empty array
+
+ pes.data = new Uint8Array(); // In certain live streams, the start of a TS fragment has ts packets
+ // that are frame data that is continuing from the previous fragment. This
+ // is to check that the pes data is the start of a new pes payload
+
+ if (startPrefix !== 1) {
+ return;
+ } // get the packet length, this will be 0 for video
+
+
+ pes.packetLength = 6 + (payload[4] << 8 | payload[5]); // find out if this packets starts a new keyframe
+
+ pes.dataAlignmentIndicator = (payload[6] & 0x04) !== 0; // PES packets may be annotated with a PTS value, or a PTS value
+ // and a DTS value. Determine what combination of values is
+ // available to work with.
+
+ ptsDtsFlags = payload[7]; // PTS and DTS are normally stored as a 33-bit number. Javascript
+ // performs all bitwise operations on 32-bit integers but javascript
+ // supports a much greater range (52-bits) of integer using standard
+ // mathematical operations.
+ // We construct a 31-bit value using bitwise operators over the 31
+ // most significant bits and then multiply by 4 (equal to a left-shift
+ // of 2) before we add the final 2 least significant bits of the
+ // timestamp (equal to an OR.)
+
+ if (ptsDtsFlags & 0xC0) {
+ // the PTS and DTS are not written out directly. For information
+ // on how they are encoded, see
+ // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
+ pes.pts = (payload[9] & 0x0E) << 27 | (payload[10] & 0xFF) << 20 | (payload[11] & 0xFE) << 12 | (payload[12] & 0xFF) << 5 | (payload[13] & 0xFE) >>> 3;
+ pes.pts *= 4; // Left shift by 2
+
+ pes.pts += (payload[13] & 0x06) >>> 1; // OR by the two LSBs
+
+ pes.dts = pes.pts;
+
+ if (ptsDtsFlags & 0x40) {
+ pes.dts = (payload[14] & 0x0E) << 27 | (payload[15] & 0xFF) << 20 | (payload[16] & 0xFE) << 12 | (payload[17] & 0xFF) << 5 | (payload[18] & 0xFE) >>> 3;
+ pes.dts *= 4; // Left shift by 2
+
+ pes.dts += (payload[18] & 0x06) >>> 1; // OR by the two LSBs
+ }
+ } // the data section starts immediately after the PES header.
+ // pes_header_data_length specifies the number of header bytes
+ // that follow the last byte of the field.
+
+
+ pes.data = payload.subarray(9 + payload[8]);
+ },
+
+ /**
+ * Pass completely parsed PES packets to the next stream in the pipeline
+ **/
+ flushStream = function flushStream(stream, type, forceFlush) {
+ var packetData = new Uint8Array(stream.size),
+ event = {
+ type: type
+ },
+ i = 0,
+ offset = 0,
+ packetFlushable = false,
+ fragment; // do nothing if there is not enough buffered data for a complete
+ // PES header
+
+ if (!stream.data.length || stream.size < 9) {
+ return;
+ }
+
+ event.trackId = stream.data[0].pid; // reassemble the packet
+
+ for (i = 0; i < stream.data.length; i++) {
+ fragment = stream.data[i];
+ packetData.set(fragment.data, offset);
+ offset += fragment.data.byteLength;
+ } // parse assembled packet's PES header
+
+
+ parsePes(packetData, event); // non-video PES packets MUST have a non-zero PES_packet_length
+ // check that there is enough stream data to fill the packet
+
+ packetFlushable = type === 'video' || event.packetLength <= stream.size; // flush pending packets if the conditions are right
+
+ if (forceFlush || packetFlushable) {
+ stream.size = 0;
+ stream.data.length = 0;
+ } // only emit packets that are complete. this is to avoid assembling
+ // incomplete PES packets due to poor segmentation
+
+
+ if (packetFlushable) {
+ self.trigger('data', event);
+ }
+ };
+
+ _ElementaryStream.prototype.init.call(this);
+ /**
+ * Identifies M2TS packet types and parses PES packets using metadata
+ * parsed from the PMT
+ **/
+
+
+ this.push = function (data) {
+ ({
+ pat: function pat() {// we have to wait for the PMT to arrive as well before we
+ // have any meaningful metadata
+ },
+ pes: function pes() {
+ var stream, streamType;
+
+ switch (data.streamType) {
+ case streamTypes.H264_STREAM_TYPE:
+ stream = video;
+ streamType = 'video';
+ break;
+
+ case streamTypes.ADTS_STREAM_TYPE:
+ stream = audio;
+ streamType = 'audio';
+ break;
+
+ case streamTypes.METADATA_STREAM_TYPE:
+ stream = timedMetadata;
+ streamType = 'timed-metadata';
+ break;
+
+ default:
+ // ignore unknown stream types
+ return;
+ } // if a new packet is starting, we can flush the completed
+ // packet
+
+
+ if (data.payloadUnitStartIndicator) {
+ flushStream(stream, streamType, true);
+ } // buffer this fragment until we are sure we've received the
+ // complete payload
+
+
+ stream.data.push(data);
+ stream.size += data.data.byteLength;
+ },
+ pmt: function pmt() {
+ var event = {
+ type: 'metadata',
+ tracks: []
+ };
+ programMapTable = data.programMapTable; // translate audio and video streams to tracks
+
+ if (programMapTable.video !== null) {
+ event.tracks.push({
+ timelineStartInfo: {
+ baseMediaDecodeTime: 0
+ },
+ id: +programMapTable.video,
+ codec: 'avc',
+ type: 'video'
+ });
+ }
+
+ if (programMapTable.audio !== null) {
+ event.tracks.push({
+ timelineStartInfo: {
+ baseMediaDecodeTime: 0
+ },
+ id: +programMapTable.audio,
+ codec: 'adts',
+ type: 'audio'
+ });
+ }
+
+ segmentHadPmt = true;
+ self.trigger('data', event);
+ }
+ })[data.type]();
+ };
+
+ this.reset = function () {
+ video.size = 0;
+ video.data.length = 0;
+ audio.size = 0;
+ audio.data.length = 0;
+ this.trigger('reset');
+ };
+ /**
+ * Flush any remaining input. Video PES packets may be of variable
+ * length. Normally, the start of a new video packet can trigger the
+ * finalization of the previous packet. That is not possible if no
+ * more video is forthcoming, however. In that case, some other
+ * mechanism (like the end of the file) has to be employed. When it is
+ * clear that no additional data is forthcoming, calling this method
+ * will flush the buffered packets.
+ */
+
+
+ this.flushStreams_ = function () {
+ // !!THIS ORDER IS IMPORTANT!!
+ // video first then audio
+ flushStream(video, 'video');
+ flushStream(audio, 'audio');
+ flushStream(timedMetadata, 'timed-metadata');
+ };
+
+ this.flush = function () {
+ // if on flush we haven't had a pmt emitted
+ // and we have a pmt to emit. emit the pmt
+ // so that we trigger a trackinfo downstream.
+ if (!segmentHadPmt && programMapTable) {
+ var pmt = {
+ type: 'metadata',
+ tracks: []
+ }; // translate audio and video streams to tracks
+
+ if (programMapTable.video !== null) {
+ pmt.tracks.push({
+ timelineStartInfo: {
+ baseMediaDecodeTime: 0
+ },
+ id: +programMapTable.video,
+ codec: 'avc',
+ type: 'video'
+ });
+ }
+
+ if (programMapTable.audio !== null) {
+ pmt.tracks.push({
+ timelineStartInfo: {
+ baseMediaDecodeTime: 0
+ },
+ id: +programMapTable.audio,
+ codec: 'adts',
+ type: 'audio'
+ });
+ }
+
+ self.trigger('data', pmt);
+ }
+
+ segmentHadPmt = false;
+ this.flushStreams_();
+ this.trigger('done');
+ };
+ };
+
+ _ElementaryStream.prototype = new stream();
+ var m2ts = {
+ PAT_PID: 0x0000,
+ MP2T_PACKET_LENGTH: MP2T_PACKET_LENGTH$1,
+ TransportPacketStream: _TransportPacketStream,
+ TransportParseStream: _TransportParseStream,
+ ElementaryStream: _ElementaryStream,
+ TimestampRolloverStream: TimestampRolloverStream,
+ CaptionStream: captionStream.CaptionStream,
+ Cea608Stream: captionStream.Cea608Stream,
+ Cea708Stream: captionStream.Cea708Stream,
+ MetadataStream: metadataStream
+ };
+
+ for (var type in streamTypes) {
+ if (streamTypes.hasOwnProperty(type)) {
+ m2ts[type] = streamTypes[type];
+ }
+ }
+
+ var m2ts_1 = m2ts;
+ var ONE_SECOND_IN_TS$2 = clock.ONE_SECOND_IN_TS;
+
+ var _AdtsStream;
+
+ var ADTS_SAMPLING_FREQUENCIES$1 = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
+ /*
+ * Accepts a ElementaryStream and emits data events with parsed
+ * AAC Audio Frames of the individual packets. Input audio in ADTS
+ * format is unpacked and re-emitted as AAC frames.
+ *
+ * @see http://wiki.multimedia.cx/index.php?title=ADTS
+ * @see http://wiki.multimedia.cx/?title=Understanding_AAC
+ */
+
+ _AdtsStream = function AdtsStream(handlePartialSegments) {
+ var buffer,
+ frameNum = 0;
+
+ _AdtsStream.prototype.init.call(this);
+
+ this.skipWarn_ = function (start, end) {
+ this.trigger('log', {
+ level: 'warn',
+ message: "adts skiping bytes " + start + " to " + end + " in frame " + frameNum + " outside syncword"
+ });
+ };
+
+ this.push = function (packet) {
+ var i = 0,
+ frameLength,
+ protectionSkipBytes,
+ oldBuffer,
+ sampleCount,
+ adtsFrameDuration;
+
+ if (!handlePartialSegments) {
+ frameNum = 0;
+ }
+
+ if (packet.type !== 'audio') {
+ // ignore non-audio data
+ return;
+ } // Prepend any data in the buffer to the input data so that we can parse
+ // aac frames the cross a PES packet boundary
+
+
+ if (buffer && buffer.length) {
+ oldBuffer = buffer;
+ buffer = new Uint8Array(oldBuffer.byteLength + packet.data.byteLength);
+ buffer.set(oldBuffer);
+ buffer.set(packet.data, oldBuffer.byteLength);
+ } else {
+ buffer = packet.data;
+ } // unpack any ADTS frames which have been fully received
+ // for details on the ADTS header, see http://wiki.multimedia.cx/index.php?title=ADTS
+
+
+ var skip; // We use i + 7 here because we want to be able to parse the entire header.
+ // If we don't have enough bytes to do that, then we definitely won't have a full frame.
+
+ while (i + 7 < buffer.length) {
+ // Look for the start of an ADTS header..
+ if (buffer[i] !== 0xFF || (buffer[i + 1] & 0xF6) !== 0xF0) {
+ if (typeof skip !== 'number') {
+ skip = i;
+ } // If a valid header was not found, jump one forward and attempt to
+ // find a valid ADTS header starting at the next byte
+
+
+ i++;
+ continue;
+ }
+
+ if (typeof skip === 'number') {
+ this.skipWarn_(skip, i);
+ skip = null;
+ } // The protection skip bit tells us if we have 2 bytes of CRC data at the
+ // end of the ADTS header
+
+
+ protectionSkipBytes = (~buffer[i + 1] & 0x01) * 2; // Frame length is a 13 bit integer starting 16 bits from the
+ // end of the sync sequence
+ // NOTE: frame length includes the size of the header
+
+ frameLength = (buffer[i + 3] & 0x03) << 11 | buffer[i + 4] << 3 | (buffer[i + 5] & 0xe0) >> 5;
+ sampleCount = ((buffer[i + 6] & 0x03) + 1) * 1024;
+ adtsFrameDuration = sampleCount * ONE_SECOND_IN_TS$2 / ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2]; // If we don't have enough data to actually finish this ADTS frame,
+ // then we have to wait for more data
+
+ if (buffer.byteLength - i < frameLength) {
+ break;
+ } // Otherwise, deliver the complete AAC frame
+
+
+ this.trigger('data', {
+ pts: packet.pts + frameNum * adtsFrameDuration,
+ dts: packet.dts + frameNum * adtsFrameDuration,
+ sampleCount: sampleCount,
+ audioobjecttype: (buffer[i + 2] >>> 6 & 0x03) + 1,
+ channelcount: (buffer[i + 2] & 1) << 2 | (buffer[i + 3] & 0xc0) >>> 6,
+ samplerate: ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2],
+ samplingfrequencyindex: (buffer[i + 2] & 0x3c) >>> 2,
+ // assume ISO/IEC 14496-12 AudioSampleEntry default of 16
+ samplesize: 16,
+ // data is the frame without it's header
+ data: buffer.subarray(i + 7 + protectionSkipBytes, i + frameLength)
+ });
+ frameNum++;
+ i += frameLength;
+ }
+
+ if (typeof skip === 'number') {
+ this.skipWarn_(skip, i);
+ skip = null;
+ } // remove processed bytes from the buffer.
+
+
+ buffer = buffer.subarray(i);
+ };
+
+ this.flush = function () {
+ frameNum = 0;
+ this.trigger('done');
+ };
+
+ this.reset = function () {
+ buffer = void 0;
+ this.trigger('reset');
+ };
+
+ this.endTimeline = function () {
+ buffer = void 0;
+ this.trigger('endedtimeline');
+ };
+ };
+
+ _AdtsStream.prototype = new stream();
+ var adts = _AdtsStream;
+ /**
+ * mux.js
+ *
+ * Copyright (c) Brightcove
+ * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
+ */
+
+ var ExpGolomb;
+ /**
+ * Parser for exponential Golomb codes, a variable-bitwidth number encoding
+ * scheme used by h264.
+ */
+
+ ExpGolomb = function ExpGolomb(workingData) {
+ var // the number of bytes left to examine in workingData
+ workingBytesAvailable = workingData.byteLength,
+ // the current word being examined
+ workingWord = 0,
+ // :uint
+ // the number of bits left to examine in the current word
+ workingBitsAvailable = 0; // :uint;
+ // ():uint
+
+ this.length = function () {
+ return 8 * workingBytesAvailable;
+ }; // ():uint
+
+
+ this.bitsAvailable = function () {
+ return 8 * workingBytesAvailable + workingBitsAvailable;
+ }; // ():void
+
+
+ this.loadWord = function () {
+ var position = workingData.byteLength - workingBytesAvailable,
+ workingBytes = new Uint8Array(4),
+ availableBytes = Math.min(4, workingBytesAvailable);
+
+ if (availableBytes === 0) {
+ throw new Error('no bytes available');
+ }
+
+ workingBytes.set(workingData.subarray(position, position + availableBytes));
+ workingWord = new DataView(workingBytes.buffer).getUint32(0); // track the amount of workingData that has been processed
+
+ workingBitsAvailable = availableBytes * 8;
+ workingBytesAvailable -= availableBytes;
+ }; // (count:int):void
+
+
+ this.skipBits = function (count) {
+ var skipBytes; // :int
+
+ if (workingBitsAvailable > count) {
+ workingWord <<= count;
+ workingBitsAvailable -= count;
+ } else {
+ count -= workingBitsAvailable;
+ skipBytes = Math.floor(count / 8);
+ count -= skipBytes * 8;
+ workingBytesAvailable -= skipBytes;
+ this.loadWord();
+ workingWord <<= count;
+ workingBitsAvailable -= count;
+ }
+ }; // (size:int):uint
+
+
+ this.readBits = function (size) {
+ var bits = Math.min(workingBitsAvailable, size),
+ // :uint
+ valu = workingWord >>> 32 - bits; // :uint
+ // if size > 31, handle error
+
+ workingBitsAvailable -= bits;
+
+ if (workingBitsAvailable > 0) {
+ workingWord <<= bits;
+ } else if (workingBytesAvailable > 0) {
+ this.loadWord();
+ }
+
+ bits = size - bits;
+
+ if (bits > 0) {
+ return valu << bits | this.readBits(bits);
+ }
+
+ return valu;
+ }; // ():uint
+
+
+ this.skipLeadingZeros = function () {
+ var leadingZeroCount; // :uint
+
+ for (leadingZeroCount = 0; leadingZeroCount < workingBitsAvailable; ++leadingZeroCount) {
+ if ((workingWord & 0x80000000 >>> leadingZeroCount) !== 0) {
+ // the first bit of working word is 1
+ workingWord <<= leadingZeroCount;
+ workingBitsAvailable -= leadingZeroCount;
+ return leadingZeroCount;
+ }
+ } // we exhausted workingWord and still have not found a 1
+
+
+ this.loadWord();
+ return leadingZeroCount + this.skipLeadingZeros();
+ }; // ():void
+
+
+ this.skipUnsignedExpGolomb = function () {
+ this.skipBits(1 + this.skipLeadingZeros());
+ }; // ():void
+
+
+ this.skipExpGolomb = function () {
+ this.skipBits(1 + this.skipLeadingZeros());
+ }; // ():uint
+
+
+ this.readUnsignedExpGolomb = function () {
+ var clz = this.skipLeadingZeros(); // :uint
+
+ return this.readBits(clz + 1) - 1;
+ }; // ():int
+
+
+ this.readExpGolomb = function () {
+ var valu = this.readUnsignedExpGolomb(); // :int
+
+ if (0x01 & valu) {
+ // the number is odd if the low order bit is set
+ return 1 + valu >>> 1; // add 1 to make it even, and divide by 2
+ }
+
+ return -1 * (valu >>> 1); // divide by two then make it negative
+ }; // Some convenience functions
+ // :Boolean
+
+
+ this.readBoolean = function () {
+ return this.readBits(1) === 1;
+ }; // ():int
+
+
+ this.readUnsignedByte = function () {
+ return this.readBits(8);
+ };
+
+ this.loadWord();
+ };
+
+ var expGolomb = ExpGolomb;
+
+ var _H264Stream, _NalByteStream;
+
+ var PROFILES_WITH_OPTIONAL_SPS_DATA;
+ /**
+ * Accepts a NAL unit byte stream and unpacks the embedded NAL units.
+ */
+
+ _NalByteStream = function NalByteStream() {
+ var syncPoint = 0,
+ i,
+ buffer;
+
+ _NalByteStream.prototype.init.call(this);
+ /*
+ * Scans a byte stream and triggers a data event with the NAL units found.
+ * @param {Object} data Event received from H264Stream
+ * @param {Uint8Array} data.data The h264 byte stream to be scanned
+ *
+ * @see H264Stream.push
+ */
+
+
+ this.push = function (data) {
+ var swapBuffer;
+
+ if (!buffer) {
+ buffer = data.data;
+ } else {
+ swapBuffer = new Uint8Array(buffer.byteLength + data.data.byteLength);
+ swapBuffer.set(buffer);
+ swapBuffer.set(data.data, buffer.byteLength);
+ buffer = swapBuffer;
+ }
+
+ var len = buffer.byteLength; // Rec. ITU-T H.264, Annex B
+ // scan for NAL unit boundaries
+ // a match looks like this:
+ // 0 0 1 .. NAL .. 0 0 1
+ // ^ sync point ^ i
+ // or this:
+ // 0 0 1 .. NAL .. 0 0 0
+ // ^ sync point ^ i
+ // advance the sync point to a NAL start, if necessary
+
+ for (; syncPoint < len - 3; syncPoint++) {
+ if (buffer[syncPoint + 2] === 1) {
+ // the sync point is properly aligned
+ i = syncPoint + 5;
+ break;
+ }
+ }
+
+ while (i < len) {
+ // look at the current byte to determine if we've hit the end of
+ // a NAL unit boundary
+ switch (buffer[i]) {
+ case 0:
+ // skip past non-sync sequences
+ if (buffer[i - 1] !== 0) {
+ i += 2;
+ break;
+ } else if (buffer[i - 2] !== 0) {
+ i++;
+ break;
+ } // deliver the NAL unit if it isn't empty
+
+
+ if (syncPoint + 3 !== i - 2) {
+ this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
+ } // drop trailing zeroes
+
+
+ do {
+ i++;
+ } while (buffer[i] !== 1 && i < len);
+
+ syncPoint = i - 2;
+ i += 3;
+ break;
+
+ case 1:
+ // skip past non-sync sequences
+ if (buffer[i - 1] !== 0 || buffer[i - 2] !== 0) {
+ i += 3;
+ break;
+ } // deliver the NAL unit
+
+
+ this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
+ syncPoint = i - 2;
+ i += 3;
+ break;
+
+ default:
+ // the current byte isn't a one or zero, so it cannot be part
+ // of a sync sequence
+ i += 3;
+ break;
+ }
+ } // filter out the NAL units that were delivered
+
+
+ buffer = buffer.subarray(syncPoint);
+ i -= syncPoint;
+ syncPoint = 0;
+ };
+
+ this.reset = function () {
+ buffer = null;
+ syncPoint = 0;
+ this.trigger('reset');
+ };
+
+ this.flush = function () {
+ // deliver the last buffered NAL unit
+ if (buffer && buffer.byteLength > 3) {
+ this.trigger('data', buffer.subarray(syncPoint + 3));
+ } // reset the stream state
+
+
+ buffer = null;
+ syncPoint = 0;
+ this.trigger('done');
+ };
+
+ this.endTimeline = function () {
+ this.flush();
+ this.trigger('endedtimeline');
+ };
+ };
+
+ _NalByteStream.prototype = new stream(); // values of profile_idc that indicate additional fields are included in the SPS
+ // see Recommendation ITU-T H.264 (4/2013),
+ // 7.3.2.1.1 Sequence parameter set data syntax
+
+ PROFILES_WITH_OPTIONAL_SPS_DATA = {
+ 100: true,
+ 110: true,
+ 122: true,
+ 244: true,
+ 44: true,
+ 83: true,
+ 86: true,
+ 118: true,
+ 128: true,
+ // TODO: the three profiles below don't
+ // appear to have sps data in the specificiation anymore?
+ 138: true,
+ 139: true,
+ 134: true
+ };
+ /**
+ * Accepts input from a ElementaryStream and produces H.264 NAL unit data
+ * events.
+ */
+
+ _H264Stream = function H264Stream() {
+ var nalByteStream = new _NalByteStream(),
+ self,
+ trackId,
+ currentPts,
+ currentDts,
+ discardEmulationPreventionBytes,
+ readSequenceParameterSet,
+ skipScalingList;
+
+ _H264Stream.prototype.init.call(this);
+
+ self = this;
+ /*
+ * Pushes a packet from a stream onto the NalByteStream
+ *
+ * @param {Object} packet - A packet received from a stream
+ * @param {Uint8Array} packet.data - The raw bytes of the packet
+ * @param {Number} packet.dts - Decode timestamp of the packet
+ * @param {Number} packet.pts - Presentation timestamp of the packet
+ * @param {Number} packet.trackId - The id of the h264 track this packet came from
+ * @param {('video'|'audio')} packet.type - The type of packet
+ *
+ */
+
+ this.push = function (packet) {
+ if (packet.type !== 'video') {
+ return;
+ }
+
+ trackId = packet.trackId;
+ currentPts = packet.pts;
+ currentDts = packet.dts;
+ nalByteStream.push(packet);
+ };
+ /*
+ * Identify NAL unit types and pass on the NALU, trackId, presentation and decode timestamps
+ * for the NALUs to the next stream component.
+ * Also, preprocess caption and sequence parameter NALUs.
+ *
+ * @param {Uint8Array} data - A NAL unit identified by `NalByteStream.push`
+ * @see NalByteStream.push
+ */
+
+
+ nalByteStream.on('data', function (data) {
+ var event = {
+ trackId: trackId,
+ pts: currentPts,
+ dts: currentDts,
+ data: data,
+ nalUnitTypeCode: data[0] & 0x1f
+ };
+
+ switch (event.nalUnitTypeCode) {
+ case 0x05:
+ event.nalUnitType = 'slice_layer_without_partitioning_rbsp_idr';
+ break;
+
+ case 0x06:
+ event.nalUnitType = 'sei_rbsp';
+ event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
+ break;
+
+ case 0x07:
+ event.nalUnitType = 'seq_parameter_set_rbsp';
+ event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
+ event.config = readSequenceParameterSet(event.escapedRBSP);
+ break;
+
+ case 0x08:
+ event.nalUnitType = 'pic_parameter_set_rbsp';
+ break;
+
+ case 0x09:
+ event.nalUnitType = 'access_unit_delimiter_rbsp';
+ break;
+ } // This triggers data on the H264Stream
+
+
+ self.trigger('data', event);
+ });
+ nalByteStream.on('done', function () {
+ self.trigger('done');
+ });
+ nalByteStream.on('partialdone', function () {
+ self.trigger('partialdone');
+ });
+ nalByteStream.on('reset', function () {
+ self.trigger('reset');
+ });
+ nalByteStream.on('endedtimeline', function () {
+ self.trigger('endedtimeline');
+ });
+
+ this.flush = function () {
+ nalByteStream.flush();
+ };
+
+ this.partialFlush = function () {
+ nalByteStream.partialFlush();
+ };
+
+ this.reset = function () {
+ nalByteStream.reset();
+ };
+
+ this.endTimeline = function () {
+ nalByteStream.endTimeline();
+ };
+ /**
+ * Advance the ExpGolomb decoder past a scaling list. The scaling
+ * list is optionally transmitted as part of a sequence parameter
+ * set and is not relevant to transmuxing.
+ * @param count {number} the number of entries in this scaling list
+ * @param expGolombDecoder {object} an ExpGolomb pointed to the
+ * start of a scaling list
+ * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
+ */
+
+
+ skipScalingList = function skipScalingList(count, expGolombDecoder) {
+ var lastScale = 8,
+ nextScale = 8,
+ j,
+ deltaScale;
+
+ for (j = 0; j < count; j++) {
+ if (nextScale !== 0) {
+ deltaScale = expGolombDecoder.readExpGolomb();
+ nextScale = (lastScale + deltaScale + 256) % 256;
+ }
+
+ lastScale = nextScale === 0 ? lastScale : nextScale;
+ }
+ };
+ /**
+ * Expunge any "Emulation Prevention" bytes from a "Raw Byte
+ * Sequence Payload"
+ * @param data {Uint8Array} the bytes of a RBSP from a NAL
+ * unit
+ * @return {Uint8Array} the RBSP without any Emulation
+ * Prevention Bytes
+ */
+
+
+ discardEmulationPreventionBytes = function discardEmulationPreventionBytes(data) {
+ var length = data.byteLength,
+ emulationPreventionBytesPositions = [],
+ i = 1,
+ newLength,
+ newData; // Find all `Emulation Prevention Bytes`
+
+ while (i < length - 2) {
+ if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
+ emulationPreventionBytesPositions.push(i + 2);
+ i += 2;
+ } else {
+ i++;
+ }
+ } // If no Emulation Prevention Bytes were found just return the original
+ // array
+
+
+ if (emulationPreventionBytesPositions.length === 0) {
+ return data;
+ } // Create a new array to hold the NAL unit data
+
+
+ newLength = length - emulationPreventionBytesPositions.length;
+ newData = new Uint8Array(newLength);
+ var sourceIndex = 0;
+
+ for (i = 0; i < newLength; sourceIndex++, i++) {
+ if (sourceIndex === emulationPreventionBytesPositions[0]) {
+ // Skip this byte
+ sourceIndex++; // Remove this position index
+
+ emulationPreventionBytesPositions.shift();
+ }
+
+ newData[i] = data[sourceIndex];
+ }
+
+ return newData;
+ };
+ /**
+ * Read a sequence parameter set and return some interesting video
+ * properties. A sequence parameter set is the H264 metadata that
+ * describes the properties of upcoming video frames.
+ * @param data {Uint8Array} the bytes of a sequence parameter set
+ * @return {object} an object with configuration parsed from the
+ * sequence parameter set, including the dimensions of the
+ * associated video frames.
+ */
+
+
+ readSequenceParameterSet = function readSequenceParameterSet(data) {
+ var frameCropLeftOffset = 0,
+ frameCropRightOffset = 0,
+ frameCropTopOffset = 0,
+ frameCropBottomOffset = 0,
+ expGolombDecoder,
+ profileIdc,
+ levelIdc,
+ profileCompatibility,
+ chromaFormatIdc,
+ picOrderCntType,
+ numRefFramesInPicOrderCntCycle,
+ picWidthInMbsMinus1,
+ picHeightInMapUnitsMinus1,
+ frameMbsOnlyFlag,
+ scalingListCount,
+ sarRatio = [1, 1],
+ aspectRatioIdc,
+ i;
+ expGolombDecoder = new expGolomb(data);
+ profileIdc = expGolombDecoder.readUnsignedByte(); // profile_idc
+
+ profileCompatibility = expGolombDecoder.readUnsignedByte(); // constraint_set[0-5]_flag
+
+ levelIdc = expGolombDecoder.readUnsignedByte(); // level_idc u(8)
+
+ expGolombDecoder.skipUnsignedExpGolomb(); // seq_parameter_set_id
+ // some profiles have more optional data we don't need
+
+ if (PROFILES_WITH_OPTIONAL_SPS_DATA[profileIdc]) {
+ chromaFormatIdc = expGolombDecoder.readUnsignedExpGolomb();
+
+ if (chromaFormatIdc === 3) {
+ expGolombDecoder.skipBits(1); // separate_colour_plane_flag
+ }
+
+ expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_luma_minus8
+
+ expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_chroma_minus8
+
+ expGolombDecoder.skipBits(1); // qpprime_y_zero_transform_bypass_flag
+
+ if (expGolombDecoder.readBoolean()) {
+ // seq_scaling_matrix_present_flag
+ scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
+
+ for (i = 0; i < scalingListCount; i++) {
+ if (expGolombDecoder.readBoolean()) {
+ // seq_scaling_list_present_flag[ i ]
+ if (i < 6) {
+ skipScalingList(16, expGolombDecoder);
+ } else {
+ skipScalingList(64, expGolombDecoder);
+ }
+ }
+ }
+ }
+ }
+
+ expGolombDecoder.skipUnsignedExpGolomb(); // log2_max_frame_num_minus4
+
+ picOrderCntType = expGolombDecoder.readUnsignedExpGolomb();
+
+ if (picOrderCntType === 0) {
+ expGolombDecoder.readUnsignedExpGolomb(); // log2_max_pic_order_cnt_lsb_minus4
+ } else if (picOrderCntType === 1) {
+ expGolombDecoder.skipBits(1); // delta_pic_order_always_zero_flag
+
+ expGolombDecoder.skipExpGolomb(); // offset_for_non_ref_pic
+
+ expGolombDecoder.skipExpGolomb(); // offset_for_top_to_bottom_field
+
+ numRefFramesInPicOrderCntCycle = expGolombDecoder.readUnsignedExpGolomb();
+
+ for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
+ expGolombDecoder.skipExpGolomb(); // offset_for_ref_frame[ i ]
+ }
+ }
+
+ expGolombDecoder.skipUnsignedExpGolomb(); // max_num_ref_frames
+
+ expGolombDecoder.skipBits(1); // gaps_in_frame_num_value_allowed_flag
+
+ picWidthInMbsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
+ picHeightInMapUnitsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
+ frameMbsOnlyFlag = expGolombDecoder.readBits(1);
+
+ if (frameMbsOnlyFlag === 0) {
+ expGolombDecoder.skipBits(1); // mb_adaptive_frame_field_flag
+ }
+
+ expGolombDecoder.skipBits(1); // direct_8x8_inference_flag
+
+ if (expGolombDecoder.readBoolean()) {
+ // frame_cropping_flag
+ frameCropLeftOffset = expGolombDecoder.readUnsignedExpGolomb();
+ frameCropRightOffset = expGolombDecoder.readUnsignedExpGolomb();
+ frameCropTopOffset = expGolombDecoder.readUnsignedExpGolomb();
+ frameCropBottomOffset = expGolombDecoder.readUnsignedExpGolomb();
+ }
+
+ if (expGolombDecoder.readBoolean()) {
+ // vui_parameters_present_flag
+ if (expGolombDecoder.readBoolean()) {
+ // aspect_ratio_info_present_flag
+ aspectRatioIdc = expGolombDecoder.readUnsignedByte();
+
+ switch (aspectRatioIdc) {
+ case 1:
+ sarRatio = [1, 1];
+ break;
+
+ case 2:
+ sarRatio = [12, 11];
+ break;
+
+ case 3:
+ sarRatio = [10, 11];
+ break;
+
+ case 4:
+ sarRatio = [16, 11];
+ break;
+
+ case 5:
+ sarRatio = [40, 33];
+ break;
+
+ case 6:
+ sarRatio = [24, 11];
+ break;
+
+ case 7:
+ sarRatio = [20, 11];
+ break;
+
+ case 8:
+ sarRatio = [32, 11];
+ break;
+
+ case 9:
+ sarRatio = [80, 33];
+ break;
+
+ case 10:
+ sarRatio = [18, 11];
+ break;
+
+ case 11:
+ sarRatio = [15, 11];
+ break;
+
+ case 12:
+ sarRatio = [64, 33];
+ break;
+
+ case 13:
+ sarRatio = [160, 99];
+ break;
+
+ case 14:
+ sarRatio = [4, 3];
+ break;
+
+ case 15:
+ sarRatio = [3, 2];
+ break;
+
+ case 16:
+ sarRatio = [2, 1];
+ break;
+
+ case 255:
+ {
+ sarRatio = [expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte(), expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte()];
+ break;
+ }
+ }
+
+ if (sarRatio) {
+ sarRatio[0] / sarRatio[1];
+ }
+ }
+ }
+
+ return {
+ profileIdc: profileIdc,
+ levelIdc: levelIdc,
+ profileCompatibility: profileCompatibility,
+ width: (picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2,
+ height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - frameCropTopOffset * 2 - frameCropBottomOffset * 2,
+ // sar is sample aspect ratio
+ sarRatio: sarRatio
+ };
+ };
+ };
+
+ _H264Stream.prototype = new stream();
+ var h264 = {
+ H264Stream: _H264Stream,
+ NalByteStream: _NalByteStream
+ };
+ /**
+ * mux.js
+ *
+ * Copyright (c) Brightcove
+ * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
+ *
+ * Utilities to detect basic properties and metadata about Aac data.
+ */
+
+ var ADTS_SAMPLING_FREQUENCIES = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
+
+ var parseId3TagSize = function parseId3TagSize(header, byteIndex) {
+ var returnSize = header[byteIndex + 6] << 21 | header[byteIndex + 7] << 14 | header[byteIndex + 8] << 7 | header[byteIndex + 9],
+ flags = header[byteIndex + 5],
+ footerPresent = (flags & 16) >> 4; // if we get a negative returnSize clamp it to 0
+
+ returnSize = returnSize >= 0 ? returnSize : 0;
+
+ if (footerPresent) {
+ return returnSize + 20;
+ }
+
+ return returnSize + 10;
+ };
+
+ var getId3Offset = function getId3Offset(data, offset) {
+ if (data.length - offset < 10 || data[offset] !== 'I'.charCodeAt(0) || data[offset + 1] !== 'D'.charCodeAt(0) || data[offset + 2] !== '3'.charCodeAt(0)) {
+ return offset;
+ }
+
+ offset += parseId3TagSize(data, offset);
+ return getId3Offset(data, offset);
+ }; // TODO: use vhs-utils
+
+
+ var isLikelyAacData$1 = function isLikelyAacData(data) {
+ var offset = getId3Offset(data, 0);
+ return data.length >= offset + 2 && (data[offset] & 0xFF) === 0xFF && (data[offset + 1] & 0xF0) === 0xF0 && // verify that the 2 layer bits are 0, aka this
+ // is not mp3 data but aac data.
+ (data[offset + 1] & 0x16) === 0x10;
+ };
+
+ var parseSyncSafeInteger = function parseSyncSafeInteger(data) {
+ return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
+ }; // return a percent-encoded representation of the specified byte range
+ // @see http://en.wikipedia.org/wiki/Percent-encoding
+
+
+ var percentEncode = function percentEncode(bytes, start, end) {
+ var i,
+ result = '';
+
+ for (i = start; i < end; i++) {
+ result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
+ }
+
+ return result;
+ }; // return the string representation of the specified byte range,
+ // interpreted as ISO-8859-1.
+
+
+ var parseIso88591 = function parseIso88591(bytes, start, end) {
+ return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
+ };
+
+ var parseAdtsSize = function parseAdtsSize(header, byteIndex) {
+ var lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
+ middle = header[byteIndex + 4] << 3,
+ highTwo = header[byteIndex + 3] & 0x3 << 11;
+ return highTwo | middle | lowThree;
+ };
+
+ var parseType$2 = function parseType(header, byteIndex) {
+ if (header[byteIndex] === 'I'.charCodeAt(0) && header[byteIndex + 1] === 'D'.charCodeAt(0) && header[byteIndex + 2] === '3'.charCodeAt(0)) {
+ return 'timed-metadata';
+ } else if (header[byteIndex] & 0xff === 0xff && (header[byteIndex + 1] & 0xf0) === 0xf0) {
+ return 'audio';
+ }
+
+ return null;
+ };
+
+ var parseSampleRate = function parseSampleRate(packet) {
+ var i = 0;
+
+ while (i + 5 < packet.length) {
+ if (packet[i] !== 0xFF || (packet[i + 1] & 0xF6) !== 0xF0) {
+ // If a valid header was not found, jump one forward and attempt to
+ // find a valid ADTS header starting at the next byte
+ i++;
+ continue;
+ }
+
+ return ADTS_SAMPLING_FREQUENCIES[(packet[i + 2] & 0x3c) >>> 2];
+ }
+
+ return null;
+ };
+
+ var parseAacTimestamp = function parseAacTimestamp(packet) {
+ var frameStart, frameSize, frame, frameHeader; // find the start of the first frame and the end of the tag
+
+ frameStart = 10;
+
+ if (packet[5] & 0x40) {
+ // advance the frame start past the extended header
+ frameStart += 4; // header size field
+
+ frameStart += parseSyncSafeInteger(packet.subarray(10, 14));
+ } // parse one or more ID3 frames
+ // http://id3.org/id3v2.3.0#ID3v2_frame_overview
+
+
+ do {
+ // determine the number of bytes in this frame
+ frameSize = parseSyncSafeInteger(packet.subarray(frameStart + 4, frameStart + 8));
+
+ if (frameSize < 1) {
+ return null;
+ }
+
+ frameHeader = String.fromCharCode(packet[frameStart], packet[frameStart + 1], packet[frameStart + 2], packet[frameStart + 3]);
+
+ if (frameHeader === 'PRIV') {
+ frame = packet.subarray(frameStart + 10, frameStart + frameSize + 10);
+
+ for (var i = 0; i < frame.byteLength; i++) {
+ if (frame[i] === 0) {
+ var owner = parseIso88591(frame, 0, i);
+
+ if (owner === 'com.apple.streaming.transportStreamTimestamp') {
+ var d = frame.subarray(i + 1);
+ var size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
+ size *= 4;
+ size += d[7] & 0x03;
+ return size;
+ }
+
+ break;
+ }
+ }
+ }
+
+ frameStart += 10; // advance past the frame header
+
+ frameStart += frameSize; // advance past the frame body
+ } while (frameStart < packet.byteLength);
+
+ return null;
+ };
+
+ var utils = {
+ isLikelyAacData: isLikelyAacData$1,
+ parseId3TagSize: parseId3TagSize,
+ parseAdtsSize: parseAdtsSize,
+ parseType: parseType$2,
+ parseSampleRate: parseSampleRate,
+ parseAacTimestamp: parseAacTimestamp
+ };
+
+ var _AacStream;
+ /**
+ * Splits an incoming stream of binary data into ADTS and ID3 Frames.
+ */
+
+
+ _AacStream = function AacStream() {
+ var everything = new Uint8Array(),
+ timeStamp = 0;
+
+ _AacStream.prototype.init.call(this);
+
+ this.setTimestamp = function (timestamp) {
+ timeStamp = timestamp;
+ };
+
+ this.push = function (bytes) {
+ var frameSize = 0,
+ byteIndex = 0,
+ bytesLeft,
+ chunk,
+ packet,
+ tempLength; // If there are bytes remaining from the last segment, prepend them to the
+ // bytes that were pushed in
+
+ if (everything.length) {
+ tempLength = everything.length;
+ everything = new Uint8Array(bytes.byteLength + tempLength);
+ everything.set(everything.subarray(0, tempLength));
+ everything.set(bytes, tempLength);
+ } else {
+ everything = bytes;
+ }
+
+ while (everything.length - byteIndex >= 3) {
+ if (everything[byteIndex] === 'I'.charCodeAt(0) && everything[byteIndex + 1] === 'D'.charCodeAt(0) && everything[byteIndex + 2] === '3'.charCodeAt(0)) {
+ // Exit early because we don't have enough to parse
+ // the ID3 tag header
+ if (everything.length - byteIndex < 10) {
+ break;
+ } // check framesize
+
+
+ frameSize = utils.parseId3TagSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
+ // to emit a full packet
+ // Add to byteIndex to support multiple ID3 tags in sequence
+
+ if (byteIndex + frameSize > everything.length) {
+ break;
+ }
+
+ chunk = {
+ type: 'timed-metadata',
+ data: everything.subarray(byteIndex, byteIndex + frameSize)
+ };
+ this.trigger('data', chunk);
+ byteIndex += frameSize;
+ continue;
+ } else if ((everything[byteIndex] & 0xff) === 0xff && (everything[byteIndex + 1] & 0xf0) === 0xf0) {
+ // Exit early because we don't have enough to parse
+ // the ADTS frame header
+ if (everything.length - byteIndex < 7) {
+ break;
+ }
+
+ frameSize = utils.parseAdtsSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
+ // to emit a full packet
+
+ if (byteIndex + frameSize > everything.length) {
+ break;
+ }
+
+ packet = {
+ type: 'audio',
+ data: everything.subarray(byteIndex, byteIndex + frameSize),
+ pts: timeStamp,
+ dts: timeStamp
+ };
+ this.trigger('data', packet);
+ byteIndex += frameSize;
+ continue;
+ }
+
+ byteIndex++;
+ }
+
+ bytesLeft = everything.length - byteIndex;
+
+ if (bytesLeft > 0) {
+ everything = everything.subarray(byteIndex);
+ } else {
+ everything = new Uint8Array();
+ }
+ };
+
+ this.reset = function () {
+ everything = new Uint8Array();
+ this.trigger('reset');
+ };
+
+ this.endTimeline = function () {
+ everything = new Uint8Array();
+ this.trigger('endedtimeline');
+ };
+ };
+
+ _AacStream.prototype = new stream();
+ var aac = _AacStream; // constants
+
+ var AUDIO_PROPERTIES = ['audioobjecttype', 'channelcount', 'samplerate', 'samplingfrequencyindex', 'samplesize'];
+ var audioProperties = AUDIO_PROPERTIES;
+ var VIDEO_PROPERTIES = ['width', 'height', 'profileIdc', 'levelIdc', 'profileCompatibility', 'sarRatio'];
+ var videoProperties = VIDEO_PROPERTIES;
+ var H264Stream = h264.H264Stream;
+ var isLikelyAacData = utils.isLikelyAacData;
+ var ONE_SECOND_IN_TS$1 = clock.ONE_SECOND_IN_TS; // object types
+
+ var _VideoSegmentStream, _AudioSegmentStream, _Transmuxer, _CoalesceStream;
+
+ var retriggerForStream = function retriggerForStream(key, event) {
+ event.stream = key;
+ this.trigger('log', event);
+ };
+
+ var addPipelineLogRetriggers = function addPipelineLogRetriggers(transmuxer, pipeline) {
+ var keys = Object.keys(pipeline);
+
+ for (var i = 0; i < keys.length; i++) {
+ var key = keys[i]; // skip non-stream keys and headOfPipeline
+ // which is just a duplicate
+
+ if (key === 'headOfPipeline' || !pipeline[key].on) {
+ continue;
+ }
+
+ pipeline[key].on('log', retriggerForStream.bind(transmuxer, key));
+ }
+ };
+ /**
+ * Compare two arrays (even typed) for same-ness
+ */
+
+
+ var arrayEquals = function arrayEquals(a, b) {
+ var i;
+
+ if (a.length !== b.length) {
+ return false;
+ } // compare the value of each element in the array
+
+
+ for (i = 0; i < a.length; i++) {
+ if (a[i] !== b[i]) {
+ return false;
+ }
+ }
+
+ return true;
+ };
+
+ var generateSegmentTimingInfo = function generateSegmentTimingInfo(baseMediaDecodeTime, startDts, startPts, endDts, endPts, prependedContentDuration) {
+ var ptsOffsetFromDts = startPts - startDts,
+ decodeDuration = endDts - startDts,
+ presentationDuration = endPts - startPts; // The PTS and DTS values are based on the actual stream times from the segment,
+ // however, the player time values will reflect a start from the baseMediaDecodeTime.
+ // In order to provide relevant values for the player times, base timing info on the
+ // baseMediaDecodeTime and the DTS and PTS durations of the segment.
+
+ return {
+ start: {
+ dts: baseMediaDecodeTime,
+ pts: baseMediaDecodeTime + ptsOffsetFromDts
+ },
+ end: {
+ dts: baseMediaDecodeTime + decodeDuration,
+ pts: baseMediaDecodeTime + presentationDuration
+ },
+ prependedContentDuration: prependedContentDuration,
+ baseMediaDecodeTime: baseMediaDecodeTime
+ };
+ };
+ /**
+ * Constructs a single-track, ISO BMFF media segment from AAC data
+ * events. The output of this stream can be fed to a SourceBuffer
+ * configured with a suitable initialization segment.
+ * @param track {object} track metadata configuration
+ * @param options {object} transmuxer options object
+ * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
+ * in the source; false to adjust the first segment to start at 0.
+ */
+
+
+ _AudioSegmentStream = function AudioSegmentStream(track, options) {
+ var adtsFrames = [],
+ sequenceNumber,
+ earliestAllowedDts = 0,
+ audioAppendStartTs = 0,
+ videoBaseMediaDecodeTime = Infinity;
+ options = options || {};
+ sequenceNumber = options.firstSequenceNumber || 0;
+
+ _AudioSegmentStream.prototype.init.call(this);
+
+ this.push = function (data) {
+ trackDecodeInfo.collectDtsInfo(track, data);
+
+ if (track) {
+ audioProperties.forEach(function (prop) {
+ track[prop] = data[prop];
+ });
+ } // buffer audio data until end() is called
+
+
+ adtsFrames.push(data);
+ };
+
+ this.setEarliestDts = function (earliestDts) {
+ earliestAllowedDts = earliestDts;
+ };
+
+ this.setVideoBaseMediaDecodeTime = function (baseMediaDecodeTime) {
+ videoBaseMediaDecodeTime = baseMediaDecodeTime;
+ };
+
+ this.setAudioAppendStart = function (timestamp) {
+ audioAppendStartTs = timestamp;
+ };
+
+ this.flush = function () {
+ var frames, moof, mdat, boxes, frameDuration, segmentDuration, videoClockCyclesOfSilencePrefixed; // return early if no audio data has been observed
+
+ if (adtsFrames.length === 0) {
+ this.trigger('done', 'AudioSegmentStream');
+ return;
+ }
+
+ frames = audioFrameUtils.trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts);
+ track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps); // amount of audio filled but the value is in video clock rather than audio clock
+
+ videoClockCyclesOfSilencePrefixed = audioFrameUtils.prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime); // we have to build the index from byte locations to
+ // samples (that is, adts frames) in the audio data
+
+ track.samples = audioFrameUtils.generateSampleTable(frames); // concatenate the audio data to constuct the mdat
+
+ mdat = mp4Generator.mdat(audioFrameUtils.concatenateFrameData(frames));
+ adtsFrames = [];
+ moof = mp4Generator.moof(sequenceNumber, [track]);
+ boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // bump the sequence number for next time
+
+ sequenceNumber++;
+ boxes.set(moof);
+ boxes.set(mdat, moof.byteLength);
+ trackDecodeInfo.clearDtsInfo(track);
+ frameDuration = Math.ceil(ONE_SECOND_IN_TS$1 * 1024 / track.samplerate); // TODO this check was added to maintain backwards compatibility (particularly with
+ // tests) on adding the timingInfo event. However, it seems unlikely that there's a
+ // valid use-case where an init segment/data should be triggered without associated
+ // frames. Leaving for now, but should be looked into.
+
+ if (frames.length) {
+ segmentDuration = frames.length * frameDuration;
+ this.trigger('segmentTimingInfo', generateSegmentTimingInfo( // The audio track's baseMediaDecodeTime is in audio clock cycles, but the
+ // frame info is in video clock cycles. Convert to match expectation of
+ // listeners (that all timestamps will be based on video clock cycles).
+ clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate), // frame times are already in video clock, as is segment duration
+ frames[0].dts, frames[0].pts, frames[0].dts + segmentDuration, frames[0].pts + segmentDuration, videoClockCyclesOfSilencePrefixed || 0));
+ this.trigger('timingInfo', {
+ start: frames[0].pts,
+ end: frames[0].pts + segmentDuration
+ });
+ }
+
+ this.trigger('data', {
+ track: track,
+ boxes: boxes
+ });
+ this.trigger('done', 'AudioSegmentStream');
+ };
+
+ this.reset = function () {
+ trackDecodeInfo.clearDtsInfo(track);
+ adtsFrames = [];
+ this.trigger('reset');
+ };
+ };
+
+ _AudioSegmentStream.prototype = new stream();
+ /**
+ * Constructs a single-track, ISO BMFF media segment from H264 data
+ * events. The output of this stream can be fed to a SourceBuffer
+ * configured with a suitable initialization segment.
+ * @param track {object} track metadata configuration
+ * @param options {object} transmuxer options object
+ * @param options.alignGopsAtEnd {boolean} If true, start from the end of the
+ * gopsToAlignWith list when attempting to align gop pts
+ * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
+ * in the source; false to adjust the first segment to start at 0.
+ */
+
+ _VideoSegmentStream = function VideoSegmentStream(track, options) {
+ var sequenceNumber,
+ nalUnits = [],
+ gopsToAlignWith = [],
+ config,
+ pps;
+ options = options || {};
+ sequenceNumber = options.firstSequenceNumber || 0;
+
+ _VideoSegmentStream.prototype.init.call(this);
+
+ delete track.minPTS;
+ this.gopCache_ = [];
+ /**
+ * Constructs a ISO BMFF segment given H264 nalUnits
+ * @param {Object} nalUnit A data event representing a nalUnit
+ * @param {String} nalUnit.nalUnitType
+ * @param {Object} nalUnit.config Properties for a mp4 track
+ * @param {Uint8Array} nalUnit.data The nalUnit bytes
+ * @see lib/codecs/h264.js
+ **/
+
+ this.push = function (nalUnit) {
+ trackDecodeInfo.collectDtsInfo(track, nalUnit); // record the track config
+
+ if (nalUnit.nalUnitType === 'seq_parameter_set_rbsp' && !config) {
+ config = nalUnit.config;
+ track.sps = [nalUnit.data];
+ videoProperties.forEach(function (prop) {
+ track[prop] = config[prop];
+ }, this);
+ }
+
+ if (nalUnit.nalUnitType === 'pic_parameter_set_rbsp' && !pps) {
+ pps = nalUnit.data;
+ track.pps = [nalUnit.data];
+ } // buffer video until flush() is called
+
+
+ nalUnits.push(nalUnit);
+ };
+ /**
+ * Pass constructed ISO BMFF track and boxes on to the
+ * next stream in the pipeline
+ **/
+
+
+ this.flush = function () {
+ var frames,
+ gopForFusion,
+ gops,
+ moof,
+ mdat,
+ boxes,
+ prependedContentDuration = 0,
+ firstGop,
+ lastGop; // Throw away nalUnits at the start of the byte stream until
+ // we find the first AUD
+
+ while (nalUnits.length) {
+ if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
+ break;
+ }
+
+ nalUnits.shift();
+ } // Return early if no video data has been observed
+
+
+ if (nalUnits.length === 0) {
+ this.resetStream_();
+ this.trigger('done', 'VideoSegmentStream');
+ return;
+ } // Organize the raw nal-units into arrays that represent
+ // higher-level constructs such as frames and gops
+ // (group-of-pictures)
+
+
+ frames = frameUtils.groupNalsIntoFrames(nalUnits);
+ gops = frameUtils.groupFramesIntoGops(frames); // If the first frame of this fragment is not a keyframe we have
+ // a problem since MSE (on Chrome) requires a leading keyframe.
+ //
+ // We have two approaches to repairing this situation:
+ // 1) GOP-FUSION:
+ // This is where we keep track of the GOPS (group-of-pictures)
+ // from previous fragments and attempt to find one that we can
+ // prepend to the current fragment in order to create a valid
+ // fragment.
+ // 2) KEYFRAME-PULLING:
+ // Here we search for the first keyframe in the fragment and
+ // throw away all the frames between the start of the fragment
+ // and that keyframe. We then extend the duration and pull the
+ // PTS of the keyframe forward so that it covers the time range
+ // of the frames that were disposed of.
+ //
+ // #1 is far prefereable over #2 which can cause "stuttering" but
+ // requires more things to be just right.
+
+ if (!gops[0][0].keyFrame) {
+ // Search for a gop for fusion from our gopCache
+ gopForFusion = this.getGopForFusion_(nalUnits[0], track);
+
+ if (gopForFusion) {
+ // in order to provide more accurate timing information about the segment, save
+ // the number of seconds prepended to the original segment due to GOP fusion
+ prependedContentDuration = gopForFusion.duration;
+ gops.unshift(gopForFusion); // Adjust Gops' metadata to account for the inclusion of the
+ // new gop at the beginning
+
+ gops.byteLength += gopForFusion.byteLength;
+ gops.nalCount += gopForFusion.nalCount;
+ gops.pts = gopForFusion.pts;
+ gops.dts = gopForFusion.dts;
+ gops.duration += gopForFusion.duration;
+ } else {
+ // If we didn't find a candidate gop fall back to keyframe-pulling
+ gops = frameUtils.extendFirstKeyFrame(gops);
+ }
+ } // Trim gops to align with gopsToAlignWith
+
+
+ if (gopsToAlignWith.length) {
+ var alignedGops;
+
+ if (options.alignGopsAtEnd) {
+ alignedGops = this.alignGopsAtEnd_(gops);
+ } else {
+ alignedGops = this.alignGopsAtStart_(gops);
+ }
+
+ if (!alignedGops) {
+ // save all the nals in the last GOP into the gop cache
+ this.gopCache_.unshift({
+ gop: gops.pop(),
+ pps: track.pps,
+ sps: track.sps
+ }); // Keep a maximum of 6 GOPs in the cache
+
+ this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
+
+ nalUnits = []; // return early no gops can be aligned with desired gopsToAlignWith
+
+ this.resetStream_();
+ this.trigger('done', 'VideoSegmentStream');
+ return;
+ } // Some gops were trimmed. clear dts info so minSegmentDts and pts are correct
+ // when recalculated before sending off to CoalesceStream
+
+
+ trackDecodeInfo.clearDtsInfo(track);
+ gops = alignedGops;
+ }
+
+ trackDecodeInfo.collectDtsInfo(track, gops); // First, we have to build the index from byte locations to
+ // samples (that is, frames) in the video data
+
+ track.samples = frameUtils.generateSampleTable(gops); // Concatenate the video data and construct the mdat
+
+ mdat = mp4Generator.mdat(frameUtils.concatenateNalData(gops));
+ track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps);
+ this.trigger('processedGopsInfo', gops.map(function (gop) {
+ return {
+ pts: gop.pts,
+ dts: gop.dts,
+ byteLength: gop.byteLength
+ };
+ }));
+ firstGop = gops[0];
+ lastGop = gops[gops.length - 1];
+ this.trigger('segmentTimingInfo', generateSegmentTimingInfo(track.baseMediaDecodeTime, firstGop.dts, firstGop.pts, lastGop.dts + lastGop.duration, lastGop.pts + lastGop.duration, prependedContentDuration));
+ this.trigger('timingInfo', {
+ start: gops[0].pts,
+ end: gops[gops.length - 1].pts + gops[gops.length - 1].duration
+ }); // save all the nals in the last GOP into the gop cache
+
+ this.gopCache_.unshift({
+ gop: gops.pop(),
+ pps: track.pps,
+ sps: track.sps
+ }); // Keep a maximum of 6 GOPs in the cache
+
+ this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
+
+ nalUnits = [];
+ this.trigger('baseMediaDecodeTime', track.baseMediaDecodeTime);
+ this.trigger('timelineStartInfo', track.timelineStartInfo);
+ moof = mp4Generator.moof(sequenceNumber, [track]); // it would be great to allocate this array up front instead of
+ // throwing away hundreds of media segment fragments
+
+ boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // Bump the sequence number for next time
+
+ sequenceNumber++;
+ boxes.set(moof);
+ boxes.set(mdat, moof.byteLength);
+ this.trigger('data', {
+ track: track,
+ boxes: boxes
+ });
+ this.resetStream_(); // Continue with the flush process now
+
+ this.trigger('done', 'VideoSegmentStream');
+ };
+
+ this.reset = function () {
+ this.resetStream_();
+ nalUnits = [];
+ this.gopCache_.length = 0;
+ gopsToAlignWith.length = 0;
+ this.trigger('reset');
+ };
+
+ this.resetStream_ = function () {
+ trackDecodeInfo.clearDtsInfo(track); // reset config and pps because they may differ across segments
+ // for instance, when we are rendition switching
+
+ config = undefined;
+ pps = undefined;
+ }; // Search for a candidate Gop for gop-fusion from the gop cache and
+ // return it or return null if no good candidate was found
+
+
+ this.getGopForFusion_ = function (nalUnit) {
+ var halfSecond = 45000,
+ // Half-a-second in a 90khz clock
+ allowableOverlap = 10000,
+ // About 3 frames @ 30fps
+ nearestDistance = Infinity,
+ dtsDistance,
+ nearestGopObj,
+ currentGop,
+ currentGopObj,
+ i; // Search for the GOP nearest to the beginning of this nal unit
+
+ for (i = 0; i < this.gopCache_.length; i++) {
+ currentGopObj = this.gopCache_[i];
+ currentGop = currentGopObj.gop; // Reject Gops with different SPS or PPS
+
+ if (!(track.pps && arrayEquals(track.pps[0], currentGopObj.pps[0])) || !(track.sps && arrayEquals(track.sps[0], currentGopObj.sps[0]))) {
+ continue;
+ } // Reject Gops that would require a negative baseMediaDecodeTime
+
+
+ if (currentGop.dts < track.timelineStartInfo.dts) {
+ continue;
+ } // The distance between the end of the gop and the start of the nalUnit
+
+
+ dtsDistance = nalUnit.dts - currentGop.dts - currentGop.duration; // Only consider GOPS that start before the nal unit and end within
+ // a half-second of the nal unit
+
+ if (dtsDistance >= -allowableOverlap && dtsDistance <= halfSecond) {
+ // Always use the closest GOP we found if there is more than
+ // one candidate
+ if (!nearestGopObj || nearestDistance > dtsDistance) {
+ nearestGopObj = currentGopObj;
+ nearestDistance = dtsDistance;
+ }
+ }
+ }
+
+ if (nearestGopObj) {
+ return nearestGopObj.gop;
+ }
+
+ return null;
+ }; // trim gop list to the first gop found that has a matching pts with a gop in the list
+ // of gopsToAlignWith starting from the START of the list
+
+
+ this.alignGopsAtStart_ = function (gops) {
+ var alignIndex, gopIndex, align, gop, byteLength, nalCount, duration, alignedGops;
+ byteLength = gops.byteLength;
+ nalCount = gops.nalCount;
+ duration = gops.duration;
+ alignIndex = gopIndex = 0;
+
+ while (alignIndex < gopsToAlignWith.length && gopIndex < gops.length) {
+ align = gopsToAlignWith[alignIndex];
+ gop = gops[gopIndex];
+
+ if (align.pts === gop.pts) {
+ break;
+ }
+
+ if (gop.pts > align.pts) {
+ // this current gop starts after the current gop we want to align on, so increment
+ // align index
+ alignIndex++;
+ continue;
+ } // current gop starts before the current gop we want to align on. so increment gop
+ // index
+
+
+ gopIndex++;
+ byteLength -= gop.byteLength;
+ nalCount -= gop.nalCount;
+ duration -= gop.duration;
+ }
+
+ if (gopIndex === 0) {
+ // no gops to trim
+ return gops;
+ }
+
+ if (gopIndex === gops.length) {
+ // all gops trimmed, skip appending all gops
+ return null;
+ }
+
+ alignedGops = gops.slice(gopIndex);
+ alignedGops.byteLength = byteLength;
+ alignedGops.duration = duration;
+ alignedGops.nalCount = nalCount;
+ alignedGops.pts = alignedGops[0].pts;
+ alignedGops.dts = alignedGops[0].dts;
+ return alignedGops;
+ }; // trim gop list to the first gop found that has a matching pts with a gop in the list
+ // of gopsToAlignWith starting from the END of the list
+
+
+ this.alignGopsAtEnd_ = function (gops) {
+ var alignIndex, gopIndex, align, gop, alignEndIndex, matchFound;
+ alignIndex = gopsToAlignWith.length - 1;
+ gopIndex = gops.length - 1;
+ alignEndIndex = null;
+ matchFound = false;
+
+ while (alignIndex >= 0 && gopIndex >= 0) {
+ align = gopsToAlignWith[alignIndex];
+ gop = gops[gopIndex];
+
+ if (align.pts === gop.pts) {
+ matchFound = true;
+ break;
+ }
+
+ if (align.pts > gop.pts) {
+ alignIndex--;
+ continue;
+ }
+
+ if (alignIndex === gopsToAlignWith.length - 1) {
+ // gop.pts is greater than the last alignment candidate. If no match is found
+ // by the end of this loop, we still want to append gops that come after this
+ // point
+ alignEndIndex = gopIndex;
+ }
+
+ gopIndex--;
+ }
+
+ if (!matchFound && alignEndIndex === null) {
+ return null;
+ }
+
+ var trimIndex;
+
+ if (matchFound) {
+ trimIndex = gopIndex;
+ } else {
+ trimIndex = alignEndIndex;
+ }
+
+ if (trimIndex === 0) {
+ return gops;
+ }
+
+ var alignedGops = gops.slice(trimIndex);
+ var metadata = alignedGops.reduce(function (total, gop) {
+ total.byteLength += gop.byteLength;
+ total.duration += gop.duration;
+ total.nalCount += gop.nalCount;
+ return total;
+ }, {
+ byteLength: 0,
+ duration: 0,
+ nalCount: 0
+ });
+ alignedGops.byteLength = metadata.byteLength;
+ alignedGops.duration = metadata.duration;
+ alignedGops.nalCount = metadata.nalCount;
+ alignedGops.pts = alignedGops[0].pts;
+ alignedGops.dts = alignedGops[0].dts;
+ return alignedGops;
+ };
+
+ this.alignGopsWith = function (newGopsToAlignWith) {
+ gopsToAlignWith = newGopsToAlignWith;
+ };
+ };
+
+ _VideoSegmentStream.prototype = new stream();
+ /**
+ * A Stream that can combine multiple streams (ie. audio & video)
+ * into a single output segment for MSE. Also supports audio-only
+ * and video-only streams.
+ * @param options {object} transmuxer options object
+ * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
+ * in the source; false to adjust the first segment to start at media timeline start.
+ */
+
+ _CoalesceStream = function CoalesceStream(options, metadataStream) {
+ // Number of Tracks per output segment
+ // If greater than 1, we combine multiple
+ // tracks into a single segment
+ this.numberOfTracks = 0;
+ this.metadataStream = metadataStream;
+ options = options || {};
+
+ if (typeof options.remux !== 'undefined') {
+ this.remuxTracks = !!options.remux;
+ } else {
+ this.remuxTracks = true;
+ }
+
+ if (typeof options.keepOriginalTimestamps === 'boolean') {
+ this.keepOriginalTimestamps = options.keepOriginalTimestamps;
+ } else {
+ this.keepOriginalTimestamps = false;
+ }
+
+ this.pendingTracks = [];
+ this.videoTrack = null;
+ this.pendingBoxes = [];
+ this.pendingCaptions = [];
+ this.pendingMetadata = [];
+ this.pendingBytes = 0;
+ this.emittedTracks = 0;
+
+ _CoalesceStream.prototype.init.call(this); // Take output from multiple
+
+
+ this.push = function (output) {
+ // buffer incoming captions until the associated video segment
+ // finishes
+ if (output.text) {
+ return this.pendingCaptions.push(output);
+ } // buffer incoming id3 tags until the final flush
+
+
+ if (output.frames) {
+ return this.pendingMetadata.push(output);
+ } // Add this track to the list of pending tracks and store
+ // important information required for the construction of
+ // the final segment
+
+
+ this.pendingTracks.push(output.track);
+ this.pendingBytes += output.boxes.byteLength; // TODO: is there an issue for this against chrome?
+ // We unshift audio and push video because
+ // as of Chrome 75 when switching from
+ // one init segment to another if the video
+ // mdat does not appear after the audio mdat
+ // only audio will play for the duration of our transmux.
+
+ if (output.track.type === 'video') {
+ this.videoTrack = output.track;
+ this.pendingBoxes.push(output.boxes);
+ }
+
+ if (output.track.type === 'audio') {
+ this.audioTrack = output.track;
+ this.pendingBoxes.unshift(output.boxes);
+ }
+ };
+ };
+
+ _CoalesceStream.prototype = new stream();
+
+ _CoalesceStream.prototype.flush = function (flushSource) {
+ var offset = 0,
+ event = {
+ captions: [],
+ captionStreams: {},
+ metadata: [],
+ info: {}
+ },
+ caption,
+ id3,
+ initSegment,
+ timelineStartPts = 0,
+ i;
+
+ if (this.pendingTracks.length < this.numberOfTracks) {
+ if (flushSource !== 'VideoSegmentStream' && flushSource !== 'AudioSegmentStream') {
+ // Return because we haven't received a flush from a data-generating
+ // portion of the segment (meaning that we have only recieved meta-data
+ // or captions.)
+ return;
+ } else if (this.remuxTracks) {
+ // Return until we have enough tracks from the pipeline to remux (if we
+ // are remuxing audio and video into a single MP4)
+ return;
+ } else if (this.pendingTracks.length === 0) {
+ // In the case where we receive a flush without any data having been
+ // received we consider it an emitted track for the purposes of coalescing
+ // `done` events.
+ // We do this for the case where there is an audio and video track in the
+ // segment but no audio data. (seen in several playlists with alternate
+ // audio tracks and no audio present in the main TS segments.)
+ this.emittedTracks++;
+
+ if (this.emittedTracks >= this.numberOfTracks) {
+ this.trigger('done');
+ this.emittedTracks = 0;
+ }
+
+ return;
+ }
+ }
+
+ if (this.videoTrack) {
+ timelineStartPts = this.videoTrack.timelineStartInfo.pts;
+ videoProperties.forEach(function (prop) {
+ event.info[prop] = this.videoTrack[prop];
+ }, this);
+ } else if (this.audioTrack) {
+ timelineStartPts = this.audioTrack.timelineStartInfo.pts;
+ audioProperties.forEach(function (prop) {
+ event.info[prop] = this.audioTrack[prop];
+ }, this);
+ }
+
+ if (this.videoTrack || this.audioTrack) {
+ if (this.pendingTracks.length === 1) {
+ event.type = this.pendingTracks[0].type;
+ } else {
+ event.type = 'combined';
+ }
+
+ this.emittedTracks += this.pendingTracks.length;
+ initSegment = mp4Generator.initSegment(this.pendingTracks); // Create a new typed array to hold the init segment
+
+ event.initSegment = new Uint8Array(initSegment.byteLength); // Create an init segment containing a moov
+ // and track definitions
+
+ event.initSegment.set(initSegment); // Create a new typed array to hold the moof+mdats
+
+ event.data = new Uint8Array(this.pendingBytes); // Append each moof+mdat (one per track) together
+
+ for (i = 0; i < this.pendingBoxes.length; i++) {
+ event.data.set(this.pendingBoxes[i], offset);
+ offset += this.pendingBoxes[i].byteLength;
+ } // Translate caption PTS times into second offsets to match the
+ // video timeline for the segment, and add track info
+
+
+ for (i = 0; i < this.pendingCaptions.length; i++) {
+ caption = this.pendingCaptions[i];
+ caption.startTime = clock.metadataTsToSeconds(caption.startPts, timelineStartPts, this.keepOriginalTimestamps);
+ caption.endTime = clock.metadataTsToSeconds(caption.endPts, timelineStartPts, this.keepOriginalTimestamps);
+ event.captionStreams[caption.stream] = true;
+ event.captions.push(caption);
+ } // Translate ID3 frame PTS times into second offsets to match the
+ // video timeline for the segment
+
+
+ for (i = 0; i < this.pendingMetadata.length; i++) {
+ id3 = this.pendingMetadata[i];
+ id3.cueTime = clock.metadataTsToSeconds(id3.pts, timelineStartPts, this.keepOriginalTimestamps);
+ event.metadata.push(id3);
+ } // We add this to every single emitted segment even though we only need
+ // it for the first
+
+
+ event.metadata.dispatchType = this.metadataStream.dispatchType; // Reset stream state
+
+ this.pendingTracks.length = 0;
+ this.videoTrack = null;
+ this.pendingBoxes.length = 0;
+ this.pendingCaptions.length = 0;
+ this.pendingBytes = 0;
+ this.pendingMetadata.length = 0; // Emit the built segment
+ // We include captions and ID3 tags for backwards compatibility,
+ // ideally we should send only video and audio in the data event
+
+ this.trigger('data', event); // Emit each caption to the outside world
+ // Ideally, this would happen immediately on parsing captions,
+ // but we need to ensure that video data is sent back first
+ // so that caption timing can be adjusted to match video timing
+
+ for (i = 0; i < event.captions.length; i++) {
+ caption = event.captions[i];
+ this.trigger('caption', caption);
+ } // Emit each id3 tag to the outside world
+ // Ideally, this would happen immediately on parsing the tag,
+ // but we need to ensure that video data is sent back first
+ // so that ID3 frame timing can be adjusted to match video timing
+
+
+ for (i = 0; i < event.metadata.length; i++) {
+ id3 = event.metadata[i];
+ this.trigger('id3Frame', id3);
+ }
+ } // Only emit `done` if all tracks have been flushed and emitted
+
+
+ if (this.emittedTracks >= this.numberOfTracks) {
+ this.trigger('done');
+ this.emittedTracks = 0;
+ }
+ };
+
+ _CoalesceStream.prototype.setRemux = function (val) {
+ this.remuxTracks = val;
+ };
+ /**
+ * A Stream that expects MP2T binary data as input and produces
+ * corresponding media segments, suitable for use with Media Source
+ * Extension (MSE) implementations that support the ISO BMFF byte
+ * stream format, like Chrome.
+ */
+
+
+ _Transmuxer = function Transmuxer(options) {
+ var self = this,
+ hasFlushed = true,
+ videoTrack,
+ audioTrack;
+
+ _Transmuxer.prototype.init.call(this);
+
+ options = options || {};
+ this.baseMediaDecodeTime = options.baseMediaDecodeTime || 0;
+ this.transmuxPipeline_ = {};
+
+ this.setupAacPipeline = function () {
+ var pipeline = {};
+ this.transmuxPipeline_ = pipeline;
+ pipeline.type = 'aac';
+ pipeline.metadataStream = new m2ts_1.MetadataStream(); // set up the parsing pipeline
+
+ pipeline.aacStream = new aac();
+ pipeline.audioTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('audio');
+ pipeline.timedMetadataTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('timed-metadata');
+ pipeline.adtsStream = new adts();
+ pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
+ pipeline.headOfPipeline = pipeline.aacStream;
+ pipeline.aacStream.pipe(pipeline.audioTimestampRolloverStream).pipe(pipeline.adtsStream);
+ pipeline.aacStream.pipe(pipeline.timedMetadataTimestampRolloverStream).pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream);
+ pipeline.metadataStream.on('timestamp', function (frame) {
+ pipeline.aacStream.setTimestamp(frame.timeStamp);
+ });
+ pipeline.aacStream.on('data', function (data) {
+ if (data.type !== 'timed-metadata' && data.type !== 'audio' || pipeline.audioSegmentStream) {
+ return;
+ }
+
+ audioTrack = audioTrack || {
+ timelineStartInfo: {
+ baseMediaDecodeTime: self.baseMediaDecodeTime
+ },
+ codec: 'adts',
+ type: 'audio'
+ }; // hook up the audio segment stream to the first track with aac data
+
+ pipeline.coalesceStream.numberOfTracks++;
+ pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
+ pipeline.audioSegmentStream.on('log', self.getLogTrigger_('audioSegmentStream'));
+ pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo')); // Set up the final part of the audio pipeline
+
+ pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream); // emit pmt info
+
+ self.trigger('trackinfo', {
+ hasAudio: !!audioTrack,
+ hasVideo: !!videoTrack
+ });
+ }); // Re-emit any data coming from the coalesce stream to the outside world
+
+ pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data')); // Let the consumer know we have finished flushing the entire pipeline
+
+ pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
+ addPipelineLogRetriggers(this, pipeline);
+ };
+
+ this.setupTsPipeline = function () {
+ var pipeline = {};
+ this.transmuxPipeline_ = pipeline;
+ pipeline.type = 'ts';
+ pipeline.metadataStream = new m2ts_1.MetadataStream(); // set up the parsing pipeline
+
+ pipeline.packetStream = new m2ts_1.TransportPacketStream();
+ pipeline.parseStream = new m2ts_1.TransportParseStream();
+ pipeline.elementaryStream = new m2ts_1.ElementaryStream();
+ pipeline.timestampRolloverStream = new m2ts_1.TimestampRolloverStream();
+ pipeline.adtsStream = new adts();
+ pipeline.h264Stream = new H264Stream();
+ pipeline.captionStream = new m2ts_1.CaptionStream(options);
+ pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
+ pipeline.headOfPipeline = pipeline.packetStream; // disassemble MPEG2-TS packets into elementary streams
+
+ pipeline.packetStream.pipe(pipeline.parseStream).pipe(pipeline.elementaryStream).pipe(pipeline.timestampRolloverStream); // !!THIS ORDER IS IMPORTANT!!
+ // demux the streams
+
+ pipeline.timestampRolloverStream.pipe(pipeline.h264Stream);
+ pipeline.timestampRolloverStream.pipe(pipeline.adtsStream);
+ pipeline.timestampRolloverStream.pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream); // Hook up CEA-608/708 caption stream
+
+ pipeline.h264Stream.pipe(pipeline.captionStream).pipe(pipeline.coalesceStream);
+ pipeline.elementaryStream.on('data', function (data) {
+ var i;
+
+ if (data.type === 'metadata') {
+ i = data.tracks.length; // scan the tracks listed in the metadata
+
+ while (i--) {
+ if (!videoTrack && data.tracks[i].type === 'video') {
+ videoTrack = data.tracks[i];
+ videoTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
+ } else if (!audioTrack && data.tracks[i].type === 'audio') {
+ audioTrack = data.tracks[i];
+ audioTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
+ }
+ } // hook up the video segment stream to the first track with h264 data
+
+
+ if (videoTrack && !pipeline.videoSegmentStream) {
+ pipeline.coalesceStream.numberOfTracks++;
+ pipeline.videoSegmentStream = new _VideoSegmentStream(videoTrack, options);
+ pipeline.videoSegmentStream.on('log', self.getLogTrigger_('videoSegmentStream'));
+ pipeline.videoSegmentStream.on('timelineStartInfo', function (timelineStartInfo) {
+ // When video emits timelineStartInfo data after a flush, we forward that
+ // info to the AudioSegmentStream, if it exists, because video timeline
+ // data takes precedence. Do not do this if keepOriginalTimestamps is set,
+ // because this is a particularly subtle form of timestamp alteration.
+ if (audioTrack && !options.keepOriginalTimestamps) {
+ audioTrack.timelineStartInfo = timelineStartInfo; // On the first segment we trim AAC frames that exist before the
+ // very earliest DTS we have seen in video because Chrome will
+ // interpret any video track with a baseMediaDecodeTime that is
+ // non-zero as a gap.
+
+ pipeline.audioSegmentStream.setEarliestDts(timelineStartInfo.dts - self.baseMediaDecodeTime);
+ }
+ });
+ pipeline.videoSegmentStream.on('processedGopsInfo', self.trigger.bind(self, 'gopInfo'));
+ pipeline.videoSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'videoSegmentTimingInfo'));
+ pipeline.videoSegmentStream.on('baseMediaDecodeTime', function (baseMediaDecodeTime) {
+ if (audioTrack) {
+ pipeline.audioSegmentStream.setVideoBaseMediaDecodeTime(baseMediaDecodeTime);
+ }
+ });
+ pipeline.videoSegmentStream.on('timingInfo', self.trigger.bind(self, 'videoTimingInfo')); // Set up the final part of the video pipeline
+
+ pipeline.h264Stream.pipe(pipeline.videoSegmentStream).pipe(pipeline.coalesceStream);
+ }
+
+ if (audioTrack && !pipeline.audioSegmentStream) {
+ // hook up the audio segment stream to the first track with aac data
+ pipeline.coalesceStream.numberOfTracks++;
+ pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
+ pipeline.audioSegmentStream.on('log', self.getLogTrigger_('audioSegmentStream'));
+ pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo'));
+ pipeline.audioSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'audioSegmentTimingInfo')); // Set up the final part of the audio pipeline
+
+ pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream);
+ } // emit pmt info
+
+
+ self.trigger('trackinfo', {
+ hasAudio: !!audioTrack,
+ hasVideo: !!videoTrack
+ });
+ }
+ }); // Re-emit any data coming from the coalesce stream to the outside world
+
+ pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
+ pipeline.coalesceStream.on('id3Frame', function (id3Frame) {
+ id3Frame.dispatchType = pipeline.metadataStream.dispatchType;
+ self.trigger('id3Frame', id3Frame);
+ });
+ pipeline.coalesceStream.on('caption', this.trigger.bind(this, 'caption')); // Let the consumer know we have finished flushing the entire pipeline
+
+ pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
+ addPipelineLogRetriggers(this, pipeline);
+ }; // hook up the segment streams once track metadata is delivered
+
+
+ this.setBaseMediaDecodeTime = function (baseMediaDecodeTime) {
+ var pipeline = this.transmuxPipeline_;
+
+ if (!options.keepOriginalTimestamps) {
+ this.baseMediaDecodeTime = baseMediaDecodeTime;
+ }
+
+ if (audioTrack) {
+ audioTrack.timelineStartInfo.dts = undefined;
+ audioTrack.timelineStartInfo.pts = undefined;
+ trackDecodeInfo.clearDtsInfo(audioTrack);
+
+ if (pipeline.audioTimestampRolloverStream) {
+ pipeline.audioTimestampRolloverStream.discontinuity();
+ }
+ }
+
+ if (videoTrack) {
+ if (pipeline.videoSegmentStream) {
+ pipeline.videoSegmentStream.gopCache_ = [];
+ }
+
+ videoTrack.timelineStartInfo.dts = undefined;
+ videoTrack.timelineStartInfo.pts = undefined;
+ trackDecodeInfo.clearDtsInfo(videoTrack);
+ pipeline.captionStream.reset();
+ }
+
+ if (pipeline.timestampRolloverStream) {
+ pipeline.timestampRolloverStream.discontinuity();
+ }
+ };
+
+ this.setAudioAppendStart = function (timestamp) {
+ if (audioTrack) {
+ this.transmuxPipeline_.audioSegmentStream.setAudioAppendStart(timestamp);
+ }
+ };
+
+ this.setRemux = function (val) {
+ var pipeline = this.transmuxPipeline_;
+ options.remux = val;
+
+ if (pipeline && pipeline.coalesceStream) {
+ pipeline.coalesceStream.setRemux(val);
+ }
+ };
+
+ this.alignGopsWith = function (gopsToAlignWith) {
+ if (videoTrack && this.transmuxPipeline_.videoSegmentStream) {
+ this.transmuxPipeline_.videoSegmentStream.alignGopsWith(gopsToAlignWith);
+ }
+ };
+
+ this.getLogTrigger_ = function (key) {
+ var self = this;
+ return function (event) {
+ event.stream = key;
+ self.trigger('log', event);
+ };
+ }; // feed incoming data to the front of the parsing pipeline
+
+
+ this.push = function (data) {
+ if (hasFlushed) {
+ var isAac = isLikelyAacData(data);
+
+ if (isAac && this.transmuxPipeline_.type !== 'aac') {
+ this.setupAacPipeline();
+ } else if (!isAac && this.transmuxPipeline_.type !== 'ts') {
+ this.setupTsPipeline();
+ }
+
+ hasFlushed = false;
+ }
+
+ this.transmuxPipeline_.headOfPipeline.push(data);
+ }; // flush any buffered data
+
+
+ this.flush = function () {
+ hasFlushed = true; // Start at the top of the pipeline and flush all pending work
+
+ this.transmuxPipeline_.headOfPipeline.flush();
+ };
+
+ this.endTimeline = function () {
+ this.transmuxPipeline_.headOfPipeline.endTimeline();
+ };
+
+ this.reset = function () {
+ if (this.transmuxPipeline_.headOfPipeline) {
+ this.transmuxPipeline_.headOfPipeline.reset();
+ }
+ }; // Caption data has to be reset when seeking outside buffered range
+
+
+ this.resetCaptions = function () {
+ if (this.transmuxPipeline_.captionStream) {
+ this.transmuxPipeline_.captionStream.reset();
+ }
+ };
+ };
+
+ _Transmuxer.prototype = new stream();
+ var transmuxer = {
+ Transmuxer: _Transmuxer,
+ VideoSegmentStream: _VideoSegmentStream,
+ AudioSegmentStream: _AudioSegmentStream,
+ AUDIO_PROPERTIES: audioProperties,
+ VIDEO_PROPERTIES: videoProperties,
+ // exported for testing
+ generateSegmentTimingInfo: generateSegmentTimingInfo
+ };
+ /**
+ * mux.js
+ *
+ * Copyright (c) Brightcove
+ * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
+ */
+
+ var toUnsigned$3 = function toUnsigned(value) {
+ return value >>> 0;
+ };
+
+ var toHexString$1 = function toHexString(value) {
+ return ('00' + value.toString(16)).slice(-2);
+ };
+
+ var bin = {
+ toUnsigned: toUnsigned$3,
+ toHexString: toHexString$1
+ };
+
+ var parseType$1 = function parseType(buffer) {
+ var result = '';
+ result += String.fromCharCode(buffer[0]);
+ result += String.fromCharCode(buffer[1]);
+ result += String.fromCharCode(buffer[2]);
+ result += String.fromCharCode(buffer[3]);
+ return result;
+ };
+
+ var parseType_1 = parseType$1;
+ var toUnsigned$2 = bin.toUnsigned;
+
+ var findBox = function findBox(data, path) {
+ var results = [],
+ i,
+ size,
+ type,
+ end,
+ subresults;
+
+ if (!path.length) {
+ // short-circuit the search for empty paths
+ return null;
+ }
+
+ for (i = 0; i < data.byteLength;) {
+ size = toUnsigned$2(data[i] << 24 | data[i + 1] << 16 | data[i + 2] << 8 | data[i + 3]);
+ type = parseType_1(data.subarray(i + 4, i + 8));
+ end = size > 1 ? i + size : data.byteLength;
+
+ if (type === path[0]) {
+ if (path.length === 1) {
+ // this is the end of the path and we've found the box we were
+ // looking for
+ results.push(data.subarray(i + 8, end));
+ } else {
+ // recursively search for the next box along the path
+ subresults = findBox(data.subarray(i + 8, end), path.slice(1));
+
+ if (subresults.length) {
+ results = results.concat(subresults);
+ }
+ }
+ }
+
+ i = end;
+ } // we've finished searching all of data
+
+
+ return results;
+ };
+
+ var findBox_1 = findBox;
+ var toUnsigned$1 = bin.toUnsigned;
+ var getUint64$1 = numbers.getUint64;
+
+ var tfdt = function tfdt(data) {
+ var result = {
+ version: data[0],
+ flags: new Uint8Array(data.subarray(1, 4))
+ };
+
+ if (result.version === 1) {
+ result.baseMediaDecodeTime = getUint64$1(data.subarray(4));
+ } else {
+ result.baseMediaDecodeTime = toUnsigned$1(data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7]);
+ }
+
+ return result;
+ };
+
+ var parseTfdt = tfdt;
+
+ var parseSampleFlags = function parseSampleFlags(flags) {
+ return {
+ isLeading: (flags[0] & 0x0c) >>> 2,
+ dependsOn: flags[0] & 0x03,
+ isDependedOn: (flags[1] & 0xc0) >>> 6,
+ hasRedundancy: (flags[1] & 0x30) >>> 4,
+ paddingValue: (flags[1] & 0x0e) >>> 1,
+ isNonSyncSample: flags[1] & 0x01,
+ degradationPriority: flags[2] << 8 | flags[3]
+ };
+ };
+
+ var parseSampleFlags_1 = parseSampleFlags;
+
+ var trun = function trun(data) {
+ var result = {
+ version: data[0],
+ flags: new Uint8Array(data.subarray(1, 4)),
+ samples: []
+ },
+ view = new DataView(data.buffer, data.byteOffset, data.byteLength),
+ // Flag interpretation
+ dataOffsetPresent = result.flags[2] & 0x01,
+ // compare with 2nd byte of 0x1
+ firstSampleFlagsPresent = result.flags[2] & 0x04,
+ // compare with 2nd byte of 0x4
+ sampleDurationPresent = result.flags[1] & 0x01,
+ // compare with 2nd byte of 0x100
+ sampleSizePresent = result.flags[1] & 0x02,
+ // compare with 2nd byte of 0x200
+ sampleFlagsPresent = result.flags[1] & 0x04,
+ // compare with 2nd byte of 0x400
+ sampleCompositionTimeOffsetPresent = result.flags[1] & 0x08,
+ // compare with 2nd byte of 0x800
+ sampleCount = view.getUint32(4),
+ offset = 8,
+ sample;
+
+ if (dataOffsetPresent) {
+ // 32 bit signed integer
+ result.dataOffset = view.getInt32(offset);
+ offset += 4;
+ } // Overrides the flags for the first sample only. The order of
+ // optional values will be: duration, size, compositionTimeOffset
+
+
+ if (firstSampleFlagsPresent && sampleCount) {
+ sample = {
+ flags: parseSampleFlags_1(data.subarray(offset, offset + 4))
+ };
+ offset += 4;
+
+ if (sampleDurationPresent) {
+ sample.duration = view.getUint32(offset);
+ offset += 4;
+ }
+
+ if (sampleSizePresent) {
+ sample.size = view.getUint32(offset);
+ offset += 4;
+ }
+
+ if (sampleCompositionTimeOffsetPresent) {
+ if (result.version === 1) {
+ sample.compositionTimeOffset = view.getInt32(offset);
+ } else {
+ sample.compositionTimeOffset = view.getUint32(offset);
+ }
+
+ offset += 4;
+ }
+
+ result.samples.push(sample);
+ sampleCount--;
+ }
+
+ while (sampleCount--) {
+ sample = {};
+
+ if (sampleDurationPresent) {
+ sample.duration = view.getUint32(offset);
+ offset += 4;
+ }
+
+ if (sampleSizePresent) {
+ sample.size = view.getUint32(offset);
+ offset += 4;
+ }
+
+ if (sampleFlagsPresent) {
+ sample.flags = parseSampleFlags_1(data.subarray(offset, offset + 4));
+ offset += 4;
+ }
+
+ if (sampleCompositionTimeOffsetPresent) {
+ if (result.version === 1) {
+ sample.compositionTimeOffset = view.getInt32(offset);
+ } else {
+ sample.compositionTimeOffset = view.getUint32(offset);
+ }
+
+ offset += 4;
+ }
+
+ result.samples.push(sample);
+ }
+
+ return result;
+ };
+
+ var parseTrun = trun;
+
+ var tfhd = function tfhd(data) {
+ var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
+ result = {
+ version: data[0],
+ flags: new Uint8Array(data.subarray(1, 4)),
+ trackId: view.getUint32(4)
+ },
+ baseDataOffsetPresent = result.flags[2] & 0x01,
+ sampleDescriptionIndexPresent = result.flags[2] & 0x02,
+ defaultSampleDurationPresent = result.flags[2] & 0x08,
+ defaultSampleSizePresent = result.flags[2] & 0x10,
+ defaultSampleFlagsPresent = result.flags[2] & 0x20,
+ durationIsEmpty = result.flags[0] & 0x010000,
+ defaultBaseIsMoof = result.flags[0] & 0x020000,
+ i;
+ i = 8;
+
+ if (baseDataOffsetPresent) {
+ i += 4; // truncate top 4 bytes
+ // FIXME: should we read the full 64 bits?
+
+ result.baseDataOffset = view.getUint32(12);
+ i += 4;
+ }
+
+ if (sampleDescriptionIndexPresent) {
+ result.sampleDescriptionIndex = view.getUint32(i);
+ i += 4;
+ }
+
+ if (defaultSampleDurationPresent) {
+ result.defaultSampleDuration = view.getUint32(i);
+ i += 4;
+ }
+
+ if (defaultSampleSizePresent) {
+ result.defaultSampleSize = view.getUint32(i);
+ i += 4;
+ }
+
+ if (defaultSampleFlagsPresent) {
+ result.defaultSampleFlags = view.getUint32(i);
+ }
+
+ if (durationIsEmpty) {
+ result.durationIsEmpty = true;
+ }
+
+ if (!baseDataOffsetPresent && defaultBaseIsMoof) {
+ result.baseDataOffsetIsMoof = true;
+ }
+
+ return result;
+ };
+
+ var parseTfhd = tfhd;
+ var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
+ var win;
+
+ if (typeof window !== "undefined") {
+ win = window;
+ } else if (typeof commonjsGlobal !== "undefined") {
+ win = commonjsGlobal;
+ } else if (typeof self !== "undefined") {
+ win = self;
+ } else {
+ win = {};
+ }
+
+ var window_1 = win;
+ var discardEmulationPreventionBytes = captionPacketParser.discardEmulationPreventionBytes;
+ var CaptionStream = captionStream.CaptionStream;
+ /**
+ * Maps an offset in the mdat to a sample based on the the size of the samples.
+ * Assumes that `parseSamples` has been called first.
+ *
+ * @param {Number} offset - The offset into the mdat
+ * @param {Object[]} samples - An array of samples, parsed using `parseSamples`
+ * @return {?Object} The matching sample, or null if no match was found.
+ *
+ * @see ISO-BMFF-12/2015, Section 8.8.8
+ **/
+
+ var mapToSample = function mapToSample(offset, samples) {
+ var approximateOffset = offset;
+
+ for (var i = 0; i < samples.length; i++) {
+ var sample = samples[i];
+
+ if (approximateOffset < sample.size) {
+ return sample;
+ }
+
+ approximateOffset -= sample.size;
+ }
+
+ return null;
+ };
+ /**
+ * Finds SEI nal units contained in a Media Data Box.
+ * Assumes that `parseSamples` has been called first.
+ *
+ * @param {Uint8Array} avcStream - The bytes of the mdat
+ * @param {Object[]} samples - The samples parsed out by `parseSamples`
+ * @param {Number} trackId - The trackId of this video track
+ * @return {Object[]} seiNals - the parsed SEI NALUs found.
+ * The contents of the seiNal should match what is expected by
+ * CaptionStream.push (nalUnitType, size, data, escapedRBSP, pts, dts)
+ *
+ * @see ISO-BMFF-12/2015, Section 8.1.1
+ * @see Rec. ITU-T H.264, 7.3.2.3.1
+ **/
+
+
+ var findSeiNals = function findSeiNals(avcStream, samples, trackId) {
+ var avcView = new DataView(avcStream.buffer, avcStream.byteOffset, avcStream.byteLength),
+ result = {
+ logs: [],
+ seiNals: []
+ },
+ seiNal,
+ i,
+ length,
+ lastMatchedSample;
+
+ for (i = 0; i + 4 < avcStream.length; i += length) {
+ length = avcView.getUint32(i);
+ i += 4; // Bail if this doesn't appear to be an H264 stream
+
+ if (length <= 0) {
+ continue;
+ }
+
+ switch (avcStream[i] & 0x1F) {
+ case 0x06:
+ var data = avcStream.subarray(i + 1, i + 1 + length);
+ var matchingSample = mapToSample(i, samples);
+ seiNal = {
+ nalUnitType: 'sei_rbsp',
+ size: length,
+ data: data,
+ escapedRBSP: discardEmulationPreventionBytes(data),
+ trackId: trackId
+ };
+
+ if (matchingSample) {
+ seiNal.pts = matchingSample.pts;
+ seiNal.dts = matchingSample.dts;
+ lastMatchedSample = matchingSample;
+ } else if (lastMatchedSample) {
+ // If a matching sample cannot be found, use the last
+ // sample's values as they should be as close as possible
+ seiNal.pts = lastMatchedSample.pts;
+ seiNal.dts = lastMatchedSample.dts;
+ } else {
+ result.logs.push({
+ level: 'warn',
+ message: 'We\'ve encountered a nal unit without data at ' + i + ' for trackId ' + trackId + '. See mux.js#223.'
+ });
+ break;
+ }
+
+ result.seiNals.push(seiNal);
+ break;
+ }
+ }
+
+ return result;
+ };
+ /**
+ * Parses sample information out of Track Run Boxes and calculates
+ * the absolute presentation and decode timestamps of each sample.
+ *
+ * @param {Array} truns - The Trun Run boxes to be parsed
+ * @param {Number|BigInt} baseMediaDecodeTime - base media decode time from tfdt
+ @see ISO-BMFF-12/2015, Section 8.8.12
+ * @param {Object} tfhd - The parsed Track Fragment Header
+ * @see inspect.parseTfhd
+ * @return {Object[]} the parsed samples
+ *
+ * @see ISO-BMFF-12/2015, Section 8.8.8
+ **/
+
+
+ var parseSamples = function parseSamples(truns, baseMediaDecodeTime, tfhd) {
+ var currentDts = baseMediaDecodeTime;
+ var defaultSampleDuration = tfhd.defaultSampleDuration || 0;
+ var defaultSampleSize = tfhd.defaultSampleSize || 0;
+ var trackId = tfhd.trackId;
+ var allSamples = [];
+ truns.forEach(function (trun) {
+ // Note: We currently do not parse the sample table as well
+ // as the trun. It's possible some sources will require this.
+ // moov > trak > mdia > minf > stbl
+ var trackRun = parseTrun(trun);
+ var samples = trackRun.samples;
+ samples.forEach(function (sample) {
+ if (sample.duration === undefined) {
+ sample.duration = defaultSampleDuration;
+ }
+
+ if (sample.size === undefined) {
+ sample.size = defaultSampleSize;
+ }
+
+ sample.trackId = trackId;
+ sample.dts = currentDts;
+
+ if (sample.compositionTimeOffset === undefined) {
+ sample.compositionTimeOffset = 0;
+ }
+
+ if (typeof currentDts === 'bigint') {
+ sample.pts = currentDts + window_1.BigInt(sample.compositionTimeOffset);
+ currentDts += window_1.BigInt(sample.duration);
+ } else {
+ sample.pts = currentDts + sample.compositionTimeOffset;
+ currentDts += sample.duration;
+ }
+ });
+ allSamples = allSamples.concat(samples);
+ });
+ return allSamples;
+ };
+ /**
+ * Parses out caption nals from an FMP4 segment's video tracks.
+ *
+ * @param {Uint8Array} segment - The bytes of a single segment
+ * @param {Number} videoTrackId - The trackId of a video track in the segment
+ * @return {Object.} A mapping of video trackId to
+ * a list of seiNals found in that track
+ **/
+
+
+ var parseCaptionNals = function parseCaptionNals(segment, videoTrackId) {
+ // To get the samples
+ var trafs = findBox_1(segment, ['moof', 'traf']); // To get SEI NAL units
+
+ var mdats = findBox_1(segment, ['mdat']);
+ var captionNals = {};
+ var mdatTrafPairs = []; // Pair up each traf with a mdat as moofs and mdats are in pairs
+
+ mdats.forEach(function (mdat, index) {
+ var matchingTraf = trafs[index];
+ mdatTrafPairs.push({
+ mdat: mdat,
+ traf: matchingTraf
+ });
+ });
+ mdatTrafPairs.forEach(function (pair) {
+ var mdat = pair.mdat;
+ var traf = pair.traf;
+ var tfhd = findBox_1(traf, ['tfhd']); // Exactly 1 tfhd per traf
+
+ var headerInfo = parseTfhd(tfhd[0]);
+ var trackId = headerInfo.trackId;
+ var tfdt = findBox_1(traf, ['tfdt']); // Either 0 or 1 tfdt per traf
+
+ var baseMediaDecodeTime = tfdt.length > 0 ? parseTfdt(tfdt[0]).baseMediaDecodeTime : 0;
+ var truns = findBox_1(traf, ['trun']);
+ var samples;
+ var result; // Only parse video data for the chosen video track
+
+ if (videoTrackId === trackId && truns.length > 0) {
+ samples = parseSamples(truns, baseMediaDecodeTime, headerInfo);
+ result = findSeiNals(mdat, samples, trackId);
+
+ if (!captionNals[trackId]) {
+ captionNals[trackId] = {
+ seiNals: [],
+ logs: []
+ };
+ }
+
+ captionNals[trackId].seiNals = captionNals[trackId].seiNals.concat(result.seiNals);
+ captionNals[trackId].logs = captionNals[trackId].logs.concat(result.logs);
+ }
+ });
+ return captionNals;
+ };
+ /**
+ * Parses out inband captions from an MP4 container and returns
+ * caption objects that can be used by WebVTT and the TextTrack API.
+ * @see https://developer.mozilla.org/en-US/docs/Web/API/VTTCue
+ * @see https://developer.mozilla.org/en-US/docs/Web/API/TextTrack
+ * Assumes that `probe.getVideoTrackIds` and `probe.timescale` have been called first
+ *
+ * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
+ * @param {Number} trackId - The id of the video track to parse
+ * @param {Number} timescale - The timescale for the video track from the init segment
+ *
+ * @return {?Object[]} parsedCaptions - A list of captions or null if no video tracks
+ * @return {Number} parsedCaptions[].startTime - The time to show the caption in seconds
+ * @return {Number} parsedCaptions[].endTime - The time to stop showing the caption in seconds
+ * @return {String} parsedCaptions[].text - The visible content of the caption
+ **/
+
+
+ var parseEmbeddedCaptions = function parseEmbeddedCaptions(segment, trackId, timescale) {
+ var captionNals; // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
+
+ if (trackId === null) {
+ return null;
+ }
+
+ captionNals = parseCaptionNals(segment, trackId);
+ var trackNals = captionNals[trackId] || {};
+ return {
+ seiNals: trackNals.seiNals,
+ logs: trackNals.logs,
+ timescale: timescale
+ };
+ };
+ /**
+ * Converts SEI NALUs into captions that can be used by video.js
+ **/
+
+
+ var CaptionParser = function CaptionParser() {
+ var isInitialized = false;
+ var captionStream; // Stores segments seen before trackId and timescale are set
+
+ var segmentCache; // Stores video track ID of the track being parsed
+
+ var trackId; // Stores the timescale of the track being parsed
+
+ var timescale; // Stores captions parsed so far
+
+ var parsedCaptions; // Stores whether we are receiving partial data or not
+
+ var parsingPartial;
+ /**
+ * A method to indicate whether a CaptionParser has been initalized
+ * @returns {Boolean}
+ **/
+
+ this.isInitialized = function () {
+ return isInitialized;
+ };
+ /**
+ * Initializes the underlying CaptionStream, SEI NAL parsing
+ * and management, and caption collection
+ **/
+
+
+ this.init = function (options) {
+ captionStream = new CaptionStream();
+ isInitialized = true;
+ parsingPartial = options ? options.isPartial : false; // Collect dispatched captions
+
+ captionStream.on('data', function (event) {
+ // Convert to seconds in the source's timescale
+ event.startTime = event.startPts / timescale;
+ event.endTime = event.endPts / timescale;
+ parsedCaptions.captions.push(event);
+ parsedCaptions.captionStreams[event.stream] = true;
+ });
+ captionStream.on('log', function (log) {
+ parsedCaptions.logs.push(log);
+ });
+ };
+ /**
+ * Determines if a new video track will be selected
+ * or if the timescale changed
+ * @return {Boolean}
+ **/
+
+
+ this.isNewInit = function (videoTrackIds, timescales) {
+ if (videoTrackIds && videoTrackIds.length === 0 || timescales && typeof timescales === 'object' && Object.keys(timescales).length === 0) {
+ return false;
+ }
+
+ return trackId !== videoTrackIds[0] || timescale !== timescales[trackId];
+ };
+ /**
+ * Parses out SEI captions and interacts with underlying
+ * CaptionStream to return dispatched captions
+ *
+ * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
+ * @param {Number[]} videoTrackIds - A list of video tracks found in the init segment
+ * @param {Object.} timescales - The timescales found in the init segment
+ * @see parseEmbeddedCaptions
+ * @see m2ts/caption-stream.js
+ **/
+
+
+ this.parse = function (segment, videoTrackIds, timescales) {
+ var parsedData;
+
+ if (!this.isInitialized()) {
+ return null; // This is not likely to be a video segment
+ } else if (!videoTrackIds || !timescales) {
+ return null;
+ } else if (this.isNewInit(videoTrackIds, timescales)) {
+ // Use the first video track only as there is no
+ // mechanism to switch to other video tracks
+ trackId = videoTrackIds[0];
+ timescale = timescales[trackId]; // If an init segment has not been seen yet, hold onto segment
+ // data until we have one.
+ // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
+ } else if (trackId === null || !timescale) {
+ segmentCache.push(segment);
+ return null;
+ } // Now that a timescale and trackId is set, parse cached segments
+
+
+ while (segmentCache.length > 0) {
+ var cachedSegment = segmentCache.shift();
+ this.parse(cachedSegment, videoTrackIds, timescales);
+ }
+
+ parsedData = parseEmbeddedCaptions(segment, trackId, timescale);
+
+ if (parsedData && parsedData.logs) {
+ parsedCaptions.logs = parsedCaptions.logs.concat(parsedData.logs);
+ }
+
+ if (parsedData === null || !parsedData.seiNals) {
+ if (parsedCaptions.logs.length) {
+ return {
+ logs: parsedCaptions.logs,
+ captions: [],
+ captionStreams: []
+ };
+ }
+
+ return null;
+ }
+
+ this.pushNals(parsedData.seiNals); // Force the parsed captions to be dispatched
+
+ this.flushStream();
+ return parsedCaptions;
+ };
+ /**
+ * Pushes SEI NALUs onto CaptionStream
+ * @param {Object[]} nals - A list of SEI nals parsed using `parseCaptionNals`
+ * Assumes that `parseCaptionNals` has been called first
+ * @see m2ts/caption-stream.js
+ **/
+
+
+ this.pushNals = function (nals) {
+ if (!this.isInitialized() || !nals || nals.length === 0) {
+ return null;
+ }
+
+ nals.forEach(function (nal) {
+ captionStream.push(nal);
+ });
+ };
+ /**
+ * Flushes underlying CaptionStream to dispatch processed, displayable captions
+ * @see m2ts/caption-stream.js
+ **/
+
+
+ this.flushStream = function () {
+ if (!this.isInitialized()) {
+ return null;
+ }
+
+ if (!parsingPartial) {
+ captionStream.flush();
+ } else {
+ captionStream.partialFlush();
+ }
+ };
+ /**
+ * Reset caption buckets for new data
+ **/
+
+
+ this.clearParsedCaptions = function () {
+ parsedCaptions.captions = [];
+ parsedCaptions.captionStreams = {};
+ parsedCaptions.logs = [];
+ };
+ /**
+ * Resets underlying CaptionStream
+ * @see m2ts/caption-stream.js
+ **/
+
+
+ this.resetCaptionStream = function () {
+ if (!this.isInitialized()) {
+ return null;
+ }
+
+ captionStream.reset();
+ };
+ /**
+ * Convenience method to clear all captions flushed from the
+ * CaptionStream and still being parsed
+ * @see m2ts/caption-stream.js
+ **/
+
+
+ this.clearAllCaptions = function () {
+ this.clearParsedCaptions();
+ this.resetCaptionStream();
+ };
+ /**
+ * Reset caption parser
+ **/
+
+
+ this.reset = function () {
+ segmentCache = [];
+ trackId = null;
+ timescale = null;
+
+ if (!parsedCaptions) {
+ parsedCaptions = {
+ captions: [],
+ // CC1, CC2, CC3, CC4
+ captionStreams: {},
+ logs: []
+ };
+ } else {
+ this.clearParsedCaptions();
+ }
+
+ this.resetCaptionStream();
+ };
+
+ this.reset();
+ };
+
+ var captionParser = CaptionParser;
+ var toUnsigned = bin.toUnsigned;
+ var toHexString = bin.toHexString;
+ var getUint64 = numbers.getUint64;
+ var timescale, startTime, compositionStartTime, getVideoTrackIds, getTracks, getTimescaleFromMediaHeader;
+ /**
+ * Parses an MP4 initialization segment and extracts the timescale
+ * values for any declared tracks. Timescale values indicate the
+ * number of clock ticks per second to assume for time-based values
+ * elsewhere in the MP4.
+ *
+ * To determine the start time of an MP4, you need two pieces of
+ * information: the timescale unit and the earliest base media decode
+ * time. Multiple timescales can be specified within an MP4 but the
+ * base media decode time is always expressed in the timescale from
+ * the media header box for the track:
+ * ```
+ * moov > trak > mdia > mdhd.timescale
+ * ```
+ * @param init {Uint8Array} the bytes of the init segment
+ * @return {object} a hash of track ids to timescale values or null if
+ * the init segment is malformed.
+ */
+
+ timescale = function timescale(init) {
+ var result = {},
+ traks = findBox_1(init, ['moov', 'trak']); // mdhd timescale
+
+ return traks.reduce(function (result, trak) {
+ var tkhd, version, index, id, mdhd;
+ tkhd = findBox_1(trak, ['tkhd'])[0];
+
+ if (!tkhd) {
+ return null;
+ }
+
+ version = tkhd[0];
+ index = version === 0 ? 12 : 20;
+ id = toUnsigned(tkhd[index] << 24 | tkhd[index + 1] << 16 | tkhd[index + 2] << 8 | tkhd[index + 3]);
+ mdhd = findBox_1(trak, ['mdia', 'mdhd'])[0];
+
+ if (!mdhd) {
+ return null;
+ }
+
+ version = mdhd[0];
+ index = version === 0 ? 12 : 20;
+ result[id] = toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
+ return result;
+ }, result);
+ };
+ /**
+ * Determine the base media decode start time, in seconds, for an MP4
+ * fragment. If multiple fragments are specified, the earliest time is
+ * returned.
+ *
+ * The base media decode time can be parsed from track fragment
+ * metadata:
+ * ```
+ * moof > traf > tfdt.baseMediaDecodeTime
+ * ```
+ * It requires the timescale value from the mdhd to interpret.
+ *
+ * @param timescale {object} a hash of track ids to timescale values.
+ * @return {number} the earliest base media decode start time for the
+ * fragment, in seconds
+ */
+
+
+ startTime = function startTime(timescale, fragment) {
+ var trafs; // we need info from two childrend of each track fragment box
+
+ trafs = findBox_1(fragment, ['moof', 'traf']); // determine the start times for each track
+
+ var lowestTime = trafs.reduce(function (acc, traf) {
+ var tfhd = findBox_1(traf, ['tfhd'])[0]; // get the track id from the tfhd
+
+ var id = toUnsigned(tfhd[4] << 24 | tfhd[5] << 16 | tfhd[6] << 8 | tfhd[7]); // assume a 90kHz clock if no timescale was specified
+
+ var scale = timescale[id] || 90e3; // get the base media decode time from the tfdt
+
+ var tfdt = findBox_1(traf, ['tfdt'])[0];
+ var dv = new DataView(tfdt.buffer, tfdt.byteOffset, tfdt.byteLength);
+ var baseTime; // version 1 is 64 bit
+
+ if (tfdt[0] === 1) {
+ baseTime = getUint64(tfdt.subarray(4, 12));
+ } else {
+ baseTime = dv.getUint32(4);
+ } // convert base time to seconds if it is a valid number.
+
+
+ var seconds;
+
+ if (typeof baseTime === 'bigint') {
+ seconds = baseTime / window_1.BigInt(scale);
+ } else if (typeof baseTime === 'number' && !isNaN(baseTime)) {
+ seconds = baseTime / scale;
+ }
+
+ if (seconds < Number.MAX_SAFE_INTEGER) {
+ seconds = Number(seconds);
+ }
+
+ if (seconds < acc) {
+ acc = seconds;
+ }
+
+ return acc;
+ }, Infinity);
+ return typeof lowestTime === 'bigint' || isFinite(lowestTime) ? lowestTime : 0;
+ };
+ /**
+ * Determine the composition start, in seconds, for an MP4
+ * fragment.
+ *
+ * The composition start time of a fragment can be calculated using the base
+ * media decode time, composition time offset, and timescale, as follows:
+ *
+ * compositionStartTime = (baseMediaDecodeTime + compositionTimeOffset) / timescale
+ *
+ * All of the aforementioned information is contained within a media fragment's
+ * `traf` box, except for timescale info, which comes from the initialization
+ * segment, so a track id (also contained within a `traf`) is also necessary to
+ * associate it with a timescale
+ *
+ *
+ * @param timescales {object} - a hash of track ids to timescale values.
+ * @param fragment {Unit8Array} - the bytes of a media segment
+ * @return {number} the composition start time for the fragment, in seconds
+ **/
+
+
+ compositionStartTime = function compositionStartTime(timescales, fragment) {
+ var trafBoxes = findBox_1(fragment, ['moof', 'traf']);
+ var baseMediaDecodeTime = 0;
+ var compositionTimeOffset = 0;
+ var trackId;
+
+ if (trafBoxes && trafBoxes.length) {
+ // The spec states that track run samples contained within a `traf` box are contiguous, but
+ // it does not explicitly state whether the `traf` boxes themselves are contiguous.
+ // We will assume that they are, so we only need the first to calculate start time.
+ var tfhd = findBox_1(trafBoxes[0], ['tfhd'])[0];
+ var trun = findBox_1(trafBoxes[0], ['trun'])[0];
+ var tfdt = findBox_1(trafBoxes[0], ['tfdt'])[0];
+
+ if (tfhd) {
+ var parsedTfhd = parseTfhd(tfhd);
+ trackId = parsedTfhd.trackId;
+ }
+
+ if (tfdt) {
+ var parsedTfdt = parseTfdt(tfdt);
+ baseMediaDecodeTime = parsedTfdt.baseMediaDecodeTime;
+ }
+
+ if (trun) {
+ var parsedTrun = parseTrun(trun);
+
+ if (parsedTrun.samples && parsedTrun.samples.length) {
+ compositionTimeOffset = parsedTrun.samples[0].compositionTimeOffset || 0;
+ }
+ }
+ } // Get timescale for this specific track. Assume a 90kHz clock if no timescale was
+ // specified.
+
+
+ var timescale = timescales[trackId] || 90e3; // return the composition start time, in seconds
+
+ if (typeof baseMediaDecodeTime === 'bigint') {
+ compositionTimeOffset = window_1.BigInt(compositionTimeOffset);
+ timescale = window_1.BigInt(timescale);
+ }
+
+ var result = (baseMediaDecodeTime + compositionTimeOffset) / timescale;
+
+ if (typeof result === 'bigint' && result < Number.MAX_SAFE_INTEGER) {
+ result = Number(result);
+ }
+
+ return result;
+ };
+ /**
+ * Find the trackIds of the video tracks in this source.
+ * Found by parsing the Handler Reference and Track Header Boxes:
+ * moov > trak > mdia > hdlr
+ * moov > trak > tkhd
+ *
+ * @param {Uint8Array} init - The bytes of the init segment for this source
+ * @return {Number[]} A list of trackIds
+ *
+ * @see ISO-BMFF-12/2015, Section 8.4.3
+ **/
+
+
+ getVideoTrackIds = function getVideoTrackIds(init) {
+ var traks = findBox_1(init, ['moov', 'trak']);
+ var videoTrackIds = [];
+ traks.forEach(function (trak) {
+ var hdlrs = findBox_1(trak, ['mdia', 'hdlr']);
+ var tkhds = findBox_1(trak, ['tkhd']);
+ hdlrs.forEach(function (hdlr, index) {
+ var handlerType = parseType_1(hdlr.subarray(8, 12));
+ var tkhd = tkhds[index];
+ var view;
+ var version;
+ var trackId;
+
+ if (handlerType === 'vide') {
+ view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
+ version = view.getUint8(0);
+ trackId = version === 0 ? view.getUint32(12) : view.getUint32(20);
+ videoTrackIds.push(trackId);
+ }
+ });
+ });
+ return videoTrackIds;
+ };
+
+ getTimescaleFromMediaHeader = function getTimescaleFromMediaHeader(mdhd) {
+ // mdhd is a FullBox, meaning it will have its own version as the first byte
+ var version = mdhd[0];
+ var index = version === 0 ? 12 : 20;
+ return toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
+ };
+ /**
+ * Get all the video, audio, and hint tracks from a non fragmented
+ * mp4 segment
+ */
+
+
+ getTracks = function getTracks(init) {
+ var traks = findBox_1(init, ['moov', 'trak']);
+ var tracks = [];
+ traks.forEach(function (trak) {
+ var track = {};
+ var tkhd = findBox_1(trak, ['tkhd'])[0];
+ var view, tkhdVersion; // id
+
+ if (tkhd) {
+ view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
+ tkhdVersion = view.getUint8(0);
+ track.id = tkhdVersion === 0 ? view.getUint32(12) : view.getUint32(20);
+ }
+
+ var hdlr = findBox_1(trak, ['mdia', 'hdlr'])[0]; // type
+
+ if (hdlr) {
+ var type = parseType_1(hdlr.subarray(8, 12));
+
+ if (type === 'vide') {
+ track.type = 'video';
+ } else if (type === 'soun') {
+ track.type = 'audio';
+ } else {
+ track.type = type;
+ }
+ } // codec
+
+
+ var stsd = findBox_1(trak, ['mdia', 'minf', 'stbl', 'stsd'])[0];
+
+ if (stsd) {
+ var sampleDescriptions = stsd.subarray(8); // gives the codec type string
+
+ track.codec = parseType_1(sampleDescriptions.subarray(4, 8));
+ var codecBox = findBox_1(sampleDescriptions, [track.codec])[0];
+ var codecConfig, codecConfigType;
+
+ if (codecBox) {
+ // https://tools.ietf.org/html/rfc6381#section-3.3
+ if (/^[asm]vc[1-9]$/i.test(track.codec)) {
+ // we don't need anything but the "config" parameter of the
+ // avc1 codecBox
+ codecConfig = codecBox.subarray(78);
+ codecConfigType = parseType_1(codecConfig.subarray(4, 8));
+
+ if (codecConfigType === 'avcC' && codecConfig.length > 11) {
+ track.codec += '.'; // left padded with zeroes for single digit hex
+ // profile idc
+
+ track.codec += toHexString(codecConfig[9]); // the byte containing the constraint_set flags
+
+ track.codec += toHexString(codecConfig[10]); // level idc
+
+ track.codec += toHexString(codecConfig[11]);
+ } else {
+ // TODO: show a warning that we couldn't parse the codec
+ // and are using the default
+ track.codec = 'avc1.4d400d';
+ }
+ } else if (/^mp4[a,v]$/i.test(track.codec)) {
+ // we do not need anything but the streamDescriptor of the mp4a codecBox
+ codecConfig = codecBox.subarray(28);
+ codecConfigType = parseType_1(codecConfig.subarray(4, 8));
+
+ if (codecConfigType === 'esds' && codecConfig.length > 20 && codecConfig[19] !== 0) {
+ track.codec += '.' + toHexString(codecConfig[19]); // this value is only a single digit
+
+ track.codec += '.' + toHexString(codecConfig[20] >>> 2 & 0x3f).replace(/^0/, '');
+ } else {
+ // TODO: show a warning that we couldn't parse the codec
+ // and are using the default
+ track.codec = 'mp4a.40.2';
+ }
+ } else {
+ // flac, opus, etc
+ track.codec = track.codec.toLowerCase();
+ }
+ }
+ }
+
+ var mdhd = findBox_1(trak, ['mdia', 'mdhd'])[0];
+
+ if (mdhd) {
+ track.timescale = getTimescaleFromMediaHeader(mdhd);
+ }
+
+ tracks.push(track);
+ });
+ return tracks;
+ };
+
+ var probe$2 = {
+ // export mp4 inspector's findBox and parseType for backwards compatibility
+ findBox: findBox_1,
+ parseType: parseType_1,
+ timescale: timescale,
+ startTime: startTime,
+ compositionStartTime: compositionStartTime,
+ videoTrackIds: getVideoTrackIds,
+ tracks: getTracks,
+ getTimescaleFromMediaHeader: getTimescaleFromMediaHeader
+ };
+
+ var parsePid = function parsePid(packet) {
+ var pid = packet[1] & 0x1f;
+ pid <<= 8;
+ pid |= packet[2];
+ return pid;
+ };
+
+ var parsePayloadUnitStartIndicator = function parsePayloadUnitStartIndicator(packet) {
+ return !!(packet[1] & 0x40);
+ };
+
+ var parseAdaptionField = function parseAdaptionField(packet) {
+ var offset = 0; // if an adaption field is present, its length is specified by the
+ // fifth byte of the TS packet header. The adaptation field is
+ // used to add stuffing to PES packets that don't fill a complete
+ // TS packet, and to specify some forms of timing and control data
+ // that we do not currently use.
+
+ if ((packet[3] & 0x30) >>> 4 > 0x01) {
+ offset += packet[4] + 1;
+ }
+
+ return offset;
+ };
+
+ var parseType = function parseType(packet, pmtPid) {
+ var pid = parsePid(packet);
+
+ if (pid === 0) {
+ return 'pat';
+ } else if (pid === pmtPid) {
+ return 'pmt';
+ } else if (pmtPid) {
+ return 'pes';
+ }
+
+ return null;
+ };
+
+ var parsePat = function parsePat(packet) {
+ var pusi = parsePayloadUnitStartIndicator(packet);
+ var offset = 4 + parseAdaptionField(packet);
+
+ if (pusi) {
+ offset += packet[offset] + 1;
+ }
+
+ return (packet[offset + 10] & 0x1f) << 8 | packet[offset + 11];
+ };
+
+ var parsePmt = function parsePmt(packet) {
+ var programMapTable = {};
+ var pusi = parsePayloadUnitStartIndicator(packet);
+ var payloadOffset = 4 + parseAdaptionField(packet);
+
+ if (pusi) {
+ payloadOffset += packet[payloadOffset] + 1;
+ } // PMTs can be sent ahead of the time when they should actually
+ // take effect. We don't believe this should ever be the case
+ // for HLS but we'll ignore "forward" PMT declarations if we see
+ // them. Future PMT declarations have the current_next_indicator
+ // set to zero.
+
+
+ if (!(packet[payloadOffset + 5] & 0x01)) {
+ return;
+ }
+
+ var sectionLength, tableEnd, programInfoLength; // the mapping table ends at the end of the current section
+
+ sectionLength = (packet[payloadOffset + 1] & 0x0f) << 8 | packet[payloadOffset + 2];
+ tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
+ // long the program info descriptors are
+
+ programInfoLength = (packet[payloadOffset + 10] & 0x0f) << 8 | packet[payloadOffset + 11]; // advance the offset to the first entry in the mapping table
+
+ var offset = 12 + programInfoLength;
+
+ while (offset < tableEnd) {
+ var i = payloadOffset + offset; // add an entry that maps the elementary_pid to the stream_type
+
+ programMapTable[(packet[i + 1] & 0x1F) << 8 | packet[i + 2]] = packet[i]; // move to the next table entry
+ // skip past the elementary stream descriptors, if present
+
+ offset += ((packet[i + 3] & 0x0F) << 8 | packet[i + 4]) + 5;
+ }
+
+ return programMapTable;
+ };
+
+ var parsePesType = function parsePesType(packet, programMapTable) {
+ var pid = parsePid(packet);
+ var type = programMapTable[pid];
+
+ switch (type) {
+ case streamTypes.H264_STREAM_TYPE:
+ return 'video';
+
+ case streamTypes.ADTS_STREAM_TYPE:
+ return 'audio';
+
+ case streamTypes.METADATA_STREAM_TYPE:
+ return 'timed-metadata';
+
+ default:
+ return null;
+ }
+ };
+
+ var parsePesTime = function parsePesTime(packet) {
+ var pusi = parsePayloadUnitStartIndicator(packet);
+
+ if (!pusi) {
+ return null;
+ }
+
+ var offset = 4 + parseAdaptionField(packet);
+
+ if (offset >= packet.byteLength) {
+ // From the H 222.0 MPEG-TS spec
+ // "For transport stream packets carrying PES packets, stuffing is needed when there
+ // is insufficient PES packet data to completely fill the transport stream packet
+ // payload bytes. Stuffing is accomplished by defining an adaptation field longer than
+ // the sum of the lengths of the data elements in it, so that the payload bytes
+ // remaining after the adaptation field exactly accommodates the available PES packet
+ // data."
+ //
+ // If the offset is >= the length of the packet, then the packet contains no data
+ // and instead is just adaption field stuffing bytes
+ return null;
+ }
+
+ var pes = null;
+ var ptsDtsFlags; // PES packets may be annotated with a PTS value, or a PTS value
+ // and a DTS value. Determine what combination of values is
+ // available to work with.
+
+ ptsDtsFlags = packet[offset + 7]; // PTS and DTS are normally stored as a 33-bit number. Javascript
+ // performs all bitwise operations on 32-bit integers but javascript
+ // supports a much greater range (52-bits) of integer using standard
+ // mathematical operations.
+ // We construct a 31-bit value using bitwise operators over the 31
+ // most significant bits and then multiply by 4 (equal to a left-shift
+ // of 2) before we add the final 2 least significant bits of the
+ // timestamp (equal to an OR.)
+
+ if (ptsDtsFlags & 0xC0) {
+ pes = {}; // the PTS and DTS are not written out directly. For information
+ // on how they are encoded, see
+ // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
+
+ pes.pts = (packet[offset + 9] & 0x0E) << 27 | (packet[offset + 10] & 0xFF) << 20 | (packet[offset + 11] & 0xFE) << 12 | (packet[offset + 12] & 0xFF) << 5 | (packet[offset + 13] & 0xFE) >>> 3;
+ pes.pts *= 4; // Left shift by 2
+
+ pes.pts += (packet[offset + 13] & 0x06) >>> 1; // OR by the two LSBs
+
+ pes.dts = pes.pts;
+
+ if (ptsDtsFlags & 0x40) {
+ pes.dts = (packet[offset + 14] & 0x0E) << 27 | (packet[offset + 15] & 0xFF) << 20 | (packet[offset + 16] & 0xFE) << 12 | (packet[offset + 17] & 0xFF) << 5 | (packet[offset + 18] & 0xFE) >>> 3;
+ pes.dts *= 4; // Left shift by 2
+
+ pes.dts += (packet[offset + 18] & 0x06) >>> 1; // OR by the two LSBs
+ }
+ }
+
+ return pes;
+ };
+
+ var parseNalUnitType = function parseNalUnitType(type) {
+ switch (type) {
+ case 0x05:
+ return 'slice_layer_without_partitioning_rbsp_idr';
+
+ case 0x06:
+ return 'sei_rbsp';
+
+ case 0x07:
+ return 'seq_parameter_set_rbsp';
+
+ case 0x08:
+ return 'pic_parameter_set_rbsp';
+
+ case 0x09:
+ return 'access_unit_delimiter_rbsp';
+
+ default:
+ return null;
+ }
+ };
+
+ var videoPacketContainsKeyFrame = function videoPacketContainsKeyFrame(packet) {
+ var offset = 4 + parseAdaptionField(packet);
+ var frameBuffer = packet.subarray(offset);
+ var frameI = 0;
+ var frameSyncPoint = 0;
+ var foundKeyFrame = false;
+ var nalType; // advance the sync point to a NAL start, if necessary
+
+ for (; frameSyncPoint < frameBuffer.byteLength - 3; frameSyncPoint++) {
+ if (frameBuffer[frameSyncPoint + 2] === 1) {
+ // the sync point is properly aligned
+ frameI = frameSyncPoint + 5;
+ break;
+ }
+ }
+
+ while (frameI < frameBuffer.byteLength) {
+ // look at the current byte to determine if we've hit the end of
+ // a NAL unit boundary
+ switch (frameBuffer[frameI]) {
+ case 0:
+ // skip past non-sync sequences
+ if (frameBuffer[frameI - 1] !== 0) {
+ frameI += 2;
+ break;
+ } else if (frameBuffer[frameI - 2] !== 0) {
+ frameI++;
+ break;
+ }
+
+ if (frameSyncPoint + 3 !== frameI - 2) {
+ nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
+
+ if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
+ foundKeyFrame = true;
+ }
+ } // drop trailing zeroes
+
+
+ do {
+ frameI++;
+ } while (frameBuffer[frameI] !== 1 && frameI < frameBuffer.length);
+
+ frameSyncPoint = frameI - 2;
+ frameI += 3;
+ break;
+
+ case 1:
+ // skip past non-sync sequences
+ if (frameBuffer[frameI - 1] !== 0 || frameBuffer[frameI - 2] !== 0) {
+ frameI += 3;
+ break;
+ }
+
+ nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
+
+ if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
+ foundKeyFrame = true;
+ }
+
+ frameSyncPoint = frameI - 2;
+ frameI += 3;
+ break;
+
+ default:
+ // the current byte isn't a one or zero, so it cannot be part
+ // of a sync sequence
+ frameI += 3;
+ break;
+ }
+ }
+
+ frameBuffer = frameBuffer.subarray(frameSyncPoint);
+ frameI -= frameSyncPoint;
+ frameSyncPoint = 0; // parse the final nal
+
+ if (frameBuffer && frameBuffer.byteLength > 3) {
+ nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
+
+ if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
+ foundKeyFrame = true;
+ }
+ }
+
+ return foundKeyFrame;
+ };
+
+ var probe$1 = {
+ parseType: parseType,
+ parsePat: parsePat,
+ parsePmt: parsePmt,
+ parsePayloadUnitStartIndicator: parsePayloadUnitStartIndicator,
+ parsePesType: parsePesType,
+ parsePesTime: parsePesTime,
+ videoPacketContainsKeyFrame: videoPacketContainsKeyFrame
+ };
+ var handleRollover = timestampRolloverStream.handleRollover;
+ var probe = {};
+ probe.ts = probe$1;
+ probe.aac = utils;
+ var ONE_SECOND_IN_TS = clock.ONE_SECOND_IN_TS;
+ var MP2T_PACKET_LENGTH = 188,
+ // bytes
+ SYNC_BYTE = 0x47;
+ /**
+ * walks through segment data looking for pat and pmt packets to parse out
+ * program map table information
+ */
+
+ var parsePsi_ = function parsePsi_(bytes, pmt) {
+ var startIndex = 0,
+ endIndex = MP2T_PACKET_LENGTH,
+ packet,
+ type;
+
+ while (endIndex < bytes.byteLength) {
+ // Look for a pair of start and end sync bytes in the data..
+ if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
+ // We found a packet
+ packet = bytes.subarray(startIndex, endIndex);
+ type = probe.ts.parseType(packet, pmt.pid);
+
+ switch (type) {
+ case 'pat':
+ pmt.pid = probe.ts.parsePat(packet);
+ break;
+
+ case 'pmt':
+ var table = probe.ts.parsePmt(packet);
+ pmt.table = pmt.table || {};
+ Object.keys(table).forEach(function (key) {
+ pmt.table[key] = table[key];
+ });
+ break;
+ }
+
+ startIndex += MP2T_PACKET_LENGTH;
+ endIndex += MP2T_PACKET_LENGTH;
+ continue;
+ } // If we get here, we have somehow become de-synchronized and we need to step
+ // forward one byte at a time until we find a pair of sync bytes that denote
+ // a packet
+
+
+ startIndex++;
+ endIndex++;
+ }
+ };
+ /**
+ * walks through the segment data from the start and end to get timing information
+ * for the first and last audio pes packets
+ */
+
+
+ var parseAudioPes_ = function parseAudioPes_(bytes, pmt, result) {
+ var startIndex = 0,
+ endIndex = MP2T_PACKET_LENGTH,
+ packet,
+ type,
+ pesType,
+ pusi,
+ parsed;
+ var endLoop = false; // Start walking from start of segment to get first audio packet
+
+ while (endIndex <= bytes.byteLength) {
+ // Look for a pair of start and end sync bytes in the data..
+ if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
+ // We found a packet
+ packet = bytes.subarray(startIndex, endIndex);
+ type = probe.ts.parseType(packet, pmt.pid);
+
+ switch (type) {
+ case 'pes':
+ pesType = probe.ts.parsePesType(packet, pmt.table);
+ pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
+
+ if (pesType === 'audio' && pusi) {
+ parsed = probe.ts.parsePesTime(packet);
+
+ if (parsed) {
+ parsed.type = 'audio';
+ result.audio.push(parsed);
+ endLoop = true;
+ }
+ }
+
+ break;
+ }
+
+ if (endLoop) {
+ break;
+ }
+
+ startIndex += MP2T_PACKET_LENGTH;
+ endIndex += MP2T_PACKET_LENGTH;
+ continue;
+ } // If we get here, we have somehow become de-synchronized and we need to step
+ // forward one byte at a time until we find a pair of sync bytes that denote
+ // a packet
+
+
+ startIndex++;
+ endIndex++;
+ } // Start walking from end of segment to get last audio packet
+
+
+ endIndex = bytes.byteLength;
+ startIndex = endIndex - MP2T_PACKET_LENGTH;
+ endLoop = false;
+
+ while (startIndex >= 0) {
+ // Look for a pair of start and end sync bytes in the data..
+ if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
+ // We found a packet
+ packet = bytes.subarray(startIndex, endIndex);
+ type = probe.ts.parseType(packet, pmt.pid);
+
+ switch (type) {
+ case 'pes':
+ pesType = probe.ts.parsePesType(packet, pmt.table);
+ pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
+
+ if (pesType === 'audio' && pusi) {
+ parsed = probe.ts.parsePesTime(packet);
+
+ if (parsed) {
+ parsed.type = 'audio';
+ result.audio.push(parsed);
+ endLoop = true;
+ }
+ }
+
+ break;
+ }
+
+ if (endLoop) {
+ break;
+ }
+
+ startIndex -= MP2T_PACKET_LENGTH;
+ endIndex -= MP2T_PACKET_LENGTH;
+ continue;
+ } // If we get here, we have somehow become de-synchronized and we need to step
+ // forward one byte at a time until we find a pair of sync bytes that denote
+ // a packet
+
+
+ startIndex--;
+ endIndex--;
+ }
+ };
+ /**
+ * walks through the segment data from the start and end to get timing information
+ * for the first and last video pes packets as well as timing information for the first
+ * key frame.
+ */
+
+
+ var parseVideoPes_ = function parseVideoPes_(bytes, pmt, result) {
+ var startIndex = 0,
+ endIndex = MP2T_PACKET_LENGTH,
+ packet,
+ type,
+ pesType,
+ pusi,
+ parsed,
+ frame,
+ i,
+ pes;
+ var endLoop = false;
+ var currentFrame = {
+ data: [],
+ size: 0
+ }; // Start walking from start of segment to get first video packet
+
+ while (endIndex < bytes.byteLength) {
+ // Look for a pair of start and end sync bytes in the data..
+ if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
+ // We found a packet
+ packet = bytes.subarray(startIndex, endIndex);
+ type = probe.ts.parseType(packet, pmt.pid);
+
+ switch (type) {
+ case 'pes':
+ pesType = probe.ts.parsePesType(packet, pmt.table);
+ pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
+
+ if (pesType === 'video') {
+ if (pusi && !endLoop) {
+ parsed = probe.ts.parsePesTime(packet);
+
+ if (parsed) {
+ parsed.type = 'video';
+ result.video.push(parsed);
+ endLoop = true;
+ }
+ }
+
+ if (!result.firstKeyFrame) {
+ if (pusi) {
+ if (currentFrame.size !== 0) {
+ frame = new Uint8Array(currentFrame.size);
+ i = 0;
+
+ while (currentFrame.data.length) {
+ pes = currentFrame.data.shift();
+ frame.set(pes, i);
+ i += pes.byteLength;
+ }
+
+ if (probe.ts.videoPacketContainsKeyFrame(frame)) {
+ var firstKeyFrame = probe.ts.parsePesTime(frame); // PTS/DTS may not be available. Simply *not* setting
+ // the keyframe seems to work fine with HLS playback
+ // and definitely preferable to a crash with TypeError...
+
+ if (firstKeyFrame) {
+ result.firstKeyFrame = firstKeyFrame;
+ result.firstKeyFrame.type = 'video';
+ } else {
+ // eslint-disable-next-line
+ console.warn('Failed to extract PTS/DTS from PES at first keyframe. ' + 'This could be an unusual TS segment, or else mux.js did not ' + 'parse your TS segment correctly. If you know your TS ' + 'segments do contain PTS/DTS on keyframes please file a bug ' + 'report! You can try ffprobe to double check for yourself.');
+ }
+ }
+
+ currentFrame.size = 0;
+ }
+ }
+
+ currentFrame.data.push(packet);
+ currentFrame.size += packet.byteLength;
+ }
+ }
+
+ break;
+ }
+
+ if (endLoop && result.firstKeyFrame) {
+ break;
+ }
+
+ startIndex += MP2T_PACKET_LENGTH;
+ endIndex += MP2T_PACKET_LENGTH;
+ continue;
+ } // If we get here, we have somehow become de-synchronized and we need to step
+ // forward one byte at a time until we find a pair of sync bytes that denote
+ // a packet
+
+
+ startIndex++;
+ endIndex++;
+ } // Start walking from end of segment to get last video packet
+
+
+ endIndex = bytes.byteLength;
+ startIndex = endIndex - MP2T_PACKET_LENGTH;
+ endLoop = false;
+
+ while (startIndex >= 0) {
+ // Look for a pair of start and end sync bytes in the data..
+ if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
+ // We found a packet
+ packet = bytes.subarray(startIndex, endIndex);
+ type = probe.ts.parseType(packet, pmt.pid);
+
+ switch (type) {
+ case 'pes':
+ pesType = probe.ts.parsePesType(packet, pmt.table);
+ pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
+
+ if (pesType === 'video' && pusi) {
+ parsed = probe.ts.parsePesTime(packet);
+
+ if (parsed) {
+ parsed.type = 'video';
+ result.video.push(parsed);
+ endLoop = true;
+ }
+ }
+
+ break;
+ }
+
+ if (endLoop) {
+ break;
+ }
+
+ startIndex -= MP2T_PACKET_LENGTH;
+ endIndex -= MP2T_PACKET_LENGTH;
+ continue;
+ } // If we get here, we have somehow become de-synchronized and we need to step
+ // forward one byte at a time until we find a pair of sync bytes that denote
+ // a packet
+
+
+ startIndex--;
+ endIndex--;
+ }
+ };
+ /**
+ * Adjusts the timestamp information for the segment to account for
+ * rollover and convert to seconds based on pes packet timescale (90khz clock)
+ */
+
+
+ var adjustTimestamp_ = function adjustTimestamp_(segmentInfo, baseTimestamp) {
+ if (segmentInfo.audio && segmentInfo.audio.length) {
+ var audioBaseTimestamp = baseTimestamp;
+
+ if (typeof audioBaseTimestamp === 'undefined' || isNaN(audioBaseTimestamp)) {
+ audioBaseTimestamp = segmentInfo.audio[0].dts;
+ }
+
+ segmentInfo.audio.forEach(function (info) {
+ info.dts = handleRollover(info.dts, audioBaseTimestamp);
+ info.pts = handleRollover(info.pts, audioBaseTimestamp); // time in seconds
+
+ info.dtsTime = info.dts / ONE_SECOND_IN_TS;
+ info.ptsTime = info.pts / ONE_SECOND_IN_TS;
+ });
+ }
+
+ if (segmentInfo.video && segmentInfo.video.length) {
+ var videoBaseTimestamp = baseTimestamp;
+
+ if (typeof videoBaseTimestamp === 'undefined' || isNaN(videoBaseTimestamp)) {
+ videoBaseTimestamp = segmentInfo.video[0].dts;
+ }
+
+ segmentInfo.video.forEach(function (info) {
+ info.dts = handleRollover(info.dts, videoBaseTimestamp);
+ info.pts = handleRollover(info.pts, videoBaseTimestamp); // time in seconds
+
+ info.dtsTime = info.dts / ONE_SECOND_IN_TS;
+ info.ptsTime = info.pts / ONE_SECOND_IN_TS;
+ });
+
+ if (segmentInfo.firstKeyFrame) {
+ var frame = segmentInfo.firstKeyFrame;
+ frame.dts = handleRollover(frame.dts, videoBaseTimestamp);
+ frame.pts = handleRollover(frame.pts, videoBaseTimestamp); // time in seconds
+
+ frame.dtsTime = frame.dts / ONE_SECOND_IN_TS;
+ frame.ptsTime = frame.pts / ONE_SECOND_IN_TS;
+ }
+ }
+ };
+ /**
+ * inspects the aac data stream for start and end time information
+ */
+
+
+ var inspectAac_ = function inspectAac_(bytes) {
+ var endLoop = false,
+ audioCount = 0,
+ sampleRate = null,
+ timestamp = null,
+ frameSize = 0,
+ byteIndex = 0,
+ packet;
+
+ while (bytes.length - byteIndex >= 3) {
+ var type = probe.aac.parseType(bytes, byteIndex);
+
+ switch (type) {
+ case 'timed-metadata':
+ // Exit early because we don't have enough to parse
+ // the ID3 tag header
+ if (bytes.length - byteIndex < 10) {
+ endLoop = true;
+ break;
+ }
+
+ frameSize = probe.aac.parseId3TagSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer
+ // to emit a full packet
+
+ if (frameSize > bytes.length) {
+ endLoop = true;
+ break;
+ }
+
+ if (timestamp === null) {
+ packet = bytes.subarray(byteIndex, byteIndex + frameSize);
+ timestamp = probe.aac.parseAacTimestamp(packet);
+ }
+
+ byteIndex += frameSize;
+ break;
+
+ case 'audio':
+ // Exit early because we don't have enough to parse
+ // the ADTS frame header
+ if (bytes.length - byteIndex < 7) {
+ endLoop = true;
+ break;
+ }
+
+ frameSize = probe.aac.parseAdtsSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer
+ // to emit a full packet
+
+ if (frameSize > bytes.length) {
+ endLoop = true;
+ break;
+ }
+
+ if (sampleRate === null) {
+ packet = bytes.subarray(byteIndex, byteIndex + frameSize);
+ sampleRate = probe.aac.parseSampleRate(packet);
+ }
+
+ audioCount++;
+ byteIndex += frameSize;
+ break;
+
+ default:
+ byteIndex++;
+ break;
+ }
+
+ if (endLoop) {
+ return null;
+ }
+ }
+
+ if (sampleRate === null || timestamp === null) {
+ return null;
+ }
+
+ var audioTimescale = ONE_SECOND_IN_TS / sampleRate;
+ var result = {
+ audio: [{
+ type: 'audio',
+ dts: timestamp,
+ pts: timestamp
+ }, {
+ type: 'audio',
+ dts: timestamp + audioCount * 1024 * audioTimescale,
+ pts: timestamp + audioCount * 1024 * audioTimescale
+ }]
+ };
+ return result;
+ };
+ /**
+ * inspects the transport stream segment data for start and end time information
+ * of the audio and video tracks (when present) as well as the first key frame's
+ * start time.
+ */
+
+
+ var inspectTs_ = function inspectTs_(bytes) {
+ var pmt = {
+ pid: null,
+ table: null
+ };
+ var result = {};
+ parsePsi_(bytes, pmt);
+
+ for (var pid in pmt.table) {
+ if (pmt.table.hasOwnProperty(pid)) {
+ var type = pmt.table[pid];
+
+ switch (type) {
+ case streamTypes.H264_STREAM_TYPE:
+ result.video = [];
+ parseVideoPes_(bytes, pmt, result);
+
+ if (result.video.length === 0) {
+ delete result.video;
+ }
+
+ break;
+
+ case streamTypes.ADTS_STREAM_TYPE:
+ result.audio = [];
+ parseAudioPes_(bytes, pmt, result);
+
+ if (result.audio.length === 0) {
+ delete result.audio;
+ }
+
+ break;
+ }
+ }
+ }
+
+ return result;
+ };
+ /**
+ * Inspects segment byte data and returns an object with start and end timing information
+ *
+ * @param {Uint8Array} bytes The segment byte data
+ * @param {Number} baseTimestamp Relative reference timestamp used when adjusting frame
+ * timestamps for rollover. This value must be in 90khz clock.
+ * @return {Object} Object containing start and end frame timing info of segment.
+ */
+
+
+ var inspect = function inspect(bytes, baseTimestamp) {
+ var isAacData = probe.aac.isLikelyAacData(bytes);
+ var result;
+
+ if (isAacData) {
+ result = inspectAac_(bytes);
+ } else {
+ result = inspectTs_(bytes);
+ }
+
+ if (!result || !result.audio && !result.video) {
+ return null;
+ }
+
+ adjustTimestamp_(result, baseTimestamp);
+ return result;
+ };
+
+ var tsInspector = {
+ inspect: inspect,
+ parseAudioPes_: parseAudioPes_
+ };
+ /* global self */
+
+ /**
+ * Re-emits transmuxer events by converting them into messages to the
+ * world outside the worker.
+ *
+ * @param {Object} transmuxer the transmuxer to wire events on
+ * @private
+ */
+
+ var wireTransmuxerEvents = function wireTransmuxerEvents(self, transmuxer) {
+ transmuxer.on('data', function (segment) {
+ // transfer ownership of the underlying ArrayBuffer
+ // instead of doing a copy to save memory
+ // ArrayBuffers are transferable but generic TypedArrays are not
+ // @link https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Using_web_workers#Passing_data_by_transferring_ownership_(transferable_objects)
+ var initArray = segment.initSegment;
+ segment.initSegment = {
+ data: initArray.buffer,
+ byteOffset: initArray.byteOffset,
+ byteLength: initArray.byteLength
+ };
+ var typedArray = segment.data;
+ segment.data = typedArray.buffer;
+ self.postMessage({
+ action: 'data',
+ segment: segment,
+ byteOffset: typedArray.byteOffset,
+ byteLength: typedArray.byteLength
+ }, [segment.data]);
+ });
+ transmuxer.on('done', function (data) {
+ self.postMessage({
+ action: 'done'
+ });
+ });
+ transmuxer.on('gopInfo', function (gopInfo) {
+ self.postMessage({
+ action: 'gopInfo',
+ gopInfo: gopInfo
+ });
+ });
+ transmuxer.on('videoSegmentTimingInfo', function (timingInfo) {
+ var videoSegmentTimingInfo = {
+ start: {
+ decode: clock.videoTsToSeconds(timingInfo.start.dts),
+ presentation: clock.videoTsToSeconds(timingInfo.start.pts)
+ },
+ end: {
+ decode: clock.videoTsToSeconds(timingInfo.end.dts),
+ presentation: clock.videoTsToSeconds(timingInfo.end.pts)
+ },
+ baseMediaDecodeTime: clock.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
+ };
+
+ if (timingInfo.prependedContentDuration) {
+ videoSegmentTimingInfo.prependedContentDuration = clock.videoTsToSeconds(timingInfo.prependedContentDuration);
+ }
+
+ self.postMessage({
+ action: 'videoSegmentTimingInfo',
+ videoSegmentTimingInfo: videoSegmentTimingInfo
+ });
+ });
+ transmuxer.on('audioSegmentTimingInfo', function (timingInfo) {
+ // Note that all times for [audio/video]SegmentTimingInfo events are in video clock
+ var audioSegmentTimingInfo = {
+ start: {
+ decode: clock.videoTsToSeconds(timingInfo.start.dts),
+ presentation: clock.videoTsToSeconds(timingInfo.start.pts)
+ },
+ end: {
+ decode: clock.videoTsToSeconds(timingInfo.end.dts),
+ presentation: clock.videoTsToSeconds(timingInfo.end.pts)
+ },
+ baseMediaDecodeTime: clock.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
+ };
+
+ if (timingInfo.prependedContentDuration) {
+ audioSegmentTimingInfo.prependedContentDuration = clock.videoTsToSeconds(timingInfo.prependedContentDuration);
+ }
+
+ self.postMessage({
+ action: 'audioSegmentTimingInfo',
+ audioSegmentTimingInfo: audioSegmentTimingInfo
+ });
+ });
+ transmuxer.on('id3Frame', function (id3Frame) {
+ self.postMessage({
+ action: 'id3Frame',
+ id3Frame: id3Frame
+ });
+ });
+ transmuxer.on('caption', function (caption) {
+ self.postMessage({
+ action: 'caption',
+ caption: caption
+ });
+ });
+ transmuxer.on('trackinfo', function (trackInfo) {
+ self.postMessage({
+ action: 'trackinfo',
+ trackInfo: trackInfo
+ });
+ });
+ transmuxer.on('audioTimingInfo', function (audioTimingInfo) {
+ // convert to video TS since we prioritize video time over audio
+ self.postMessage({
+ action: 'audioTimingInfo',
+ audioTimingInfo: {
+ start: clock.videoTsToSeconds(audioTimingInfo.start),
+ end: clock.videoTsToSeconds(audioTimingInfo.end)
+ }
+ });
+ });
+ transmuxer.on('videoTimingInfo', function (videoTimingInfo) {
+ self.postMessage({
+ action: 'videoTimingInfo',
+ videoTimingInfo: {
+ start: clock.videoTsToSeconds(videoTimingInfo.start),
+ end: clock.videoTsToSeconds(videoTimingInfo.end)
+ }
+ });
+ });
+ transmuxer.on('log', function (log) {
+ self.postMessage({
+ action: 'log',
+ log: log
+ });
+ });
+ };
+ /**
+ * All incoming messages route through this hash. If no function exists
+ * to handle an incoming message, then we ignore the message.
+ *
+ * @class MessageHandlers
+ * @param {Object} options the options to initialize with
+ */
+
+
+ var MessageHandlers = /*#__PURE__*/function () {
+ function MessageHandlers(self, options) {
+ this.options = options || {};
+ this.self = self;
+ this.init();
+ }
+ /**
+ * initialize our web worker and wire all the events.
+ */
+
+
+ var _proto = MessageHandlers.prototype;
+
+ _proto.init = function init() {
+ if (this.transmuxer) {
+ this.transmuxer.dispose();
+ }
+
+ this.transmuxer = new transmuxer.Transmuxer(this.options);
+ wireTransmuxerEvents(this.self, this.transmuxer);
+ };
+
+ _proto.pushMp4Captions = function pushMp4Captions(data) {
+ if (!this.captionParser) {
+ this.captionParser = new captionParser();
+ this.captionParser.init();
+ }
+
+ var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
+ var parsed = this.captionParser.parse(segment, data.trackIds, data.timescales);
+ this.self.postMessage({
+ action: 'mp4Captions',
+ captions: parsed && parsed.captions || [],
+ logs: parsed && parsed.logs || [],
+ data: segment.buffer
+ }, [segment.buffer]);
+ };
+
+ _proto.probeMp4StartTime = function probeMp4StartTime(_ref) {
+ var timescales = _ref.timescales,
+ data = _ref.data;
+ var startTime = probe$2.startTime(timescales, data);
+ this.self.postMessage({
+ action: 'probeMp4StartTime',
+ startTime: startTime,
+ data: data
+ }, [data.buffer]);
+ };
+
+ _proto.probeMp4Tracks = function probeMp4Tracks(_ref2) {
+ var data = _ref2.data;
+ var tracks = probe$2.tracks(data);
+ this.self.postMessage({
+ action: 'probeMp4Tracks',
+ tracks: tracks,
+ data: data
+ }, [data.buffer]);
+ }
+ /**
+ * Probe an mpeg2-ts segment to determine the start time of the segment in it's
+ * internal "media time," as well as whether it contains video and/or audio.
+ *
+ * @private
+ * @param {Uint8Array} bytes - segment bytes
+ * @param {number} baseStartTime
+ * Relative reference timestamp used when adjusting frame timestamps for rollover.
+ * This value should be in seconds, as it's converted to a 90khz clock within the
+ * function body.
+ * @return {Object} The start time of the current segment in "media time" as well as
+ * whether it contains video and/or audio
+ */
+ ;
+
+ _proto.probeTs = function probeTs(_ref3) {
+ var data = _ref3.data,
+ baseStartTime = _ref3.baseStartTime;
+ var tsStartTime = typeof baseStartTime === 'number' && !isNaN(baseStartTime) ? baseStartTime * clock.ONE_SECOND_IN_TS : void 0;
+ var timeInfo = tsInspector.inspect(data, tsStartTime);
+ var result = null;
+
+ if (timeInfo) {
+ result = {
+ // each type's time info comes back as an array of 2 times, start and end
+ hasVideo: timeInfo.video && timeInfo.video.length === 2 || false,
+ hasAudio: timeInfo.audio && timeInfo.audio.length === 2 || false
+ };
+
+ if (result.hasVideo) {
+ result.videoStart = timeInfo.video[0].ptsTime;
+ }
+
+ if (result.hasAudio) {
+ result.audioStart = timeInfo.audio[0].ptsTime;
+ }
+ }
+
+ this.self.postMessage({
+ action: 'probeTs',
+ result: result,
+ data: data
+ }, [data.buffer]);
+ };
+
+ _proto.clearAllMp4Captions = function clearAllMp4Captions() {
+ if (this.captionParser) {
+ this.captionParser.clearAllCaptions();
+ }
+ };
+
+ _proto.clearParsedMp4Captions = function clearParsedMp4Captions() {
+ if (this.captionParser) {
+ this.captionParser.clearParsedCaptions();
+ }
+ }
+ /**
+ * Adds data (a ts segment) to the start of the transmuxer pipeline for
+ * processing.
+ *
+ * @param {ArrayBuffer} data data to push into the muxer
+ */
+ ;
+
+ _proto.push = function push(data) {
+ // Cast array buffer to correct type for transmuxer
+ var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
+ this.transmuxer.push(segment);
+ }
+ /**
+ * Recreate the transmuxer so that the next segment added via `push`
+ * start with a fresh transmuxer.
+ */
+ ;
+
+ _proto.reset = function reset() {
+ this.transmuxer.reset();
+ }
+ /**
+ * Set the value that will be used as the `baseMediaDecodeTime` time for the
+ * next segment pushed in. Subsequent segments will have their `baseMediaDecodeTime`
+ * set relative to the first based on the PTS values.
+ *
+ * @param {Object} data used to set the timestamp offset in the muxer
+ */
+ ;
+
+ _proto.setTimestampOffset = function setTimestampOffset(data) {
+ var timestampOffset = data.timestampOffset || 0;
+ this.transmuxer.setBaseMediaDecodeTime(Math.round(clock.secondsToVideoTs(timestampOffset)));
+ };
+
+ _proto.setAudioAppendStart = function setAudioAppendStart(data) {
+ this.transmuxer.setAudioAppendStart(Math.ceil(clock.secondsToVideoTs(data.appendStart)));
+ };
+
+ _proto.setRemux = function setRemux(data) {
+ this.transmuxer.setRemux(data.remux);
+ }
+ /**
+ * Forces the pipeline to finish processing the last segment and emit it's
+ * results.
+ *
+ * @param {Object} data event data, not really used
+ */
+ ;
+
+ _proto.flush = function flush(data) {
+ this.transmuxer.flush(); // transmuxed done action is fired after both audio/video pipelines are flushed
+
+ self.postMessage({
+ action: 'done',
+ type: 'transmuxed'
+ });
+ };
+
+ _proto.endTimeline = function endTimeline() {
+ this.transmuxer.endTimeline(); // transmuxed endedtimeline action is fired after both audio/video pipelines end their
+ // timelines
+
+ self.postMessage({
+ action: 'endedtimeline',
+ type: 'transmuxed'
+ });
+ };
+
+ _proto.alignGopsWith = function alignGopsWith(data) {
+ this.transmuxer.alignGopsWith(data.gopsToAlignWith.slice());
+ };
+
+ return MessageHandlers;
+ }();
+ /**
+ * Our web worker interface so that things can talk to mux.js
+ * that will be running in a web worker. the scope is passed to this by
+ * webworkify.
+ *
+ * @param {Object} self the scope for the web worker
+ */
+
+
+ self.onmessage = function (event) {
+ if (event.data.action === 'init' && event.data.options) {
+ this.messageHandlers = new MessageHandlers(self, event.data.options);
+ return;
+ }
+
+ if (!this.messageHandlers) {
+ this.messageHandlers = new MessageHandlers(self);
+ }
+
+ if (event.data && event.data.action && event.data.action !== 'init') {
+ if (this.messageHandlers[event.data.action]) {
+ this.messageHandlers[event.data.action](event.data);
+ }
+ }
+ };
+}));
+var TransmuxWorker = factory(workerCode$1);
+/* rollup-plugin-worker-factory end for worker!/Users/bclifford/Code/vhs-release-test/src/transmuxer-worker.js */
+
+var handleData_ = function handleData_(event, transmuxedData, callback) {
+ var _event$data$segment = event.data.segment,
+ type = _event$data$segment.type,
+ initSegment = _event$data$segment.initSegment,
+ captions = _event$data$segment.captions,
+ captionStreams = _event$data$segment.captionStreams,
+ metadata = _event$data$segment.metadata,
+ videoFrameDtsTime = _event$data$segment.videoFrameDtsTime,
+ videoFramePtsTime = _event$data$segment.videoFramePtsTime;
+ transmuxedData.buffer.push({
+ captions: captions,
+ captionStreams: captionStreams,
+ metadata: metadata
+ });
+ var boxes = event.data.segment.boxes || {
+ data: event.data.segment.data
+ };
+ var result = {
+ type: type,
+ // cast ArrayBuffer to TypedArray
+ data: new Uint8Array(boxes.data, boxes.data.byteOffset, boxes.data.byteLength),
+ initSegment: new Uint8Array(initSegment.data, initSegment.byteOffset, initSegment.byteLength)
+ };
+
+ if (typeof videoFrameDtsTime !== 'undefined') {
+ result.videoFrameDtsTime = videoFrameDtsTime;
+ }
+
+ if (typeof videoFramePtsTime !== 'undefined') {
+ result.videoFramePtsTime = videoFramePtsTime;
+ }
+
+ callback(result);
+};
+
+var handleDone_ = function handleDone_(_ref) {
+ var transmuxedData = _ref.transmuxedData,
+ callback = _ref.callback; // Previously we only returned data on data events,
+ // not on done events. Clear out the buffer to keep that consistent.
+
+ transmuxedData.buffer = []; // all buffers should have been flushed from the muxer, so start processing anything we
+ // have received
+
+ callback(transmuxedData);
+};
+
+var handleGopInfo_ = function handleGopInfo_(event, transmuxedData) {
+ transmuxedData.gopInfo = event.data.gopInfo;
+};
+
+var processTransmux = function processTransmux(options) {
+ var transmuxer = options.transmuxer,
+ bytes = options.bytes,
+ audioAppendStart = options.audioAppendStart,
+ gopsToAlignWith = options.gopsToAlignWith,
+ remux = options.remux,
+ onData = options.onData,
+ onTrackInfo = options.onTrackInfo,
+ onAudioTimingInfo = options.onAudioTimingInfo,
+ onVideoTimingInfo = options.onVideoTimingInfo,
+ onVideoSegmentTimingInfo = options.onVideoSegmentTimingInfo,
+ onAudioSegmentTimingInfo = options.onAudioSegmentTimingInfo,
+ onId3 = options.onId3,
+ onCaptions = options.onCaptions,
+ onDone = options.onDone,
+ onEndedTimeline = options.onEndedTimeline,
+ onTransmuxerLog = options.onTransmuxerLog,
+ isEndOfTimeline = options.isEndOfTimeline;
+ var transmuxedData = {
+ buffer: []
+ };
+ var waitForEndedTimelineEvent = isEndOfTimeline;
+
+ var handleMessage = function handleMessage(event) {
+ if (transmuxer.currentTransmux !== options) {
+ // disposed
+ return;
+ }
+
+ if (event.data.action === 'data') {
+ handleData_(event, transmuxedData, onData);
+ }
+
+ if (event.data.action === 'trackinfo') {
+ onTrackInfo(event.data.trackInfo);
+ }
+
+ if (event.data.action === 'gopInfo') {
+ handleGopInfo_(event, transmuxedData);
+ }
+
+ if (event.data.action === 'audioTimingInfo') {
+ onAudioTimingInfo(event.data.audioTimingInfo);
+ }
+
+ if (event.data.action === 'videoTimingInfo') {
+ onVideoTimingInfo(event.data.videoTimingInfo);
+ }
+
+ if (event.data.action === 'videoSegmentTimingInfo') {
+ onVideoSegmentTimingInfo(event.data.videoSegmentTimingInfo);
+ }
+
+ if (event.data.action === 'audioSegmentTimingInfo') {
+ onAudioSegmentTimingInfo(event.data.audioSegmentTimingInfo);
+ }
+
+ if (event.data.action === 'id3Frame') {
+ onId3([event.data.id3Frame], event.data.id3Frame.dispatchType);
+ }
+
+ if (event.data.action === 'caption') {
+ onCaptions(event.data.caption);
+ }
+
+ if (event.data.action === 'endedtimeline') {
+ waitForEndedTimelineEvent = false;
+ onEndedTimeline();
+ }
+
+ if (event.data.action === 'log') {
+ onTransmuxerLog(event.data.log);
+ } // wait for the transmuxed event since we may have audio and video
+
+
+ if (event.data.type !== 'transmuxed') {
+ return;
+ } // If the "endedtimeline" event has not yet fired, and this segment represents the end
+ // of a timeline, that means there may still be data events before the segment
+ // processing can be considerred complete. In that case, the final event should be
+ // an "endedtimeline" event with the type "transmuxed."
+
+
+ if (waitForEndedTimelineEvent) {
+ return;
+ }
+
+ transmuxer.onmessage = null;
+ handleDone_({
+ transmuxedData: transmuxedData,
+ callback: onDone
+ });
+ /* eslint-disable no-use-before-define */
+
+ dequeue(transmuxer);
+ /* eslint-enable */
+ };
+
+ transmuxer.onmessage = handleMessage;
+
+ if (audioAppendStart) {
+ transmuxer.postMessage({
+ action: 'setAudioAppendStart',
+ appendStart: audioAppendStart
+ });
+ } // allow empty arrays to be passed to clear out GOPs
+
+
+ if (Array.isArray(gopsToAlignWith)) {
+ transmuxer.postMessage({
+ action: 'alignGopsWith',
+ gopsToAlignWith: gopsToAlignWith
+ });
+ }
+
+ if (typeof remux !== 'undefined') {
+ transmuxer.postMessage({
+ action: 'setRemux',
+ remux: remux
+ });
+ }
+
+ if (bytes.byteLength) {
+ var buffer = bytes instanceof ArrayBuffer ? bytes : bytes.buffer;
+ var byteOffset = bytes instanceof ArrayBuffer ? 0 : bytes.byteOffset;
+ transmuxer.postMessage({
+ action: 'push',
+ // Send the typed-array of data as an ArrayBuffer so that
+ // it can be sent as a "Transferable" and avoid the costly
+ // memory copy
+ data: buffer,
+ // To recreate the original typed-array, we need information
+ // about what portion of the ArrayBuffer it was a view into
+ byteOffset: byteOffset,
+ byteLength: bytes.byteLength
+ }, [buffer]);
+ }
+
+ if (isEndOfTimeline) {
+ transmuxer.postMessage({
+ action: 'endTimeline'
+ });
+ } // even if we didn't push any bytes, we have to make sure we flush in case we reached
+ // the end of the segment
+
+
+ transmuxer.postMessage({
+ action: 'flush'
+ });
+};
+
+var dequeue = function dequeue(transmuxer) {
+ transmuxer.currentTransmux = null;
+
+ if (transmuxer.transmuxQueue.length) {
+ transmuxer.currentTransmux = transmuxer.transmuxQueue.shift();
+
+ if (typeof transmuxer.currentTransmux === 'function') {
+ transmuxer.currentTransmux();
+ } else {
+ processTransmux(transmuxer.currentTransmux);
+ }
+ }
+};
+
+var processAction = function processAction(transmuxer, action) {
+ transmuxer.postMessage({
+ action: action
+ });
+ dequeue(transmuxer);
+};
+
+var enqueueAction = function enqueueAction(action, transmuxer) {
+ if (!transmuxer.currentTransmux) {
+ transmuxer.currentTransmux = action;
+ processAction(transmuxer, action);
+ return;
+ }
+
+ transmuxer.transmuxQueue.push(processAction.bind(null, transmuxer, action));
+};
+
+var reset = function reset(transmuxer) {
+ enqueueAction('reset', transmuxer);
+};
+
+var endTimeline = function endTimeline(transmuxer) {
+ enqueueAction('endTimeline', transmuxer);
+};
+
+var transmux = function transmux(options) {
+ if (!options.transmuxer.currentTransmux) {
+ options.transmuxer.currentTransmux = options;
+ processTransmux(options);
+ return;
+ }
+
+ options.transmuxer.transmuxQueue.push(options);
+};
+
+var createTransmuxer = function createTransmuxer(options) {
+ var transmuxer = new TransmuxWorker();
+ transmuxer.currentTransmux = null;
+ transmuxer.transmuxQueue = [];
+ var term = transmuxer.terminate;
+
+ transmuxer.terminate = function () {
+ transmuxer.currentTransmux = null;
+ transmuxer.transmuxQueue.length = 0;
+ return term.call(transmuxer);
+ };
+
+ transmuxer.postMessage({
+ action: 'init',
+ options: options
+ });
+ return transmuxer;
+};
+
+var segmentTransmuxer = {
+ reset: reset,
+ endTimeline: endTimeline,
+ transmux: transmux,
+ createTransmuxer: createTransmuxer
+};
+
+var workerCallback = function workerCallback(options) {
+ var transmuxer = options.transmuxer;
+ var endAction = options.endAction || options.action;
+ var callback = options.callback;
+
+ var message = _extends__default['default']({}, options, {
+ endAction: null,
+ transmuxer: null,
+ callback: null
+ });
+
+ var listenForEndEvent = function listenForEndEvent(event) {
+ if (event.data.action !== endAction) {
+ return;
+ }
+
+ transmuxer.removeEventListener('message', listenForEndEvent); // transfer ownership of bytes back to us.
+
+ if (event.data.data) {
+ event.data.data = new Uint8Array(event.data.data, options.byteOffset || 0, options.byteLength || event.data.data.byteLength);
+
+ if (options.data) {
+ options.data = event.data.data;
+ }
+ }
+
+ callback(event.data);
+ };
+
+ transmuxer.addEventListener('message', listenForEndEvent);
+
+ if (options.data) {
+ var isArrayBuffer = options.data instanceof ArrayBuffer;
+ message.byteOffset = isArrayBuffer ? 0 : options.data.byteOffset;
+ message.byteLength = options.data.byteLength;
+ var transfers = [isArrayBuffer ? options.data : options.data.buffer];
+ transmuxer.postMessage(message, transfers);
+ } else {
+ transmuxer.postMessage(message);
+ }
+};
+
+var REQUEST_ERRORS = {
+ FAILURE: 2,
+ TIMEOUT: -101,
+ ABORTED: -102
+};
+/**
+ * Abort all requests
+ *
+ * @param {Object} activeXhrs - an object that tracks all XHR requests
+ */
+
+var abortAll = function abortAll(activeXhrs) {
+ activeXhrs.forEach(function (xhr) {
+ xhr.abort();
+ });
+};
+/**
+ * Gather important bandwidth stats once a request has completed
+ *
+ * @param {Object} request - the XHR request from which to gather stats
+ */
+
+
+var getRequestStats = function getRequestStats(request) {
+ return {
+ bandwidth: request.bandwidth,
+ bytesReceived: request.bytesReceived || 0,
+ roundTripTime: request.roundTripTime || 0
+ };
+};
+/**
+ * If possible gather bandwidth stats as a request is in
+ * progress
+ *
+ * @param {Event} progressEvent - an event object from an XHR's progress event
+ */
+
+
+var getProgressStats = function getProgressStats(progressEvent) {
+ var request = progressEvent.target;
+ var roundTripTime = Date.now() - request.requestTime;
+ var stats = {
+ bandwidth: Infinity,
+ bytesReceived: 0,
+ roundTripTime: roundTripTime || 0
+ };
+ stats.bytesReceived = progressEvent.loaded; // This can result in Infinity if stats.roundTripTime is 0 but that is ok
+ // because we should only use bandwidth stats on progress to determine when
+ // abort a request early due to insufficient bandwidth
+
+ stats.bandwidth = Math.floor(stats.bytesReceived / stats.roundTripTime * 8 * 1000);
+ return stats;
+};
+/**
+ * Handle all error conditions in one place and return an object
+ * with all the information
+ *
+ * @param {Error|null} error - if non-null signals an error occured with the XHR
+ * @param {Object} request - the XHR request that possibly generated the error
+ */
+
+
+var handleErrors = function handleErrors(error, request) {
+ if (request.timedout) {
+ return {
+ status: request.status,
+ message: 'HLS request timed-out at URL: ' + request.uri,
+ code: REQUEST_ERRORS.TIMEOUT,
+ xhr: request
+ };
+ }
+
+ if (request.aborted) {
+ return {
+ status: request.status,
+ message: 'HLS request aborted at URL: ' + request.uri,
+ code: REQUEST_ERRORS.ABORTED,
+ xhr: request
+ };
+ }
+
+ if (error) {
+ return {
+ status: request.status,
+ message: 'HLS request errored at URL: ' + request.uri,
+ code: REQUEST_ERRORS.FAILURE,
+ xhr: request
+ };
+ }
+
+ if (request.responseType === 'arraybuffer' && request.response.byteLength === 0) {
+ return {
+ status: request.status,
+ message: 'Empty HLS response at URL: ' + request.uri,
+ code: REQUEST_ERRORS.FAILURE,
+ xhr: request
+ };
+ }
+
+ return null;
+};
+/**
+ * Handle responses for key data and convert the key data to the correct format
+ * for the decryption step later
+ *
+ * @param {Object} segment - a simplified copy of the segmentInfo object
+ * from SegmentLoader
+ * @param {Array} objects - objects to add the key bytes to.
+ * @param {Function} finishProcessingFn - a callback to execute to continue processing
+ * this request
+ */
+
+
+var handleKeyResponse = function handleKeyResponse(segment, objects, finishProcessingFn) {
+ return function (error, request) {
+ var response = request.response;
+ var errorObj = handleErrors(error, request);
+
+ if (errorObj) {
+ return finishProcessingFn(errorObj, segment);
+ }
+
+ if (response.byteLength !== 16) {
+ return finishProcessingFn({
+ status: request.status,
+ message: 'Invalid HLS key at URL: ' + request.uri,
+ code: REQUEST_ERRORS.FAILURE,
+ xhr: request
+ }, segment);
+ }
+
+ var view = new DataView(response);
+ var bytes = new Uint32Array([view.getUint32(0), view.getUint32(4), view.getUint32(8), view.getUint32(12)]);
+
+ for (var i = 0; i < objects.length; i++) {
+ objects[i].bytes = bytes;
+ }
+
+ return finishProcessingFn(null, segment);
+ };
+};
+
+var parseInitSegment = function parseInitSegment(segment, _callback) {
+ var type = containers.detectContainerForBytes(segment.map.bytes); // TODO: We should also handle ts init segments here, but we
+ // only know how to parse mp4 init segments at the moment
+
+ if (type !== 'mp4') {
+ var uri = segment.map.resolvedUri || segment.map.uri;
+ return _callback({
+ internal: true,
+ message: "Found unsupported " + (type || 'unknown') + " container for initialization segment at URL: " + uri,
+ code: REQUEST_ERRORS.FAILURE
+ });
+ }
+
+ workerCallback({
+ action: 'probeMp4Tracks',
+ data: segment.map.bytes,
+ transmuxer: segment.transmuxer,
+ callback: function callback(_ref) {
+ var tracks = _ref.tracks,
+ data = _ref.data; // transfer bytes back to us
+
+ segment.map.bytes = data;
+ tracks.forEach(function (track) {
+ segment.map.tracks = segment.map.tracks || {}; // only support one track of each type for now
+
+ if (segment.map.tracks[track.type]) {
+ return;
+ }
+
+ segment.map.tracks[track.type] = track;
+
+ if (typeof track.id === 'number' && track.timescale) {
+ segment.map.timescales = segment.map.timescales || {};
+ segment.map.timescales[track.id] = track.timescale;
+ }
+ });
+ return _callback(null);
+ }
+ });
+};
+/**
+ * Handle init-segment responses
+ *
+ * @param {Object} segment - a simplified copy of the segmentInfo object
+ * from SegmentLoader
+ * @param {Function} finishProcessingFn - a callback to execute to continue processing
+ * this request
+ */
+
+
+var handleInitSegmentResponse = function handleInitSegmentResponse(_ref2) {
+ var segment = _ref2.segment,
+ finishProcessingFn = _ref2.finishProcessingFn;
+ return function (error, request) {
+ var errorObj = handleErrors(error, request);
+
+ if (errorObj) {
+ return finishProcessingFn(errorObj, segment);
+ }
+
+ var bytes = new Uint8Array(request.response); // init segment is encypted, we will have to wait
+ // until the key request is done to decrypt.
+
+ if (segment.map.key) {
+ segment.map.encryptedBytes = bytes;
+ return finishProcessingFn(null, segment);
+ }
+
+ segment.map.bytes = bytes;
+ parseInitSegment(segment, function (parseError) {
+ if (parseError) {
+ parseError.xhr = request;
+ parseError.status = request.status;
+ return finishProcessingFn(parseError, segment);
+ }
+
+ finishProcessingFn(null, segment);
+ });
+ };
+};
+/**
+ * Response handler for segment-requests being sure to set the correct
+ * property depending on whether the segment is encryped or not
+ * Also records and keeps track of stats that are used for ABR purposes
+ *
+ * @param {Object} segment - a simplified copy of the segmentInfo object
+ * from SegmentLoader
+ * @param {Function} finishProcessingFn - a callback to execute to continue processing
+ * this request
+ */
+
+
+var handleSegmentResponse = function handleSegmentResponse(_ref3) {
+ var segment = _ref3.segment,
+ finishProcessingFn = _ref3.finishProcessingFn,
+ responseType = _ref3.responseType;
+ return function (error, request) {
+ var errorObj = handleErrors(error, request);
+
+ if (errorObj) {
+ return finishProcessingFn(errorObj, segment);
+ }
+
+ var newBytes = // although responseText "should" exist, this guard serves to prevent an error being
+ // thrown for two primary cases:
+ // 1. the mime type override stops working, or is not implemented for a specific
+ // browser
+ // 2. when using mock XHR libraries like sinon that do not allow the override behavior
+ responseType === 'arraybuffer' || !request.responseText ? request.response : stringToArrayBuffer(request.responseText.substring(segment.lastReachedChar || 0));
+ segment.stats = getRequestStats(request);
+
+ if (segment.key) {
+ segment.encryptedBytes = new Uint8Array(newBytes);
+ } else {
+ segment.bytes = new Uint8Array(newBytes);
+ }
+
+ return finishProcessingFn(null, segment);
+ };
+};
+
+var transmuxAndNotify = function transmuxAndNotify(_ref4) {
+ var segment = _ref4.segment,
+ bytes = _ref4.bytes,
+ trackInfoFn = _ref4.trackInfoFn,
+ timingInfoFn = _ref4.timingInfoFn,
+ videoSegmentTimingInfoFn = _ref4.videoSegmentTimingInfoFn,
+ audioSegmentTimingInfoFn = _ref4.audioSegmentTimingInfoFn,
+ id3Fn = _ref4.id3Fn,
+ captionsFn = _ref4.captionsFn,
+ isEndOfTimeline = _ref4.isEndOfTimeline,
+ endedTimelineFn = _ref4.endedTimelineFn,
+ dataFn = _ref4.dataFn,
+ doneFn = _ref4.doneFn,
+ onTransmuxerLog = _ref4.onTransmuxerLog;
+ var fmp4Tracks = segment.map && segment.map.tracks || {};
+ var isMuxed = Boolean(fmp4Tracks.audio && fmp4Tracks.video); // Keep references to each function so we can null them out after we're done with them.
+ // One reason for this is that in the case of full segments, we want to trust start
+ // times from the probe, rather than the transmuxer.
+
+ var audioStartFn = timingInfoFn.bind(null, segment, 'audio', 'start');
+ var audioEndFn = timingInfoFn.bind(null, segment, 'audio', 'end');
+ var videoStartFn = timingInfoFn.bind(null, segment, 'video', 'start');
+ var videoEndFn = timingInfoFn.bind(null, segment, 'video', 'end');
+
+ var finish = function finish() {
+ return transmux({
+ bytes: bytes,
+ transmuxer: segment.transmuxer,
+ audioAppendStart: segment.audioAppendStart,
+ gopsToAlignWith: segment.gopsToAlignWith,
+ remux: isMuxed,
+ onData: function onData(result) {
+ result.type = result.type === 'combined' ? 'video' : result.type;
+ dataFn(segment, result);
+ },
+ onTrackInfo: function onTrackInfo(trackInfo) {
+ if (trackInfoFn) {
+ if (isMuxed) {
+ trackInfo.isMuxed = true;
+ }
+
+ trackInfoFn(segment, trackInfo);
+ }
+ },
+ onAudioTimingInfo: function onAudioTimingInfo(audioTimingInfo) {
+ // we only want the first start value we encounter
+ if (audioStartFn && typeof audioTimingInfo.start !== 'undefined') {
+ audioStartFn(audioTimingInfo.start);
+ audioStartFn = null;
+ } // we want to continually update the end time
+
+
+ if (audioEndFn && typeof audioTimingInfo.end !== 'undefined') {
+ audioEndFn(audioTimingInfo.end);
+ }
+ },
+ onVideoTimingInfo: function onVideoTimingInfo(videoTimingInfo) {
+ // we only want the first start value we encounter
+ if (videoStartFn && typeof videoTimingInfo.start !== 'undefined') {
+ videoStartFn(videoTimingInfo.start);
+ videoStartFn = null;
+ } // we want to continually update the end time
+
+
+ if (videoEndFn && typeof videoTimingInfo.end !== 'undefined') {
+ videoEndFn(videoTimingInfo.end);
+ }
+ },
+ onVideoSegmentTimingInfo: function onVideoSegmentTimingInfo(videoSegmentTimingInfo) {
+ videoSegmentTimingInfoFn(videoSegmentTimingInfo);
+ },
+ onAudioSegmentTimingInfo: function onAudioSegmentTimingInfo(audioSegmentTimingInfo) {
+ audioSegmentTimingInfoFn(audioSegmentTimingInfo);
+ },
+ onId3: function onId3(id3Frames, dispatchType) {
+ id3Fn(segment, id3Frames, dispatchType);
+ },
+ onCaptions: function onCaptions(captions) {
+ captionsFn(segment, [captions]);
+ },
+ isEndOfTimeline: isEndOfTimeline,
+ onEndedTimeline: function onEndedTimeline() {
+ endedTimelineFn();
+ },
+ onTransmuxerLog: onTransmuxerLog,
+ onDone: function onDone(result) {
+ if (!doneFn) {
+ return;
+ }
+
+ result.type = result.type === 'combined' ? 'video' : result.type;
+ doneFn(null, segment, result);
+ }
+ });
+ }; // In the transmuxer, we don't yet have the ability to extract a "proper" start time.
+ // Meaning cached frame data may corrupt our notion of where this segment
+ // really starts. To get around this, probe for the info needed.
+
+
+ workerCallback({
+ action: 'probeTs',
+ transmuxer: segment.transmuxer,
+ data: bytes,
+ baseStartTime: segment.baseStartTime,
+ callback: function callback(data) {
+ segment.bytes = bytes = data.data;
+ var probeResult = data.result;
+
+ if (probeResult) {
+ trackInfoFn(segment, {
+ hasAudio: probeResult.hasAudio,
+ hasVideo: probeResult.hasVideo,
+ isMuxed: isMuxed
+ });
+ trackInfoFn = null;
+
+ if (probeResult.hasAudio && !isMuxed) {
+ audioStartFn(probeResult.audioStart);
+ }
+
+ if (probeResult.hasVideo) {
+ videoStartFn(probeResult.videoStart);
+ }
+
+ audioStartFn = null;
+ videoStartFn = null;
+ }
+
+ finish();
+ }
+ });
+};
+
+var handleSegmentBytes = function handleSegmentBytes(_ref5) {
+ var segment = _ref5.segment,
+ bytes = _ref5.bytes,
+ trackInfoFn = _ref5.trackInfoFn,
+ timingInfoFn = _ref5.timingInfoFn,
+ videoSegmentTimingInfoFn = _ref5.videoSegmentTimingInfoFn,
+ audioSegmentTimingInfoFn = _ref5.audioSegmentTimingInfoFn,
+ id3Fn = _ref5.id3Fn,
+ captionsFn = _ref5.captionsFn,
+ isEndOfTimeline = _ref5.isEndOfTimeline,
+ endedTimelineFn = _ref5.endedTimelineFn,
+ dataFn = _ref5.dataFn,
+ doneFn = _ref5.doneFn,
+ onTransmuxerLog = _ref5.onTransmuxerLog;
+ var bytesAsUint8Array = new Uint8Array(bytes); // TODO:
+ // We should have a handler that fetches the number of bytes required
+ // to check if something is fmp4. This will allow us to save bandwidth
+ // because we can only blacklist a playlist and abort requests
+ // by codec after trackinfo triggers.
+
+ if (containers.isLikelyFmp4MediaSegment(bytesAsUint8Array)) {
+ segment.isFmp4 = true;
+ var tracks = segment.map.tracks;
+ var trackInfo = {
+ isFmp4: true,
+ hasVideo: !!tracks.video,
+ hasAudio: !!tracks.audio
+ }; // if we have a audio track, with a codec that is not set to
+ // encrypted audio
+
+ if (tracks.audio && tracks.audio.codec && tracks.audio.codec !== 'enca') {
+ trackInfo.audioCodec = tracks.audio.codec;
+ } // if we have a video track, with a codec that is not set to
+ // encrypted video
+
+
+ if (tracks.video && tracks.video.codec && tracks.video.codec !== 'encv') {
+ trackInfo.videoCodec = tracks.video.codec;
+ }
+
+ if (tracks.video && tracks.audio) {
+ trackInfo.isMuxed = true;
+ } // since we don't support appending fmp4 data on progress, we know we have the full
+ // segment here
+
+
+ trackInfoFn(segment, trackInfo); // The probe doesn't provide the segment end time, so only callback with the start
+ // time. The end time can be roughly calculated by the receiver using the duration.
+ //
+ // Note that the start time returned by the probe reflects the baseMediaDecodeTime, as
+ // that is the true start of the segment (where the playback engine should begin
+ // decoding).
+
+ var finishLoading = function finishLoading(captions) {
+ // if the track still has audio at this point it is only possible
+ // for it to be audio only. See `tracks.video && tracks.audio` if statement
+ // above.
+ // we make sure to use segment.bytes here as that
+ dataFn(segment, {
+ data: bytesAsUint8Array,
+ type: trackInfo.hasAudio && !trackInfo.isMuxed ? 'audio' : 'video'
+ });
+
+ if (captions && captions.length) {
+ captionsFn(segment, captions);
+ }
+
+ doneFn(null, segment, {});
+ };
+
+ workerCallback({
+ action: 'probeMp4StartTime',
+ timescales: segment.map.timescales,
+ data: bytesAsUint8Array,
+ transmuxer: segment.transmuxer,
+ callback: function callback(_ref6) {
+ var data = _ref6.data,
+ startTime = _ref6.startTime; // transfer bytes back to us
+
+ bytes = data.buffer;
+ segment.bytes = bytesAsUint8Array = data;
+
+ if (trackInfo.hasAudio && !trackInfo.isMuxed) {
+ timingInfoFn(segment, 'audio', 'start', startTime);
+ }
+
+ if (trackInfo.hasVideo) {
+ timingInfoFn(segment, 'video', 'start', startTime);
+ } // Run through the CaptionParser in case there are captions.
+ // Initialize CaptionParser if it hasn't been yet
+
+
+ if (!tracks.video || !data.byteLength || !segment.transmuxer) {
+ finishLoading();
+ return;
+ }
+
+ workerCallback({
+ action: 'pushMp4Captions',
+ endAction: 'mp4Captions',
+ transmuxer: segment.transmuxer,
+ data: bytesAsUint8Array,
+ timescales: segment.map.timescales,
+ trackIds: [tracks.video.id],
+ callback: function callback(message) {
+ // transfer bytes back to us
+ bytes = message.data.buffer;
+ segment.bytes = bytesAsUint8Array = message.data;
+ message.logs.forEach(function (log) {
+ onTransmuxerLog(videojs.mergeOptions(log, {
+ stream: 'mp4CaptionParser'
+ }));
+ });
+ finishLoading(message.captions);
+ }
+ });
+ }
+ });
+ return;
+ } // VTT or other segments that don't need processing
+
+
+ if (!segment.transmuxer) {
+ doneFn(null, segment, {});
+ return;
+ }
+
+ if (typeof segment.container === 'undefined') {
+ segment.container = containers.detectContainerForBytes(bytesAsUint8Array);
+ }
+
+ if (segment.container !== 'ts' && segment.container !== 'aac') {
+ trackInfoFn(segment, {
+ hasAudio: false,
+ hasVideo: false
+ });
+ doneFn(null, segment, {});
+ return;
+ } // ts or aac
+
+
+ transmuxAndNotify({
+ segment: segment,
+ bytes: bytes,
+ trackInfoFn: trackInfoFn,
+ timingInfoFn: timingInfoFn,
+ videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
+ audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
+ id3Fn: id3Fn,
+ captionsFn: captionsFn,
+ isEndOfTimeline: isEndOfTimeline,
+ endedTimelineFn: endedTimelineFn,
+ dataFn: dataFn,
+ doneFn: doneFn,
+ onTransmuxerLog: onTransmuxerLog
+ });
+};
+
+var decrypt = function decrypt(_ref7, callback) {
+ var id = _ref7.id,
+ key = _ref7.key,
+ encryptedBytes = _ref7.encryptedBytes,
+ decryptionWorker = _ref7.decryptionWorker;
+
+ var decryptionHandler = function decryptionHandler(event) {
+ if (event.data.source === id) {
+ decryptionWorker.removeEventListener('message', decryptionHandler);
+ var decrypted = event.data.decrypted;
+ callback(new Uint8Array(decrypted.bytes, decrypted.byteOffset, decrypted.byteLength));
+ }
+ };
+
+ decryptionWorker.addEventListener('message', decryptionHandler);
+ var keyBytes;
+
+ if (key.bytes.slice) {
+ keyBytes = key.bytes.slice();
+ } else {
+ keyBytes = new Uint32Array(Array.prototype.slice.call(key.bytes));
+ } // incrementally decrypt the bytes
+
+
+ decryptionWorker.postMessage(createTransferableMessage({
+ source: id,
+ encrypted: encryptedBytes,
+ key: keyBytes,
+ iv: key.iv
+ }), [encryptedBytes.buffer, keyBytes.buffer]);
+};
+/**
+ * Decrypt the segment via the decryption web worker
+ *
+ * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
+ * routines
+ * @param {Object} segment - a simplified copy of the segmentInfo object
+ * from SegmentLoader
+ * @param {Function} trackInfoFn - a callback that receives track info
+ * @param {Function} timingInfoFn - a callback that receives timing info
+ * @param {Function} videoSegmentTimingInfoFn
+ * a callback that receives video timing info based on media times and
+ * any adjustments made by the transmuxer
+ * @param {Function} audioSegmentTimingInfoFn
+ * a callback that receives audio timing info based on media times and
+ * any adjustments made by the transmuxer
+ * @param {boolean} isEndOfTimeline
+ * true if this segment represents the last segment in a timeline
+ * @param {Function} endedTimelineFn
+ * a callback made when a timeline is ended, will only be called if
+ * isEndOfTimeline is true
+ * @param {Function} dataFn - a callback that is executed when segment bytes are available
+ * and ready to use
+ * @param {Function} doneFn - a callback that is executed after decryption has completed
+ */
+
+
+var decryptSegment = function decryptSegment(_ref8) {
+ var decryptionWorker = _ref8.decryptionWorker,
+ segment = _ref8.segment,
+ trackInfoFn = _ref8.trackInfoFn,
+ timingInfoFn = _ref8.timingInfoFn,
+ videoSegmentTimingInfoFn = _ref8.videoSegmentTimingInfoFn,
+ audioSegmentTimingInfoFn = _ref8.audioSegmentTimingInfoFn,
+ id3Fn = _ref8.id3Fn,
+ captionsFn = _ref8.captionsFn,
+ isEndOfTimeline = _ref8.isEndOfTimeline,
+ endedTimelineFn = _ref8.endedTimelineFn,
+ dataFn = _ref8.dataFn,
+ doneFn = _ref8.doneFn,
+ onTransmuxerLog = _ref8.onTransmuxerLog;
+ decrypt({
+ id: segment.requestId,
+ key: segment.key,
+ encryptedBytes: segment.encryptedBytes,
+ decryptionWorker: decryptionWorker
+ }, function (decryptedBytes) {
+ segment.bytes = decryptedBytes;
+ handleSegmentBytes({
+ segment: segment,
+ bytes: segment.bytes,
+ trackInfoFn: trackInfoFn,
+ timingInfoFn: timingInfoFn,
+ videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
+ audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
+ id3Fn: id3Fn,
+ captionsFn: captionsFn,
+ isEndOfTimeline: isEndOfTimeline,
+ endedTimelineFn: endedTimelineFn,
+ dataFn: dataFn,
+ doneFn: doneFn,
+ onTransmuxerLog: onTransmuxerLog
+ });
+ });
+};
+/**
+ * This function waits for all XHRs to finish (with either success or failure)
+ * before continueing processing via it's callback. The function gathers errors
+ * from each request into a single errors array so that the error status for
+ * each request can be examined later.
+ *
+ * @param {Object} activeXhrs - an object that tracks all XHR requests
+ * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
+ * routines
+ * @param {Function} trackInfoFn - a callback that receives track info
+ * @param {Function} timingInfoFn - a callback that receives timing info
+ * @param {Function} videoSegmentTimingInfoFn
+ * a callback that receives video timing info based on media times and
+ * any adjustments made by the transmuxer
+ * @param {Function} audioSegmentTimingInfoFn
+ * a callback that receives audio timing info based on media times and
+ * any adjustments made by the transmuxer
+ * @param {Function} id3Fn - a callback that receives ID3 metadata
+ * @param {Function} captionsFn - a callback that receives captions
+ * @param {boolean} isEndOfTimeline
+ * true if this segment represents the last segment in a timeline
+ * @param {Function} endedTimelineFn
+ * a callback made when a timeline is ended, will only be called if
+ * isEndOfTimeline is true
+ * @param {Function} dataFn - a callback that is executed when segment bytes are available
+ * and ready to use
+ * @param {Function} doneFn - a callback that is executed after all resources have been
+ * downloaded and any decryption completed
+ */
+
+
+var waitForCompletion = function waitForCompletion(_ref9) {
+ var activeXhrs = _ref9.activeXhrs,
+ decryptionWorker = _ref9.decryptionWorker,
+ trackInfoFn = _ref9.trackInfoFn,
+ timingInfoFn = _ref9.timingInfoFn,
+ videoSegmentTimingInfoFn = _ref9.videoSegmentTimingInfoFn,
+ audioSegmentTimingInfoFn = _ref9.audioSegmentTimingInfoFn,
+ id3Fn = _ref9.id3Fn,
+ captionsFn = _ref9.captionsFn,
+ isEndOfTimeline = _ref9.isEndOfTimeline,
+ endedTimelineFn = _ref9.endedTimelineFn,
+ dataFn = _ref9.dataFn,
+ doneFn = _ref9.doneFn,
+ onTransmuxerLog = _ref9.onTransmuxerLog;
+ var count = 0;
+ var didError = false;
+ return function (error, segment) {
+ if (didError) {
+ return;
+ }
+
+ if (error) {
+ didError = true; // If there are errors, we have to abort any outstanding requests
+
+ abortAll(activeXhrs); // Even though the requests above are aborted, and in theory we could wait until we
+ // handle the aborted events from those requests, there are some cases where we may
+ // never get an aborted event. For instance, if the network connection is lost and
+ // there were two requests, the first may have triggered an error immediately, while
+ // the second request remains unsent. In that case, the aborted algorithm will not
+ // trigger an abort: see https://xhr.spec.whatwg.org/#the-abort()-method
+ //
+ // We also can't rely on the ready state of the XHR, since the request that
+ // triggered the connection error may also show as a ready state of 0 (unsent).
+ // Therefore, we have to finish this group of requests immediately after the first
+ // seen error.
+
+ return doneFn(error, segment);
+ }
+
+ count += 1;
+
+ if (count === activeXhrs.length) {
+ var segmentFinish = function segmentFinish() {
+ if (segment.encryptedBytes) {
+ return decryptSegment({
+ decryptionWorker: decryptionWorker,
+ segment: segment,
+ trackInfoFn: trackInfoFn,
+ timingInfoFn: timingInfoFn,
+ videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
+ audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
+ id3Fn: id3Fn,
+ captionsFn: captionsFn,
+ isEndOfTimeline: isEndOfTimeline,
+ endedTimelineFn: endedTimelineFn,
+ dataFn: dataFn,
+ doneFn: doneFn,
+ onTransmuxerLog: onTransmuxerLog
+ });
+ } // Otherwise, everything is ready just continue
+
+
+ handleSegmentBytes({
+ segment: segment,
+ bytes: segment.bytes,
+ trackInfoFn: trackInfoFn,
+ timingInfoFn: timingInfoFn,
+ videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
+ audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
+ id3Fn: id3Fn,
+ captionsFn: captionsFn,
+ isEndOfTimeline: isEndOfTimeline,
+ endedTimelineFn: endedTimelineFn,
+ dataFn: dataFn,
+ doneFn: doneFn,
+ onTransmuxerLog: onTransmuxerLog
+ });
+ }; // Keep track of when *all* of the requests have completed
+
+
+ segment.endOfAllRequests = Date.now();
+
+ if (segment.map && segment.map.encryptedBytes && !segment.map.bytes) {
+ return decrypt({
+ decryptionWorker: decryptionWorker,
+ // add -init to the "id" to differentiate between segment
+ // and init segment decryption, just in case they happen
+ // at the same time at some point in the future.
+ id: segment.requestId + '-init',
+ encryptedBytes: segment.map.encryptedBytes,
+ key: segment.map.key
+ }, function (decryptedBytes) {
+ segment.map.bytes = decryptedBytes;
+ parseInitSegment(segment, function (parseError) {
+ if (parseError) {
+ abortAll(activeXhrs);
+ return doneFn(parseError, segment);
+ }
+
+ segmentFinish();
+ });
+ });
+ }
+
+ segmentFinish();
+ }
+ };
+};
+/**
+ * Calls the abort callback if any request within the batch was aborted. Will only call
+ * the callback once per batch of requests, even if multiple were aborted.
+ *
+ * @param {Object} loadendState - state to check to see if the abort function was called
+ * @param {Function} abortFn - callback to call for abort
+ */
+
+
+var handleLoadEnd = function handleLoadEnd(_ref10) {
+ var loadendState = _ref10.loadendState,
+ abortFn = _ref10.abortFn;
+ return function (event) {
+ var request = event.target;
+
+ if (request.aborted && abortFn && !loadendState.calledAbortFn) {
+ abortFn();
+ loadendState.calledAbortFn = true;
+ }
+ };
+};
+/**
+ * Simple progress event callback handler that gathers some stats before
+ * executing a provided callback with the `segment` object
+ *
+ * @param {Object} segment - a simplified copy of the segmentInfo object
+ * from SegmentLoader
+ * @param {Function} progressFn - a callback that is executed each time a progress event
+ * is received
+ * @param {Function} trackInfoFn - a callback that receives track info
+ * @param {Function} timingInfoFn - a callback that receives timing info
+ * @param {Function} videoSegmentTimingInfoFn
+ * a callback that receives video timing info based on media times and
+ * any adjustments made by the transmuxer
+ * @param {Function} audioSegmentTimingInfoFn
+ * a callback that receives audio timing info based on media times and
+ * any adjustments made by the transmuxer
+ * @param {boolean} isEndOfTimeline
+ * true if this segment represents the last segment in a timeline
+ * @param {Function} endedTimelineFn
+ * a callback made when a timeline is ended, will only be called if
+ * isEndOfTimeline is true
+ * @param {Function} dataFn - a callback that is executed when segment bytes are available
+ * and ready to use
+ * @param {Event} event - the progress event object from XMLHttpRequest
+ */
+
+
+var handleProgress = function handleProgress(_ref11) {
+ var segment = _ref11.segment,
+ progressFn = _ref11.progressFn;
+ _ref11.trackInfoFn;
+ _ref11.timingInfoFn;
+ _ref11.videoSegmentTimingInfoFn;
+ _ref11.audioSegmentTimingInfoFn;
+ _ref11.id3Fn;
+ _ref11.captionsFn;
+ _ref11.isEndOfTimeline;
+ _ref11.endedTimelineFn;
+ _ref11.dataFn;
+ return function (event) {
+ var request = event.target;
+
+ if (request.aborted) {
+ return;
+ }
+
+ segment.stats = videojs.mergeOptions(segment.stats, getProgressStats(event)); // record the time that we receive the first byte of data
+
+ if (!segment.stats.firstBytesReceivedAt && segment.stats.bytesReceived) {
+ segment.stats.firstBytesReceivedAt = Date.now();
+ }
+
+ return progressFn(event, segment);
+ };
+};
+/**
+ * Load all resources and does any processing necessary for a media-segment
+ *
+ * Features:
+ * decrypts the media-segment if it has a key uri and an iv
+ * aborts *all* requests if *any* one request fails
+ *
+ * The segment object, at minimum, has the following format:
+ * {
+ * resolvedUri: String,
+ * [transmuxer]: Object,
+ * [byterange]: {
+ * offset: Number,
+ * length: Number
+ * },
+ * [key]: {
+ * resolvedUri: String
+ * [byterange]: {
+ * offset: Number,
+ * length: Number
+ * },
+ * iv: {
+ * bytes: Uint32Array
+ * }
+ * },
+ * [map]: {
+ * resolvedUri: String,
+ * [byterange]: {
+ * offset: Number,
+ * length: Number
+ * },
+ * [bytes]: Uint8Array
+ * }
+ * }
+ * ...where [name] denotes optional properties
+ *
+ * @param {Function} xhr - an instance of the xhr wrapper in xhr.js
+ * @param {Object} xhrOptions - the base options to provide to all xhr requests
+ * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128
+ * decryption routines
+ * @param {Object} segment - a simplified copy of the segmentInfo object
+ * from SegmentLoader
+ * @param {Function} abortFn - a callback called (only once) if any piece of a request was
+ * aborted
+ * @param {Function} progressFn - a callback that receives progress events from the main
+ * segment's xhr request
+ * @param {Function} trackInfoFn - a callback that receives track info
+ * @param {Function} timingInfoFn - a callback that receives timing info
+ * @param {Function} videoSegmentTimingInfoFn
+ * a callback that receives video timing info based on media times and
+ * any adjustments made by the transmuxer
+ * @param {Function} audioSegmentTimingInfoFn
+ * a callback that receives audio timing info based on media times and
+ * any adjustments made by the transmuxer
+ * @param {Function} id3Fn - a callback that receives ID3 metadata
+ * @param {Function} captionsFn - a callback that receives captions
+ * @param {boolean} isEndOfTimeline
+ * true if this segment represents the last segment in a timeline
+ * @param {Function} endedTimelineFn
+ * a callback made when a timeline is ended, will only be called if
+ * isEndOfTimeline is true
+ * @param {Function} dataFn - a callback that receives data from the main segment's xhr
+ * request, transmuxed if needed
+ * @param {Function} doneFn - a callback that is executed only once all requests have
+ * succeeded or failed
+ * @return {Function} a function that, when invoked, immediately aborts all
+ * outstanding requests
+ */
+
+
+var mediaSegmentRequest = function mediaSegmentRequest(_ref12) {
+ var xhr = _ref12.xhr,
+ xhrOptions = _ref12.xhrOptions,
+ decryptionWorker = _ref12.decryptionWorker,
+ segment = _ref12.segment,
+ abortFn = _ref12.abortFn,
+ progressFn = _ref12.progressFn,
+ trackInfoFn = _ref12.trackInfoFn,
+ timingInfoFn = _ref12.timingInfoFn,
+ videoSegmentTimingInfoFn = _ref12.videoSegmentTimingInfoFn,
+ audioSegmentTimingInfoFn = _ref12.audioSegmentTimingInfoFn,
+ id3Fn = _ref12.id3Fn,
+ captionsFn = _ref12.captionsFn,
+ isEndOfTimeline = _ref12.isEndOfTimeline,
+ endedTimelineFn = _ref12.endedTimelineFn,
+ dataFn = _ref12.dataFn,
+ doneFn = _ref12.doneFn,
+ onTransmuxerLog = _ref12.onTransmuxerLog;
+ var activeXhrs = [];
+ var finishProcessingFn = waitForCompletion({
+ activeXhrs: activeXhrs,
+ decryptionWorker: decryptionWorker,
+ trackInfoFn: trackInfoFn,
+ timingInfoFn: timingInfoFn,
+ videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
+ audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
+ id3Fn: id3Fn,
+ captionsFn: captionsFn,
+ isEndOfTimeline: isEndOfTimeline,
+ endedTimelineFn: endedTimelineFn,
+ dataFn: dataFn,
+ doneFn: doneFn,
+ onTransmuxerLog: onTransmuxerLog
+ }); // optionally, request the decryption key
+
+ if (segment.key && !segment.key.bytes) {
+ var objects = [segment.key];
+
+ if (segment.map && !segment.map.bytes && segment.map.key && segment.map.key.resolvedUri === segment.key.resolvedUri) {
+ objects.push(segment.map.key);
+ }
+
+ var keyRequestOptions = videojs.mergeOptions(xhrOptions, {
+ uri: segment.key.resolvedUri,
+ responseType: 'arraybuffer'
+ });
+ var keyRequestCallback = handleKeyResponse(segment, objects, finishProcessingFn);
+ var keyXhr = xhr(keyRequestOptions, keyRequestCallback);
+ activeXhrs.push(keyXhr);
+ } // optionally, request the associated media init segment
+
+
+ if (segment.map && !segment.map.bytes) {
+ var differentMapKey = segment.map.key && (!segment.key || segment.key.resolvedUri !== segment.map.key.resolvedUri);
+
+ if (differentMapKey) {
+ var mapKeyRequestOptions = videojs.mergeOptions(xhrOptions, {
+ uri: segment.map.key.resolvedUri,
+ responseType: 'arraybuffer'
+ });
+ var mapKeyRequestCallback = handleKeyResponse(segment, [segment.map.key], finishProcessingFn);
+ var mapKeyXhr = xhr(mapKeyRequestOptions, mapKeyRequestCallback);
+ activeXhrs.push(mapKeyXhr);
+ }
+
+ var initSegmentOptions = videojs.mergeOptions(xhrOptions, {
+ uri: segment.map.resolvedUri,
+ responseType: 'arraybuffer',
+ headers: segmentXhrHeaders(segment.map)
+ });
+ var initSegmentRequestCallback = handleInitSegmentResponse({
+ segment: segment,
+ finishProcessingFn: finishProcessingFn
+ });
+ var initSegmentXhr = xhr(initSegmentOptions, initSegmentRequestCallback);
+ activeXhrs.push(initSegmentXhr);
+ }
+
+ var segmentRequestOptions = videojs.mergeOptions(xhrOptions, {
+ uri: segment.part && segment.part.resolvedUri || segment.resolvedUri,
+ responseType: 'arraybuffer',
+ headers: segmentXhrHeaders(segment)
+ });
+ var segmentRequestCallback = handleSegmentResponse({
+ segment: segment,
+ finishProcessingFn: finishProcessingFn,
+ responseType: segmentRequestOptions.responseType
+ });
+ var segmentXhr = xhr(segmentRequestOptions, segmentRequestCallback);
+ segmentXhr.addEventListener('progress', handleProgress({
+ segment: segment,
+ progressFn: progressFn,
+ trackInfoFn: trackInfoFn,
+ timingInfoFn: timingInfoFn,
+ videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
+ audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
+ id3Fn: id3Fn,
+ captionsFn: captionsFn,
+ isEndOfTimeline: isEndOfTimeline,
+ endedTimelineFn: endedTimelineFn,
+ dataFn: dataFn
+ }));
+ activeXhrs.push(segmentXhr); // since all parts of the request must be considered, but should not make callbacks
+ // multiple times, provide a shared state object
+
+ var loadendState = {};
+ activeXhrs.forEach(function (activeXhr) {
+ activeXhr.addEventListener('loadend', handleLoadEnd({
+ loadendState: loadendState,
+ abortFn: abortFn
+ }));
+ });
+ return function () {
+ return abortAll(activeXhrs);
+ };
+};
+/**
+ * @file - codecs.js - Handles tasks regarding codec strings such as translating them to
+ * codec strings, or translating codec strings into objects that can be examined.
+ */
+
+
+var logFn$1 = logger('CodecUtils');
+/**
+ * Returns a set of codec strings parsed from the playlist or the default
+ * codec strings if no codecs were specified in the playlist
+ *
+ * @param {Playlist} media the current media playlist
+ * @return {Object} an object with the video and audio codecs
+ */
+
+var getCodecs = function getCodecs(media) {
+ // if the codecs were explicitly specified, use them instead of the
+ // defaults
+ var mediaAttributes = media.attributes || {};
+
+ if (mediaAttributes.CODECS) {
+ return codecs_js.parseCodecs(mediaAttributes.CODECS);
+ }
+};
+
+var isMaat = function isMaat(master, media) {
+ var mediaAttributes = media.attributes || {};
+ return master && master.mediaGroups && master.mediaGroups.AUDIO && mediaAttributes.AUDIO && master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
+};
+
+var isMuxed = function isMuxed(master, media) {
+ if (!isMaat(master, media)) {
+ return true;
+ }
+
+ var mediaAttributes = media.attributes || {};
+ var audioGroup = master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
+
+ for (var groupId in audioGroup) {
+ // If an audio group has a URI (the case for HLS, as HLS will use external playlists),
+ // or there are listed playlists (the case for DASH, as the manifest will have already
+ // provided all of the details necessary to generate the audio playlist, as opposed to
+ // HLS' externally requested playlists), then the content is demuxed.
+ if (!audioGroup[groupId].uri && !audioGroup[groupId].playlists) {
+ return true;
+ }
+ }
+
+ return false;
+};
+
+var unwrapCodecList = function unwrapCodecList(codecList) {
+ var codecs = {};
+ codecList.forEach(function (_ref) {
+ var mediaType = _ref.mediaType,
+ type = _ref.type,
+ details = _ref.details;
+ codecs[mediaType] = codecs[mediaType] || [];
+ codecs[mediaType].push(codecs_js.translateLegacyCodec("" + type + details));
+ });
+ Object.keys(codecs).forEach(function (mediaType) {
+ if (codecs[mediaType].length > 1) {
+ logFn$1("multiple " + mediaType + " codecs found as attributes: " + codecs[mediaType].join(', ') + ". Setting playlist codecs to null so that we wait for mux.js to probe segments for real codecs.");
+ codecs[mediaType] = null;
+ return;
+ }
+
+ codecs[mediaType] = codecs[mediaType][0];
+ });
+ return codecs;
+};
+
+var codecCount = function codecCount(codecObj) {
+ var count = 0;
+
+ if (codecObj.audio) {
+ count++;
+ }
+
+ if (codecObj.video) {
+ count++;
+ }
+
+ return count;
+};
+/**
+ * Calculates the codec strings for a working configuration of
+ * SourceBuffers to play variant streams in a master playlist. If
+ * there is no possible working configuration, an empty object will be
+ * returned.
+ *
+ * @param master {Object} the m3u8 object for the master playlist
+ * @param media {Object} the m3u8 object for the variant playlist
+ * @return {Object} the codec strings.
+ *
+ * @private
+ */
+
+
+var codecsForPlaylist = function codecsForPlaylist(master, media) {
+ var mediaAttributes = media.attributes || {};
+ var codecInfo = unwrapCodecList(getCodecs(media) || []); // HLS with multiple-audio tracks must always get an audio codec.
+ // Put another way, there is no way to have a video-only multiple-audio HLS!
+
+ if (isMaat(master, media) && !codecInfo.audio) {
+ if (!isMuxed(master, media)) {
+ // It is possible for codecs to be specified on the audio media group playlist but
+ // not on the rendition playlist. This is mostly the case for DASH, where audio and
+ // video are always separate (and separately specified).
+ var defaultCodecs = unwrapCodecList(codecs_js.codecsFromDefault(master, mediaAttributes.AUDIO) || []);
+
+ if (defaultCodecs.audio) {
+ codecInfo.audio = defaultCodecs.audio;
+ }
+ }
+ }
+
+ return codecInfo;
+};
+
+var logFn = logger('PlaylistSelector');
+
+var representationToString = function representationToString(representation) {
+ if (!representation || !representation.playlist) {
+ return;
+ }
+
+ var playlist = representation.playlist;
+ return JSON.stringify({
+ id: playlist.id,
+ bandwidth: representation.bandwidth,
+ width: representation.width,
+ height: representation.height,
+ codecs: playlist.attributes && playlist.attributes.CODECS || ''
+ });
+}; // Utilities
+
+/**
+ * Returns the CSS value for the specified property on an element
+ * using `getComputedStyle`. Firefox has a long-standing issue where
+ * getComputedStyle() may return null when running in an iframe with
+ * `display: none`.
+ *
+ * @see https://bugzilla.mozilla.org/show_bug.cgi?id=548397
+ * @param {HTMLElement} el the htmlelement to work on
+ * @param {string} the proprety to get the style for
+ */
+
+
+var safeGetComputedStyle = function safeGetComputedStyle(el, property) {
+ if (!el) {
+ return '';
+ }
+
+ var result = window__default['default'].getComputedStyle(el);
+
+ if (!result) {
+ return '';
+ }
+
+ return result[property];
+};
+/**
+ * Resuable stable sort function
+ *
+ * @param {Playlists} array
+ * @param {Function} sortFn Different comparators
+ * @function stableSort
+ */
+
+
+var stableSort = function stableSort(array, sortFn) {
+ var newArray = array.slice();
+ array.sort(function (left, right) {
+ var cmp = sortFn(left, right);
+
+ if (cmp === 0) {
+ return newArray.indexOf(left) - newArray.indexOf(right);
+ }
+
+ return cmp;
+ });
+};
+/**
+ * A comparator function to sort two playlist object by bandwidth.
+ *
+ * @param {Object} left a media playlist object
+ * @param {Object} right a media playlist object
+ * @return {number} Greater than zero if the bandwidth attribute of
+ * left is greater than the corresponding attribute of right. Less
+ * than zero if the bandwidth of right is greater than left and
+ * exactly zero if the two are equal.
+ */
+
+
+var comparePlaylistBandwidth = function comparePlaylistBandwidth(left, right) {
+ var leftBandwidth;
+ var rightBandwidth;
+
+ if (left.attributes.BANDWIDTH) {
+ leftBandwidth = left.attributes.BANDWIDTH;
+ }
+
+ leftBandwidth = leftBandwidth || window__default['default'].Number.MAX_VALUE;
+
+ if (right.attributes.BANDWIDTH) {
+ rightBandwidth = right.attributes.BANDWIDTH;
+ }
+
+ rightBandwidth = rightBandwidth || window__default['default'].Number.MAX_VALUE;
+ return leftBandwidth - rightBandwidth;
+};
+/**
+ * A comparator function to sort two playlist object by resolution (width).
+ *
+ * @param {Object} left a media playlist object
+ * @param {Object} right a media playlist object
+ * @return {number} Greater than zero if the resolution.width attribute of
+ * left is greater than the corresponding attribute of right. Less
+ * than zero if the resolution.width of right is greater than left and
+ * exactly zero if the two are equal.
+ */
+
+
+var comparePlaylistResolution = function comparePlaylistResolution(left, right) {
+ var leftWidth;
+ var rightWidth;
+
+ if (left.attributes.RESOLUTION && left.attributes.RESOLUTION.width) {
+ leftWidth = left.attributes.RESOLUTION.width;
+ }
+
+ leftWidth = leftWidth || window__default['default'].Number.MAX_VALUE;
+
+ if (right.attributes.RESOLUTION && right.attributes.RESOLUTION.width) {
+ rightWidth = right.attributes.RESOLUTION.width;
+ }
+
+ rightWidth = rightWidth || window__default['default'].Number.MAX_VALUE; // NOTE - Fallback to bandwidth sort as appropriate in cases where multiple renditions
+ // have the same media dimensions/ resolution
+
+ if (leftWidth === rightWidth && left.attributes.BANDWIDTH && right.attributes.BANDWIDTH) {
+ return left.attributes.BANDWIDTH - right.attributes.BANDWIDTH;
+ }
+
+ return leftWidth - rightWidth;
+};
+/**
+ * Chooses the appropriate media playlist based on bandwidth and player size
+ *
+ * @param {Object} master
+ * Object representation of the master manifest
+ * @param {number} playerBandwidth
+ * Current calculated bandwidth of the player
+ * @param {number} playerWidth
+ * Current width of the player element (should account for the device pixel ratio)
+ * @param {number} playerHeight
+ * Current height of the player element (should account for the device pixel ratio)
+ * @param {boolean} limitRenditionByPlayerDimensions
+ * True if the player width and height should be used during the selection, false otherwise
+ * @param {Object} masterPlaylistController
+ * the current masterPlaylistController object
+ * @return {Playlist} the highest bitrate playlist less than the
+ * currently detected bandwidth, accounting for some amount of
+ * bandwidth variance
+ */
+
+
+var simpleSelector = function simpleSelector(master, playerBandwidth, playerWidth, playerHeight, limitRenditionByPlayerDimensions, masterPlaylistController) {
+ // If we end up getting called before `master` is available, exit early
+ if (!master) {
+ return;
+ }
+
+ var options = {
+ bandwidth: playerBandwidth,
+ width: playerWidth,
+ height: playerHeight,
+ limitRenditionByPlayerDimensions: limitRenditionByPlayerDimensions
+ };
+ var playlists = master.playlists; // if playlist is audio only, select between currently active audio group playlists.
+
+ if (Playlist.isAudioOnly(master)) {
+ playlists = masterPlaylistController.getAudioTrackPlaylists_(); // add audioOnly to options so that we log audioOnly: true
+ // at the buttom of this function for debugging.
+
+ options.audioOnly = true;
+ } // convert the playlists to an intermediary representation to make comparisons easier
+
+
+ var sortedPlaylistReps = playlists.map(function (playlist) {
+ var bandwidth;
+ var width = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.width;
+ var height = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height;
+ bandwidth = playlist.attributes && playlist.attributes.BANDWIDTH;
+ bandwidth = bandwidth || window__default['default'].Number.MAX_VALUE;
+ return {
+ bandwidth: bandwidth,
+ width: width,
+ height: height,
+ playlist: playlist
+ };
+ });
+ stableSort(sortedPlaylistReps, function (left, right) {
+ return left.bandwidth - right.bandwidth;
+ }); // filter out any playlists that have been excluded due to
+ // incompatible configurations
+
+ sortedPlaylistReps = sortedPlaylistReps.filter(function (rep) {
+ return !Playlist.isIncompatible(rep.playlist);
+ }); // filter out any playlists that have been disabled manually through the representations
+ // api or blacklisted temporarily due to playback errors.
+
+ var enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
+ return Playlist.isEnabled(rep.playlist);
+ });
+
+ if (!enabledPlaylistReps.length) {
+ // if there are no enabled playlists, then they have all been blacklisted or disabled
+ // by the user through the representations api. In this case, ignore blacklisting and
+ // fallback to what the user wants by using playlists the user has not disabled.
+ enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
+ return !Playlist.isDisabled(rep.playlist);
+ });
+ } // filter out any variant that has greater effective bitrate
+ // than the current estimated bandwidth
+
+
+ var bandwidthPlaylistReps = enabledPlaylistReps.filter(function (rep) {
+ return rep.bandwidth * Config.BANDWIDTH_VARIANCE < playerBandwidth;
+ });
+ var highestRemainingBandwidthRep = bandwidthPlaylistReps[bandwidthPlaylistReps.length - 1]; // get all of the renditions with the same (highest) bandwidth
+ // and then taking the very first element
+
+ var bandwidthBestRep = bandwidthPlaylistReps.filter(function (rep) {
+ return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
+ })[0]; // if we're not going to limit renditions by player size, make an early decision.
+
+ if (limitRenditionByPlayerDimensions === false) {
+ var _chosenRep = bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
+
+ if (_chosenRep && _chosenRep.playlist) {
+ var type = 'sortedPlaylistReps';
+
+ if (bandwidthBestRep) {
+ type = 'bandwidthBestRep';
+ }
+
+ if (enabledPlaylistReps[0]) {
+ type = 'enabledPlaylistReps';
+ }
+
+ logFn("choosing " + representationToString(_chosenRep) + " using " + type + " with options", options);
+ return _chosenRep.playlist;
+ }
+
+ logFn('could not choose a playlist with options', options);
+ return null;
+ } // filter out playlists without resolution information
+
+
+ var haveResolution = bandwidthPlaylistReps.filter(function (rep) {
+ return rep.width && rep.height;
+ }); // sort variants by resolution
+
+ stableSort(haveResolution, function (left, right) {
+ return left.width - right.width;
+ }); // if we have the exact resolution as the player use it
+
+ var resolutionBestRepList = haveResolution.filter(function (rep) {
+ return rep.width === playerWidth && rep.height === playerHeight;
+ });
+ highestRemainingBandwidthRep = resolutionBestRepList[resolutionBestRepList.length - 1]; // ensure that we pick the highest bandwidth variant that have exact resolution
+
+ var resolutionBestRep = resolutionBestRepList.filter(function (rep) {
+ return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
+ })[0];
+ var resolutionPlusOneList;
+ var resolutionPlusOneSmallest;
+ var resolutionPlusOneRep; // find the smallest variant that is larger than the player
+ // if there is no match of exact resolution
+
+ if (!resolutionBestRep) {
+ resolutionPlusOneList = haveResolution.filter(function (rep) {
+ return rep.width > playerWidth || rep.height > playerHeight;
+ }); // find all the variants have the same smallest resolution
+
+ resolutionPlusOneSmallest = resolutionPlusOneList.filter(function (rep) {
+ return rep.width === resolutionPlusOneList[0].width && rep.height === resolutionPlusOneList[0].height;
+ }); // ensure that we also pick the highest bandwidth variant that
+ // is just-larger-than the video player
+
+ highestRemainingBandwidthRep = resolutionPlusOneSmallest[resolutionPlusOneSmallest.length - 1];
+ resolutionPlusOneRep = resolutionPlusOneSmallest.filter(function (rep) {
+ return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
+ })[0];
+ }
+
+ var leastPixelDiffRep; // If this selector proves to be better than others,
+ // resolutionPlusOneRep and resolutionBestRep and all
+ // the code involving them should be removed.
+
+ if (masterPlaylistController.experimentalLeastPixelDiffSelector) {
+ // find the variant that is closest to the player's pixel size
+ var leastPixelDiffList = haveResolution.map(function (rep) {
+ rep.pixelDiff = Math.abs(rep.width - playerWidth) + Math.abs(rep.height - playerHeight);
+ return rep;
+ }); // get the highest bandwidth, closest resolution playlist
+
+ stableSort(leastPixelDiffList, function (left, right) {
+ // sort by highest bandwidth if pixelDiff is the same
+ if (left.pixelDiff === right.pixelDiff) {
+ return right.bandwidth - left.bandwidth;
+ }
+
+ return left.pixelDiff - right.pixelDiff;
+ });
+ leastPixelDiffRep = leastPixelDiffList[0];
+ } // fallback chain of variants
+
+
+ var chosenRep = leastPixelDiffRep || resolutionPlusOneRep || resolutionBestRep || bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
+
+ if (chosenRep && chosenRep.playlist) {
+ var _type = 'sortedPlaylistReps';
+
+ if (leastPixelDiffRep) {
+ _type = 'leastPixelDiffRep';
+ } else if (resolutionPlusOneRep) {
+ _type = 'resolutionPlusOneRep';
+ } else if (resolutionBestRep) {
+ _type = 'resolutionBestRep';
+ } else if (bandwidthBestRep) {
+ _type = 'bandwidthBestRep';
+ } else if (enabledPlaylistReps[0]) {
+ _type = 'enabledPlaylistReps';
+ }
+
+ logFn("choosing " + representationToString(chosenRep) + " using " + _type + " with options", options);
+ return chosenRep.playlist;
+ }
+
+ logFn('could not choose a playlist with options', options);
+ return null;
+};
+/**
+ * Chooses the appropriate media playlist based on the most recent
+ * bandwidth estimate and the player size.
+ *
+ * Expects to be called within the context of an instance of VhsHandler
+ *
+ * @return {Playlist} the highest bitrate playlist less than the
+ * currently detected bandwidth, accounting for some amount of
+ * bandwidth variance
+ */
+
+
+var lastBandwidthSelector = function lastBandwidthSelector() {
+ var pixelRatio = this.useDevicePixelRatio ? window__default['default'].devicePixelRatio || 1 : 1;
+ return simpleSelector(this.playlists.master, this.systemBandwidth, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.masterPlaylistController_);
+};
+/**
+ * Chooses the appropriate media playlist based on an
+ * exponential-weighted moving average of the bandwidth after
+ * filtering for player size.
+ *
+ * Expects to be called within the context of an instance of VhsHandler
+ *
+ * @param {number} decay - a number between 0 and 1. Higher values of
+ * this parameter will cause previous bandwidth estimates to lose
+ * significance more quickly.
+ * @return {Function} a function which can be invoked to create a new
+ * playlist selector function.
+ * @see https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
+ */
+
+
+var movingAverageBandwidthSelector = function movingAverageBandwidthSelector(decay) {
+ var average = -1;
+ var lastSystemBandwidth = -1;
+
+ if (decay < 0 || decay > 1) {
+ throw new Error('Moving average bandwidth decay must be between 0 and 1.');
+ }
+
+ return function () {
+ var pixelRatio = this.useDevicePixelRatio ? window__default['default'].devicePixelRatio || 1 : 1;
+
+ if (average < 0) {
+ average = this.systemBandwidth;
+ lastSystemBandwidth = this.systemBandwidth;
+ } // stop the average value from decaying for every 250ms
+ // when the systemBandwidth is constant
+ // and
+ // stop average from setting to a very low value when the
+ // systemBandwidth becomes 0 in case of chunk cancellation
+
+
+ if (this.systemBandwidth > 0 && this.systemBandwidth !== lastSystemBandwidth) {
+ average = decay * this.systemBandwidth + (1 - decay) * average;
+ lastSystemBandwidth = this.systemBandwidth;
+ }
+
+ return simpleSelector(this.playlists.master, average, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.masterPlaylistController_);
+ };
+};
+/**
+ * Chooses the appropriate media playlist based on the potential to rebuffer
+ *
+ * @param {Object} settings
+ * Object of information required to use this selector
+ * @param {Object} settings.master
+ * Object representation of the master manifest
+ * @param {number} settings.currentTime
+ * The current time of the player
+ * @param {number} settings.bandwidth
+ * Current measured bandwidth
+ * @param {number} settings.duration
+ * Duration of the media
+ * @param {number} settings.segmentDuration
+ * Segment duration to be used in round trip time calculations
+ * @param {number} settings.timeUntilRebuffer
+ * Time left in seconds until the player has to rebuffer
+ * @param {number} settings.currentTimeline
+ * The current timeline segments are being loaded from
+ * @param {SyncController} settings.syncController
+ * SyncController for determining if we have a sync point for a given playlist
+ * @return {Object|null}
+ * {Object} return.playlist
+ * The highest bandwidth playlist with the least amount of rebuffering
+ * {Number} return.rebufferingImpact
+ * The amount of time in seconds switching to this playlist will rebuffer. A
+ * negative value means that switching will cause zero rebuffering.
+ */
+
+
+var minRebufferMaxBandwidthSelector = function minRebufferMaxBandwidthSelector(settings) {
+ var master = settings.master,
+ currentTime = settings.currentTime,
+ bandwidth = settings.bandwidth,
+ duration = settings.duration,
+ segmentDuration = settings.segmentDuration,
+ timeUntilRebuffer = settings.timeUntilRebuffer,
+ currentTimeline = settings.currentTimeline,
+ syncController = settings.syncController; // filter out any playlists that have been excluded due to
+ // incompatible configurations
+
+ var compatiblePlaylists = master.playlists.filter(function (playlist) {
+ return !Playlist.isIncompatible(playlist);
+ }); // filter out any playlists that have been disabled manually through the representations
+ // api or blacklisted temporarily due to playback errors.
+
+ var enabledPlaylists = compatiblePlaylists.filter(Playlist.isEnabled);
+
+ if (!enabledPlaylists.length) {
+ // if there are no enabled playlists, then they have all been blacklisted or disabled
+ // by the user through the representations api. In this case, ignore blacklisting and
+ // fallback to what the user wants by using playlists the user has not disabled.
+ enabledPlaylists = compatiblePlaylists.filter(function (playlist) {
+ return !Playlist.isDisabled(playlist);
+ });
+ }
+
+ var bandwidthPlaylists = enabledPlaylists.filter(Playlist.hasAttribute.bind(null, 'BANDWIDTH'));
+ var rebufferingEstimates = bandwidthPlaylists.map(function (playlist) {
+ var syncPoint = syncController.getSyncPoint(playlist, duration, currentTimeline, currentTime); // If there is no sync point for this playlist, switching to it will require a
+ // sync request first. This will double the request time
+
+ var numRequests = syncPoint ? 1 : 2;
+ var requestTimeEstimate = Playlist.estimateSegmentRequestTime(segmentDuration, bandwidth, playlist);
+ var rebufferingImpact = requestTimeEstimate * numRequests - timeUntilRebuffer;
+ return {
+ playlist: playlist,
+ rebufferingImpact: rebufferingImpact
+ };
+ });
+ var noRebufferingPlaylists = rebufferingEstimates.filter(function (estimate) {
+ return estimate.rebufferingImpact <= 0;
+ }); // Sort by bandwidth DESC
+
+ stableSort(noRebufferingPlaylists, function (a, b) {
+ return comparePlaylistBandwidth(b.playlist, a.playlist);
+ });
+
+ if (noRebufferingPlaylists.length) {
+ return noRebufferingPlaylists[0];
+ }
+
+ stableSort(rebufferingEstimates, function (a, b) {
+ return a.rebufferingImpact - b.rebufferingImpact;
+ });
+ return rebufferingEstimates[0] || null;
+};
+/**
+ * Chooses the appropriate media playlist, which in this case is the lowest bitrate
+ * one with video. If no renditions with video exist, return the lowest audio rendition.
+ *
+ * Expects to be called within the context of an instance of VhsHandler
+ *
+ * @return {Object|null}
+ * {Object} return.playlist
+ * The lowest bitrate playlist that contains a video codec. If no such rendition
+ * exists pick the lowest audio rendition.
+ */
+
+
+var lowestBitrateCompatibleVariantSelector = function lowestBitrateCompatibleVariantSelector() {
+ var _this = this; // filter out any playlists that have been excluded due to
+ // incompatible configurations or playback errors
+
+
+ var playlists = this.playlists.master.playlists.filter(Playlist.isEnabled); // Sort ascending by bitrate
+
+ stableSort(playlists, function (a, b) {
+ return comparePlaylistBandwidth(a, b);
+ }); // Parse and assume that playlists with no video codec have no video
+ // (this is not necessarily true, although it is generally true).
+ //
+ // If an entire manifest has no valid videos everything will get filtered
+ // out.
+
+ var playlistsWithVideo = playlists.filter(function (playlist) {
+ return !!codecsForPlaylist(_this.playlists.master, playlist).video;
+ });
+ return playlistsWithVideo[0] || null;
+};
+/**
+ * Combine all segments into a single Uint8Array
+ *
+ * @param {Object} segmentObj
+ * @return {Uint8Array} concatenated bytes
+ * @private
+ */
+
+
+var concatSegments = function concatSegments(segmentObj) {
+ var offset = 0;
+ var tempBuffer;
+
+ if (segmentObj.bytes) {
+ tempBuffer = new Uint8Array(segmentObj.bytes); // combine the individual segments into one large typed-array
+
+ segmentObj.segments.forEach(function (segment) {
+ tempBuffer.set(segment, offset);
+ offset += segment.byteLength;
+ });
+ }
+
+ return tempBuffer;
+};
+/**
+ * @file text-tracks.js
+ */
+
+/**
+ * Create captions text tracks on video.js if they do not exist
+ *
+ * @param {Object} inbandTextTracks a reference to current inbandTextTracks
+ * @param {Object} tech the video.js tech
+ * @param {Object} captionStream the caption stream to create
+ * @private
+ */
+
+
+var createCaptionsTrackIfNotExists = function createCaptionsTrackIfNotExists(inbandTextTracks, tech, captionStream) {
+ if (!inbandTextTracks[captionStream]) {
+ tech.trigger({
+ type: 'usage',
+ name: 'vhs-608'
+ });
+ tech.trigger({
+ type: 'usage',
+ name: 'hls-608'
+ });
+ var instreamId = captionStream; // we need to translate SERVICEn for 708 to how mux.js currently labels them
+
+ if (/^cc708_/.test(captionStream)) {
+ instreamId = 'SERVICE' + captionStream.split('_')[1];
+ }
+
+ var track = tech.textTracks().getTrackById(instreamId);
+
+ if (track) {
+ // Resuse an existing track with a CC# id because this was
+ // very likely created by videojs-contrib-hls from information
+ // in the m3u8 for us to use
+ inbandTextTracks[captionStream] = track;
+ } else {
+ // This section gets called when we have caption services that aren't specified in the manifest.
+ // Manifest level caption services are handled in media-groups.js under CLOSED-CAPTIONS.
+ var captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};
+ var label = captionStream;
+ var language = captionStream;
+ var def = false;
+ var captionService = captionServices[instreamId];
+
+ if (captionService) {
+ label = captionService.label;
+ language = captionService.language;
+ def = captionService["default"];
+ } // Otherwise, create a track with the default `CC#` label and
+ // without a language
+
+
+ inbandTextTracks[captionStream] = tech.addRemoteTextTrack({
+ kind: 'captions',
+ id: instreamId,
+ // TODO: investigate why this doesn't seem to turn the caption on by default
+ "default": def,
+ label: label,
+ language: language
+ }, false).track;
+ }
+ }
+};
+/**
+ * Add caption text track data to a source handler given an array of captions
+ *
+ * @param {Object}
+ * @param {Object} inbandTextTracks the inband text tracks
+ * @param {number} timestampOffset the timestamp offset of the source buffer
+ * @param {Array} captionArray an array of caption data
+ * @private
+ */
+
+
+var addCaptionData = function addCaptionData(_ref) {
+ var inbandTextTracks = _ref.inbandTextTracks,
+ captionArray = _ref.captionArray,
+ timestampOffset = _ref.timestampOffset;
+
+ if (!captionArray) {
+ return;
+ }
+
+ var Cue = window__default['default'].WebKitDataCue || window__default['default'].VTTCue;
+ captionArray.forEach(function (caption) {
+ var track = caption.stream;
+ inbandTextTracks[track].addCue(new Cue(caption.startTime + timestampOffset, caption.endTime + timestampOffset, caption.text));
+ });
+};
+/**
+ * Define properties on a cue for backwards compatability,
+ * but warn the user that the way that they are using it
+ * is depricated and will be removed at a later date.
+ *
+ * @param {Cue} cue the cue to add the properties on
+ * @private
+ */
+
+
+var deprecateOldCue = function deprecateOldCue(cue) {
+ Object.defineProperties(cue.frame, {
+ id: {
+ get: function get() {
+ videojs.log.warn('cue.frame.id is deprecated. Use cue.value.key instead.');
+ return cue.value.key;
+ }
+ },
+ value: {
+ get: function get() {
+ videojs.log.warn('cue.frame.value is deprecated. Use cue.value.data instead.');
+ return cue.value.data;
+ }
+ },
+ privateData: {
+ get: function get() {
+ videojs.log.warn('cue.frame.privateData is deprecated. Use cue.value.data instead.');
+ return cue.value.data;
+ }
+ }
+ });
+};
+/**
+ * Add metadata text track data to a source handler given an array of metadata
+ *
+ * @param {Object}
+ * @param {Object} inbandTextTracks the inband text tracks
+ * @param {Array} metadataArray an array of meta data
+ * @param {number} timestampOffset the timestamp offset of the source buffer
+ * @param {number} videoDuration the duration of the video
+ * @private
+ */
+
+
+var addMetadata = function addMetadata(_ref2) {
+ var inbandTextTracks = _ref2.inbandTextTracks,
+ metadataArray = _ref2.metadataArray,
+ timestampOffset = _ref2.timestampOffset,
+ videoDuration = _ref2.videoDuration;
+
+ if (!metadataArray) {
+ return;
+ }
+
+ var Cue = window__default['default'].WebKitDataCue || window__default['default'].VTTCue;
+ var metadataTrack = inbandTextTracks.metadataTrack_;
+
+ if (!metadataTrack) {
+ return;
+ }
+
+ metadataArray.forEach(function (metadata) {
+ var time = metadata.cueTime + timestampOffset; // if time isn't a finite number between 0 and Infinity, like NaN,
+ // ignore this bit of metadata.
+ // This likely occurs when you have an non-timed ID3 tag like TIT2,
+ // which is the "Title/Songname/Content description" frame
+
+ if (typeof time !== 'number' || window__default['default'].isNaN(time) || time < 0 || !(time < Infinity)) {
+ return;
+ }
+
+ metadata.frames.forEach(function (frame) {
+ var cue = new Cue(time, time, frame.value || frame.url || frame.data || '');
+ cue.frame = frame;
+ cue.value = frame;
+ deprecateOldCue(cue);
+ metadataTrack.addCue(cue);
+ });
+ });
+
+ if (!metadataTrack.cues || !metadataTrack.cues.length) {
+ return;
+ } // Updating the metadeta cues so that
+ // the endTime of each cue is the startTime of the next cue
+ // the endTime of last cue is the duration of the video
+
+
+ var cues = metadataTrack.cues;
+ var cuesArray = []; // Create a copy of the TextTrackCueList...
+ // ...disregarding cues with a falsey value
+
+ for (var i = 0; i < cues.length; i++) {
+ if (cues[i]) {
+ cuesArray.push(cues[i]);
+ }
+ } // Group cues by their startTime value
+
+
+ var cuesGroupedByStartTime = cuesArray.reduce(function (obj, cue) {
+ var timeSlot = obj[cue.startTime] || [];
+ timeSlot.push(cue);
+ obj[cue.startTime] = timeSlot;
+ return obj;
+ }, {}); // Sort startTimes by ascending order
+
+ var sortedStartTimes = Object.keys(cuesGroupedByStartTime).sort(function (a, b) {
+ return Number(a) - Number(b);
+ }); // Map each cue group's endTime to the next group's startTime
+
+ sortedStartTimes.forEach(function (startTime, idx) {
+ var cueGroup = cuesGroupedByStartTime[startTime];
+ var nextTime = Number(sortedStartTimes[idx + 1]) || videoDuration; // Map each cue's endTime the next group's startTime
+
+ cueGroup.forEach(function (cue) {
+ cue.endTime = nextTime;
+ });
+ });
+};
+/**
+ * Create metadata text track on video.js if it does not exist
+ *
+ * @param {Object} inbandTextTracks a reference to current inbandTextTracks
+ * @param {string} dispatchType the inband metadata track dispatch type
+ * @param {Object} tech the video.js tech
+ * @private
+ */
+
+
+var createMetadataTrackIfNotExists = function createMetadataTrackIfNotExists(inbandTextTracks, dispatchType, tech) {
+ if (inbandTextTracks.metadataTrack_) {
+ return;
+ }
+
+ inbandTextTracks.metadataTrack_ = tech.addRemoteTextTrack({
+ kind: 'metadata',
+ label: 'Timed Metadata'
+ }, false).track;
+ inbandTextTracks.metadataTrack_.inBandMetadataTrackDispatchType = dispatchType;
+};
+/**
+ * Remove cues from a track on video.js.
+ *
+ * @param {Double} start start of where we should remove the cue
+ * @param {Double} end end of where the we should remove the cue
+ * @param {Object} track the text track to remove the cues from
+ * @private
+ */
+
+
+var removeCuesFromTrack = function removeCuesFromTrack(start, end, track) {
+ var i;
+ var cue;
+
+ if (!track) {
+ return;
+ }
+
+ if (!track.cues) {
+ return;
+ }
+
+ i = track.cues.length;
+
+ while (i--) {
+ cue = track.cues[i]; // Remove any cue within the provided start and end time
+
+ if (cue.startTime >= start && cue.endTime <= end) {
+ track.removeCue(cue);
+ }
+ }
+};
+/**
+ * Remove duplicate cues from a track on video.js (a cue is considered a
+ * duplicate if it has the same time interval and text as another)
+ *
+ * @param {Object} track the text track to remove the duplicate cues from
+ * @private
+ */
+
+
+var removeDuplicateCuesFromTrack = function removeDuplicateCuesFromTrack(track) {
+ var cues = track.cues;
+
+ if (!cues) {
+ return;
+ }
+
+ for (var i = 0; i < cues.length; i++) {
+ var duplicates = [];
+ var occurrences = 0;
+
+ for (var j = 0; j < cues.length; j++) {
+ if (cues[i].startTime === cues[j].startTime && cues[i].endTime === cues[j].endTime && cues[i].text === cues[j].text) {
+ occurrences++;
+
+ if (occurrences > 1) {
+ duplicates.push(cues[j]);
+ }
+ }
+ }
+
+ if (duplicates.length) {
+ duplicates.forEach(function (dupe) {
+ return track.removeCue(dupe);
+ });
+ }
+ }
+};
+/**
+ * Returns a list of gops in the buffer that have a pts value of 3 seconds or more in
+ * front of current time.
+ *
+ * @param {Array} buffer
+ * The current buffer of gop information
+ * @param {number} currentTime
+ * The current time
+ * @param {Double} mapping
+ * Offset to map display time to stream presentation time
+ * @return {Array}
+ * List of gops considered safe to append over
+ */
+
+
+var gopsSafeToAlignWith = function gopsSafeToAlignWith(buffer, currentTime, mapping) {
+ if (typeof currentTime === 'undefined' || currentTime === null || !buffer.length) {
+ return [];
+ } // pts value for current time + 3 seconds to give a bit more wiggle room
+
+
+ var currentTimePts = Math.ceil((currentTime - mapping + 3) * clock.ONE_SECOND_IN_TS);
+ var i;
+
+ for (i = 0; i < buffer.length; i++) {
+ if (buffer[i].pts > currentTimePts) {
+ break;
+ }
+ }
+
+ return buffer.slice(i);
+};
+/**
+ * Appends gop information (timing and byteLength) received by the transmuxer for the
+ * gops appended in the last call to appendBuffer
+ *
+ * @param {Array} buffer
+ * The current buffer of gop information
+ * @param {Array} gops
+ * List of new gop information
+ * @param {boolean} replace
+ * If true, replace the buffer with the new gop information. If false, append the
+ * new gop information to the buffer in the right location of time.
+ * @return {Array}
+ * Updated list of gop information
+ */
+
+
+var updateGopBuffer = function updateGopBuffer(buffer, gops, replace) {
+ if (!gops.length) {
+ return buffer;
+ }
+
+ if (replace) {
+ // If we are in safe append mode, then completely overwrite the gop buffer
+ // with the most recent appeneded data. This will make sure that when appending
+ // future segments, we only try to align with gops that are both ahead of current
+ // time and in the last segment appended.
+ return gops.slice();
+ }
+
+ var start = gops[0].pts;
+ var i = 0;
+
+ for (i; i < buffer.length; i++) {
+ if (buffer[i].pts >= start) {
+ break;
+ }
+ }
+
+ return buffer.slice(0, i).concat(gops);
+};
+/**
+ * Removes gop information in buffer that overlaps with provided start and end
+ *
+ * @param {Array} buffer
+ * The current buffer of gop information
+ * @param {Double} start
+ * position to start the remove at
+ * @param {Double} end
+ * position to end the remove at
+ * @param {Double} mapping
+ * Offset to map display time to stream presentation time
+ */
+
+
+var removeGopBuffer = function removeGopBuffer(buffer, start, end, mapping) {
+ var startPts = Math.ceil((start - mapping) * clock.ONE_SECOND_IN_TS);
+ var endPts = Math.ceil((end - mapping) * clock.ONE_SECOND_IN_TS);
+ var updatedBuffer = buffer.slice();
+ var i = buffer.length;
+
+ while (i--) {
+ if (buffer[i].pts <= endPts) {
+ break;
+ }
+ }
+
+ if (i === -1) {
+ // no removal because end of remove range is before start of buffer
+ return updatedBuffer;
+ }
+
+ var j = i + 1;
+
+ while (j--) {
+ if (buffer[j].pts <= startPts) {
+ break;
+ }
+ } // clamp remove range start to 0 index
+
+
+ j = Math.max(j, 0);
+ updatedBuffer.splice(j, i - j + 1);
+ return updatedBuffer;
+};
+
+var shallowEqual = function shallowEqual(a, b) {
+ // if both are undefined
+ // or one or the other is undefined
+ // they are not equal
+ if (!a && !b || !a && b || a && !b) {
+ return false;
+ } // they are the same object and thus, equal
+
+
+ if (a === b) {
+ return true;
+ } // sort keys so we can make sure they have
+ // all the same keys later.
+
+
+ var akeys = Object.keys(a).sort();
+ var bkeys = Object.keys(b).sort(); // different number of keys, not equal
+
+ if (akeys.length !== bkeys.length) {
+ return false;
+ }
+
+ for (var i = 0; i < akeys.length; i++) {
+ var key = akeys[i]; // different sorted keys, not equal
+
+ if (key !== bkeys[i]) {
+ return false;
+ } // different values, not equal
+
+
+ if (a[key] !== b[key]) {
+ return false;
+ }
+ }
+
+ return true;
+}; // https://www.w3.org/TR/WebIDL-1/#quotaexceedederror
+
+
+var QUOTA_EXCEEDED_ERR = 22;
+/**
+ * The segment loader has no recourse except to fetch a segment in the
+ * current playlist and use the internal timestamps in that segment to
+ * generate a syncPoint. This function returns a good candidate index
+ * for that process.
+ *
+ * @param {Array} segments - the segments array from a playlist.
+ * @return {number} An index of a segment from the playlist to load
+ */
+
+var getSyncSegmentCandidate = function getSyncSegmentCandidate(currentTimeline, segments, targetTime) {
+ segments = segments || [];
+ var timelineSegments = [];
+ var time = 0;
+
+ for (var i = 0; i < segments.length; i++) {
+ var segment = segments[i];
+
+ if (currentTimeline === segment.timeline) {
+ timelineSegments.push(i);
+ time += segment.duration;
+
+ if (time > targetTime) {
+ return i;
+ }
+ }
+ }
+
+ if (timelineSegments.length === 0) {
+ return 0;
+ } // default to the last timeline segment
+
+
+ return timelineSegments[timelineSegments.length - 1];
+}; // In the event of a quota exceeded error, keep at least one second of back buffer. This
+// number was arbitrarily chosen and may be updated in the future, but seemed reasonable
+// as a start to prevent any potential issues with removing content too close to the
+// playhead.
+
+
+var MIN_BACK_BUFFER = 1; // in ms
+
+var CHECK_BUFFER_DELAY = 500;
+
+var finite = function finite(num) {
+ return typeof num === 'number' && isFinite(num);
+}; // With most content hovering around 30fps, if a segment has a duration less than a half
+// frame at 30fps or one frame at 60fps, the bandwidth and throughput calculations will
+// not accurately reflect the rest of the content.
+
+
+var MIN_SEGMENT_DURATION_TO_SAVE_STATS = 1 / 60;
+
+var illegalMediaSwitch = function illegalMediaSwitch(loaderType, startingMedia, trackInfo) {
+ // Although these checks should most likely cover non 'main' types, for now it narrows
+ // the scope of our checks.
+ if (loaderType !== 'main' || !startingMedia || !trackInfo) {
+ return null;
+ }
+
+ if (!trackInfo.hasAudio && !trackInfo.hasVideo) {
+ return 'Neither audio nor video found in segment.';
+ }
+
+ if (startingMedia.hasVideo && !trackInfo.hasVideo) {
+ return 'Only audio found in segment when we expected video.' + ' We can\'t switch to audio only from a stream that had video.' + ' To get rid of this message, please add codec information to the manifest.';
+ }
+
+ if (!startingMedia.hasVideo && trackInfo.hasVideo) {
+ return 'Video found in segment when we expected only audio.' + ' We can\'t switch to a stream with video from an audio only stream.' + ' To get rid of this message, please add codec information to the manifest.';
+ }
+
+ return null;
+};
+/**
+ * Calculates a time value that is safe to remove from the back buffer without interrupting
+ * playback.
+ *
+ * @param {TimeRange} seekable
+ * The current seekable range
+ * @param {number} currentTime
+ * The current time of the player
+ * @param {number} targetDuration
+ * The target duration of the current playlist
+ * @return {number}
+ * Time that is safe to remove from the back buffer without interrupting playback
+ */
+
+
+var safeBackBufferTrimTime = function safeBackBufferTrimTime(seekable, currentTime, targetDuration) {
+ // 30 seconds before the playhead provides a safe default for trimming.
+ //
+ // Choosing a reasonable default is particularly important for high bitrate content and
+ // VOD videos/live streams with large windows, as the buffer may end up overfilled and
+ // throw an APPEND_BUFFER_ERR.
+ var trimTime = currentTime - Config.BACK_BUFFER_LENGTH;
+
+ if (seekable.length) {
+ // Some live playlists may have a shorter window of content than the full allowed back
+ // buffer. For these playlists, don't save content that's no longer within the window.
+ trimTime = Math.max(trimTime, seekable.start(0));
+ } // Don't remove within target duration of the current time to avoid the possibility of
+ // removing the GOP currently being played, as removing it can cause playback stalls.
+
+
+ var maxTrimTime = currentTime - targetDuration;
+ return Math.min(maxTrimTime, trimTime);
+};
+
+var segmentInfoString = function segmentInfoString(segmentInfo) {
+ var startOfSegment = segmentInfo.startOfSegment,
+ duration = segmentInfo.duration,
+ segment = segmentInfo.segment,
+ part = segmentInfo.part,
+ _segmentInfo$playlist = segmentInfo.playlist,
+ seq = _segmentInfo$playlist.mediaSequence,
+ id = _segmentInfo$playlist.id,
+ _segmentInfo$playlist2 = _segmentInfo$playlist.segments,
+ segments = _segmentInfo$playlist2 === void 0 ? [] : _segmentInfo$playlist2,
+ index = segmentInfo.mediaIndex,
+ partIndex = segmentInfo.partIndex,
+ timeline = segmentInfo.timeline;
+ var segmentLen = segments.length - 1;
+ var selection = 'mediaIndex/partIndex increment';
+
+ if (segmentInfo.getMediaInfoForTime) {
+ selection = "getMediaInfoForTime (" + segmentInfo.getMediaInfoForTime + ")";
+ } else if (segmentInfo.isSyncRequest) {
+ selection = 'getSyncSegmentCandidate (isSyncRequest)';
+ }
+
+ if (segmentInfo.independent) {
+ selection += " with independent " + segmentInfo.independent;
+ }
+
+ var hasPartIndex = typeof partIndex === 'number';
+ var name = segmentInfo.segment.uri ? 'segment' : 'pre-segment';
+ var zeroBasedPartCount = hasPartIndex ? getKnownPartCount({
+ preloadSegment: segment
+ }) - 1 : 0;
+ return name + " [" + (seq + index) + "/" + (seq + segmentLen) + "]" + (hasPartIndex ? " part [" + partIndex + "/" + zeroBasedPartCount + "]" : '') + (" segment start/end [" + segment.start + " => " + segment.end + "]") + (hasPartIndex ? " part start/end [" + part.start + " => " + part.end + "]" : '') + (" startOfSegment [" + startOfSegment + "]") + (" duration [" + duration + "]") + (" timeline [" + timeline + "]") + (" selected by [" + selection + "]") + (" playlist [" + id + "]");
+};
+
+var timingInfoPropertyForMedia = function timingInfoPropertyForMedia(mediaType) {
+ return mediaType + "TimingInfo";
+};
+/**
+ * Returns the timestamp offset to use for the segment.
+ *
+ * @param {number} segmentTimeline
+ * The timeline of the segment
+ * @param {number} currentTimeline
+ * The timeline currently being followed by the loader
+ * @param {number} startOfSegment
+ * The estimated segment start
+ * @param {TimeRange[]} buffered
+ * The loader's buffer
+ * @param {boolean} overrideCheck
+ * If true, no checks are made to see if the timestamp offset value should be set,
+ * but sets it directly to a value.
+ *
+ * @return {number|null}
+ * Either a number representing a new timestamp offset, or null if the segment is
+ * part of the same timeline
+ */
+
+
+var timestampOffsetForSegment = function timestampOffsetForSegment(_ref) {
+ var segmentTimeline = _ref.segmentTimeline,
+ currentTimeline = _ref.currentTimeline,
+ startOfSegment = _ref.startOfSegment,
+ buffered = _ref.buffered,
+ overrideCheck = _ref.overrideCheck; // Check to see if we are crossing a discontinuity to see if we need to set the
+ // timestamp offset on the transmuxer and source buffer.
+ //
+ // Previously, we changed the timestampOffset if the start of this segment was less than
+ // the currently set timestampOffset, but this isn't desirable as it can produce bad
+ // behavior, especially around long running live streams.
+
+ if (!overrideCheck && segmentTimeline === currentTimeline) {
+ return null;
+ } // When changing renditions, it's possible to request a segment on an older timeline. For
+ // instance, given two renditions with the following:
+ //
+ // #EXTINF:10
+ // segment1
+ // #EXT-X-DISCONTINUITY
+ // #EXTINF:10
+ // segment2
+ // #EXTINF:10
+ // segment3
+ //
+ // And the current player state:
+ //
+ // current time: 8
+ // buffer: 0 => 20
+ //
+ // The next segment on the current rendition would be segment3, filling the buffer from
+ // 20s onwards. However, if a rendition switch happens after segment2 was requested,
+ // then the next segment to be requested will be segment1 from the new rendition in
+ // order to fill time 8 and onwards. Using the buffered end would result in repeated
+ // content (since it would position segment1 of the new rendition starting at 20s). This
+ // case can be identified when the new segment's timeline is a prior value. Instead of
+ // using the buffered end, the startOfSegment can be used, which, hopefully, will be
+ // more accurate to the actual start time of the segment.
+
+
+ if (segmentTimeline < currentTimeline) {
+ return startOfSegment;
+ } // segmentInfo.startOfSegment used to be used as the timestamp offset, however, that
+ // value uses the end of the last segment if it is available. While this value
+ // should often be correct, it's better to rely on the buffered end, as the new
+ // content post discontinuity should line up with the buffered end as if it were
+ // time 0 for the new content.
+
+
+ return buffered.length ? buffered.end(buffered.length - 1) : startOfSegment;
+};
+/**
+ * Returns whether or not the loader should wait for a timeline change from the timeline
+ * change controller before processing the segment.
+ *
+ * Primary timing in VHS goes by video. This is different from most media players, as
+ * audio is more often used as the primary timing source. For the foreseeable future, VHS
+ * will continue to use video as the primary timing source, due to the current logic and
+ * expectations built around it.
+
+ * Since the timing follows video, in order to maintain sync, the video loader is
+ * responsible for setting both audio and video source buffer timestamp offsets.
+ *
+ * Setting different values for audio and video source buffers could lead to
+ * desyncing. The following examples demonstrate some of the situations where this
+ * distinction is important. Note that all of these cases involve demuxed content. When
+ * content is muxed, the audio and video are packaged together, therefore syncing
+ * separate media playlists is not an issue.
+ *
+ * CASE 1: Audio prepares to load a new timeline before video:
+ *
+ * Timeline: 0 1
+ * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
+ * Audio Loader: ^
+ * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
+ * Video Loader ^
+ *
+ * In the above example, the audio loader is preparing to load the 6th segment, the first
+ * after a discontinuity, while the video loader is still loading the 5th segment, before
+ * the discontinuity.
+ *
+ * If the audio loader goes ahead and loads and appends the 6th segment before the video
+ * loader crosses the discontinuity, then when appended, the 6th audio segment will use
+ * the timestamp offset from timeline 0. This will likely lead to desyncing. In addition,
+ * the audio loader must provide the audioAppendStart value to trim the content in the
+ * transmuxer, and that value relies on the audio timestamp offset. Since the audio
+ * timestamp offset is set by the video (main) loader, the audio loader shouldn't load the
+ * segment until that value is provided.
+ *
+ * CASE 2: Video prepares to load a new timeline before audio:
+ *
+ * Timeline: 0 1
+ * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
+ * Audio Loader: ^
+ * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
+ * Video Loader ^
+ *
+ * In the above example, the video loader is preparing to load the 6th segment, the first
+ * after a discontinuity, while the audio loader is still loading the 5th segment, before
+ * the discontinuity.
+ *
+ * If the video loader goes ahead and loads and appends the 6th segment, then once the
+ * segment is loaded and processed, both the video and audio timestamp offsets will be
+ * set, since video is used as the primary timing source. This is to ensure content lines
+ * up appropriately, as any modifications to the video timing are reflected by audio when
+ * the video loader sets the audio and video timestamp offsets to the same value. However,
+ * setting the timestamp offset for audio before audio has had a chance to change
+ * timelines will likely lead to desyncing, as the audio loader will append segment 5 with
+ * a timestamp intended to apply to segments from timeline 1 rather than timeline 0.
+ *
+ * CASE 3: When seeking, audio prepares to load a new timeline before video
+ *
+ * Timeline: 0 1
+ * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
+ * Audio Loader: ^
+ * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
+ * Video Loader ^
+ *
+ * In the above example, both audio and video loaders are loading segments from timeline
+ * 0, but imagine that the seek originated from timeline 1.
+ *
+ * When seeking to a new timeline, the timestamp offset will be set based on the expected
+ * segment start of the loaded video segment. In order to maintain sync, the audio loader
+ * must wait for the video loader to load its segment and update both the audio and video
+ * timestamp offsets before it may load and append its own segment. This is the case
+ * whether the seek results in a mismatched segment request (e.g., the audio loader
+ * chooses to load segment 3 and the video loader chooses to load segment 4) or the
+ * loaders choose to load the same segment index from each playlist, as the segments may
+ * not be aligned perfectly, even for matching segment indexes.
+ *
+ * @param {Object} timelinechangeController
+ * @param {number} currentTimeline
+ * The timeline currently being followed by the loader
+ * @param {number} segmentTimeline
+ * The timeline of the segment being loaded
+ * @param {('main'|'audio')} loaderType
+ * The loader type
+ * @param {boolean} audioDisabled
+ * Whether the audio is disabled for the loader. This should only be true when the
+ * loader may have muxed audio in its segment, but should not append it, e.g., for
+ * the main loader when an alternate audio playlist is active.
+ *
+ * @return {boolean}
+ * Whether the loader should wait for a timeline change from the timeline change
+ * controller before processing the segment
+ */
+
+
+var shouldWaitForTimelineChange = function shouldWaitForTimelineChange(_ref2) {
+ var timelineChangeController = _ref2.timelineChangeController,
+ currentTimeline = _ref2.currentTimeline,
+ segmentTimeline = _ref2.segmentTimeline,
+ loaderType = _ref2.loaderType,
+ audioDisabled = _ref2.audioDisabled;
+
+ if (currentTimeline === segmentTimeline) {
+ return false;
+ }
+
+ if (loaderType === 'audio') {
+ var lastMainTimelineChange = timelineChangeController.lastTimelineChange({
+ type: 'main'
+ }); // Audio loader should wait if:
+ //
+ // * main hasn't had a timeline change yet (thus has not loaded its first segment)
+ // * main hasn't yet changed to the timeline audio is looking to load
+
+ return !lastMainTimelineChange || lastMainTimelineChange.to !== segmentTimeline;
+ } // The main loader only needs to wait for timeline changes if there's demuxed audio.
+ // Otherwise, there's nothing to wait for, since audio would be muxed into the main
+ // loader's segments (or the content is audio/video only and handled by the main
+ // loader).
+
+
+ if (loaderType === 'main' && audioDisabled) {
+ var pendingAudioTimelineChange = timelineChangeController.pendingTimelineChange({
+ type: 'audio'
+ }); // Main loader should wait for the audio loader if audio is not pending a timeline
+ // change to the current timeline.
+ //
+ // Since the main loader is responsible for setting the timestamp offset for both
+ // audio and video, the main loader must wait for audio to be about to change to its
+ // timeline before setting the offset, otherwise, if audio is behind in loading,
+ // segments from the previous timeline would be adjusted by the new timestamp offset.
+ //
+ // This requirement means that video will not cross a timeline until the audio is
+ // about to cross to it, so that way audio and video will always cross the timeline
+ // together.
+ //
+ // In addition to normal timeline changes, these rules also apply to the start of a
+ // stream (going from a non-existent timeline, -1, to timeline 0). It's important
+ // that these rules apply to the first timeline change because if they did not, it's
+ // possible that the main loader will cross two timelines before the audio loader has
+ // crossed one. Logic may be implemented to handle the startup as a special case, but
+ // it's easier to simply treat all timeline changes the same.
+
+ if (pendingAudioTimelineChange && pendingAudioTimelineChange.to === segmentTimeline) {
+ return false;
+ }
+
+ return true;
+ }
+
+ return false;
+};
+
+var mediaDuration = function mediaDuration(timingInfos) {
+ var maxDuration = 0;
+ ['video', 'audio'].forEach(function (type) {
+ var typeTimingInfo = timingInfos[type + "TimingInfo"];
+
+ if (!typeTimingInfo) {
+ return;
+ }
+
+ var start = typeTimingInfo.start,
+ end = typeTimingInfo.end;
+ var duration;
+
+ if (typeof start === 'bigint' || typeof end === 'bigint') {
+ duration = window__default['default'].BigInt(end) - window__default['default'].BigInt(start);
+ } else if (typeof start === 'number' && typeof end === 'number') {
+ duration = end - start;
+ }
+
+ if (typeof duration !== 'undefined' && duration > maxDuration) {
+ maxDuration = duration;
+ }
+ }); // convert back to a number if it is lower than MAX_SAFE_INTEGER
+ // as we only need BigInt when we are above that.
+
+ if (typeof maxDuration === 'bigint' && maxDuration < Number.MAX_SAFE_INTEGER) {
+ maxDuration = Number(maxDuration);
+ }
+
+ return maxDuration;
+};
+
+var segmentTooLong = function segmentTooLong(_ref3) {
+ var segmentDuration = _ref3.segmentDuration,
+ maxDuration = _ref3.maxDuration; // 0 duration segments are most likely due to metadata only segments or a lack of
+ // information.
+
+ if (!segmentDuration) {
+ return false;
+ } // For HLS:
+ //
+ // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1
+ // The EXTINF duration of each Media Segment in the Playlist
+ // file, when rounded to the nearest integer, MUST be less than or equal
+ // to the target duration; longer segments can trigger playback stalls
+ // or other errors.
+ //
+ // For DASH, the mpd-parser uses the largest reported segment duration as the target
+ // duration. Although that reported duration is occasionally approximate (i.e., not
+ // exact), a strict check may report that a segment is too long more often in DASH.
+
+
+ return Math.round(segmentDuration) > maxDuration + TIME_FUDGE_FACTOR;
+};
+
+var getTroublesomeSegmentDurationMessage = function getTroublesomeSegmentDurationMessage(segmentInfo, sourceType) {
+ // Right now we aren't following DASH's timing model exactly, so only perform
+ // this check for HLS content.
+ if (sourceType !== 'hls') {
+ return null;
+ }
+
+ var segmentDuration = mediaDuration({
+ audioTimingInfo: segmentInfo.audioTimingInfo,
+ videoTimingInfo: segmentInfo.videoTimingInfo
+ }); // Don't report if we lack information.
+ //
+ // If the segment has a duration of 0 it is either a lack of information or a
+ // metadata only segment and shouldn't be reported here.
+
+ if (!segmentDuration) {
+ return null;
+ }
+
+ var targetDuration = segmentInfo.playlist.targetDuration;
+ var isSegmentWayTooLong = segmentTooLong({
+ segmentDuration: segmentDuration,
+ maxDuration: targetDuration * 2
+ });
+ var isSegmentSlightlyTooLong = segmentTooLong({
+ segmentDuration: segmentDuration,
+ maxDuration: targetDuration
+ });
+ var segmentTooLongMessage = "Segment with index " + segmentInfo.mediaIndex + " " + ("from playlist " + segmentInfo.playlist.id + " ") + ("has a duration of " + segmentDuration + " ") + ("when the reported duration is " + segmentInfo.duration + " ") + ("and the target duration is " + targetDuration + ". ") + 'For HLS content, a duration in excess of the target duration may result in ' + 'playback issues. See the HLS specification section on EXT-X-TARGETDURATION for ' + 'more details: ' + 'https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1';
+
+ if (isSegmentWayTooLong || isSegmentSlightlyTooLong) {
+ return {
+ severity: isSegmentWayTooLong ? 'warn' : 'info',
+ message: segmentTooLongMessage
+ };
+ }
+
+ return null;
+};
+/**
+ * An object that manages segment loading and appending.
+ *
+ * @class SegmentLoader
+ * @param {Object} options required and optional options
+ * @extends videojs.EventTarget
+ */
+
+
+var SegmentLoader = /*#__PURE__*/function (_videojs$EventTarget) {
+ _inheritsLoose__default['default'](SegmentLoader, _videojs$EventTarget);
+
+ function SegmentLoader(settings, options) {
+ var _this;
+
+ _this = _videojs$EventTarget.call(this) || this; // check pre-conditions
+
+ if (!settings) {
+ throw new TypeError('Initialization settings are required');
+ }
+
+ if (typeof settings.currentTime !== 'function') {
+ throw new TypeError('No currentTime getter specified');
+ }
+
+ if (!settings.mediaSource) {
+ throw new TypeError('No MediaSource specified');
+ } // public properties
+
+
+ _this.bandwidth = settings.bandwidth;
+ _this.throughput = {
+ rate: 0,
+ count: 0
+ };
+ _this.roundTrip = NaN;
+
+ _this.resetStats_();
+
+ _this.mediaIndex = null;
+ _this.partIndex = null; // private settings
+
+ _this.hasPlayed_ = settings.hasPlayed;
+ _this.currentTime_ = settings.currentTime;
+ _this.seekable_ = settings.seekable;
+ _this.seeking_ = settings.seeking;
+ _this.duration_ = settings.duration;
+ _this.mediaSource_ = settings.mediaSource;
+ _this.vhs_ = settings.vhs;
+ _this.loaderType_ = settings.loaderType;
+ _this.currentMediaInfo_ = void 0;
+ _this.startingMediaInfo_ = void 0;
+ _this.segmentMetadataTrack_ = settings.segmentMetadataTrack;
+ _this.goalBufferLength_ = settings.goalBufferLength;
+ _this.sourceType_ = settings.sourceType;
+ _this.sourceUpdater_ = settings.sourceUpdater;
+ _this.inbandTextTracks_ = settings.inbandTextTracks;
+ _this.state_ = 'INIT';
+ _this.timelineChangeController_ = settings.timelineChangeController;
+ _this.shouldSaveSegmentTimingInfo_ = true;
+ _this.parse708captions_ = settings.parse708captions;
+ _this.useDtsForTimestampOffset_ = settings.useDtsForTimestampOffset;
+ _this.captionServices_ = settings.captionServices;
+ _this.experimentalExactManifestTimings = settings.experimentalExactManifestTimings; // private instance variables
+
+ _this.checkBufferTimeout_ = null;
+ _this.error_ = void 0;
+ _this.currentTimeline_ = -1;
+ _this.pendingSegment_ = null;
+ _this.xhrOptions_ = null;
+ _this.pendingSegments_ = [];
+ _this.audioDisabled_ = false;
+ _this.isPendingTimestampOffset_ = false; // TODO possibly move gopBuffer and timeMapping info to a separate controller
+
+ _this.gopBuffer_ = [];
+ _this.timeMapping_ = 0;
+ _this.safeAppend_ = videojs.browser.IE_VERSION >= 11;
+ _this.appendInitSegment_ = {
+ audio: true,
+ video: true
+ };
+ _this.playlistOfLastInitSegment_ = {
+ audio: null,
+ video: null
+ };
+ _this.callQueue_ = []; // If the segment loader prepares to load a segment, but does not have enough
+ // information yet to start the loading process (e.g., if the audio loader wants to
+ // load a segment from the next timeline but the main loader hasn't yet crossed that
+ // timeline), then the load call will be added to the queue until it is ready to be
+ // processed.
+
+ _this.loadQueue_ = [];
+ _this.metadataQueue_ = {
+ id3: [],
+ caption: []
+ };
+ _this.waitingOnRemove_ = false;
+ _this.quotaExceededErrorRetryTimeout_ = null; // Fragmented mp4 playback
+
+ _this.activeInitSegmentId_ = null;
+ _this.initSegments_ = {}; // HLSe playback
+
+ _this.cacheEncryptionKeys_ = settings.cacheEncryptionKeys;
+ _this.keyCache_ = {};
+ _this.decrypter_ = settings.decrypter; // Manages the tracking and generation of sync-points, mappings
+ // between a time in the display time and a segment index within
+ // a playlist
+
+ _this.syncController_ = settings.syncController;
+ _this.syncPoint_ = {
+ segmentIndex: 0,
+ time: 0
+ };
+ _this.transmuxer_ = _this.createTransmuxer_();
+
+ _this.triggerSyncInfoUpdate_ = function () {
+ return _this.trigger('syncinfoupdate');
+ };
+
+ _this.syncController_.on('syncinfoupdate', _this.triggerSyncInfoUpdate_);
+
+ _this.mediaSource_.addEventListener('sourceopen', function () {
+ if (!_this.isEndOfStream_()) {
+ _this.ended_ = false;
+ }
+ }); // ...for determining the fetch location
+
+
+ _this.fetchAtBuffer_ = false;
+ _this.logger_ = logger("SegmentLoader[" + _this.loaderType_ + "]");
+ Object.defineProperty(_assertThisInitialized__default['default'](_this), 'state', {
+ get: function get() {
+ return this.state_;
+ },
+ set: function set(newState) {
+ if (newState !== this.state_) {
+ this.logger_(this.state_ + " -> " + newState);
+ this.state_ = newState;
+ this.trigger('statechange');
+ }
+ }
+ });
+
+ _this.sourceUpdater_.on('ready', function () {
+ if (_this.hasEnoughInfoToAppend_()) {
+ _this.processCallQueue_();
+ }
+ }); // Only the main loader needs to listen for pending timeline changes, as the main
+ // loader should wait for audio to be ready to change its timeline so that both main
+ // and audio timelines change together. For more details, see the
+ // shouldWaitForTimelineChange function.
+
+
+ if (_this.loaderType_ === 'main') {
+ _this.timelineChangeController_.on('pendingtimelinechange', function () {
+ if (_this.hasEnoughInfoToAppend_()) {
+ _this.processCallQueue_();
+ }
+ });
+ } // The main loader only listens on pending timeline changes, but the audio loader,
+ // since its loads follow main, needs to listen on timeline changes. For more details,
+ // see the shouldWaitForTimelineChange function.
+
+
+ if (_this.loaderType_ === 'audio') {
+ _this.timelineChangeController_.on('timelinechange', function () {
+ if (_this.hasEnoughInfoToLoad_()) {
+ _this.processLoadQueue_();
+ }
+
+ if (_this.hasEnoughInfoToAppend_()) {
+ _this.processCallQueue_();
+ }
+ });
+ }
+
+ return _this;
+ }
+
+ var _proto = SegmentLoader.prototype;
+
+ _proto.createTransmuxer_ = function createTransmuxer_() {
+ return segmentTransmuxer.createTransmuxer({
+ remux: false,
+ alignGopsAtEnd: this.safeAppend_,
+ keepOriginalTimestamps: true,
+ parse708captions: this.parse708captions_,
+ captionServices: this.captionServices_
+ });
+ }
+ /**
+ * reset all of our media stats
+ *
+ * @private
+ */
+ ;
+
+ _proto.resetStats_ = function resetStats_() {
+ this.mediaBytesTransferred = 0;
+ this.mediaRequests = 0;
+ this.mediaRequestsAborted = 0;
+ this.mediaRequestsTimedout = 0;
+ this.mediaRequestsErrored = 0;
+ this.mediaTransferDuration = 0;
+ this.mediaSecondsLoaded = 0;
+ this.mediaAppends = 0;
+ }
+ /**
+ * dispose of the SegmentLoader and reset to the default state
+ */
+ ;
+
+ _proto.dispose = function dispose() {
+ this.trigger('dispose');
+ this.state = 'DISPOSED';
+ this.pause();
+ this.abort_();
+
+ if (this.transmuxer_) {
+ this.transmuxer_.terminate();
+ }
+
+ this.resetStats_();
+
+ if (this.checkBufferTimeout_) {
+ window__default['default'].clearTimeout(this.checkBufferTimeout_);
+ }
+
+ if (this.syncController_ && this.triggerSyncInfoUpdate_) {
+ this.syncController_.off('syncinfoupdate', this.triggerSyncInfoUpdate_);
+ }
+
+ this.off();
+ };
+
+ _proto.setAudio = function setAudio(enable) {
+ this.audioDisabled_ = !enable;
+
+ if (enable) {
+ this.appendInitSegment_.audio = true;
+ } else {
+ // remove current track audio if it gets disabled
+ this.sourceUpdater_.removeAudio(0, this.duration_());
+ }
+ }
+ /**
+ * abort anything that is currently doing on with the SegmentLoader
+ * and reset to a default state
+ */
+ ;
+
+ _proto.abort = function abort() {
+ if (this.state !== 'WAITING') {
+ if (this.pendingSegment_) {
+ this.pendingSegment_ = null;
+ }
+
+ return;
+ }
+
+ this.abort_(); // We aborted the requests we were waiting on, so reset the loader's state to READY
+ // since we are no longer "waiting" on any requests. XHR callback is not always run
+ // when the request is aborted. This will prevent the loader from being stuck in the
+ // WAITING state indefinitely.
+
+ this.state = 'READY'; // don't wait for buffer check timeouts to begin fetching the
+ // next segment
+
+ if (!this.paused()) {
+ this.monitorBuffer_();
+ }
+ }
+ /**
+ * abort all pending xhr requests and null any pending segements
+ *
+ * @private
+ */
+ ;
+
+ _proto.abort_ = function abort_() {
+ if (this.pendingSegment_ && this.pendingSegment_.abortRequests) {
+ this.pendingSegment_.abortRequests();
+ } // clear out the segment being processed
+
+
+ this.pendingSegment_ = null;
+ this.callQueue_ = [];
+ this.loadQueue_ = [];
+ this.metadataQueue_.id3 = [];
+ this.metadataQueue_.caption = [];
+ this.timelineChangeController_.clearPendingTimelineChange(this.loaderType_);
+ this.waitingOnRemove_ = false;
+ window__default['default'].clearTimeout(this.quotaExceededErrorRetryTimeout_);
+ this.quotaExceededErrorRetryTimeout_ = null;
+ };
+
+ _proto.checkForAbort_ = function checkForAbort_(requestId) {
+ // If the state is APPENDING, then aborts will not modify the state, meaning the first
+ // callback that happens should reset the state to READY so that loading can continue.
+ if (this.state === 'APPENDING' && !this.pendingSegment_) {
+ this.state = 'READY';
+ return true;
+ }
+
+ if (!this.pendingSegment_ || this.pendingSegment_.requestId !== requestId) {
+ return true;
+ }
+
+ return false;
+ }
+ /**
+ * set an error on the segment loader and null out any pending segements
+ *
+ * @param {Error} error the error to set on the SegmentLoader
+ * @return {Error} the error that was set or that is currently set
+ */
+ ;
+
+ _proto.error = function error(_error) {
+ if (typeof _error !== 'undefined') {
+ this.logger_('error occurred:', _error);
+ this.error_ = _error;
+ }
+
+ this.pendingSegment_ = null;
+ return this.error_;
+ };
+
+ _proto.endOfStream = function endOfStream() {
+ this.ended_ = true;
+
+ if (this.transmuxer_) {
+ // need to clear out any cached data to prepare for the new segment
+ segmentTransmuxer.reset(this.transmuxer_);
+ }
+
+ this.gopBuffer_.length = 0;
+ this.pause();
+ this.trigger('ended');
+ }
+ /**
+ * Indicates which time ranges are buffered
+ *
+ * @return {TimeRange}
+ * TimeRange object representing the current buffered ranges
+ */
+ ;
+
+ _proto.buffered_ = function buffered_() {
+ var trackInfo = this.getMediaInfo_();
+
+ if (!this.sourceUpdater_ || !trackInfo) {
+ return videojs.createTimeRanges();
+ }
+
+ if (this.loaderType_ === 'main') {
+ var hasAudio = trackInfo.hasAudio,
+ hasVideo = trackInfo.hasVideo,
+ isMuxed = trackInfo.isMuxed;
+
+ if (hasVideo && hasAudio && !this.audioDisabled_ && !isMuxed) {
+ return this.sourceUpdater_.buffered();
+ }
+
+ if (hasVideo) {
+ return this.sourceUpdater_.videoBuffered();
+ }
+ } // One case that can be ignored for now is audio only with alt audio,
+ // as we don't yet have proper support for that.
+
+
+ return this.sourceUpdater_.audioBuffered();
+ }
+ /**
+ * Gets and sets init segment for the provided map
+ *
+ * @param {Object} map
+ * The map object representing the init segment to get or set
+ * @param {boolean=} set
+ * If true, the init segment for the provided map should be saved
+ * @return {Object}
+ * map object for desired init segment
+ */
+ ;
+
+ _proto.initSegmentForMap = function initSegmentForMap(map, set) {
+ if (set === void 0) {
+ set = false;
+ }
+
+ if (!map) {
+ return null;
+ }
+
+ var id = initSegmentId(map);
+ var storedMap = this.initSegments_[id];
+
+ if (set && !storedMap && map.bytes) {
+ this.initSegments_[id] = storedMap = {
+ resolvedUri: map.resolvedUri,
+ byterange: map.byterange,
+ bytes: map.bytes,
+ tracks: map.tracks,
+ timescales: map.timescales
+ };
+ }
+
+ return storedMap || map;
+ }
+ /**
+ * Gets and sets key for the provided key
+ *
+ * @param {Object} key
+ * The key object representing the key to get or set
+ * @param {boolean=} set
+ * If true, the key for the provided key should be saved
+ * @return {Object}
+ * Key object for desired key
+ */
+ ;
+
+ _proto.segmentKey = function segmentKey(key, set) {
+ if (set === void 0) {
+ set = false;
+ }
+
+ if (!key) {
+ return null;
+ }
+
+ var id = segmentKeyId(key);
+ var storedKey = this.keyCache_[id]; // TODO: We should use the HTTP Expires header to invalidate our cache per
+ // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-6.2.3
+
+ if (this.cacheEncryptionKeys_ && set && !storedKey && key.bytes) {
+ this.keyCache_[id] = storedKey = {
+ resolvedUri: key.resolvedUri,
+ bytes: key.bytes
+ };
+ }
+
+ var result = {
+ resolvedUri: (storedKey || key).resolvedUri
+ };
+
+ if (storedKey) {
+ result.bytes = storedKey.bytes;
+ }
+
+ return result;
+ }
+ /**
+ * Returns true if all configuration required for loading is present, otherwise false.
+ *
+ * @return {boolean} True if the all configuration is ready for loading
+ * @private
+ */
+ ;
+
+ _proto.couldBeginLoading_ = function couldBeginLoading_() {
+ return this.playlist_ && !this.paused();
+ }
+ /**
+ * load a playlist and start to fill the buffer
+ */
+ ;
+
+ _proto.load = function load() {
+ // un-pause
+ this.monitorBuffer_(); // if we don't have a playlist yet, keep waiting for one to be
+ // specified
+
+ if (!this.playlist_) {
+ return;
+ } // if all the configuration is ready, initialize and begin loading
+
+
+ if (this.state === 'INIT' && this.couldBeginLoading_()) {
+ return this.init_();
+ } // if we're in the middle of processing a segment already, don't
+ // kick off an additional segment request
+
+
+ if (!this.couldBeginLoading_() || this.state !== 'READY' && this.state !== 'INIT') {
+ return;
+ }
+
+ this.state = 'READY';
+ }
+ /**
+ * Once all the starting parameters have been specified, begin
+ * operation. This method should only be invoked from the INIT
+ * state.
+ *
+ * @private
+ */
+ ;
+
+ _proto.init_ = function init_() {
+ this.state = 'READY'; // if this is the audio segment loader, and it hasn't been inited before, then any old
+ // audio data from the muxed content should be removed
+
+ this.resetEverything();
+ return this.monitorBuffer_();
+ }
+ /**
+ * set a playlist on the segment loader
+ *
+ * @param {PlaylistLoader} media the playlist to set on the segment loader
+ */
+ ;
+
+ _proto.playlist = function playlist(newPlaylist, options) {
+ if (options === void 0) {
+ options = {};
+ }
+
+ if (!newPlaylist) {
+ return;
+ }
+
+ var oldPlaylist = this.playlist_;
+ var segmentInfo = this.pendingSegment_;
+ this.playlist_ = newPlaylist;
+ this.xhrOptions_ = options; // when we haven't started playing yet, the start of a live playlist
+ // is always our zero-time so force a sync update each time the playlist
+ // is refreshed from the server
+ //
+ // Use the INIT state to determine if playback has started, as the playlist sync info
+ // should be fixed once requests begin (as sync points are generated based on sync
+ // info), but not before then.
+
+ if (this.state === 'INIT') {
+ newPlaylist.syncInfo = {
+ mediaSequence: newPlaylist.mediaSequence,
+ time: 0
+ }; // Setting the date time mapping means mapping the program date time (if available)
+ // to time 0 on the player's timeline. The playlist's syncInfo serves a similar
+ // purpose, mapping the initial mediaSequence to time zero. Since the syncInfo can
+ // be updated as the playlist is refreshed before the loader starts loading, the
+ // program date time mapping needs to be updated as well.
+ //
+ // This mapping is only done for the main loader because a program date time should
+ // map equivalently between playlists.
+
+ if (this.loaderType_ === 'main') {
+ this.syncController_.setDateTimeMappingForStart(newPlaylist);
+ }
+ }
+
+ var oldId = null;
+
+ if (oldPlaylist) {
+ if (oldPlaylist.id) {
+ oldId = oldPlaylist.id;
+ } else if (oldPlaylist.uri) {
+ oldId = oldPlaylist.uri;
+ }
+ }
+
+ this.logger_("playlist update [" + oldId + " => " + (newPlaylist.id || newPlaylist.uri) + "]"); // in VOD, this is always a rendition switch (or we updated our syncInfo above)
+ // in LIVE, we always want to update with new playlists (including refreshes)
+
+ this.trigger('syncinfoupdate'); // if we were unpaused but waiting for a playlist, start
+ // buffering now
+
+ if (this.state === 'INIT' && this.couldBeginLoading_()) {
+ return this.init_();
+ }
+
+ if (!oldPlaylist || oldPlaylist.uri !== newPlaylist.uri) {
+ if (this.mediaIndex !== null) {
+ // we must reset/resync the segment loader when we switch renditions and
+ // the segment loader is already synced to the previous rendition
+ // on playlist changes we want it to be possible to fetch
+ // at the buffer for vod but not for live. So we use resetLoader
+ // for live and resyncLoader for vod. We want this because
+ // if a playlist uses independent and non-independent segments/parts the
+ // buffer may not accurately reflect the next segment that we should try
+ // downloading.
+ if (!newPlaylist.endList) {
+ this.resetLoader();
+ } else {
+ this.resyncLoader();
+ }
+ }
+
+ this.currentMediaInfo_ = void 0;
+ this.trigger('playlistupdate'); // the rest of this function depends on `oldPlaylist` being defined
+
+ return;
+ } // we reloaded the same playlist so we are in a live scenario
+ // and we will likely need to adjust the mediaIndex
+
+
+ var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;
+ this.logger_("live window shift [" + mediaSequenceDiff + "]"); // update the mediaIndex on the SegmentLoader
+ // this is important because we can abort a request and this value must be
+ // equal to the last appended mediaIndex
+
+ if (this.mediaIndex !== null) {
+ this.mediaIndex -= mediaSequenceDiff; // this can happen if we are going to load the first segment, but get a playlist
+ // update during that. mediaIndex would go from 0 to -1 if mediaSequence in the
+ // new playlist was incremented by 1.
+
+ if (this.mediaIndex < 0) {
+ this.mediaIndex = null;
+ this.partIndex = null;
+ } else {
+ var segment = this.playlist_.segments[this.mediaIndex]; // partIndex should remain the same for the same segment
+ // unless parts fell off of the playlist for this segment.
+ // In that case we need to reset partIndex and resync
+
+ if (this.partIndex && (!segment.parts || !segment.parts.length || !segment.parts[this.partIndex])) {
+ var mediaIndex = this.mediaIndex;
+ this.logger_("currently processing part (index " + this.partIndex + ") no longer exists.");
+ this.resetLoader(); // We want to throw away the partIndex and the data associated with it,
+ // as the part was dropped from our current playlists segment.
+ // The mediaIndex will still be valid so keep that around.
+
+ this.mediaIndex = mediaIndex;
+ }
+ }
+ } // update the mediaIndex on the SegmentInfo object
+ // this is important because we will update this.mediaIndex with this value
+ // in `handleAppendsDone_` after the segment has been successfully appended
+
+
+ if (segmentInfo) {
+ segmentInfo.mediaIndex -= mediaSequenceDiff;
+
+ if (segmentInfo.mediaIndex < 0) {
+ segmentInfo.mediaIndex = null;
+ segmentInfo.partIndex = null;
+ } else {
+ // we need to update the referenced segment so that timing information is
+ // saved for the new playlist's segment, however, if the segment fell off the
+ // playlist, we can leave the old reference and just lose the timing info
+ if (segmentInfo.mediaIndex >= 0) {
+ segmentInfo.segment = newPlaylist.segments[segmentInfo.mediaIndex];
+ }
+
+ if (segmentInfo.partIndex >= 0 && segmentInfo.segment.parts) {
+ segmentInfo.part = segmentInfo.segment.parts[segmentInfo.partIndex];
+ }
+ }
+ }
+
+ this.syncController_.saveExpiredSegmentInfo(oldPlaylist, newPlaylist);
+ }
+ /**
+ * Prevent the loader from fetching additional segments. If there
+ * is a segment request outstanding, it will finish processing
+ * before the loader halts. A segment loader can be unpaused by
+ * calling load().
+ */
+ ;
+
+ _proto.pause = function pause() {
+ if (this.checkBufferTimeout_) {
+ window__default['default'].clearTimeout(this.checkBufferTimeout_);
+ this.checkBufferTimeout_ = null;
+ }
+ }
+ /**
+ * Returns whether the segment loader is fetching additional
+ * segments when given the opportunity. This property can be
+ * modified through calls to pause() and load().
+ */
+ ;
+
+ _proto.paused = function paused() {
+ return this.checkBufferTimeout_ === null;
+ }
+ /**
+ * Delete all the buffered data and reset the SegmentLoader
+ *
+ * @param {Function} [done] an optional callback to be executed when the remove
+ * operation is complete
+ */
+ ;
+
+ _proto.resetEverything = function resetEverything(done) {
+ this.ended_ = false;
+ this.appendInitSegment_ = {
+ audio: true,
+ video: true
+ };
+ this.resetLoader(); // remove from 0, the earliest point, to Infinity, to signify removal of everything.
+ // VTT Segment Loader doesn't need to do anything but in the regular SegmentLoader,
+ // we then clamp the value to duration if necessary.
+
+ this.remove(0, Infinity, done); // clears fmp4 captions
+
+ if (this.transmuxer_) {
+ this.transmuxer_.postMessage({
+ action: 'clearAllMp4Captions'
+ }); // reset the cache in the transmuxer
+
+ this.transmuxer_.postMessage({
+ action: 'reset'
+ });
+ }
+ }
+ /**
+ * Force the SegmentLoader to resync and start loading around the currentTime instead
+ * of starting at the end of the buffer
+ *
+ * Useful for fast quality changes
+ */
+ ;
+
+ _proto.resetLoader = function resetLoader() {
+ this.fetchAtBuffer_ = false;
+ this.resyncLoader();
+ }
+ /**
+ * Force the SegmentLoader to restart synchronization and make a conservative guess
+ * before returning to the simple walk-forward method
+ */
+ ;
+
+ _proto.resyncLoader = function resyncLoader() {
+ if (this.transmuxer_) {
+ // need to clear out any cached data to prepare for the new segment
+ segmentTransmuxer.reset(this.transmuxer_);
+ }
+
+ this.mediaIndex = null;
+ this.partIndex = null;
+ this.syncPoint_ = null;
+ this.isPendingTimestampOffset_ = false;
+ this.callQueue_ = [];
+ this.loadQueue_ = [];
+ this.metadataQueue_.id3 = [];
+ this.metadataQueue_.caption = [];
+ this.abort();
+
+ if (this.transmuxer_) {
+ this.transmuxer_.postMessage({
+ action: 'clearParsedMp4Captions'
+ });
+ }
+ }
+ /**
+ * Remove any data in the source buffer between start and end times
+ *
+ * @param {number} start - the start time of the region to remove from the buffer
+ * @param {number} end - the end time of the region to remove from the buffer
+ * @param {Function} [done] - an optional callback to be executed when the remove
+ * @param {boolean} force - force all remove operations to happen
+ * operation is complete
+ */
+ ;
+
+ _proto.remove = function remove(start, end, done, force) {
+ if (done === void 0) {
+ done = function done() {};
+ }
+
+ if (force === void 0) {
+ force = false;
+ } // clamp end to duration if we need to remove everything.
+ // This is due to a browser bug that causes issues if we remove to Infinity.
+ // videojs/videojs-contrib-hls#1225
+
+
+ if (end === Infinity) {
+ end = this.duration_();
+ } // skip removes that would throw an error
+ // commonly happens during a rendition switch at the start of a video
+ // from start 0 to end 0
+
+
+ if (end <= start) {
+ this.logger_('skipping remove because end ${end} is <= start ${start}');
+ return;
+ }
+
+ if (!this.sourceUpdater_ || !this.getMediaInfo_()) {
+ this.logger_('skipping remove because no source updater or starting media info'); // nothing to remove if we haven't processed any media
+
+ return;
+ } // set it to one to complete this function's removes
+
+
+ var removesRemaining = 1;
+
+ var removeFinished = function removeFinished() {
+ removesRemaining--;
+
+ if (removesRemaining === 0) {
+ done();
+ }
+ };
+
+ if (force || !this.audioDisabled_) {
+ removesRemaining++;
+ this.sourceUpdater_.removeAudio(start, end, removeFinished);
+ } // While it would be better to only remove video if the main loader has video, this
+ // should be safe with audio only as removeVideo will call back even if there's no
+ // video buffer.
+ //
+ // In theory we can check to see if there's video before calling the remove, but in
+ // the event that we're switching between renditions and from video to audio only
+ // (when we add support for that), we may need to clear the video contents despite
+ // what the new media will contain.
+
+
+ if (force || this.loaderType_ === 'main') {
+ this.gopBuffer_ = removeGopBuffer(this.gopBuffer_, start, end, this.timeMapping_);
+ removesRemaining++;
+ this.sourceUpdater_.removeVideo(start, end, removeFinished);
+ } // remove any captions and ID3 tags
+
+
+ for (var track in this.inbandTextTracks_) {
+ removeCuesFromTrack(start, end, this.inbandTextTracks_[track]);
+ }
+
+ removeCuesFromTrack(start, end, this.segmentMetadataTrack_); // finished this function's removes
+
+ removeFinished();
+ }
+ /**
+ * (re-)schedule monitorBufferTick_ to run as soon as possible
+ *
+ * @private
+ */
+ ;
+
+ _proto.monitorBuffer_ = function monitorBuffer_() {
+ if (this.checkBufferTimeout_) {
+ window__default['default'].clearTimeout(this.checkBufferTimeout_);
+ }
+
+ this.checkBufferTimeout_ = window__default['default'].setTimeout(this.monitorBufferTick_.bind(this), 1);
+ }
+ /**
+ * As long as the SegmentLoader is in the READY state, periodically
+ * invoke fillBuffer_().
+ *
+ * @private
+ */
+ ;
+
+ _proto.monitorBufferTick_ = function monitorBufferTick_() {
+ if (this.state === 'READY') {
+ this.fillBuffer_();
+ }
+
+ if (this.checkBufferTimeout_) {
+ window__default['default'].clearTimeout(this.checkBufferTimeout_);
+ }
+
+ this.checkBufferTimeout_ = window__default['default'].setTimeout(this.monitorBufferTick_.bind(this), CHECK_BUFFER_DELAY);
+ }
+ /**
+ * fill the buffer with segements unless the sourceBuffers are
+ * currently updating
+ *
+ * Note: this function should only ever be called by monitorBuffer_
+ * and never directly
+ *
+ * @private
+ */
+ ;
+
+ _proto.fillBuffer_ = function fillBuffer_() {
+ // TODO since the source buffer maintains a queue, and we shouldn't call this function
+ // except when we're ready for the next segment, this check can most likely be removed
+ if (this.sourceUpdater_.updating()) {
+ return;
+ } // see if we need to begin loading immediately
+
+
+ var segmentInfo = this.chooseNextRequest_();
+
+ if (!segmentInfo) {
+ return;
+ }
+
+ if (typeof segmentInfo.timestampOffset === 'number') {
+ this.isPendingTimestampOffset_ = false;
+ this.timelineChangeController_.pendingTimelineChange({
+ type: this.loaderType_,
+ from: this.currentTimeline_,
+ to: segmentInfo.timeline
+ });
+ }
+
+ this.loadSegment_(segmentInfo);
+ }
+ /**
+ * Determines if we should call endOfStream on the media source based
+ * on the state of the buffer or if appened segment was the final
+ * segment in the playlist.
+ *
+ * @param {number} [mediaIndex] the media index of segment we last appended
+ * @param {Object} [playlist] a media playlist object
+ * @return {boolean} do we need to call endOfStream on the MediaSource
+ */
+ ;
+
+ _proto.isEndOfStream_ = function isEndOfStream_(mediaIndex, playlist, partIndex) {
+ if (mediaIndex === void 0) {
+ mediaIndex = this.mediaIndex;
+ }
+
+ if (playlist === void 0) {
+ playlist = this.playlist_;
+ }
+
+ if (partIndex === void 0) {
+ partIndex = this.partIndex;
+ }
+
+ if (!playlist || !this.mediaSource_) {
+ return false;
+ }
+
+ var segment = typeof mediaIndex === 'number' && playlist.segments[mediaIndex]; // mediaIndex is zero based but length is 1 based
+
+ var appendedLastSegment = mediaIndex + 1 === playlist.segments.length; // true if there are no parts, or this is the last part.
+
+ var appendedLastPart = !segment || !segment.parts || partIndex + 1 === segment.parts.length; // if we've buffered to the end of the video, we need to call endOfStream
+ // so that MediaSources can trigger the `ended` event when it runs out of
+ // buffered data instead of waiting for me
+
+ return playlist.endList && this.mediaSource_.readyState === 'open' && appendedLastSegment && appendedLastPart;
+ }
+ /**
+ * Determines what request should be made given current segment loader state.
+ *
+ * @return {Object} a request object that describes the segment/part to load
+ */
+ ;
+
+ _proto.chooseNextRequest_ = function chooseNextRequest_() {
+ var buffered = this.buffered_();
+ var bufferedEnd = lastBufferedEnd(buffered) || 0;
+ var bufferedTime = timeAheadOf(buffered, this.currentTime_());
+ var preloaded = !this.hasPlayed_() && bufferedTime >= 1;
+ var haveEnoughBuffer = bufferedTime >= this.goalBufferLength_();
+ var segments = this.playlist_.segments; // return no segment if:
+ // 1. we don't have segments
+ // 2. The video has not yet played and we already downloaded a segment
+ // 3. we already have enough buffered time
+
+ if (!segments.length || preloaded || haveEnoughBuffer) {
+ return null;
+ }
+
+ this.syncPoint_ = this.syncPoint_ || this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_());
+ var next = {
+ partIndex: null,
+ mediaIndex: null,
+ startOfSegment: null,
+ playlist: this.playlist_,
+ isSyncRequest: Boolean(!this.syncPoint_)
+ };
+
+ if (next.isSyncRequest) {
+ next.mediaIndex = getSyncSegmentCandidate(this.currentTimeline_, segments, bufferedEnd);
+ } else if (this.mediaIndex !== null) {
+ var segment = segments[this.mediaIndex];
+ var partIndex = typeof this.partIndex === 'number' ? this.partIndex : -1;
+ next.startOfSegment = segment.end ? segment.end : bufferedEnd;
+
+ if (segment.parts && segment.parts[partIndex + 1]) {
+ next.mediaIndex = this.mediaIndex;
+ next.partIndex = partIndex + 1;
+ } else {
+ next.mediaIndex = this.mediaIndex + 1;
+ }
+ } else {
+ // Find the segment containing the end of the buffer or current time.
+ var _Playlist$getMediaInf = Playlist.getMediaInfoForTime({
+ experimentalExactManifestTimings: this.experimentalExactManifestTimings,
+ playlist: this.playlist_,
+ currentTime: this.fetchAtBuffer_ ? bufferedEnd : this.currentTime_(),
+ startingPartIndex: this.syncPoint_.partIndex,
+ startingSegmentIndex: this.syncPoint_.segmentIndex,
+ startTime: this.syncPoint_.time
+ }),
+ segmentIndex = _Playlist$getMediaInf.segmentIndex,
+ startTime = _Playlist$getMediaInf.startTime,
+ _partIndex = _Playlist$getMediaInf.partIndex;
+
+ next.getMediaInfoForTime = this.fetchAtBuffer_ ? "bufferedEnd " + bufferedEnd : "currentTime " + this.currentTime_();
+ next.mediaIndex = segmentIndex;
+ next.startOfSegment = startTime;
+ next.partIndex = _partIndex;
+ }
+
+ var nextSegment = segments[next.mediaIndex];
+ var nextPart = nextSegment && typeof next.partIndex === 'number' && nextSegment.parts && nextSegment.parts[next.partIndex]; // if the next segment index is invalid or
+ // the next partIndex is invalid do not choose a next segment.
+
+ if (!nextSegment || typeof next.partIndex === 'number' && !nextPart) {
+ return null;
+ } // if the next segment has parts, and we don't have a partIndex.
+ // Set partIndex to 0
+
+
+ if (typeof next.partIndex !== 'number' && nextSegment.parts) {
+ next.partIndex = 0;
+ nextPart = nextSegment.parts[0];
+ } // if we have no buffered data then we need to make sure
+ // that the next part we append is "independent" if possible.
+ // So we check if the previous part is independent, and request
+ // it if it is.
+
+
+ if (!bufferedTime && nextPart && !nextPart.independent) {
+ if (next.partIndex === 0) {
+ var lastSegment = segments[next.mediaIndex - 1];
+ var lastSegmentLastPart = lastSegment.parts && lastSegment.parts.length && lastSegment.parts[lastSegment.parts.length - 1];
+
+ if (lastSegmentLastPart && lastSegmentLastPart.independent) {
+ next.mediaIndex -= 1;
+ next.partIndex = lastSegment.parts.length - 1;
+ next.independent = 'previous segment';
+ }
+ } else if (nextSegment.parts[next.partIndex - 1].independent) {
+ next.partIndex -= 1;
+ next.independent = 'previous part';
+ }
+ }
+
+ var ended = this.mediaSource_ && this.mediaSource_.readyState === 'ended'; // do not choose a next segment if all of the following:
+ // 1. this is the last segment in the playlist
+ // 2. end of stream has been called on the media source already
+ // 3. the player is not seeking
+
+ if (next.mediaIndex >= segments.length - 1 && ended && !this.seeking_()) {
+ return null;
+ }
+
+ return this.generateSegmentInfo_(next);
+ };
+
+ _proto.generateSegmentInfo_ = function generateSegmentInfo_(options) {
+ var independent = options.independent,
+ playlist = options.playlist,
+ mediaIndex = options.mediaIndex,
+ startOfSegment = options.startOfSegment,
+ isSyncRequest = options.isSyncRequest,
+ partIndex = options.partIndex,
+ forceTimestampOffset = options.forceTimestampOffset,
+ getMediaInfoForTime = options.getMediaInfoForTime;
+ var segment = playlist.segments[mediaIndex];
+ var part = typeof partIndex === 'number' && segment.parts[partIndex];
+ var segmentInfo = {
+ requestId: 'segment-loader-' + Math.random(),
+ // resolve the segment URL relative to the playlist
+ uri: part && part.resolvedUri || segment.resolvedUri,
+ // the segment's mediaIndex at the time it was requested
+ mediaIndex: mediaIndex,
+ partIndex: part ? partIndex : null,
+ // whether or not to update the SegmentLoader's state with this
+ // segment's mediaIndex
+ isSyncRequest: isSyncRequest,
+ startOfSegment: startOfSegment,
+ // the segment's playlist
+ playlist: playlist,
+ // unencrypted bytes of the segment
+ bytes: null,
+ // when a key is defined for this segment, the encrypted bytes
+ encryptedBytes: null,
+ // The target timestampOffset for this segment when we append it
+ // to the source buffer
+ timestampOffset: null,
+ // The timeline that the segment is in
+ timeline: segment.timeline,
+ // The expected duration of the segment in seconds
+ duration: part && part.duration || segment.duration,
+ // retain the segment in case the playlist updates while doing an async process
+ segment: segment,
+ part: part,
+ byteLength: 0,
+ transmuxer: this.transmuxer_,
+ // type of getMediaInfoForTime that was used to get this segment
+ getMediaInfoForTime: getMediaInfoForTime,
+ independent: independent
+ };
+ var overrideCheck = typeof forceTimestampOffset !== 'undefined' ? forceTimestampOffset : this.isPendingTimestampOffset_;
+ segmentInfo.timestampOffset = this.timestampOffsetForSegment_({
+ segmentTimeline: segment.timeline,
+ currentTimeline: this.currentTimeline_,
+ startOfSegment: startOfSegment,
+ buffered: this.buffered_(),
+ overrideCheck: overrideCheck
+ });
+ var audioBufferedEnd = lastBufferedEnd(this.sourceUpdater_.audioBuffered());
+
+ if (typeof audioBufferedEnd === 'number') {
+ // since the transmuxer is using the actual timing values, but the buffer is
+ // adjusted by the timestamp offset, we must adjust the value here
+ segmentInfo.audioAppendStart = audioBufferedEnd - this.sourceUpdater_.audioTimestampOffset();
+ }
+
+ if (this.sourceUpdater_.videoBuffered().length) {
+ segmentInfo.gopsToAlignWith = gopsSafeToAlignWith(this.gopBuffer_, // since the transmuxer is using the actual timing values, but the time is
+ // adjusted by the timestmap offset, we must adjust the value here
+ this.currentTime_() - this.sourceUpdater_.videoTimestampOffset(), this.timeMapping_);
+ }
+
+ return segmentInfo;
+ } // get the timestampoffset for a segment,
+ // added so that vtt segment loader can override and prevent
+ // adding timestamp offsets.
+ ;
+
+ _proto.timestampOffsetForSegment_ = function timestampOffsetForSegment_(options) {
+ return timestampOffsetForSegment(options);
+ }
+ /**
+ * Determines if the network has enough bandwidth to complete the current segment
+ * request in a timely manner. If not, the request will be aborted early and bandwidth
+ * updated to trigger a playlist switch.
+ *
+ * @param {Object} stats
+ * Object containing stats about the request timing and size
+ * @private
+ */
+ ;
+
+ _proto.earlyAbortWhenNeeded_ = function earlyAbortWhenNeeded_(stats) {
+ if (this.vhs_.tech_.paused() || // Don't abort if the current playlist is on the lowestEnabledRendition
+ // TODO: Replace using timeout with a boolean indicating whether this playlist is
+ // the lowestEnabledRendition.
+ !this.xhrOptions_.timeout || // Don't abort if we have no bandwidth information to estimate segment sizes
+ !this.playlist_.attributes.BANDWIDTH) {
+ return;
+ } // Wait at least 1 second since the first byte of data has been received before
+ // using the calculated bandwidth from the progress event to allow the bitrate
+ // to stabilize
+
+
+ if (Date.now() - (stats.firstBytesReceivedAt || Date.now()) < 1000) {
+ return;
+ }
+
+ var currentTime = this.currentTime_();
+ var measuredBandwidth = stats.bandwidth;
+ var segmentDuration = this.pendingSegment_.duration;
+ var requestTimeRemaining = Playlist.estimateSegmentRequestTime(segmentDuration, measuredBandwidth, this.playlist_, stats.bytesReceived); // Subtract 1 from the timeUntilRebuffer so we still consider an early abort
+ // if we are only left with less than 1 second when the request completes.
+ // A negative timeUntilRebuffering indicates we are already rebuffering
+
+ var timeUntilRebuffer$1 = timeUntilRebuffer(this.buffered_(), currentTime, this.vhs_.tech_.playbackRate()) - 1; // Only consider aborting early if the estimated time to finish the download
+ // is larger than the estimated time until the player runs out of forward buffer
+
+ if (requestTimeRemaining <= timeUntilRebuffer$1) {
+ return;
+ }
+
+ var switchCandidate = minRebufferMaxBandwidthSelector({
+ master: this.vhs_.playlists.master,
+ currentTime: currentTime,
+ bandwidth: measuredBandwidth,
+ duration: this.duration_(),
+ segmentDuration: segmentDuration,
+ timeUntilRebuffer: timeUntilRebuffer$1,
+ currentTimeline: this.currentTimeline_,
+ syncController: this.syncController_
+ });
+
+ if (!switchCandidate) {
+ return;
+ }
+
+ var rebufferingImpact = requestTimeRemaining - timeUntilRebuffer$1;
+ var timeSavedBySwitching = rebufferingImpact - switchCandidate.rebufferingImpact;
+ var minimumTimeSaving = 0.5; // If we are already rebuffering, increase the amount of variance we add to the
+ // potential round trip time of the new request so that we are not too aggressive
+ // with switching to a playlist that might save us a fraction of a second.
+
+ if (timeUntilRebuffer$1 <= TIME_FUDGE_FACTOR) {
+ minimumTimeSaving = 1;
+ }
+
+ if (!switchCandidate.playlist || switchCandidate.playlist.uri === this.playlist_.uri || timeSavedBySwitching < minimumTimeSaving) {
+ return;
+ } // set the bandwidth to that of the desired playlist being sure to scale by
+ // BANDWIDTH_VARIANCE and add one so the playlist selector does not exclude it
+ // don't trigger a bandwidthupdate as the bandwidth is artifial
+
+
+ this.bandwidth = switchCandidate.playlist.attributes.BANDWIDTH * Config.BANDWIDTH_VARIANCE + 1;
+ this.trigger('earlyabort');
+ };
+
+ _proto.handleAbort_ = function handleAbort_(segmentInfo) {
+ this.logger_("Aborting " + segmentInfoString(segmentInfo));
+ this.mediaRequestsAborted += 1;
+ }
+ /**
+ * XHR `progress` event handler
+ *
+ * @param {Event}
+ * The XHR `progress` event
+ * @param {Object} simpleSegment
+ * A simplified segment object copy
+ * @private
+ */
+ ;
+
+ _proto.handleProgress_ = function handleProgress_(event, simpleSegment) {
+ this.earlyAbortWhenNeeded_(simpleSegment.stats);
+
+ if (this.checkForAbort_(simpleSegment.requestId)) {
+ return;
+ }
+
+ this.trigger('progress');
+ };
+
+ _proto.handleTrackInfo_ = function handleTrackInfo_(simpleSegment, trackInfo) {
+ this.earlyAbortWhenNeeded_(simpleSegment.stats);
+
+ if (this.checkForAbort_(simpleSegment.requestId)) {
+ return;
+ }
+
+ if (this.checkForIllegalMediaSwitch(trackInfo)) {
+ return;
+ }
+
+ trackInfo = trackInfo || {}; // When we have track info, determine what media types this loader is dealing with.
+ // Guard against cases where we're not getting track info at all until we are
+ // certain that all streams will provide it.
+
+ if (!shallowEqual(this.currentMediaInfo_, trackInfo)) {
+ this.appendInitSegment_ = {
+ audio: true,
+ video: true
+ };
+ this.startingMediaInfo_ = trackInfo;
+ this.currentMediaInfo_ = trackInfo;
+ this.logger_('trackinfo update', trackInfo);
+ this.trigger('trackinfo');
+ } // trackinfo may cause an abort if the trackinfo
+ // causes a codec change to an unsupported codec.
+
+
+ if (this.checkForAbort_(simpleSegment.requestId)) {
+ return;
+ } // set trackinfo on the pending segment so that
+ // it can append.
+
+
+ this.pendingSegment_.trackInfo = trackInfo; // check if any calls were waiting on the track info
+
+ if (this.hasEnoughInfoToAppend_()) {
+ this.processCallQueue_();
+ }
+ };
+
+ _proto.handleTimingInfo_ = function handleTimingInfo_(simpleSegment, mediaType, timeType, time) {
+ this.earlyAbortWhenNeeded_(simpleSegment.stats);
+
+ if (this.checkForAbort_(simpleSegment.requestId)) {
+ return;
+ }
+
+ var segmentInfo = this.pendingSegment_;
+ var timingInfoProperty = timingInfoPropertyForMedia(mediaType);
+ segmentInfo[timingInfoProperty] = segmentInfo[timingInfoProperty] || {};
+ segmentInfo[timingInfoProperty][timeType] = time;
+ this.logger_("timinginfo: " + mediaType + " - " + timeType + " - " + time); // check if any calls were waiting on the timing info
+
+ if (this.hasEnoughInfoToAppend_()) {
+ this.processCallQueue_();
+ }
+ };
+
+ _proto.handleCaptions_ = function handleCaptions_(simpleSegment, captionData) {
+ var _this2 = this;
+
+ this.earlyAbortWhenNeeded_(simpleSegment.stats);
+
+ if (this.checkForAbort_(simpleSegment.requestId)) {
+ return;
+ } // This could only happen with fmp4 segments, but
+ // should still not happen in general
+
+
+ if (captionData.length === 0) {
+ this.logger_('SegmentLoader received no captions from a caption event');
+ return;
+ }
+
+ var segmentInfo = this.pendingSegment_; // Wait until we have some video data so that caption timing
+ // can be adjusted by the timestamp offset
+
+ if (!segmentInfo.hasAppendedData_) {
+ this.metadataQueue_.caption.push(this.handleCaptions_.bind(this, simpleSegment, captionData));
+ return;
+ }
+
+ var timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset();
+ var captionTracks = {}; // get total start/end and captions for each track/stream
+
+ captionData.forEach(function (caption) {
+ // caption.stream is actually a track name...
+ // set to the existing values in tracks or default values
+ captionTracks[caption.stream] = captionTracks[caption.stream] || {
+ // Infinity, as any other value will be less than this
+ startTime: Infinity,
+ captions: [],
+ // 0 as an other value will be more than this
+ endTime: 0
+ };
+ var captionTrack = captionTracks[caption.stream];
+ captionTrack.startTime = Math.min(captionTrack.startTime, caption.startTime + timestampOffset);
+ captionTrack.endTime = Math.max(captionTrack.endTime, caption.endTime + timestampOffset);
+ captionTrack.captions.push(caption);
+ });
+ Object.keys(captionTracks).forEach(function (trackName) {
+ var _captionTracks$trackN = captionTracks[trackName],
+ startTime = _captionTracks$trackN.startTime,
+ endTime = _captionTracks$trackN.endTime,
+ captions = _captionTracks$trackN.captions;
+ var inbandTextTracks = _this2.inbandTextTracks_;
+
+ _this2.logger_("adding cues from " + startTime + " -> " + endTime + " for " + trackName);
+
+ createCaptionsTrackIfNotExists(inbandTextTracks, _this2.vhs_.tech_, trackName); // clear out any cues that start and end at the same time period for the same track.
+ // We do this because a rendition change that also changes the timescale for captions
+ // will result in captions being re-parsed for certain segments. If we add them again
+ // without clearing we will have two of the same captions visible.
+
+ removeCuesFromTrack(startTime, endTime, inbandTextTracks[trackName]);
+ addCaptionData({
+ captionArray: captions,
+ inbandTextTracks: inbandTextTracks,
+ timestampOffset: timestampOffset
+ });
+ }); // Reset stored captions since we added parsed
+ // captions to a text track at this point
+
+ if (this.transmuxer_) {
+ this.transmuxer_.postMessage({
+ action: 'clearParsedMp4Captions'
+ });
+ }
+ };
+
+ _proto.handleId3_ = function handleId3_(simpleSegment, id3Frames, dispatchType) {
+ this.earlyAbortWhenNeeded_(simpleSegment.stats);
+
+ if (this.checkForAbort_(simpleSegment.requestId)) {
+ return;
+ }
+
+ var segmentInfo = this.pendingSegment_; // we need to have appended data in order for the timestamp offset to be set
+
+ if (!segmentInfo.hasAppendedData_) {
+ this.metadataQueue_.id3.push(this.handleId3_.bind(this, simpleSegment, id3Frames, dispatchType));
+ return;
+ }
+
+ var timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset(); // There's potentially an issue where we could double add metadata if there's a muxed
+ // audio/video source with a metadata track, and an alt audio with a metadata track.
+ // However, this probably won't happen, and if it does it can be handled then.
+
+ createMetadataTrackIfNotExists(this.inbandTextTracks_, dispatchType, this.vhs_.tech_);
+ addMetadata({
+ inbandTextTracks: this.inbandTextTracks_,
+ metadataArray: id3Frames,
+ timestampOffset: timestampOffset,
+ videoDuration: this.duration_()
+ });
+ };
+
+ _proto.processMetadataQueue_ = function processMetadataQueue_() {
+ this.metadataQueue_.id3.forEach(function (fn) {
+ return fn();
+ });
+ this.metadataQueue_.caption.forEach(function (fn) {
+ return fn();
+ });
+ this.metadataQueue_.id3 = [];
+ this.metadataQueue_.caption = [];
+ };
+
+ _proto.processCallQueue_ = function processCallQueue_() {
+ var callQueue = this.callQueue_; // Clear out the queue before the queued functions are run, since some of the
+ // functions may check the length of the load queue and default to pushing themselves
+ // back onto the queue.
+
+ this.callQueue_ = [];
+ callQueue.forEach(function (fun) {
+ return fun();
+ });
+ };
+
+ _proto.processLoadQueue_ = function processLoadQueue_() {
+ var loadQueue = this.loadQueue_; // Clear out the queue before the queued functions are run, since some of the
+ // functions may check the length of the load queue and default to pushing themselves
+ // back onto the queue.
+
+ this.loadQueue_ = [];
+ loadQueue.forEach(function (fun) {
+ return fun();
+ });
+ }
+ /**
+ * Determines whether the loader has enough info to load the next segment.
+ *
+ * @return {boolean}
+ * Whether or not the loader has enough info to load the next segment
+ */
+ ;
+
+ _proto.hasEnoughInfoToLoad_ = function hasEnoughInfoToLoad_() {
+ // Since primary timing goes by video, only the audio loader potentially needs to wait
+ // to load.
+ if (this.loaderType_ !== 'audio') {
+ return true;
+ }
+
+ var segmentInfo = this.pendingSegment_; // A fill buffer must have already run to establish a pending segment before there's
+ // enough info to load.
+
+ if (!segmentInfo) {
+ return false;
+ } // The first segment can and should be loaded immediately so that source buffers are
+ // created together (before appending). Source buffer creation uses the presence of
+ // audio and video data to determine whether to create audio/video source buffers, and
+ // uses processed (transmuxed or parsed) media to determine the types required.
+
+
+ if (!this.getCurrentMediaInfo_()) {
+ return true;
+ }
+
+ if ( // Technically, instead of waiting to load a segment on timeline changes, a segment
+ // can be requested and downloaded and only wait before it is transmuxed or parsed.
+ // But in practice, there are a few reasons why it is better to wait until a loader
+ // is ready to append that segment before requesting and downloading:
+ //
+ // 1. Because audio and main loaders cross discontinuities together, if this loader
+ // is waiting for the other to catch up, then instead of requesting another
+ // segment and using up more bandwidth, by not yet loading, more bandwidth is
+ // allotted to the loader currently behind.
+ // 2. media-segment-request doesn't have to have logic to consider whether a segment
+ // is ready to be processed or not, isolating the queueing behavior to the loader.
+ // 3. The audio loader bases some of its segment properties on timing information
+ // provided by the main loader, meaning that, if the logic for waiting on
+ // processing was in media-segment-request, then it would also need to know how
+ // to re-generate the segment information after the main loader caught up.
+ shouldWaitForTimelineChange({
+ timelineChangeController: this.timelineChangeController_,
+ currentTimeline: this.currentTimeline_,
+ segmentTimeline: segmentInfo.timeline,
+ loaderType: this.loaderType_,
+ audioDisabled: this.audioDisabled_
+ })) {
+ return false;
+ }
+
+ return true;
+ };
+
+ _proto.getCurrentMediaInfo_ = function getCurrentMediaInfo_(segmentInfo) {
+ if (segmentInfo === void 0) {
+ segmentInfo = this.pendingSegment_;
+ }
+
+ return segmentInfo && segmentInfo.trackInfo || this.currentMediaInfo_;
+ };
+
+ _proto.getMediaInfo_ = function getMediaInfo_(segmentInfo) {
+ if (segmentInfo === void 0) {
+ segmentInfo = this.pendingSegment_;
+ }
+
+ return this.getCurrentMediaInfo_(segmentInfo) || this.startingMediaInfo_;
+ };
+
+ _proto.hasEnoughInfoToAppend_ = function hasEnoughInfoToAppend_() {
+ if (!this.sourceUpdater_.ready()) {
+ return false;
+ } // If content needs to be removed or the loader is waiting on an append reattempt,
+ // then no additional content should be appended until the prior append is resolved.
+
+
+ if (this.waitingOnRemove_ || this.quotaExceededErrorRetryTimeout_) {
+ return false;
+ }
+
+ var segmentInfo = this.pendingSegment_;
+ var trackInfo = this.getCurrentMediaInfo_(); // no segment to append any data for or
+ // we do not have information on this specific
+ // segment yet
+
+ if (!segmentInfo || !trackInfo) {
+ return false;
+ }
+
+ var hasAudio = trackInfo.hasAudio,
+ hasVideo = trackInfo.hasVideo,
+ isMuxed = trackInfo.isMuxed;
+
+ if (hasVideo && !segmentInfo.videoTimingInfo) {
+ return false;
+ } // muxed content only relies on video timing information for now.
+
+
+ if (hasAudio && !this.audioDisabled_ && !isMuxed && !segmentInfo.audioTimingInfo) {
+ return false;
+ }
+
+ if (shouldWaitForTimelineChange({
+ timelineChangeController: this.timelineChangeController_,
+ currentTimeline: this.currentTimeline_,
+ segmentTimeline: segmentInfo.timeline,
+ loaderType: this.loaderType_,
+ audioDisabled: this.audioDisabled_
+ })) {
+ return false;
+ }
+
+ return true;
+ };
+
+ _proto.handleData_ = function handleData_(simpleSegment, result) {
+ this.earlyAbortWhenNeeded_(simpleSegment.stats);
+
+ if (this.checkForAbort_(simpleSegment.requestId)) {
+ return;
+ } // If there's anything in the call queue, then this data came later and should be
+ // executed after the calls currently queued.
+
+
+ if (this.callQueue_.length || !this.hasEnoughInfoToAppend_()) {
+ this.callQueue_.push(this.handleData_.bind(this, simpleSegment, result));
+ return;
+ }
+
+ var segmentInfo = this.pendingSegment_; // update the time mapping so we can translate from display time to media time
+
+ this.setTimeMapping_(segmentInfo.timeline); // for tracking overall stats
+
+ this.updateMediaSecondsLoaded_(segmentInfo.part || segmentInfo.segment); // Note that the state isn't changed from loading to appending. This is because abort
+ // logic may change behavior depending on the state, and changing state too early may
+ // inflate our estimates of bandwidth. In the future this should be re-examined to
+ // note more granular states.
+ // don't process and append data if the mediaSource is closed
+
+ if (this.mediaSource_.readyState === 'closed') {
+ return;
+ } // if this request included an initialization segment, save that data
+ // to the initSegment cache
+
+
+ if (simpleSegment.map) {
+ simpleSegment.map = this.initSegmentForMap(simpleSegment.map, true); // move over init segment properties to media request
+
+ segmentInfo.segment.map = simpleSegment.map;
+ } // if this request included a segment key, save that data in the cache
+
+
+ if (simpleSegment.key) {
+ this.segmentKey(simpleSegment.key, true);
+ }
+
+ segmentInfo.isFmp4 = simpleSegment.isFmp4;
+ segmentInfo.timingInfo = segmentInfo.timingInfo || {};
+
+ if (segmentInfo.isFmp4) {
+ this.trigger('fmp4');
+ segmentInfo.timingInfo.start = segmentInfo[timingInfoPropertyForMedia(result.type)].start;
+ } else {
+ var trackInfo = this.getCurrentMediaInfo_();
+ var useVideoTimingInfo = this.loaderType_ === 'main' && trackInfo && trackInfo.hasVideo;
+ var firstVideoFrameTimeForData;
+
+ if (useVideoTimingInfo) {
+ firstVideoFrameTimeForData = segmentInfo.videoTimingInfo.start;
+ } // Segment loader knows more about segment timing than the transmuxer (in certain
+ // aspects), so make any changes required for a more accurate start time.
+ // Don't set the end time yet, as the segment may not be finished processing.
+
+
+ segmentInfo.timingInfo.start = this.trueSegmentStart_({
+ currentStart: segmentInfo.timingInfo.start,
+ playlist: segmentInfo.playlist,
+ mediaIndex: segmentInfo.mediaIndex,
+ currentVideoTimestampOffset: this.sourceUpdater_.videoTimestampOffset(),
+ useVideoTimingInfo: useVideoTimingInfo,
+ firstVideoFrameTimeForData: firstVideoFrameTimeForData,
+ videoTimingInfo: segmentInfo.videoTimingInfo,
+ audioTimingInfo: segmentInfo.audioTimingInfo
+ });
+ } // Init segments for audio and video only need to be appended in certain cases. Now
+ // that data is about to be appended, we can check the final cases to determine
+ // whether we should append an init segment.
+
+
+ this.updateAppendInitSegmentStatus(segmentInfo, result.type); // Timestamp offset should be updated once we get new data and have its timing info,
+ // as we use the start of the segment to offset the best guess (playlist provided)
+ // timestamp offset.
+
+ this.updateSourceBufferTimestampOffset_(segmentInfo); // if this is a sync request we need to determine whether it should
+ // be appended or not.
+
+ if (segmentInfo.isSyncRequest) {
+ // first save/update our timing info for this segment.
+ // this is what allows us to choose an accurate segment
+ // and the main reason we make a sync request.
+ this.updateTimingInfoEnd_(segmentInfo);
+ this.syncController_.saveSegmentTimingInfo({
+ segmentInfo: segmentInfo,
+ shouldSaveTimelineMapping: this.loaderType_ === 'main'
+ });
+ var next = this.chooseNextRequest_(); // If the sync request isn't the segment that would be requested next
+ // after taking into account its timing info, do not append it.
+
+ if (next.mediaIndex !== segmentInfo.mediaIndex || next.partIndex !== segmentInfo.partIndex) {
+ this.logger_('sync segment was incorrect, not appending');
+ return;
+ } // otherwise append it like any other segment as our guess was correct.
+
+
+ this.logger_('sync segment was correct, appending');
+ } // Save some state so that in the future anything waiting on first append (and/or
+ // timestamp offset(s)) can process immediately. While the extra state isn't optimal,
+ // we need some notion of whether the timestamp offset or other relevant information
+ // has had a chance to be set.
+
+
+ segmentInfo.hasAppendedData_ = true; // Now that the timestamp offset should be set, we can append any waiting ID3 tags.
+
+ this.processMetadataQueue_();
+ this.appendData_(segmentInfo, result);
+ };
+
+ _proto.updateAppendInitSegmentStatus = function updateAppendInitSegmentStatus(segmentInfo, type) {
+ // alt audio doesn't manage timestamp offset
+ if (this.loaderType_ === 'main' && typeof segmentInfo.timestampOffset === 'number' && // in the case that we're handling partial data, we don't want to append an init
+ // segment for each chunk
+ !segmentInfo.changedTimestampOffset) {
+ // if the timestamp offset changed, the timeline may have changed, so we have to re-
+ // append init segments
+ this.appendInitSegment_ = {
+ audio: true,
+ video: true
+ };
+ }
+
+ if (this.playlistOfLastInitSegment_[type] !== segmentInfo.playlist) {
+ // make sure we append init segment on playlist changes, in case the media config
+ // changed
+ this.appendInitSegment_[type] = true;
+ }
+ };
+
+ _proto.getInitSegmentAndUpdateState_ = function getInitSegmentAndUpdateState_(_ref4) {
+ var type = _ref4.type,
+ initSegment = _ref4.initSegment,
+ map = _ref4.map,
+ playlist = _ref4.playlist; // "The EXT-X-MAP tag specifies how to obtain the Media Initialization Section
+ // (Section 3) required to parse the applicable Media Segments. It applies to every
+ // Media Segment that appears after it in the Playlist until the next EXT-X-MAP tag
+ // or until the end of the playlist."
+ // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.2.5
+
+ if (map) {
+ var id = initSegmentId(map);
+
+ if (this.activeInitSegmentId_ === id) {
+ // don't need to re-append the init segment if the ID matches
+ return null;
+ } // a map-specified init segment takes priority over any transmuxed (or otherwise
+ // obtained) init segment
+ //
+ // this also caches the init segment for later use
+
+
+ initSegment = this.initSegmentForMap(map, true).bytes;
+ this.activeInitSegmentId_ = id;
+ } // We used to always prepend init segments for video, however, that shouldn't be
+ // necessary. Instead, we should only append on changes, similar to what we've always
+ // done for audio. This is more important (though may not be that important) for
+ // frame-by-frame appending for LHLS, simply because of the increased quantity of
+ // appends.
+
+
+ if (initSegment && this.appendInitSegment_[type]) {
+ // Make sure we track the playlist that we last used for the init segment, so that
+ // we can re-append the init segment in the event that we get data from a new
+ // playlist. Discontinuities and track changes are handled in other sections.
+ this.playlistOfLastInitSegment_[type] = playlist; // Disable future init segment appends for this type. Until a change is necessary.
+
+ this.appendInitSegment_[type] = false; // we need to clear out the fmp4 active init segment id, since
+ // we are appending the muxer init segment
+
+ this.activeInitSegmentId_ = null;
+ return initSegment;
+ }
+
+ return null;
+ };
+
+ _proto.handleQuotaExceededError_ = function handleQuotaExceededError_(_ref5, error) {
+ var _this3 = this;
+
+ var segmentInfo = _ref5.segmentInfo,
+ type = _ref5.type,
+ bytes = _ref5.bytes;
+ var audioBuffered = this.sourceUpdater_.audioBuffered();
+ var videoBuffered = this.sourceUpdater_.videoBuffered(); // For now we're ignoring any notion of gaps in the buffer, but they, in theory,
+ // should be cleared out during the buffer removals. However, log in case it helps
+ // debug.
+
+ if (audioBuffered.length > 1) {
+ this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the audio buffer: ' + timeRangesToArray(audioBuffered).join(', '));
+ }
+
+ if (videoBuffered.length > 1) {
+ this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the video buffer: ' + timeRangesToArray(videoBuffered).join(', '));
+ }
+
+ var audioBufferStart = audioBuffered.length ? audioBuffered.start(0) : 0;
+ var audioBufferEnd = audioBuffered.length ? audioBuffered.end(audioBuffered.length - 1) : 0;
+ var videoBufferStart = videoBuffered.length ? videoBuffered.start(0) : 0;
+ var videoBufferEnd = videoBuffered.length ? videoBuffered.end(videoBuffered.length - 1) : 0;
+
+ if (audioBufferEnd - audioBufferStart <= MIN_BACK_BUFFER && videoBufferEnd - videoBufferStart <= MIN_BACK_BUFFER) {
+ // Can't remove enough buffer to make room for new segment (or the browser doesn't
+ // allow for appends of segments this size). In the future, it may be possible to
+ // split up the segment and append in pieces, but for now, error out this playlist
+ // in an attempt to switch to a more manageable rendition.
+ this.logger_('On QUOTA_EXCEEDED_ERR, single segment too large to append to ' + 'buffer, triggering an error. ' + ("Appended byte length: " + bytes.byteLength + ", ") + ("audio buffer: " + timeRangesToArray(audioBuffered).join(', ') + ", ") + ("video buffer: " + timeRangesToArray(videoBuffered).join(', ') + ", "));
+ this.error({
+ message: 'Quota exceeded error with append of a single segment of content',
+ excludeUntil: Infinity
+ });
+ this.trigger('error');
+ return;
+ } // To try to resolve the quota exceeded error, clear back buffer and retry. This means
+ // that the segment-loader should block on future events until this one is handled, so
+ // that it doesn't keep moving onto further segments. Adding the call to the call
+ // queue will prevent further appends until waitingOnRemove_ and
+ // quotaExceededErrorRetryTimeout_ are cleared.
+ //
+ // Note that this will only block the current loader. In the case of demuxed content,
+ // the other load may keep filling as fast as possible. In practice, this should be
+ // OK, as it is a rare case when either audio has a high enough bitrate to fill up a
+ // source buffer, or video fills without enough room for audio to append (and without
+ // the availability of clearing out seconds of back buffer to make room for audio).
+ // But it might still be good to handle this case in the future as a TODO.
+
+
+ this.waitingOnRemove_ = true;
+ this.callQueue_.push(this.appendToSourceBuffer_.bind(this, {
+ segmentInfo: segmentInfo,
+ type: type,
+ bytes: bytes
+ }));
+ var currentTime = this.currentTime_(); // Try to remove as much audio and video as possible to make room for new content
+ // before retrying.
+
+ var timeToRemoveUntil = currentTime - MIN_BACK_BUFFER;
+ this.logger_("On QUOTA_EXCEEDED_ERR, removing audio/video from 0 to " + timeToRemoveUntil);
+ this.remove(0, timeToRemoveUntil, function () {
+ _this3.logger_("On QUOTA_EXCEEDED_ERR, retrying append in " + MIN_BACK_BUFFER + "s");
+
+ _this3.waitingOnRemove_ = false; // wait the length of time alotted in the back buffer to prevent wasted
+ // attempts (since we can't clear less than the minimum)
+
+ _this3.quotaExceededErrorRetryTimeout_ = window__default['default'].setTimeout(function () {
+ _this3.logger_('On QUOTA_EXCEEDED_ERR, re-processing call queue');
+
+ _this3.quotaExceededErrorRetryTimeout_ = null;
+
+ _this3.processCallQueue_();
+ }, MIN_BACK_BUFFER * 1000);
+ }, true);
+ };
+
+ _proto.handleAppendError_ = function handleAppendError_(_ref6, error) {
+ var segmentInfo = _ref6.segmentInfo,
+ type = _ref6.type,
+ bytes = _ref6.bytes; // if there's no error, nothing to do
+
+ if (!error) {
+ return;
+ }
+
+ if (error.code === QUOTA_EXCEEDED_ERR) {
+ this.handleQuotaExceededError_({
+ segmentInfo: segmentInfo,
+ type: type,
+ bytes: bytes
+ }); // A quota exceeded error should be recoverable with a future re-append, so no need
+ // to trigger an append error.
+
+ return;
+ }
+
+ this.logger_('Received non QUOTA_EXCEEDED_ERR on append', error);
+ this.error(type + " append of " + bytes.length + "b failed for segment " + ("#" + segmentInfo.mediaIndex + " in playlist " + segmentInfo.playlist.id)); // If an append errors, we often can't recover.
+ // (see https://w3c.github.io/media-source/#sourcebuffer-append-error).
+ //
+ // Trigger a special error so that it can be handled separately from normal,
+ // recoverable errors.
+
+ this.trigger('appenderror');
+ };
+
+ _proto.appendToSourceBuffer_ = function appendToSourceBuffer_(_ref7) {
+ var segmentInfo = _ref7.segmentInfo,
+ type = _ref7.type,
+ initSegment = _ref7.initSegment,
+ data = _ref7.data,
+ bytes = _ref7.bytes; // If this is a re-append, bytes were already created and don't need to be recreated
+
+ if (!bytes) {
+ var segments = [data];
+ var byteLength = data.byteLength;
+
+ if (initSegment) {
+ // if the media initialization segment is changing, append it before the content
+ // segment
+ segments.unshift(initSegment);
+ byteLength += initSegment.byteLength;
+ } // Technically we should be OK appending the init segment separately, however, we
+ // haven't yet tested that, and prepending is how we have always done things.
+
+
+ bytes = concatSegments({
+ bytes: byteLength,
+ segments: segments
+ });
+ }
+
+ this.sourceUpdater_.appendBuffer({
+ segmentInfo: segmentInfo,
+ type: type,
+ bytes: bytes
+ }, this.handleAppendError_.bind(this, {
+ segmentInfo: segmentInfo,
+ type: type,
+ bytes: bytes
+ }));
+ };
+
+ _proto.handleSegmentTimingInfo_ = function handleSegmentTimingInfo_(type, requestId, segmentTimingInfo) {
+ if (!this.pendingSegment_ || requestId !== this.pendingSegment_.requestId) {
+ return;
+ }
+
+ var segment = this.pendingSegment_.segment;
+ var timingInfoProperty = type + "TimingInfo";
+
+ if (!segment[timingInfoProperty]) {
+ segment[timingInfoProperty] = {};
+ }
+
+ segment[timingInfoProperty].transmuxerPrependedSeconds = segmentTimingInfo.prependedContentDuration || 0;
+ segment[timingInfoProperty].transmuxedPresentationStart = segmentTimingInfo.start.presentation;
+ segment[timingInfoProperty].transmuxedDecodeStart = segmentTimingInfo.start.decode;
+ segment[timingInfoProperty].transmuxedPresentationEnd = segmentTimingInfo.end.presentation;
+ segment[timingInfoProperty].transmuxedDecodeEnd = segmentTimingInfo.end.decode; // mainly used as a reference for debugging
+
+ segment[timingInfoProperty].baseMediaDecodeTime = segmentTimingInfo.baseMediaDecodeTime;
+ };
+
+ _proto.appendData_ = function appendData_(segmentInfo, result) {
+ var type = result.type,
+ data = result.data;
+
+ if (!data || !data.byteLength) {
+ return;
+ }
+
+ if (type === 'audio' && this.audioDisabled_) {
+ return;
+ }
+
+ var initSegment = this.getInitSegmentAndUpdateState_({
+ type: type,
+ initSegment: result.initSegment,
+ playlist: segmentInfo.playlist,
+ map: segmentInfo.isFmp4 ? segmentInfo.segment.map : null
+ });
+ this.appendToSourceBuffer_({
+ segmentInfo: segmentInfo,
+ type: type,
+ initSegment: initSegment,
+ data: data
+ });
+ }
+ /**
+ * load a specific segment from a request into the buffer
+ *
+ * @private
+ */
+ ;
+
+ _proto.loadSegment_ = function loadSegment_(segmentInfo) {
+ var _this4 = this;
+
+ this.state = 'WAITING';
+ this.pendingSegment_ = segmentInfo;
+ this.trimBackBuffer_(segmentInfo);
+
+ if (typeof segmentInfo.timestampOffset === 'number') {
+ if (this.transmuxer_) {
+ this.transmuxer_.postMessage({
+ action: 'clearAllMp4Captions'
+ });
+ }
+ }
+
+ if (!this.hasEnoughInfoToLoad_()) {
+ this.loadQueue_.push(function () {
+ // regenerate the audioAppendStart, timestampOffset, etc as they
+ // may have changed since this function was added to the queue.
+ var options = _extends__default['default']({}, segmentInfo, {
+ forceTimestampOffset: true
+ });
+
+ _extends__default['default'](segmentInfo, _this4.generateSegmentInfo_(options));
+
+ _this4.isPendingTimestampOffset_ = false;
+
+ _this4.updateTransmuxerAndRequestSegment_(segmentInfo);
+ });
+ return;
+ }
+
+ this.updateTransmuxerAndRequestSegment_(segmentInfo);
+ };
+
+ _proto.updateTransmuxerAndRequestSegment_ = function updateTransmuxerAndRequestSegment_(segmentInfo) {
+ var _this5 = this; // We'll update the source buffer's timestamp offset once we have transmuxed data, but
+ // the transmuxer still needs to be updated before then.
+ //
+ // Even though keepOriginalTimestamps is set to true for the transmuxer, timestamp
+ // offset must be passed to the transmuxer for stream correcting adjustments.
+
+
+ if (this.shouldUpdateTransmuxerTimestampOffset_(segmentInfo.timestampOffset)) {
+ this.gopBuffer_.length = 0; // gopsToAlignWith was set before the GOP buffer was cleared
+
+ segmentInfo.gopsToAlignWith = [];
+ this.timeMapping_ = 0; // reset values in the transmuxer since a discontinuity should start fresh
+
+ this.transmuxer_.postMessage({
+ action: 'reset'
+ });
+ this.transmuxer_.postMessage({
+ action: 'setTimestampOffset',
+ timestampOffset: segmentInfo.timestampOffset
+ });
+ }
+
+ var simpleSegment = this.createSimplifiedSegmentObj_(segmentInfo);
+ var isEndOfStream = this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex);
+ var isWalkingForward = this.mediaIndex !== null;
+ var isDiscontinuity = segmentInfo.timeline !== this.currentTimeline_ && // currentTimeline starts at -1, so we shouldn't end the timeline switching to 0,
+ // the first timeline
+ segmentInfo.timeline > 0;
+ var isEndOfTimeline = isEndOfStream || isWalkingForward && isDiscontinuity;
+ this.logger_("Requesting " + segmentInfoString(segmentInfo)); // If there's an init segment associated with this segment, but it is not cached (identified by a lack of bytes),
+ // then this init segment has never been seen before and should be appended.
+ //
+ // At this point the content type (audio/video or both) is not yet known, but it should be safe to set
+ // both to true and leave the decision of whether to append the init segment to append time.
+
+ if (simpleSegment.map && !simpleSegment.map.bytes) {
+ this.logger_('going to request init segment.');
+ this.appendInitSegment_ = {
+ video: true,
+ audio: true
+ };
+ }
+
+ segmentInfo.abortRequests = mediaSegmentRequest({
+ xhr: this.vhs_.xhr,
+ xhrOptions: this.xhrOptions_,
+ decryptionWorker: this.decrypter_,
+ segment: simpleSegment,
+ abortFn: this.handleAbort_.bind(this, segmentInfo),
+ progressFn: this.handleProgress_.bind(this),
+ trackInfoFn: this.handleTrackInfo_.bind(this),
+ timingInfoFn: this.handleTimingInfo_.bind(this),
+ videoSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'video', segmentInfo.requestId),
+ audioSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'audio', segmentInfo.requestId),
+ captionsFn: this.handleCaptions_.bind(this),
+ isEndOfTimeline: isEndOfTimeline,
+ endedTimelineFn: function endedTimelineFn() {
+ _this5.logger_('received endedtimeline callback');
+ },
+ id3Fn: this.handleId3_.bind(this),
+ dataFn: this.handleData_.bind(this),
+ doneFn: this.segmentRequestFinished_.bind(this),
+ onTransmuxerLog: function onTransmuxerLog(_ref8) {
+ var message = _ref8.message,
+ level = _ref8.level,
+ stream = _ref8.stream;
+
+ _this5.logger_(segmentInfoString(segmentInfo) + " logged from transmuxer stream " + stream + " as a " + level + ": " + message);
+ }
+ });
+ }
+ /**
+ * trim the back buffer so that we don't have too much data
+ * in the source buffer
+ *
+ * @private
+ *
+ * @param {Object} segmentInfo - the current segment
+ */
+ ;
+
+ _proto.trimBackBuffer_ = function trimBackBuffer_(segmentInfo) {
+ var removeToTime = safeBackBufferTrimTime(this.seekable_(), this.currentTime_(), this.playlist_.targetDuration || 10); // Chrome has a hard limit of 150MB of
+ // buffer and a very conservative "garbage collector"
+ // We manually clear out the old buffer to ensure
+ // we don't trigger the QuotaExceeded error
+ // on the source buffer during subsequent appends
+
+ if (removeToTime > 0) {
+ this.remove(0, removeToTime);
+ }
+ }
+ /**
+ * created a simplified copy of the segment object with just the
+ * information necessary to perform the XHR and decryption
+ *
+ * @private
+ *
+ * @param {Object} segmentInfo - the current segment
+ * @return {Object} a simplified segment object copy
+ */
+ ;
+
+ _proto.createSimplifiedSegmentObj_ = function createSimplifiedSegmentObj_(segmentInfo) {
+ var segment = segmentInfo.segment;
+ var part = segmentInfo.part;
+ var simpleSegment = {
+ resolvedUri: part ? part.resolvedUri : segment.resolvedUri,
+ byterange: part ? part.byterange : segment.byterange,
+ requestId: segmentInfo.requestId,
+ transmuxer: segmentInfo.transmuxer,
+ audioAppendStart: segmentInfo.audioAppendStart,
+ gopsToAlignWith: segmentInfo.gopsToAlignWith,
+ part: segmentInfo.part
+ };
+ var previousSegment = segmentInfo.playlist.segments[segmentInfo.mediaIndex - 1];
+
+ if (previousSegment && previousSegment.timeline === segment.timeline) {
+ // The baseStartTime of a segment is used to handle rollover when probing the TS
+ // segment to retrieve timing information. Since the probe only looks at the media's
+ // times (e.g., PTS and DTS values of the segment), and doesn't consider the
+ // player's time (e.g., player.currentTime()), baseStartTime should reflect the
+ // media time as well. transmuxedDecodeEnd represents the end time of a segment, in
+ // seconds of media time, so should be used here. The previous segment is used since
+ // the end of the previous segment should represent the beginning of the current
+ // segment, so long as they are on the same timeline.
+ if (previousSegment.videoTimingInfo) {
+ simpleSegment.baseStartTime = previousSegment.videoTimingInfo.transmuxedDecodeEnd;
+ } else if (previousSegment.audioTimingInfo) {
+ simpleSegment.baseStartTime = previousSegment.audioTimingInfo.transmuxedDecodeEnd;
+ }
+ }
+
+ if (segment.key) {
+ // if the media sequence is greater than 2^32, the IV will be incorrect
+ // assuming 10s segments, that would be about 1300 years
+ var iv = segment.key.iv || new Uint32Array([0, 0, 0, segmentInfo.mediaIndex + segmentInfo.playlist.mediaSequence]);
+ simpleSegment.key = this.segmentKey(segment.key);
+ simpleSegment.key.iv = iv;
+ }
+
+ if (segment.map) {
+ simpleSegment.map = this.initSegmentForMap(segment.map);
+ }
+
+ return simpleSegment;
+ };
+
+ _proto.saveTransferStats_ = function saveTransferStats_(stats) {
+ // every request counts as a media request even if it has been aborted
+ // or canceled due to a timeout
+ this.mediaRequests += 1;
+
+ if (stats) {
+ this.mediaBytesTransferred += stats.bytesReceived;
+ this.mediaTransferDuration += stats.roundTripTime;
+ }
+ };
+
+ _proto.saveBandwidthRelatedStats_ = function saveBandwidthRelatedStats_(duration, stats) {
+ // byteLength will be used for throughput, and should be based on bytes receieved,
+ // which we only know at the end of the request and should reflect total bytes
+ // downloaded rather than just bytes processed from components of the segment
+ this.pendingSegment_.byteLength = stats.bytesReceived;
+
+ if (duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
+ this.logger_("Ignoring segment's bandwidth because its duration of " + duration + (" is less than the min to record " + MIN_SEGMENT_DURATION_TO_SAVE_STATS));
+ return;
+ }
+
+ this.bandwidth = stats.bandwidth;
+ this.roundTrip = stats.roundTripTime;
+ };
+
+ _proto.handleTimeout_ = function handleTimeout_() {
+ // although the VTT segment loader bandwidth isn't really used, it's good to
+ // maintain functinality between segment loaders
+ this.mediaRequestsTimedout += 1;
+ this.bandwidth = 1;
+ this.roundTrip = NaN;
+ this.trigger('bandwidthupdate');
+ }
+ /**
+ * Handle the callback from the segmentRequest function and set the
+ * associated SegmentLoader state and errors if necessary
+ *
+ * @private
+ */
+ ;
+
+ _proto.segmentRequestFinished_ = function segmentRequestFinished_(error, simpleSegment, result) {
+ // TODO handle special cases, e.g., muxed audio/video but only audio in the segment
+ // check the call queue directly since this function doesn't need to deal with any
+ // data, and can continue even if the source buffers are not set up and we didn't get
+ // any data from the segment
+ if (this.callQueue_.length) {
+ this.callQueue_.push(this.segmentRequestFinished_.bind(this, error, simpleSegment, result));
+ return;
+ }
+
+ this.saveTransferStats_(simpleSegment.stats); // The request was aborted and the SegmentLoader has already been reset
+
+ if (!this.pendingSegment_) {
+ return;
+ } // the request was aborted and the SegmentLoader has already started
+ // another request. this can happen when the timeout for an aborted
+ // request triggers due to a limitation in the XHR library
+ // do not count this as any sort of request or we risk double-counting
+
+
+ if (simpleSegment.requestId !== this.pendingSegment_.requestId) {
+ return;
+ } // an error occurred from the active pendingSegment_ so reset everything
+
+
+ if (error) {
+ this.pendingSegment_ = null;
+ this.state = 'READY'; // aborts are not a true error condition and nothing corrective needs to be done
+
+ if (error.code === REQUEST_ERRORS.ABORTED) {
+ return;
+ }
+
+ this.pause(); // the error is really just that at least one of the requests timed-out
+ // set the bandwidth to a very low value and trigger an ABR switch to
+ // take emergency action
+
+ if (error.code === REQUEST_ERRORS.TIMEOUT) {
+ this.handleTimeout_();
+ return;
+ } // if control-flow has arrived here, then the error is real
+ // emit an error event to blacklist the current playlist
+
+
+ this.mediaRequestsErrored += 1;
+ this.error(error);
+ this.trigger('error');
+ return;
+ }
+
+ var segmentInfo = this.pendingSegment_; // the response was a success so set any bandwidth stats the request
+ // generated for ABR purposes
+
+ this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats);
+ segmentInfo.endOfAllRequests = simpleSegment.endOfAllRequests;
+
+ if (result.gopInfo) {
+ this.gopBuffer_ = updateGopBuffer(this.gopBuffer_, result.gopInfo, this.safeAppend_);
+ } // Although we may have already started appending on progress, we shouldn't switch the
+ // state away from loading until we are officially done loading the segment data.
+
+
+ this.state = 'APPENDING'; // used for testing
+
+ this.trigger('appending');
+ this.waitForAppendsToComplete_(segmentInfo);
+ };
+
+ _proto.setTimeMapping_ = function setTimeMapping_(timeline) {
+ var timelineMapping = this.syncController_.mappingForTimeline(timeline);
+
+ if (timelineMapping !== null) {
+ this.timeMapping_ = timelineMapping;
+ }
+ };
+
+ _proto.updateMediaSecondsLoaded_ = function updateMediaSecondsLoaded_(segment) {
+ if (typeof segment.start === 'number' && typeof segment.end === 'number') {
+ this.mediaSecondsLoaded += segment.end - segment.start;
+ } else {
+ this.mediaSecondsLoaded += segment.duration;
+ }
+ };
+
+ _proto.shouldUpdateTransmuxerTimestampOffset_ = function shouldUpdateTransmuxerTimestampOffset_(timestampOffset) {
+ if (timestampOffset === null) {
+ return false;
+ } // note that we're potentially using the same timestamp offset for both video and
+ // audio
+
+
+ if (this.loaderType_ === 'main' && timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
+ return true;
+ }
+
+ if (!this.audioDisabled_ && timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
+ return true;
+ }
+
+ return false;
+ };
+
+ _proto.trueSegmentStart_ = function trueSegmentStart_(_ref9) {
+ var currentStart = _ref9.currentStart,
+ playlist = _ref9.playlist,
+ mediaIndex = _ref9.mediaIndex,
+ firstVideoFrameTimeForData = _ref9.firstVideoFrameTimeForData,
+ currentVideoTimestampOffset = _ref9.currentVideoTimestampOffset,
+ useVideoTimingInfo = _ref9.useVideoTimingInfo,
+ videoTimingInfo = _ref9.videoTimingInfo,
+ audioTimingInfo = _ref9.audioTimingInfo;
+
+ if (typeof currentStart !== 'undefined') {
+ // if start was set once, keep using it
+ return currentStart;
+ }
+
+ if (!useVideoTimingInfo) {
+ return audioTimingInfo.start;
+ }
+
+ var previousSegment = playlist.segments[mediaIndex - 1]; // The start of a segment should be the start of the first full frame contained
+ // within that segment. Since the transmuxer maintains a cache of incomplete data
+ // from and/or the last frame seen, the start time may reflect a frame that starts
+ // in the previous segment. Check for that case and ensure the start time is
+ // accurate for the segment.
+
+ if (mediaIndex === 0 || !previousSegment || typeof previousSegment.start === 'undefined' || previousSegment.end !== firstVideoFrameTimeForData + currentVideoTimestampOffset) {
+ return firstVideoFrameTimeForData;
+ }
+
+ return videoTimingInfo.start;
+ };
+
+ _proto.waitForAppendsToComplete_ = function waitForAppendsToComplete_(segmentInfo) {
+ var trackInfo = this.getCurrentMediaInfo_(segmentInfo);
+
+ if (!trackInfo) {
+ this.error({
+ message: 'No starting media returned, likely due to an unsupported media format.',
+ blacklistDuration: Infinity
+ });
+ this.trigger('error');
+ return;
+ } // Although transmuxing is done, appends may not yet be finished. Throw a marker
+ // on each queue this loader is responsible for to ensure that the appends are
+ // complete.
+
+
+ var hasAudio = trackInfo.hasAudio,
+ hasVideo = trackInfo.hasVideo,
+ isMuxed = trackInfo.isMuxed;
+ var waitForVideo = this.loaderType_ === 'main' && hasVideo;
+ var waitForAudio = !this.audioDisabled_ && hasAudio && !isMuxed;
+ segmentInfo.waitingOnAppends = 0; // segments with no data
+
+ if (!segmentInfo.hasAppendedData_) {
+ if (!segmentInfo.timingInfo && typeof segmentInfo.timestampOffset === 'number') {
+ // When there's no audio or video data in the segment, there's no audio or video
+ // timing information.
+ //
+ // If there's no audio or video timing information, then the timestamp offset
+ // can't be adjusted to the appropriate value for the transmuxer and source
+ // buffers.
+ //
+ // Therefore, the next segment should be used to set the timestamp offset.
+ this.isPendingTimestampOffset_ = true;
+ } // override settings for metadata only segments
+
+
+ segmentInfo.timingInfo = {
+ start: 0
+ };
+ segmentInfo.waitingOnAppends++;
+
+ if (!this.isPendingTimestampOffset_) {
+ // update the timestampoffset
+ this.updateSourceBufferTimestampOffset_(segmentInfo); // make sure the metadata queue is processed even though we have
+ // no video/audio data.
+
+ this.processMetadataQueue_();
+ } // append is "done" instantly with no data.
+
+
+ this.checkAppendsDone_(segmentInfo);
+ return;
+ } // Since source updater could call back synchronously, do the increments first.
+
+
+ if (waitForVideo) {
+ segmentInfo.waitingOnAppends++;
+ }
+
+ if (waitForAudio) {
+ segmentInfo.waitingOnAppends++;
+ }
+
+ if (waitForVideo) {
+ this.sourceUpdater_.videoQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
+ }
+
+ if (waitForAudio) {
+ this.sourceUpdater_.audioQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
+ }
+ };
+
+ _proto.checkAppendsDone_ = function checkAppendsDone_(segmentInfo) {
+ if (this.checkForAbort_(segmentInfo.requestId)) {
+ return;
+ }
+
+ segmentInfo.waitingOnAppends--;
+
+ if (segmentInfo.waitingOnAppends === 0) {
+ this.handleAppendsDone_();
+ }
+ };
+
+ _proto.checkForIllegalMediaSwitch = function checkForIllegalMediaSwitch(trackInfo) {
+ var illegalMediaSwitchError = illegalMediaSwitch(this.loaderType_, this.getCurrentMediaInfo_(), trackInfo);
+
+ if (illegalMediaSwitchError) {
+ this.error({
+ message: illegalMediaSwitchError,
+ blacklistDuration: Infinity
+ });
+ this.trigger('error');
+ return true;
+ }
+
+ return false;
+ };
+
+ _proto.updateSourceBufferTimestampOffset_ = function updateSourceBufferTimestampOffset_(segmentInfo) {
+ if (segmentInfo.timestampOffset === null || // we don't yet have the start for whatever media type (video or audio) has
+ // priority, timing-wise, so we must wait
+ typeof segmentInfo.timingInfo.start !== 'number' || // already updated the timestamp offset for this segment
+ segmentInfo.changedTimestampOffset || // the alt audio loader should not be responsible for setting the timestamp offset
+ this.loaderType_ !== 'main') {
+ return;
+ }
+
+ var didChange = false; // Primary timing goes by video, and audio is trimmed in the transmuxer, meaning that
+ // the timing info here comes from video. In the event that the audio is longer than
+ // the video, this will trim the start of the audio.
+ // This also trims any offset from 0 at the beginning of the media
+
+ segmentInfo.timestampOffset -= this.getSegmentStartTimeForTimestampOffsetCalculation_({
+ videoTimingInfo: segmentInfo.segment.videoTimingInfo,
+ audioTimingInfo: segmentInfo.segment.audioTimingInfo,
+ timingInfo: segmentInfo.timingInfo
+ }); // In the event that there are part segment downloads, each will try to update the
+ // timestamp offset. Retaining this bit of state prevents us from updating in the
+ // future (within the same segment), however, there may be a better way to handle it.
+
+ segmentInfo.changedTimestampOffset = true;
+
+ if (segmentInfo.timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
+ this.sourceUpdater_.videoTimestampOffset(segmentInfo.timestampOffset);
+ didChange = true;
+ }
+
+ if (segmentInfo.timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
+ this.sourceUpdater_.audioTimestampOffset(segmentInfo.timestampOffset);
+ didChange = true;
+ }
+
+ if (didChange) {
+ this.trigger('timestampoffset');
+ }
+ };
+
+ _proto.getSegmentStartTimeForTimestampOffsetCalculation_ = function getSegmentStartTimeForTimestampOffsetCalculation_(_ref10) {
+ var videoTimingInfo = _ref10.videoTimingInfo,
+ audioTimingInfo = _ref10.audioTimingInfo,
+ timingInfo = _ref10.timingInfo;
+
+ if (!this.useDtsForTimestampOffset_) {
+ return timingInfo.start;
+ }
+
+ if (videoTimingInfo && typeof videoTimingInfo.transmuxedDecodeStart === 'number') {
+ return videoTimingInfo.transmuxedDecodeStart;
+ } // handle audio only
+
+
+ if (audioTimingInfo && typeof audioTimingInfo.transmuxedDecodeStart === 'number') {
+ return audioTimingInfo.transmuxedDecodeStart;
+ } // handle content not transmuxed (e.g., MP4)
+
+
+ return timingInfo.start;
+ };
+
+ _proto.updateTimingInfoEnd_ = function updateTimingInfoEnd_(segmentInfo) {
+ segmentInfo.timingInfo = segmentInfo.timingInfo || {};
+ var trackInfo = this.getMediaInfo_();
+ var useVideoTimingInfo = this.loaderType_ === 'main' && trackInfo && trackInfo.hasVideo;
+ var prioritizedTimingInfo = useVideoTimingInfo && segmentInfo.videoTimingInfo ? segmentInfo.videoTimingInfo : segmentInfo.audioTimingInfo;
+
+ if (!prioritizedTimingInfo) {
+ return;
+ }
+
+ segmentInfo.timingInfo.end = typeof prioritizedTimingInfo.end === 'number' ? // End time may not exist in a case where we aren't parsing the full segment (one
+ // current example is the case of fmp4), so use the rough duration to calculate an
+ // end time.
+ prioritizedTimingInfo.end : prioritizedTimingInfo.start + segmentInfo.duration;
+ }
+ /**
+ * callback to run when appendBuffer is finished. detects if we are
+ * in a good state to do things with the data we got, or if we need
+ * to wait for more
+ *
+ * @private
+ */
+ ;
+
+ _proto.handleAppendsDone_ = function handleAppendsDone_() {
+ // appendsdone can cause an abort
+ if (this.pendingSegment_) {
+ this.trigger('appendsdone');
+ }
+
+ if (!this.pendingSegment_) {
+ this.state = 'READY'; // TODO should this move into this.checkForAbort to speed up requests post abort in
+ // all appending cases?
+
+ if (!this.paused()) {
+ this.monitorBuffer_();
+ }
+
+ return;
+ }
+
+ var segmentInfo = this.pendingSegment_; // Now that the end of the segment has been reached, we can set the end time. It's
+ // best to wait until all appends are done so we're sure that the primary media is
+ // finished (and we have its end time).
+
+ this.updateTimingInfoEnd_(segmentInfo);
+
+ if (this.shouldSaveSegmentTimingInfo_) {
+ // Timeline mappings should only be saved for the main loader. This is for multiple
+ // reasons:
+ //
+ // 1) Only one mapping is saved per timeline, meaning that if both the audio loader
+ // and the main loader try to save the timeline mapping, whichever comes later
+ // will overwrite the first. In theory this is OK, as the mappings should be the
+ // same, however, it breaks for (2)
+ // 2) In the event of a live stream, the initial live point will make for a somewhat
+ // arbitrary mapping. If audio and video streams are not perfectly in-sync, then
+ // the mapping will be off for one of the streams, dependent on which one was
+ // first saved (see (1)).
+ // 3) Primary timing goes by video in VHS, so the mapping should be video.
+ //
+ // Since the audio loader will wait for the main loader to load the first segment,
+ // the main loader will save the first timeline mapping, and ensure that there won't
+ // be a case where audio loads two segments without saving a mapping (thus leading
+ // to missing segment timing info).
+ this.syncController_.saveSegmentTimingInfo({
+ segmentInfo: segmentInfo,
+ shouldSaveTimelineMapping: this.loaderType_ === 'main'
+ });
+ }
+
+ var segmentDurationMessage = getTroublesomeSegmentDurationMessage(segmentInfo, this.sourceType_);
+
+ if (segmentDurationMessage) {
+ if (segmentDurationMessage.severity === 'warn') {
+ videojs.log.warn(segmentDurationMessage.message);
+ } else {
+ this.logger_(segmentDurationMessage.message);
+ }
+ }
+
+ this.recordThroughput_(segmentInfo);
+ this.pendingSegment_ = null;
+ this.state = 'READY';
+
+ if (segmentInfo.isSyncRequest) {
+ this.trigger('syncinfoupdate'); // if the sync request was not appended
+ // then it was not the correct segment.
+ // throw it away and use the data it gave us
+ // to get the correct one.
+
+ if (!segmentInfo.hasAppendedData_) {
+ this.logger_("Throwing away un-appended sync request " + segmentInfoString(segmentInfo));
+ return;
+ }
+ }
+
+ this.logger_("Appended " + segmentInfoString(segmentInfo));
+ this.addSegmentMetadataCue_(segmentInfo);
+ this.fetchAtBuffer_ = true;
+
+ if (this.currentTimeline_ !== segmentInfo.timeline) {
+ this.timelineChangeController_.lastTimelineChange({
+ type: this.loaderType_,
+ from: this.currentTimeline_,
+ to: segmentInfo.timeline
+ }); // If audio is not disabled, the main segment loader is responsible for updating
+ // the audio timeline as well. If the content is video only, this won't have any
+ // impact.
+
+ if (this.loaderType_ === 'main' && !this.audioDisabled_) {
+ this.timelineChangeController_.lastTimelineChange({
+ type: 'audio',
+ from: this.currentTimeline_,
+ to: segmentInfo.timeline
+ });
+ }
+ }
+
+ this.currentTimeline_ = segmentInfo.timeline; // We must update the syncinfo to recalculate the seekable range before
+ // the following conditional otherwise it may consider this a bad "guess"
+ // and attempt to resync when the post-update seekable window and live
+ // point would mean that this was the perfect segment to fetch
+
+ this.trigger('syncinfoupdate');
+ var segment = segmentInfo.segment;
+ var part = segmentInfo.part;
+ var badSegmentGuess = segment.end && this.currentTime_() - segment.end > segmentInfo.playlist.targetDuration * 3;
+ var badPartGuess = part && part.end && this.currentTime_() - part.end > segmentInfo.playlist.partTargetDuration * 3; // If we previously appended a segment/part that ends more than 3 part/targetDurations before
+ // the currentTime_ that means that our conservative guess was too conservative.
+ // In that case, reset the loader state so that we try to use any information gained
+ // from the previous request to create a new, more accurate, sync-point.
+
+ if (badSegmentGuess || badPartGuess) {
+ this.logger_("bad " + (badSegmentGuess ? 'segment' : 'part') + " " + segmentInfoString(segmentInfo));
+ this.resetEverything();
+ return;
+ }
+
+ var isWalkingForward = this.mediaIndex !== null; // Don't do a rendition switch unless we have enough time to get a sync segment
+ // and conservatively guess
+
+ if (isWalkingForward) {
+ this.trigger('bandwidthupdate');
+ }
+
+ this.trigger('progress');
+ this.mediaIndex = segmentInfo.mediaIndex;
+ this.partIndex = segmentInfo.partIndex; // any time an update finishes and the last segment is in the
+ // buffer, end the stream. this ensures the "ended" event will
+ // fire if playback reaches that point.
+
+ if (this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex)) {
+ this.endOfStream();
+ } // used for testing
+
+
+ this.trigger('appended');
+
+ if (segmentInfo.hasAppendedData_) {
+ this.mediaAppends++;
+ }
+
+ if (!this.paused()) {
+ this.monitorBuffer_();
+ }
+ }
+ /**
+ * Records the current throughput of the decrypt, transmux, and append
+ * portion of the semgment pipeline. `throughput.rate` is a the cumulative
+ * moving average of the throughput. `throughput.count` is the number of
+ * data points in the average.
+ *
+ * @private
+ * @param {Object} segmentInfo the object returned by loadSegment
+ */
+ ;
+
+ _proto.recordThroughput_ = function recordThroughput_(segmentInfo) {
+ if (segmentInfo.duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
+ this.logger_("Ignoring segment's throughput because its duration of " + segmentInfo.duration + (" is less than the min to record " + MIN_SEGMENT_DURATION_TO_SAVE_STATS));
+ return;
+ }
+
+ var rate = this.throughput.rate; // Add one to the time to ensure that we don't accidentally attempt to divide
+ // by zero in the case where the throughput is ridiculously high
+
+ var segmentProcessingTime = Date.now() - segmentInfo.endOfAllRequests + 1; // Multiply by 8000 to convert from bytes/millisecond to bits/second
+
+ var segmentProcessingThroughput = Math.floor(segmentInfo.byteLength / segmentProcessingTime * 8 * 1000); // This is just a cumulative moving average calculation:
+ // newAvg = oldAvg + (sample - oldAvg) / (sampleCount + 1)
+
+ this.throughput.rate += (segmentProcessingThroughput - rate) / ++this.throughput.count;
+ }
+ /**
+ * Adds a cue to the segment-metadata track with some metadata information about the
+ * segment
+ *
+ * @private
+ * @param {Object} segmentInfo
+ * the object returned by loadSegment
+ * @method addSegmentMetadataCue_
+ */
+ ;
+
+ _proto.addSegmentMetadataCue_ = function addSegmentMetadataCue_(segmentInfo) {
+ if (!this.segmentMetadataTrack_) {
+ return;
+ }
+
+ var segment = segmentInfo.segment;
+ var start = segment.start;
+ var end = segment.end; // Do not try adding the cue if the start and end times are invalid.
+
+ if (!finite(start) || !finite(end)) {
+ return;
+ }
+
+ removeCuesFromTrack(start, end, this.segmentMetadataTrack_);
+ var Cue = window__default['default'].WebKitDataCue || window__default['default'].VTTCue;
+ var value = {
+ custom: segment.custom,
+ dateTimeObject: segment.dateTimeObject,
+ dateTimeString: segment.dateTimeString,
+ bandwidth: segmentInfo.playlist.attributes.BANDWIDTH,
+ resolution: segmentInfo.playlist.attributes.RESOLUTION,
+ codecs: segmentInfo.playlist.attributes.CODECS,
+ byteLength: segmentInfo.byteLength,
+ uri: segmentInfo.uri,
+ timeline: segmentInfo.timeline,
+ playlist: segmentInfo.playlist.id,
+ start: start,
+ end: end
+ };
+ var data = JSON.stringify(value);
+ var cue = new Cue(start, end, data); // Attach the metadata to the value property of the cue to keep consistency between
+ // the differences of WebKitDataCue in safari and VTTCue in other browsers
+
+ cue.value = value;
+ this.segmentMetadataTrack_.addCue(cue);
+ };
+
+ return SegmentLoader;
+}(videojs.EventTarget);
+
+function noop() {}
+
+var toTitleCase = function toTitleCase(string) {
+ if (typeof string !== 'string') {
+ return string;
+ }
+
+ return string.replace(/./, function (w) {
+ return w.toUpperCase();
+ });
+};
+
+var bufferTypes = ['video', 'audio'];
+
+var _updating = function updating(type, sourceUpdater) {
+ var sourceBuffer = sourceUpdater[type + "Buffer"];
+ return sourceBuffer && sourceBuffer.updating || sourceUpdater.queuePending[type];
+};
+
+var nextQueueIndexOfType = function nextQueueIndexOfType(type, queue) {
+ for (var i = 0; i < queue.length; i++) {
+ var queueEntry = queue[i];
+
+ if (queueEntry.type === 'mediaSource') {
+ // If the next entry is a media source entry (uses multiple source buffers), block
+ // processing to allow it to go through first.
+ return null;
+ }
+
+ if (queueEntry.type === type) {
+ return i;
+ }
+ }
+
+ return null;
+};
+
+var shiftQueue = function shiftQueue(type, sourceUpdater) {
+ if (sourceUpdater.queue.length === 0) {
+ return;
+ }
+
+ var queueIndex = 0;
+ var queueEntry = sourceUpdater.queue[queueIndex];
+
+ if (queueEntry.type === 'mediaSource') {
+ if (!sourceUpdater.updating() && sourceUpdater.mediaSource.readyState !== 'closed') {
+ sourceUpdater.queue.shift();
+ queueEntry.action(sourceUpdater);
+
+ if (queueEntry.doneFn) {
+ queueEntry.doneFn();
+ } // Only specific source buffer actions must wait for async updateend events. Media
+ // Source actions process synchronously. Therefore, both audio and video source
+ // buffers are now clear to process the next queue entries.
+
+
+ shiftQueue('audio', sourceUpdater);
+ shiftQueue('video', sourceUpdater);
+ } // Media Source actions require both source buffers, so if the media source action
+ // couldn't process yet (because one or both source buffers are busy), block other
+ // queue actions until both are available and the media source action can process.
+
+
+ return;
+ }
+
+ if (type === 'mediaSource') {
+ // If the queue was shifted by a media source action (this happens when pushing a
+ // media source action onto the queue), then it wasn't from an updateend event from an
+ // audio or video source buffer, so there's no change from previous state, and no
+ // processing should be done.
+ return;
+ } // Media source queue entries don't need to consider whether the source updater is
+ // started (i.e., source buffers are created) as they don't need the source buffers, but
+ // source buffer queue entries do.
+
+
+ if (!sourceUpdater.ready() || sourceUpdater.mediaSource.readyState === 'closed' || _updating(type, sourceUpdater)) {
+ return;
+ }
+
+ if (queueEntry.type !== type) {
+ queueIndex = nextQueueIndexOfType(type, sourceUpdater.queue);
+
+ if (queueIndex === null) {
+ // Either there's no queue entry that uses this source buffer type in the queue, or
+ // there's a media source queue entry before the next entry of this type, in which
+ // case wait for that action to process first.
+ return;
+ }
+
+ queueEntry = sourceUpdater.queue[queueIndex];
+ }
+
+ sourceUpdater.queue.splice(queueIndex, 1); // Keep a record that this source buffer type is in use.
+ //
+ // The queue pending operation must be set before the action is performed in the event
+ // that the action results in a synchronous event that is acted upon. For instance, if
+ // an exception is thrown that can be handled, it's possible that new actions will be
+ // appended to an empty queue and immediately executed, but would not have the correct
+ // pending information if this property was set after the action was performed.
+
+ sourceUpdater.queuePending[type] = queueEntry;
+ queueEntry.action(type, sourceUpdater);
+
+ if (!queueEntry.doneFn) {
+ // synchronous operation, process next entry
+ sourceUpdater.queuePending[type] = null;
+ shiftQueue(type, sourceUpdater);
+ return;
+ }
+};
+
+var cleanupBuffer = function cleanupBuffer(type, sourceUpdater) {
+ var buffer = sourceUpdater[type + "Buffer"];
+ var titleType = toTitleCase(type);
+
+ if (!buffer) {
+ return;
+ }
+
+ buffer.removeEventListener('updateend', sourceUpdater["on" + titleType + "UpdateEnd_"]);
+ buffer.removeEventListener('error', sourceUpdater["on" + titleType + "Error_"]);
+ sourceUpdater.codecs[type] = null;
+ sourceUpdater[type + "Buffer"] = null;
+};
+
+var inSourceBuffers = function inSourceBuffers(mediaSource, sourceBuffer) {
+ return mediaSource && sourceBuffer && Array.prototype.indexOf.call(mediaSource.sourceBuffers, sourceBuffer) !== -1;
+};
+
+var actions = {
+ appendBuffer: function appendBuffer(bytes, segmentInfo, onError) {
+ return function (type, sourceUpdater) {
+ var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
+ // or the media source does not contain this source buffer.
+
+ if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
+ return;
+ }
+
+ sourceUpdater.logger_("Appending segment " + segmentInfo.mediaIndex + "'s " + bytes.length + " bytes to " + type + "Buffer");
+
+ try {
+ sourceBuffer.appendBuffer(bytes);
+ } catch (e) {
+ sourceUpdater.logger_("Error with code " + e.code + " " + (e.code === QUOTA_EXCEEDED_ERR ? '(QUOTA_EXCEEDED_ERR) ' : '') + ("when appending segment " + segmentInfo.mediaIndex + " to " + type + "Buffer"));
+ sourceUpdater.queuePending[type] = null;
+ onError(e);
+ }
+ };
+ },
+ remove: function remove(start, end) {
+ return function (type, sourceUpdater) {
+ var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
+ // or the media source does not contain this source buffer.
+
+ if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
+ return;
+ }
+
+ sourceUpdater.logger_("Removing " + start + " to " + end + " from " + type + "Buffer");
+
+ try {
+ sourceBuffer.remove(start, end);
+ } catch (e) {
+ sourceUpdater.logger_("Remove " + start + " to " + end + " from " + type + "Buffer failed");
+ }
+ };
+ },
+ timestampOffset: function timestampOffset(offset) {
+ return function (type, sourceUpdater) {
+ var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
+ // or the media source does not contain this source buffer.
+
+ if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
+ return;
+ }
+
+ sourceUpdater.logger_("Setting " + type + "timestampOffset to " + offset);
+ sourceBuffer.timestampOffset = offset;
+ };
+ },
+ callback: function callback(_callback) {
+ return function (type, sourceUpdater) {
+ _callback();
+ };
+ },
+ endOfStream: function endOfStream(error) {
+ return function (sourceUpdater) {
+ if (sourceUpdater.mediaSource.readyState !== 'open') {
+ return;
+ }
+
+ sourceUpdater.logger_("Calling mediaSource endOfStream(" + (error || '') + ")");
+
+ try {
+ sourceUpdater.mediaSource.endOfStream(error);
+ } catch (e) {
+ videojs.log.warn('Failed to call media source endOfStream', e);
+ }
+ };
+ },
+ duration: function duration(_duration) {
+ return function (sourceUpdater) {
+ sourceUpdater.logger_("Setting mediaSource duration to " + _duration);
+
+ try {
+ sourceUpdater.mediaSource.duration = _duration;
+ } catch (e) {
+ videojs.log.warn('Failed to set media source duration', e);
+ }
+ };
+ },
+ abort: function abort() {
+ return function (type, sourceUpdater) {
+ if (sourceUpdater.mediaSource.readyState !== 'open') {
+ return;
+ }
+
+ var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
+ // or the media source does not contain this source buffer.
+
+ if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
+ return;
+ }
+
+ sourceUpdater.logger_("calling abort on " + type + "Buffer");
+
+ try {
+ sourceBuffer.abort();
+ } catch (e) {
+ videojs.log.warn("Failed to abort on " + type + "Buffer", e);
+ }
+ };
+ },
+ addSourceBuffer: function addSourceBuffer(type, codec) {
+ return function (sourceUpdater) {
+ var titleType = toTitleCase(type);
+ var mime = codecs_js.getMimeForCodec(codec);
+ sourceUpdater.logger_("Adding " + type + "Buffer with codec " + codec + " to mediaSource");
+ var sourceBuffer = sourceUpdater.mediaSource.addSourceBuffer(mime);
+ sourceBuffer.addEventListener('updateend', sourceUpdater["on" + titleType + "UpdateEnd_"]);
+ sourceBuffer.addEventListener('error', sourceUpdater["on" + titleType + "Error_"]);
+ sourceUpdater.codecs[type] = codec;
+ sourceUpdater[type + "Buffer"] = sourceBuffer;
+ };
+ },
+ removeSourceBuffer: function removeSourceBuffer(type) {
+ return function (sourceUpdater) {
+ var sourceBuffer = sourceUpdater[type + "Buffer"];
+ cleanupBuffer(type, sourceUpdater); // can't do anything if the media source / source buffer is null
+ // or the media source does not contain this source buffer.
+
+ if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
+ return;
+ }
+
+ sourceUpdater.logger_("Removing " + type + "Buffer with codec " + sourceUpdater.codecs[type] + " from mediaSource");
+
+ try {
+ sourceUpdater.mediaSource.removeSourceBuffer(sourceBuffer);
+ } catch (e) {
+ videojs.log.warn("Failed to removeSourceBuffer " + type + "Buffer", e);
+ }
+ };
+ },
+ changeType: function changeType(codec) {
+ return function (type, sourceUpdater) {
+ var sourceBuffer = sourceUpdater[type + "Buffer"];
+ var mime = codecs_js.getMimeForCodec(codec); // can't do anything if the media source / source buffer is null
+ // or the media source does not contain this source buffer.
+
+ if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
+ return;
+ } // do not update codec if we don't need to.
+
+
+ if (sourceUpdater.codecs[type] === codec) {
+ return;
+ }
+
+ sourceUpdater.logger_("changing " + type + "Buffer codec from " + sourceUpdater.codecs[type] + " to " + codec);
+ sourceBuffer.changeType(mime);
+ sourceUpdater.codecs[type] = codec;
+ };
+ }
+};
+
+var pushQueue = function pushQueue(_ref) {
+ var type = _ref.type,
+ sourceUpdater = _ref.sourceUpdater,
+ action = _ref.action,
+ doneFn = _ref.doneFn,
+ name = _ref.name;
+ sourceUpdater.queue.push({
+ type: type,
+ action: action,
+ doneFn: doneFn,
+ name: name
+ });
+ shiftQueue(type, sourceUpdater);
+};
+
+var onUpdateend = function onUpdateend(type, sourceUpdater) {
+ return function (e) {
+ // Although there should, in theory, be a pending action for any updateend receieved,
+ // there are some actions that may trigger updateend events without set definitions in
+ // the w3c spec. For instance, setting the duration on the media source may trigger
+ // updateend events on source buffers. This does not appear to be in the spec. As such,
+ // if we encounter an updateend without a corresponding pending action from our queue
+ // for that source buffer type, process the next action.
+ if (sourceUpdater.queuePending[type]) {
+ var doneFn = sourceUpdater.queuePending[type].doneFn;
+ sourceUpdater.queuePending[type] = null;
+
+ if (doneFn) {
+ // if there's an error, report it
+ doneFn(sourceUpdater[type + "Error_"]);
+ }
+ }
+
+ shiftQueue(type, sourceUpdater);
+ };
+};
+/**
+ * A queue of callbacks to be serialized and applied when a
+ * MediaSource and its associated SourceBuffers are not in the
+ * updating state. It is used by the segment loader to update the
+ * underlying SourceBuffers when new data is loaded, for instance.
+ *
+ * @class SourceUpdater
+ * @param {MediaSource} mediaSource the MediaSource to create the SourceBuffer from
+ * @param {string} mimeType the desired MIME type of the underlying SourceBuffer
+ */
+
+
+var SourceUpdater = /*#__PURE__*/function (_videojs$EventTarget) {
+ _inheritsLoose__default['default'](SourceUpdater, _videojs$EventTarget);
+
+ function SourceUpdater(mediaSource) {
+ var _this;
+
+ _this = _videojs$EventTarget.call(this) || this;
+ _this.mediaSource = mediaSource;
+
+ _this.sourceopenListener_ = function () {
+ return shiftQueue('mediaSource', _assertThisInitialized__default['default'](_this));
+ };
+
+ _this.mediaSource.addEventListener('sourceopen', _this.sourceopenListener_);
+
+ _this.logger_ = logger('SourceUpdater'); // initial timestamp offset is 0
+
+ _this.audioTimestampOffset_ = 0;
+ _this.videoTimestampOffset_ = 0;
+ _this.queue = [];
+ _this.queuePending = {
+ audio: null,
+ video: null
+ };
+ _this.delayedAudioAppendQueue_ = [];
+ _this.videoAppendQueued_ = false;
+ _this.codecs = {};
+ _this.onVideoUpdateEnd_ = onUpdateend('video', _assertThisInitialized__default['default'](_this));
+ _this.onAudioUpdateEnd_ = onUpdateend('audio', _assertThisInitialized__default['default'](_this));
+
+ _this.onVideoError_ = function (e) {
+ // used for debugging
+ _this.videoError_ = e;
+ };
+
+ _this.onAudioError_ = function (e) {
+ // used for debugging
+ _this.audioError_ = e;
+ };
+
+ _this.createdSourceBuffers_ = false;
+ _this.initializedEme_ = false;
+ _this.triggeredReady_ = false;
+ return _this;
+ }
+
+ var _proto = SourceUpdater.prototype;
+
+ _proto.initializedEme = function initializedEme() {
+ this.initializedEme_ = true;
+ this.triggerReady();
+ };
+
+ _proto.hasCreatedSourceBuffers = function hasCreatedSourceBuffers() {
+ // if false, likely waiting on one of the segment loaders to get enough data to create
+ // source buffers
+ return this.createdSourceBuffers_;
+ };
+
+ _proto.hasInitializedAnyEme = function hasInitializedAnyEme() {
+ return this.initializedEme_;
+ };
+
+ _proto.ready = function ready() {
+ return this.hasCreatedSourceBuffers() && this.hasInitializedAnyEme();
+ };
+
+ _proto.createSourceBuffers = function createSourceBuffers(codecs) {
+ if (this.hasCreatedSourceBuffers()) {
+ // already created them before
+ return;
+ } // the intial addOrChangeSourceBuffers will always be
+ // two add buffers.
+
+
+ this.addOrChangeSourceBuffers(codecs);
+ this.createdSourceBuffers_ = true;
+ this.trigger('createdsourcebuffers');
+ this.triggerReady();
+ };
+
+ _proto.triggerReady = function triggerReady() {
+ // only allow ready to be triggered once, this prevents the case
+ // where:
+ // 1. we trigger createdsourcebuffers
+ // 2. ie 11 synchronously initializates eme
+ // 3. the synchronous initialization causes us to trigger ready
+ // 4. We go back to the ready check in createSourceBuffers and ready is triggered again.
+ if (this.ready() && !this.triggeredReady_) {
+ this.triggeredReady_ = true;
+ this.trigger('ready');
+ }
+ }
+ /**
+ * Add a type of source buffer to the media source.
+ *
+ * @param {string} type
+ * The type of source buffer to add.
+ *
+ * @param {string} codec
+ * The codec to add the source buffer with.
+ */
+ ;
+
+ _proto.addSourceBuffer = function addSourceBuffer(type, codec) {
+ pushQueue({
+ type: 'mediaSource',
+ sourceUpdater: this,
+ action: actions.addSourceBuffer(type, codec),
+ name: 'addSourceBuffer'
+ });
+ }
+ /**
+ * call abort on a source buffer.
+ *
+ * @param {string} type
+ * The type of source buffer to call abort on.
+ */
+ ;
+
+ _proto.abort = function abort(type) {
+ pushQueue({
+ type: type,
+ sourceUpdater: this,
+ action: actions.abort(type),
+ name: 'abort'
+ });
+ }
+ /**
+ * Call removeSourceBuffer and remove a specific type
+ * of source buffer on the mediaSource.
+ *
+ * @param {string} type
+ * The type of source buffer to remove.
+ */
+ ;
+
+ _proto.removeSourceBuffer = function removeSourceBuffer(type) {
+ if (!this.canRemoveSourceBuffer()) {
+ videojs.log.error('removeSourceBuffer is not supported!');
+ return;
+ }
+
+ pushQueue({
+ type: 'mediaSource',
+ sourceUpdater: this,
+ action: actions.removeSourceBuffer(type),
+ name: 'removeSourceBuffer'
+ });
+ }
+ /**
+ * Whether or not the removeSourceBuffer function is supported
+ * on the mediaSource.
+ *
+ * @return {boolean}
+ * if removeSourceBuffer can be called.
+ */
+ ;
+
+ _proto.canRemoveSourceBuffer = function canRemoveSourceBuffer() {
+ // IE reports that it supports removeSourceBuffer, but often throws
+ // errors when attempting to use the function. So we report that it
+ // does not support removeSourceBuffer. As of Firefox 83 removeSourceBuffer
+ // throws errors, so we report that it does not support this as well.
+ return !videojs.browser.IE_VERSION && !videojs.browser.IS_FIREFOX && window__default['default'].MediaSource && window__default['default'].MediaSource.prototype && typeof window__default['default'].MediaSource.prototype.removeSourceBuffer === 'function';
+ }
+ /**
+ * Whether or not the changeType function is supported
+ * on our SourceBuffers.
+ *
+ * @return {boolean}
+ * if changeType can be called.
+ */
+ ;
+
+ SourceUpdater.canChangeType = function canChangeType() {
+ return window__default['default'].SourceBuffer && window__default['default'].SourceBuffer.prototype && typeof window__default['default'].SourceBuffer.prototype.changeType === 'function';
+ }
+ /**
+ * Whether or not the changeType function is supported
+ * on our SourceBuffers.
+ *
+ * @return {boolean}
+ * if changeType can be called.
+ */
+ ;
+
+ _proto.canChangeType = function canChangeType() {
+ return this.constructor.canChangeType();
+ }
+ /**
+ * Call the changeType function on a source buffer, given the code and type.
+ *
+ * @param {string} type
+ * The type of source buffer to call changeType on.
+ *
+ * @param {string} codec
+ * The codec string to change type with on the source buffer.
+ */
+ ;
+
+ _proto.changeType = function changeType(type, codec) {
+ if (!this.canChangeType()) {
+ videojs.log.error('changeType is not supported!');
+ return;
+ }
+
+ pushQueue({
+ type: type,
+ sourceUpdater: this,
+ action: actions.changeType(codec),
+ name: 'changeType'
+ });
+ }
+ /**
+ * Add source buffers with a codec or, if they are already created,
+ * call changeType on source buffers using changeType.
+ *
+ * @param {Object} codecs
+ * Codecs to switch to
+ */
+ ;
+
+ _proto.addOrChangeSourceBuffers = function addOrChangeSourceBuffers(codecs) {
+ var _this2 = this;
+
+ if (!codecs || typeof codecs !== 'object' || Object.keys(codecs).length === 0) {
+ throw new Error('Cannot addOrChangeSourceBuffers to undefined codecs');
+ }
+
+ Object.keys(codecs).forEach(function (type) {
+ var codec = codecs[type];
+
+ if (!_this2.hasCreatedSourceBuffers()) {
+ return _this2.addSourceBuffer(type, codec);
+ }
+
+ if (_this2.canChangeType()) {
+ _this2.changeType(type, codec);
+ }
+ });
+ }
+ /**
+ * Queue an update to append an ArrayBuffer.
+ *
+ * @param {MediaObject} object containing audioBytes and/or videoBytes
+ * @param {Function} done the function to call when done
+ * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-appendBuffer-void-ArrayBuffer-data
+ */
+ ;
+
+ _proto.appendBuffer = function appendBuffer(options, doneFn) {
+ var _this3 = this;
+
+ var segmentInfo = options.segmentInfo,
+ type = options.type,
+ bytes = options.bytes;
+ this.processedAppend_ = true;
+
+ if (type === 'audio' && this.videoBuffer && !this.videoAppendQueued_) {
+ this.delayedAudioAppendQueue_.push([options, doneFn]);
+ this.logger_("delayed audio append of " + bytes.length + " until video append");
+ return;
+ } // In the case of certain errors, for instance, QUOTA_EXCEEDED_ERR, updateend will
+ // not be fired. This means that the queue will be blocked until the next action
+ // taken by the segment-loader. Provide a mechanism for segment-loader to handle
+ // these errors by calling the doneFn with the specific error.
+
+
+ var onError = doneFn;
+ pushQueue({
+ type: type,
+ sourceUpdater: this,
+ action: actions.appendBuffer(bytes, segmentInfo || {
+ mediaIndex: -1
+ }, onError),
+ doneFn: doneFn,
+ name: 'appendBuffer'
+ });
+
+ if (type === 'video') {
+ this.videoAppendQueued_ = true;
+
+ if (!this.delayedAudioAppendQueue_.length) {
+ return;
+ }
+
+ var queue = this.delayedAudioAppendQueue_.slice();
+ this.logger_("queuing delayed audio " + queue.length + " appendBuffers");
+ this.delayedAudioAppendQueue_.length = 0;
+ queue.forEach(function (que) {
+ _this3.appendBuffer.apply(_this3, que);
+ });
+ }
+ }
+ /**
+ * Get the audio buffer's buffered timerange.
+ *
+ * @return {TimeRange}
+ * The audio buffer's buffered time range
+ */
+ ;
+
+ _proto.audioBuffered = function audioBuffered() {
+ // no media source/source buffer or it isn't in the media sources
+ // source buffer list
+ if (!inSourceBuffers(this.mediaSource, this.audioBuffer)) {
+ return videojs.createTimeRange();
+ }
+
+ return this.audioBuffer.buffered ? this.audioBuffer.buffered : videojs.createTimeRange();
+ }
+ /**
+ * Get the video buffer's buffered timerange.
+ *
+ * @return {TimeRange}
+ * The video buffer's buffered time range
+ */
+ ;
+
+ _proto.videoBuffered = function videoBuffered() {
+ // no media source/source buffer or it isn't in the media sources
+ // source buffer list
+ if (!inSourceBuffers(this.mediaSource, this.videoBuffer)) {
+ return videojs.createTimeRange();
+ }
+
+ return this.videoBuffer.buffered ? this.videoBuffer.buffered : videojs.createTimeRange();
+ }
+ /**
+ * Get a combined video/audio buffer's buffered timerange.
+ *
+ * @return {TimeRange}
+ * the combined time range
+ */
+ ;
+
+ _proto.buffered = function buffered() {
+ var video = inSourceBuffers(this.mediaSource, this.videoBuffer) ? this.videoBuffer : null;
+ var audio = inSourceBuffers(this.mediaSource, this.audioBuffer) ? this.audioBuffer : null;
+
+ if (audio && !video) {
+ return this.audioBuffered();
+ }
+
+ if (video && !audio) {
+ return this.videoBuffered();
+ }
+
+ return bufferIntersection(this.audioBuffered(), this.videoBuffered());
+ }
+ /**
+ * Add a callback to the queue that will set duration on the mediaSource.
+ *
+ * @param {number} duration
+ * The duration to set
+ *
+ * @param {Function} [doneFn]
+ * function to run after duration has been set.
+ */
+ ;
+
+ _proto.setDuration = function setDuration(duration, doneFn) {
+ if (doneFn === void 0) {
+ doneFn = noop;
+ } // In order to set the duration on the media source, it's necessary to wait for all
+ // source buffers to no longer be updating. "If the updating attribute equals true on
+ // any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
+ // abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
+
+
+ pushQueue({
+ type: 'mediaSource',
+ sourceUpdater: this,
+ action: actions.duration(duration),
+ name: 'duration',
+ doneFn: doneFn
+ });
+ }
+ /**
+ * Add a mediaSource endOfStream call to the queue
+ *
+ * @param {Error} [error]
+ * Call endOfStream with an error
+ *
+ * @param {Function} [doneFn]
+ * A function that should be called when the
+ * endOfStream call has finished.
+ */
+ ;
+
+ _proto.endOfStream = function endOfStream(error, doneFn) {
+ if (error === void 0) {
+ error = null;
+ }
+
+ if (doneFn === void 0) {
+ doneFn = noop;
+ }
+
+ if (typeof error !== 'string') {
+ error = undefined;
+ } // In order to set the duration on the media source, it's necessary to wait for all
+ // source buffers to no longer be updating. "If the updating attribute equals true on
+ // any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
+ // abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
+
+
+ pushQueue({
+ type: 'mediaSource',
+ sourceUpdater: this,
+ action: actions.endOfStream(error),
+ name: 'endOfStream',
+ doneFn: doneFn
+ });
+ }
+ /**
+ * Queue an update to remove a time range from the buffer.
+ *
+ * @param {number} start where to start the removal
+ * @param {number} end where to end the removal
+ * @param {Function} [done=noop] optional callback to be executed when the remove
+ * operation is complete
+ * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
+ */
+ ;
+
+ _proto.removeAudio = function removeAudio(start, end, done) {
+ if (done === void 0) {
+ done = noop;
+ }
+
+ if (!this.audioBuffered().length || this.audioBuffered().end(0) === 0) {
+ done();
+ return;
+ }
+
+ pushQueue({
+ type: 'audio',
+ sourceUpdater: this,
+ action: actions.remove(start, end),
+ doneFn: done,
+ name: 'remove'
+ });
+ }
+ /**
+ * Queue an update to remove a time range from the buffer.
+ *
+ * @param {number} start where to start the removal
+ * @param {number} end where to end the removal
+ * @param {Function} [done=noop] optional callback to be executed when the remove
+ * operation is complete
+ * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
+ */
+ ;
+
+ _proto.removeVideo = function removeVideo(start, end, done) {
+ if (done === void 0) {
+ done = noop;
+ }
+
+ if (!this.videoBuffered().length || this.videoBuffered().end(0) === 0) {
+ done();
+ return;
+ }
+
+ pushQueue({
+ type: 'video',
+ sourceUpdater: this,
+ action: actions.remove(start, end),
+ doneFn: done,
+ name: 'remove'
+ });
+ }
+ /**
+ * Whether the underlying sourceBuffer is updating or not
+ *
+ * @return {boolean} the updating status of the SourceBuffer
+ */
+ ;
+
+ _proto.updating = function updating() {
+ // the audio/video source buffer is updating
+ if (_updating('audio', this) || _updating('video', this)) {
+ return true;
+ }
+
+ return false;
+ }
+ /**
+ * Set/get the timestampoffset on the audio SourceBuffer
+ *
+ * @return {number} the timestamp offset
+ */
+ ;
+
+ _proto.audioTimestampOffset = function audioTimestampOffset(offset) {
+ if (typeof offset !== 'undefined' && this.audioBuffer && // no point in updating if it's the same
+ this.audioTimestampOffset_ !== offset) {
+ pushQueue({
+ type: 'audio',
+ sourceUpdater: this,
+ action: actions.timestampOffset(offset),
+ name: 'timestampOffset'
+ });
+ this.audioTimestampOffset_ = offset;
+ }
+
+ return this.audioTimestampOffset_;
+ }
+ /**
+ * Set/get the timestampoffset on the video SourceBuffer
+ *
+ * @return {number} the timestamp offset
+ */
+ ;
+
+ _proto.videoTimestampOffset = function videoTimestampOffset(offset) {
+ if (typeof offset !== 'undefined' && this.videoBuffer && // no point in updating if it's the same
+ this.videoTimestampOffset !== offset) {
+ pushQueue({
+ type: 'video',
+ sourceUpdater: this,
+ action: actions.timestampOffset(offset),
+ name: 'timestampOffset'
+ });
+ this.videoTimestampOffset_ = offset;
+ }
+
+ return this.videoTimestampOffset_;
+ }
+ /**
+ * Add a function to the queue that will be called
+ * when it is its turn to run in the audio queue.
+ *
+ * @param {Function} callback
+ * The callback to queue.
+ */
+ ;
+
+ _proto.audioQueueCallback = function audioQueueCallback(callback) {
+ if (!this.audioBuffer) {
+ return;
+ }
+
+ pushQueue({
+ type: 'audio',
+ sourceUpdater: this,
+ action: actions.callback(callback),
+ name: 'callback'
+ });
+ }
+ /**
+ * Add a function to the queue that will be called
+ * when it is its turn to run in the video queue.
+ *
+ * @param {Function} callback
+ * The callback to queue.
+ */
+ ;
+
+ _proto.videoQueueCallback = function videoQueueCallback(callback) {
+ if (!this.videoBuffer) {
+ return;
+ }
+
+ pushQueue({
+ type: 'video',
+ sourceUpdater: this,
+ action: actions.callback(callback),
+ name: 'callback'
+ });
+ }
+ /**
+ * dispose of the source updater and the underlying sourceBuffer
+ */
+ ;
+
+ _proto.dispose = function dispose() {
+ var _this4 = this;
+
+ this.trigger('dispose');
+ bufferTypes.forEach(function (type) {
+ _this4.abort(type);
+
+ if (_this4.canRemoveSourceBuffer()) {
+ _this4.removeSourceBuffer(type);
+ } else {
+ _this4[type + "QueueCallback"](function () {
+ return cleanupBuffer(type, _this4);
+ });
+ }
+ });
+ this.videoAppendQueued_ = false;
+ this.delayedAudioAppendQueue_.length = 0;
+
+ if (this.sourceopenListener_) {
+ this.mediaSource.removeEventListener('sourceopen', this.sourceopenListener_);
+ }
+
+ this.off();
+ };
+
+ return SourceUpdater;
+}(videojs.EventTarget);
+
+var uint8ToUtf8 = function uint8ToUtf8(uintArray) {
+ return decodeURIComponent(escape(String.fromCharCode.apply(null, uintArray)));
+};
+
+var VTT_LINE_TERMINATORS = new Uint8Array('\n\n'.split('').map(function (_char3) {
+ return _char3.charCodeAt(0);
+}));
+/**
+ * An object that manages segment loading and appending.
+ *
+ * @class VTTSegmentLoader
+ * @param {Object} options required and optional options
+ * @extends videojs.EventTarget
+ */
+
+var VTTSegmentLoader = /*#__PURE__*/function (_SegmentLoader) {
+ _inheritsLoose__default['default'](VTTSegmentLoader, _SegmentLoader);
+
+ function VTTSegmentLoader(settings, options) {
+ var _this;
+
+ if (options === void 0) {
+ options = {};
+ }
+
+ _this = _SegmentLoader.call(this, settings, options) || this; // SegmentLoader requires a MediaSource be specified or it will throw an error;
+ // however, VTTSegmentLoader has no need of a media source, so delete the reference
+
+ _this.mediaSource_ = null;
+ _this.subtitlesTrack_ = null;
+ _this.loaderType_ = 'subtitle';
+ _this.featuresNativeTextTracks_ = settings.featuresNativeTextTracks; // The VTT segment will have its own time mappings. Saving VTT segment timing info in
+ // the sync controller leads to improper behavior.
+
+ _this.shouldSaveSegmentTimingInfo_ = false;
+ return _this;
+ }
+
+ var _proto = VTTSegmentLoader.prototype;
+
+ _proto.createTransmuxer_ = function createTransmuxer_() {
+ // don't need to transmux any subtitles
+ return null;
+ }
+ /**
+ * Indicates which time ranges are buffered
+ *
+ * @return {TimeRange}
+ * TimeRange object representing the current buffered ranges
+ */
+ ;
+
+ _proto.buffered_ = function buffered_() {
+ if (!this.subtitlesTrack_ || !this.subtitlesTrack_.cues || !this.subtitlesTrack_.cues.length) {
+ return videojs.createTimeRanges();
+ }
+
+ var cues = this.subtitlesTrack_.cues;
+ var start = cues[0].startTime;
+ var end = cues[cues.length - 1].startTime;
+ return videojs.createTimeRanges([[start, end]]);
+ }
+ /**
+ * Gets and sets init segment for the provided map
+ *
+ * @param {Object} map
+ * The map object representing the init segment to get or set
+ * @param {boolean=} set
+ * If true, the init segment for the provided map should be saved
+ * @return {Object}
+ * map object for desired init segment
+ */
+ ;
+
+ _proto.initSegmentForMap = function initSegmentForMap(map, set) {
+ if (set === void 0) {
+ set = false;
+ }
+
+ if (!map) {
+ return null;
+ }
+
+ var id = initSegmentId(map);
+ var storedMap = this.initSegments_[id];
+
+ if (set && !storedMap && map.bytes) {
+ // append WebVTT line terminators to the media initialization segment if it exists
+ // to follow the WebVTT spec (https://w3c.github.io/webvtt/#file-structure) that
+ // requires two or more WebVTT line terminators between the WebVTT header and the
+ // rest of the file
+ var combinedByteLength = VTT_LINE_TERMINATORS.byteLength + map.bytes.byteLength;
+ var combinedSegment = new Uint8Array(combinedByteLength);
+ combinedSegment.set(map.bytes);
+ combinedSegment.set(VTT_LINE_TERMINATORS, map.bytes.byteLength);
+ this.initSegments_[id] = storedMap = {
+ resolvedUri: map.resolvedUri,
+ byterange: map.byterange,
+ bytes: combinedSegment
+ };
+ }
+
+ return storedMap || map;
+ }
+ /**
+ * Returns true if all configuration required for loading is present, otherwise false.
+ *
+ * @return {boolean} True if the all configuration is ready for loading
+ * @private
+ */
+ ;
+
+ _proto.couldBeginLoading_ = function couldBeginLoading_() {
+ return this.playlist_ && this.subtitlesTrack_ && !this.paused();
+ }
+ /**
+ * Once all the starting parameters have been specified, begin
+ * operation. This method should only be invoked from the INIT
+ * state.
+ *
+ * @private
+ */
+ ;
+
+ _proto.init_ = function init_() {
+ this.state = 'READY';
+ this.resetEverything();
+ return this.monitorBuffer_();
+ }
+ /**
+ * Set a subtitle track on the segment loader to add subtitles to
+ *
+ * @param {TextTrack=} track
+ * The text track to add loaded subtitles to
+ * @return {TextTrack}
+ * Returns the subtitles track
+ */
+ ;
+
+ _proto.track = function track(_track) {
+ if (typeof _track === 'undefined') {
+ return this.subtitlesTrack_;
+ }
+
+ this.subtitlesTrack_ = _track; // if we were unpaused but waiting for a sourceUpdater, start
+ // buffering now
+
+ if (this.state === 'INIT' && this.couldBeginLoading_()) {
+ this.init_();
+ }
+
+ return this.subtitlesTrack_;
+ }
+ /**
+ * Remove any data in the source buffer between start and end times
+ *
+ * @param {number} start - the start time of the region to remove from the buffer
+ * @param {number} end - the end time of the region to remove from the buffer
+ */
+ ;
+
+ _proto.remove = function remove(start, end) {
+ removeCuesFromTrack(start, end, this.subtitlesTrack_);
+ }
+ /**
+ * fill the buffer with segements unless the sourceBuffers are
+ * currently updating
+ *
+ * Note: this function should only ever be called by monitorBuffer_
+ * and never directly
+ *
+ * @private
+ */
+ ;
+
+ _proto.fillBuffer_ = function fillBuffer_() {
+ var _this2 = this; // see if we need to begin loading immediately
+
+
+ var segmentInfo = this.chooseNextRequest_();
+
+ if (!segmentInfo) {
+ return;
+ }
+
+ if (this.syncController_.timestampOffsetForTimeline(segmentInfo.timeline) === null) {
+ // We don't have the timestamp offset that we need to sync subtitles.
+ // Rerun on a timestamp offset or user interaction.
+ var checkTimestampOffset = function checkTimestampOffset() {
+ _this2.state = 'READY';
+
+ if (!_this2.paused()) {
+ // if not paused, queue a buffer check as soon as possible
+ _this2.monitorBuffer_();
+ }
+ };
+
+ this.syncController_.one('timestampoffset', checkTimestampOffset);
+ this.state = 'WAITING_ON_TIMELINE';
+ return;
+ }
+
+ this.loadSegment_(segmentInfo);
+ } // never set a timestamp offset for vtt segments.
+ ;
+
+ _proto.timestampOffsetForSegment_ = function timestampOffsetForSegment_() {
+ return null;
+ };
+
+ _proto.chooseNextRequest_ = function chooseNextRequest_() {
+ return this.skipEmptySegments_(_SegmentLoader.prototype.chooseNextRequest_.call(this));
+ }
+ /**
+ * Prevents the segment loader from requesting segments we know contain no subtitles
+ * by walking forward until we find the next segment that we don't know whether it is
+ * empty or not.
+ *
+ * @param {Object} segmentInfo
+ * a segment info object that describes the current segment
+ * @return {Object}
+ * a segment info object that describes the current segment
+ */
+ ;
+
+ _proto.skipEmptySegments_ = function skipEmptySegments_(segmentInfo) {
+ while (segmentInfo && segmentInfo.segment.empty) {
+ // stop at the last possible segmentInfo
+ if (segmentInfo.mediaIndex + 1 >= segmentInfo.playlist.segments.length) {
+ segmentInfo = null;
+ break;
+ }
+
+ segmentInfo = this.generateSegmentInfo_({
+ playlist: segmentInfo.playlist,
+ mediaIndex: segmentInfo.mediaIndex + 1,
+ startOfSegment: segmentInfo.startOfSegment + segmentInfo.duration,
+ isSyncRequest: segmentInfo.isSyncRequest
+ });
+ }
+
+ return segmentInfo;
+ };
+
+ _proto.stopForError = function stopForError(error) {
+ this.error(error);
+ this.state = 'READY';
+ this.pause();
+ this.trigger('error');
+ }
+ /**
+ * append a decrypted segement to the SourceBuffer through a SourceUpdater
+ *
+ * @private
+ */
+ ;
+
+ _proto.segmentRequestFinished_ = function segmentRequestFinished_(error, simpleSegment, result) {
+ var _this3 = this;
+
+ if (!this.subtitlesTrack_) {
+ this.state = 'READY';
+ return;
+ }
+
+ this.saveTransferStats_(simpleSegment.stats); // the request was aborted
+
+ if (!this.pendingSegment_) {
+ this.state = 'READY';
+ this.mediaRequestsAborted += 1;
+ return;
+ }
+
+ if (error) {
+ if (error.code === REQUEST_ERRORS.TIMEOUT) {
+ this.handleTimeout_();
+ }
+
+ if (error.code === REQUEST_ERRORS.ABORTED) {
+ this.mediaRequestsAborted += 1;
+ } else {
+ this.mediaRequestsErrored += 1;
+ }
+
+ this.stopForError(error);
+ return;
+ }
+
+ var segmentInfo = this.pendingSegment_; // although the VTT segment loader bandwidth isn't really used, it's good to
+ // maintain functionality between segment loaders
+
+ this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats);
+ this.state = 'APPENDING'; // used for tests
+
+ this.trigger('appending');
+ var segment = segmentInfo.segment;
+
+ if (segment.map) {
+ segment.map.bytes = simpleSegment.map.bytes;
+ }
+
+ segmentInfo.bytes = simpleSegment.bytes; // Make sure that vttjs has loaded, otherwise, wait till it finished loading
+
+ if (typeof window__default['default'].WebVTT !== 'function' && this.subtitlesTrack_ && this.subtitlesTrack_.tech_) {
+ var loadHandler;
+
+ var errorHandler = function errorHandler() {
+ _this3.subtitlesTrack_.tech_.off('vttjsloaded', loadHandler);
+
+ _this3.stopForError({
+ message: 'Error loading vtt.js'
+ });
+
+ return;
+ };
+
+ loadHandler = function loadHandler() {
+ _this3.subtitlesTrack_.tech_.off('vttjserror', errorHandler);
+
+ _this3.segmentRequestFinished_(error, simpleSegment, result);
+ };
+
+ this.state = 'WAITING_ON_VTTJS';
+ this.subtitlesTrack_.tech_.one('vttjsloaded', loadHandler);
+ this.subtitlesTrack_.tech_.one('vttjserror', errorHandler);
+ return;
+ }
+
+ segment.requested = true;
+
+ try {
+ this.parseVTTCues_(segmentInfo);
+ } catch (e) {
+ this.stopForError({
+ message: e.message
+ });
+ return;
+ }
+
+ this.updateTimeMapping_(segmentInfo, this.syncController_.timelines[segmentInfo.timeline], this.playlist_);
+
+ if (segmentInfo.cues.length) {
+ segmentInfo.timingInfo = {
+ start: segmentInfo.cues[0].startTime,
+ end: segmentInfo.cues[segmentInfo.cues.length - 1].endTime
+ };
+ } else {
+ segmentInfo.timingInfo = {
+ start: segmentInfo.startOfSegment,
+ end: segmentInfo.startOfSegment + segmentInfo.duration
+ };
+ }
+
+ if (segmentInfo.isSyncRequest) {
+ this.trigger('syncinfoupdate');
+ this.pendingSegment_ = null;
+ this.state = 'READY';
+ return;
+ }
+
+ segmentInfo.byteLength = segmentInfo.bytes.byteLength;
+ this.mediaSecondsLoaded += segment.duration; // Create VTTCue instances for each cue in the new segment and add them to
+ // the subtitle track
+
+ segmentInfo.cues.forEach(function (cue) {
+ _this3.subtitlesTrack_.addCue(_this3.featuresNativeTextTracks_ ? new window__default['default'].VTTCue(cue.startTime, cue.endTime, cue.text) : cue);
+ }); // Remove any duplicate cues from the subtitle track. The WebVTT spec allows
+ // cues to have identical time-intervals, but if the text is also identical
+ // we can safely assume it is a duplicate that can be removed (ex. when a cue
+ // "overlaps" VTT segments)
+
+ removeDuplicateCuesFromTrack(this.subtitlesTrack_);
+ this.handleAppendsDone_();
+ };
+
+ _proto.handleData_ = function handleData_() {// noop as we shouldn't be getting video/audio data captions
+ // that we do not support here.
+ };
+
+ _proto.updateTimingInfoEnd_ = function updateTimingInfoEnd_() {// noop
+ }
+ /**
+ * Uses the WebVTT parser to parse the segment response
+ *
+ * @param {Object} segmentInfo
+ * a segment info object that describes the current segment
+ * @private
+ */
+ ;
+
+ _proto.parseVTTCues_ = function parseVTTCues_(segmentInfo) {
+ var decoder;
+ var decodeBytesToString = false;
+
+ if (typeof window__default['default'].TextDecoder === 'function') {
+ decoder = new window__default['default'].TextDecoder('utf8');
+ } else {
+ decoder = window__default['default'].WebVTT.StringDecoder();
+ decodeBytesToString = true;
+ }
+
+ var parser = new window__default['default'].WebVTT.Parser(window__default['default'], window__default['default'].vttjs, decoder);
+ segmentInfo.cues = [];
+ segmentInfo.timestampmap = {
+ MPEGTS: 0,
+ LOCAL: 0
+ };
+ parser.oncue = segmentInfo.cues.push.bind(segmentInfo.cues);
+
+ parser.ontimestampmap = function (map) {
+ segmentInfo.timestampmap = map;
+ };
+
+ parser.onparsingerror = function (error) {
+ videojs.log.warn('Error encountered when parsing cues: ' + error.message);
+ };
+
+ if (segmentInfo.segment.map) {
+ var mapData = segmentInfo.segment.map.bytes;
+
+ if (decodeBytesToString) {
+ mapData = uint8ToUtf8(mapData);
+ }
+
+ parser.parse(mapData);
+ }
+
+ var segmentData = segmentInfo.bytes;
+
+ if (decodeBytesToString) {
+ segmentData = uint8ToUtf8(segmentData);
+ }
+
+ parser.parse(segmentData);
+ parser.flush();
+ }
+ /**
+ * Updates the start and end times of any cues parsed by the WebVTT parser using
+ * the information parsed from the X-TIMESTAMP-MAP header and a TS to media time mapping
+ * from the SyncController
+ *
+ * @param {Object} segmentInfo
+ * a segment info object that describes the current segment
+ * @param {Object} mappingObj
+ * object containing a mapping from TS to media time
+ * @param {Object} playlist
+ * the playlist object containing the segment
+ * @private
+ */
+ ;
+
+ _proto.updateTimeMapping_ = function updateTimeMapping_(segmentInfo, mappingObj, playlist) {
+ var segment = segmentInfo.segment;
+
+ if (!mappingObj) {
+ // If the sync controller does not have a mapping of TS to Media Time for the
+ // timeline, then we don't have enough information to update the cue
+ // start/end times
+ return;
+ }
+
+ if (!segmentInfo.cues.length) {
+ // If there are no cues, we also do not have enough information to figure out
+ // segment timing. Mark that the segment contains no cues so we don't re-request
+ // an empty segment.
+ segment.empty = true;
+ return;
+ }
+
+ var timestampmap = segmentInfo.timestampmap;
+ var diff = timestampmap.MPEGTS / clock.ONE_SECOND_IN_TS - timestampmap.LOCAL + mappingObj.mapping;
+ segmentInfo.cues.forEach(function (cue) {
+ // First convert cue time to TS time using the timestamp-map provided within the vtt
+ cue.startTime += diff;
+ cue.endTime += diff;
+ });
+
+ if (!playlist.syncInfo) {
+ var firstStart = segmentInfo.cues[0].startTime;
+ var lastStart = segmentInfo.cues[segmentInfo.cues.length - 1].startTime;
+ playlist.syncInfo = {
+ mediaSequence: playlist.mediaSequence + segmentInfo.mediaIndex,
+ time: Math.min(firstStart, lastStart - segment.duration)
+ };
+ }
+ };
+
+ return VTTSegmentLoader;
+}(SegmentLoader);
+/**
+ * @file ad-cue-tags.js
+ */
+
+/**
+ * Searches for an ad cue that overlaps with the given mediaTime
+ *
+ * @param {Object} track
+ * the track to find the cue for
+ *
+ * @param {number} mediaTime
+ * the time to find the cue at
+ *
+ * @return {Object|null}
+ * the found cue or null
+ */
+
+
+var findAdCue = function findAdCue(track, mediaTime) {
+ var cues = track.cues;
+
+ for (var i = 0; i < cues.length; i++) {
+ var cue = cues[i];
+
+ if (mediaTime >= cue.adStartTime && mediaTime <= cue.adEndTime) {
+ return cue;
+ }
+ }
+
+ return null;
+};
+
+var updateAdCues = function updateAdCues(media, track, offset) {
+ if (offset === void 0) {
+ offset = 0;
+ }
+
+ if (!media.segments) {
+ return;
+ }
+
+ var mediaTime = offset;
+ var cue;
+
+ for (var i = 0; i < media.segments.length; i++) {
+ var segment = media.segments[i];
+
+ if (!cue) {
+ // Since the cues will span for at least the segment duration, adding a fudge
+ // factor of half segment duration will prevent duplicate cues from being
+ // created when timing info is not exact (e.g. cue start time initialized
+ // at 10.006677, but next call mediaTime is 10.003332 )
+ cue = findAdCue(track, mediaTime + segment.duration / 2);
+ }
+
+ if (cue) {
+ if ('cueIn' in segment) {
+ // Found a CUE-IN so end the cue
+ cue.endTime = mediaTime;
+ cue.adEndTime = mediaTime;
+ mediaTime += segment.duration;
+ cue = null;
+ continue;
+ }
+
+ if (mediaTime < cue.endTime) {
+ // Already processed this mediaTime for this cue
+ mediaTime += segment.duration;
+ continue;
+ } // otherwise extend cue until a CUE-IN is found
+
+
+ cue.endTime += segment.duration;
+ } else {
+ if ('cueOut' in segment) {
+ cue = new window__default['default'].VTTCue(mediaTime, mediaTime + segment.duration, segment.cueOut);
+ cue.adStartTime = mediaTime; // Assumes tag format to be
+ // #EXT-X-CUE-OUT:30
+
+ cue.adEndTime = mediaTime + parseFloat(segment.cueOut);
+ track.addCue(cue);
+ }
+
+ if ('cueOutCont' in segment) {
+ // Entered into the middle of an ad cue
+ // Assumes tag formate to be
+ // #EXT-X-CUE-OUT-CONT:10/30
+ var _segment$cueOutCont$s = segment.cueOutCont.split('/').map(parseFloat),
+ adOffset = _segment$cueOutCont$s[0],
+ adTotal = _segment$cueOutCont$s[1];
+
+ cue = new window__default['default'].VTTCue(mediaTime, mediaTime + segment.duration, '');
+ cue.adStartTime = mediaTime - adOffset;
+ cue.adEndTime = cue.adStartTime + adTotal;
+ track.addCue(cue);
+ }
+ }
+
+ mediaTime += segment.duration;
+ }
+}; // synchronize expired playlist segments.
+// the max media sequence diff is 48 hours of live stream
+// content with two second segments. Anything larger than that
+// will likely be invalid.
+
+
+var MAX_MEDIA_SEQUENCE_DIFF_FOR_SYNC = 86400;
+var syncPointStrategies = [// Stategy "VOD": Handle the VOD-case where the sync-point is *always*
+// the equivalence display-time 0 === segment-index 0
+{
+ name: 'VOD',
+ run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
+ if (duration !== Infinity) {
+ var syncPoint = {
+ time: 0,
+ segmentIndex: 0,
+ partIndex: null
+ };
+ return syncPoint;
+ }
+
+ return null;
+ }
+}, // Stategy "ProgramDateTime": We have a program-date-time tag in this playlist
+{
+ name: 'ProgramDateTime',
+ run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
+ if (!Object.keys(syncController.timelineToDatetimeMappings).length) {
+ return null;
+ }
+
+ var syncPoint = null;
+ var lastDistance = null;
+ var partsAndSegments = getPartsAndSegments(playlist);
+ currentTime = currentTime || 0;
+
+ for (var i = 0; i < partsAndSegments.length; i++) {
+ // start from the end and loop backwards for live
+ // or start from the front and loop forwards for non-live
+ var index = playlist.endList || currentTime === 0 ? i : partsAndSegments.length - (i + 1);
+ var partAndSegment = partsAndSegments[index];
+ var segment = partAndSegment.segment;
+ var datetimeMapping = syncController.timelineToDatetimeMappings[segment.timeline];
+
+ if (!datetimeMapping || !segment.dateTimeObject) {
+ continue;
+ }
+
+ var segmentTime = segment.dateTimeObject.getTime() / 1000;
+ var start = segmentTime + datetimeMapping; // take part duration into account.
+
+ if (segment.parts && typeof partAndSegment.partIndex === 'number') {
+ for (var z = 0; z < partAndSegment.partIndex; z++) {
+ start += segment.parts[z].duration;
+ }
+ }
+
+ var distance = Math.abs(currentTime - start); // Once the distance begins to increase, or if distance is 0, we have passed
+ // currentTime and can stop looking for better candidates
+
+ if (lastDistance !== null && (distance === 0 || lastDistance < distance)) {
+ break;
+ }
+
+ lastDistance = distance;
+ syncPoint = {
+ time: start,
+ segmentIndex: partAndSegment.segmentIndex,
+ partIndex: partAndSegment.partIndex
+ };
+ }
+
+ return syncPoint;
+ }
+}, // Stategy "Segment": We have a known time mapping for a timeline and a
+// segment in the current timeline with timing data
+{
+ name: 'Segment',
+ run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
+ var syncPoint = null;
+ var lastDistance = null;
+ currentTime = currentTime || 0;
+ var partsAndSegments = getPartsAndSegments(playlist);
+
+ for (var i = 0; i < partsAndSegments.length; i++) {
+ // start from the end and loop backwards for live
+ // or start from the front and loop forwards for non-live
+ var index = playlist.endList || currentTime === 0 ? i : partsAndSegments.length - (i + 1);
+ var partAndSegment = partsAndSegments[index];
+ var segment = partAndSegment.segment;
+ var start = partAndSegment.part && partAndSegment.part.start || segment && segment.start;
+
+ if (segment.timeline === currentTimeline && typeof start !== 'undefined') {
+ var distance = Math.abs(currentTime - start); // Once the distance begins to increase, we have passed
+ // currentTime and can stop looking for better candidates
+
+ if (lastDistance !== null && lastDistance < distance) {
+ break;
+ }
+
+ if (!syncPoint || lastDistance === null || lastDistance >= distance) {
+ lastDistance = distance;
+ syncPoint = {
+ time: start,
+ segmentIndex: partAndSegment.segmentIndex,
+ partIndex: partAndSegment.partIndex
+ };
+ }
+ }
+ }
+
+ return syncPoint;
+ }
+}, // Stategy "Discontinuity": We have a discontinuity with a known
+// display-time
+{
+ name: 'Discontinuity',
+ run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
+ var syncPoint = null;
+ currentTime = currentTime || 0;
+
+ if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
+ var lastDistance = null;
+
+ for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
+ var segmentIndex = playlist.discontinuityStarts[i];
+ var discontinuity = playlist.discontinuitySequence + i + 1;
+ var discontinuitySync = syncController.discontinuities[discontinuity];
+
+ if (discontinuitySync) {
+ var distance = Math.abs(currentTime - discontinuitySync.time); // Once the distance begins to increase, we have passed
+ // currentTime and can stop looking for better candidates
+
+ if (lastDistance !== null && lastDistance < distance) {
+ break;
+ }
+
+ if (!syncPoint || lastDistance === null || lastDistance >= distance) {
+ lastDistance = distance;
+ syncPoint = {
+ time: discontinuitySync.time,
+ segmentIndex: segmentIndex,
+ partIndex: null
+ };
+ }
+ }
+ }
+ }
+
+ return syncPoint;
+ }
+}, // Stategy "Playlist": We have a playlist with a known mapping of
+// segment index to display time
+{
+ name: 'Playlist',
+ run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
+ if (playlist.syncInfo) {
+ var syncPoint = {
+ time: playlist.syncInfo.time,
+ segmentIndex: playlist.syncInfo.mediaSequence - playlist.mediaSequence,
+ partIndex: null
+ };
+ return syncPoint;
+ }
+
+ return null;
+ }
+}];
+
+var SyncController = /*#__PURE__*/function (_videojs$EventTarget) {
+ _inheritsLoose__default['default'](SyncController, _videojs$EventTarget);
+
+ function SyncController(options) {
+ var _this;
+
+ _this = _videojs$EventTarget.call(this) || this; // ...for synching across variants
+
+ _this.timelines = [];
+ _this.discontinuities = [];
+ _this.timelineToDatetimeMappings = {};
+ _this.logger_ = logger('SyncController');
+ return _this;
+ }
+ /**
+ * Find a sync-point for the playlist specified
+ *
+ * A sync-point is defined as a known mapping from display-time to
+ * a segment-index in the current playlist.
+ *
+ * @param {Playlist} playlist
+ * The playlist that needs a sync-point
+ * @param {number} duration
+ * Duration of the MediaSource (Infinite if playing a live source)
+ * @param {number} currentTimeline
+ * The last timeline from which a segment was loaded
+ * @return {Object}
+ * A sync-point object
+ */
+
+
+ var _proto = SyncController.prototype;
+
+ _proto.getSyncPoint = function getSyncPoint(playlist, duration, currentTimeline, currentTime) {
+ var syncPoints = this.runStrategies_(playlist, duration, currentTimeline, currentTime);
+
+ if (!syncPoints.length) {
+ // Signal that we need to attempt to get a sync-point manually
+ // by fetching a segment in the playlist and constructing
+ // a sync-point from that information
+ return null;
+ } // Now find the sync-point that is closest to the currentTime because
+ // that should result in the most accurate guess about which segment
+ // to fetch
+
+
+ return this.selectSyncPoint_(syncPoints, {
+ key: 'time',
+ value: currentTime
+ });
+ }
+ /**
+ * Calculate the amount of time that has expired off the playlist during playback
+ *
+ * @param {Playlist} playlist
+ * Playlist object to calculate expired from
+ * @param {number} duration
+ * Duration of the MediaSource (Infinity if playling a live source)
+ * @return {number|null}
+ * The amount of time that has expired off the playlist during playback. Null
+ * if no sync-points for the playlist can be found.
+ */
+ ;
+
+ _proto.getExpiredTime = function getExpiredTime(playlist, duration) {
+ if (!playlist || !playlist.segments) {
+ return null;
+ }
+
+ var syncPoints = this.runStrategies_(playlist, duration, playlist.discontinuitySequence, 0); // Without sync-points, there is not enough information to determine the expired time
+
+ if (!syncPoints.length) {
+ return null;
+ }
+
+ var syncPoint = this.selectSyncPoint_(syncPoints, {
+ key: 'segmentIndex',
+ value: 0
+ }); // If the sync-point is beyond the start of the playlist, we want to subtract the
+ // duration from index 0 to syncPoint.segmentIndex instead of adding.
+
+ if (syncPoint.segmentIndex > 0) {
+ syncPoint.time *= -1;
+ }
+
+ return Math.abs(syncPoint.time + sumDurations({
+ defaultDuration: playlist.targetDuration,
+ durationList: playlist.segments,
+ startIndex: syncPoint.segmentIndex,
+ endIndex: 0
+ }));
+ }
+ /**
+ * Runs each sync-point strategy and returns a list of sync-points returned by the
+ * strategies
+ *
+ * @private
+ * @param {Playlist} playlist
+ * The playlist that needs a sync-point
+ * @param {number} duration
+ * Duration of the MediaSource (Infinity if playing a live source)
+ * @param {number} currentTimeline
+ * The last timeline from which a segment was loaded
+ * @return {Array}
+ * A list of sync-point objects
+ */
+ ;
+
+ _proto.runStrategies_ = function runStrategies_(playlist, duration, currentTimeline, currentTime) {
+ var syncPoints = []; // Try to find a sync-point in by utilizing various strategies...
+
+ for (var i = 0; i < syncPointStrategies.length; i++) {
+ var strategy = syncPointStrategies[i];
+ var syncPoint = strategy.run(this, playlist, duration, currentTimeline, currentTime);
+
+ if (syncPoint) {
+ syncPoint.strategy = strategy.name;
+ syncPoints.push({
+ strategy: strategy.name,
+ syncPoint: syncPoint
+ });
+ }
+ }
+
+ return syncPoints;
+ }
+ /**
+ * Selects the sync-point nearest the specified target
+ *
+ * @private
+ * @param {Array} syncPoints
+ * List of sync-points to select from
+ * @param {Object} target
+ * Object specifying the property and value we are targeting
+ * @param {string} target.key
+ * Specifies the property to target. Must be either 'time' or 'segmentIndex'
+ * @param {number} target.value
+ * The value to target for the specified key.
+ * @return {Object}
+ * The sync-point nearest the target
+ */
+ ;
+
+ _proto.selectSyncPoint_ = function selectSyncPoint_(syncPoints, target) {
+ var bestSyncPoint = syncPoints[0].syncPoint;
+ var bestDistance = Math.abs(syncPoints[0].syncPoint[target.key] - target.value);
+ var bestStrategy = syncPoints[0].strategy;
+
+ for (var i = 1; i < syncPoints.length; i++) {
+ var newDistance = Math.abs(syncPoints[i].syncPoint[target.key] - target.value);
+
+ if (newDistance < bestDistance) {
+ bestDistance = newDistance;
+ bestSyncPoint = syncPoints[i].syncPoint;
+ bestStrategy = syncPoints[i].strategy;
+ }
+ }
+
+ this.logger_("syncPoint for [" + target.key + ": " + target.value + "] chosen with strategy" + (" [" + bestStrategy + "]: [time:" + bestSyncPoint.time + ",") + (" segmentIndex:" + bestSyncPoint.segmentIndex) + (typeof bestSyncPoint.partIndex === 'number' ? ",partIndex:" + bestSyncPoint.partIndex : '') + ']');
+ return bestSyncPoint;
+ }
+ /**
+ * Save any meta-data present on the segments when segments leave
+ * the live window to the playlist to allow for synchronization at the
+ * playlist level later.
+ *
+ * @param {Playlist} oldPlaylist - The previous active playlist
+ * @param {Playlist} newPlaylist - The updated and most current playlist
+ */
+ ;
+
+ _proto.saveExpiredSegmentInfo = function saveExpiredSegmentInfo(oldPlaylist, newPlaylist) {
+ var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence; // Ignore large media sequence gaps
+
+ if (mediaSequenceDiff > MAX_MEDIA_SEQUENCE_DIFF_FOR_SYNC) {
+ videojs.log.warn("Not saving expired segment info. Media sequence gap " + mediaSequenceDiff + " is too large.");
+ return;
+ } // When a segment expires from the playlist and it has a start time
+ // save that information as a possible sync-point reference in future
+
+
+ for (var i = mediaSequenceDiff - 1; i >= 0; i--) {
+ var lastRemovedSegment = oldPlaylist.segments[i];
+
+ if (lastRemovedSegment && typeof lastRemovedSegment.start !== 'undefined') {
+ newPlaylist.syncInfo = {
+ mediaSequence: oldPlaylist.mediaSequence + i,
+ time: lastRemovedSegment.start
+ };
+ this.logger_("playlist refresh sync: [time:" + newPlaylist.syncInfo.time + "," + (" mediaSequence: " + newPlaylist.syncInfo.mediaSequence + "]"));
+ this.trigger('syncinfoupdate');
+ break;
+ }
+ }
+ }
+ /**
+ * Save the mapping from playlist's ProgramDateTime to display. This should only happen
+ * before segments start to load.
+ *
+ * @param {Playlist} playlist - The currently active playlist
+ */
+ ;
+
+ _proto.setDateTimeMappingForStart = function setDateTimeMappingForStart(playlist) {
+ // It's possible for the playlist to be updated before playback starts, meaning time
+ // zero is not yet set. If, during these playlist refreshes, a discontinuity is
+ // crossed, then the old time zero mapping (for the prior timeline) would be retained
+ // unless the mappings are cleared.
+ this.timelineToDatetimeMappings = {};
+
+ if (playlist.segments && playlist.segments.length && playlist.segments[0].dateTimeObject) {
+ var firstSegment = playlist.segments[0];
+ var playlistTimestamp = firstSegment.dateTimeObject.getTime() / 1000;
+ this.timelineToDatetimeMappings[firstSegment.timeline] = -playlistTimestamp;
+ }
+ }
+ /**
+ * Calculates and saves timeline mappings, playlist sync info, and segment timing values
+ * based on the latest timing information.
+ *
+ * @param {Object} options
+ * Options object
+ * @param {SegmentInfo} options.segmentInfo
+ * The current active request information
+ * @param {boolean} options.shouldSaveTimelineMapping
+ * If there's a timeline change, determines if the timeline mapping should be
+ * saved for timeline mapping and program date time mappings.
+ */
+ ;
+
+ _proto.saveSegmentTimingInfo = function saveSegmentTimingInfo(_ref) {
+ var segmentInfo = _ref.segmentInfo,
+ shouldSaveTimelineMapping = _ref.shouldSaveTimelineMapping;
+ var didCalculateSegmentTimeMapping = this.calculateSegmentTimeMapping_(segmentInfo, segmentInfo.timingInfo, shouldSaveTimelineMapping);
+ var segment = segmentInfo.segment;
+
+ if (didCalculateSegmentTimeMapping) {
+ this.saveDiscontinuitySyncInfo_(segmentInfo); // If the playlist does not have sync information yet, record that information
+ // now with segment timing information
+
+ if (!segmentInfo.playlist.syncInfo) {
+ segmentInfo.playlist.syncInfo = {
+ mediaSequence: segmentInfo.playlist.mediaSequence + segmentInfo.mediaIndex,
+ time: segment.start
+ };
+ }
+ }
+
+ var dateTime = segment.dateTimeObject;
+
+ if (segment.discontinuity && shouldSaveTimelineMapping && dateTime) {
+ this.timelineToDatetimeMappings[segment.timeline] = -(dateTime.getTime() / 1000);
+ }
+ };
+
+ _proto.timestampOffsetForTimeline = function timestampOffsetForTimeline(timeline) {
+ if (typeof this.timelines[timeline] === 'undefined') {
+ return null;
+ }
+
+ return this.timelines[timeline].time;
+ };
+
+ _proto.mappingForTimeline = function mappingForTimeline(timeline) {
+ if (typeof this.timelines[timeline] === 'undefined') {
+ return null;
+ }
+
+ return this.timelines[timeline].mapping;
+ }
+ /**
+ * Use the "media time" for a segment to generate a mapping to "display time" and
+ * save that display time to the segment.
+ *
+ * @private
+ * @param {SegmentInfo} segmentInfo
+ * The current active request information
+ * @param {Object} timingInfo
+ * The start and end time of the current segment in "media time"
+ * @param {boolean} shouldSaveTimelineMapping
+ * If there's a timeline change, determines if the timeline mapping should be
+ * saved in timelines.
+ * @return {boolean}
+ * Returns false if segment time mapping could not be calculated
+ */
+ ;
+
+ _proto.calculateSegmentTimeMapping_ = function calculateSegmentTimeMapping_(segmentInfo, timingInfo, shouldSaveTimelineMapping) {
+ // TODO: remove side effects
+ var segment = segmentInfo.segment;
+ var part = segmentInfo.part;
+ var mappingObj = this.timelines[segmentInfo.timeline];
+ var start;
+ var end;
+
+ if (typeof segmentInfo.timestampOffset === 'number') {
+ mappingObj = {
+ time: segmentInfo.startOfSegment,
+ mapping: segmentInfo.startOfSegment - timingInfo.start
+ };
+
+ if (shouldSaveTimelineMapping) {
+ this.timelines[segmentInfo.timeline] = mappingObj;
+ this.trigger('timestampoffset');
+ this.logger_("time mapping for timeline " + segmentInfo.timeline + ": " + ("[time: " + mappingObj.time + "] [mapping: " + mappingObj.mapping + "]"));
+ }
+
+ start = segmentInfo.startOfSegment;
+ end = timingInfo.end + mappingObj.mapping;
+ } else if (mappingObj) {
+ start = timingInfo.start + mappingObj.mapping;
+ end = timingInfo.end + mappingObj.mapping;
+ } else {
+ return false;
+ }
+
+ if (part) {
+ part.start = start;
+ part.end = end;
+ } // If we don't have a segment start yet or the start value we got
+ // is less than our current segment.start value, save a new start value.
+ // We have to do this because parts will have segment timing info saved
+ // multiple times and we want segment start to be the earliest part start
+ // value for that segment.
+
+
+ if (!segment.start || start < segment.start) {
+ segment.start = start;
+ }
+
+ segment.end = end;
+ return true;
+ }
+ /**
+ * Each time we have discontinuity in the playlist, attempt to calculate the location
+ * in display of the start of the discontinuity and save that. We also save an accuracy
+ * value so that we save values with the most accuracy (closest to 0.)
+ *
+ * @private
+ * @param {SegmentInfo} segmentInfo - The current active request information
+ */
+ ;
+
+ _proto.saveDiscontinuitySyncInfo_ = function saveDiscontinuitySyncInfo_(segmentInfo) {
+ var playlist = segmentInfo.playlist;
+ var segment = segmentInfo.segment; // If the current segment is a discontinuity then we know exactly where
+ // the start of the range and it's accuracy is 0 (greater accuracy values
+ // mean more approximation)
+
+ if (segment.discontinuity) {
+ this.discontinuities[segment.timeline] = {
+ time: segment.start,
+ accuracy: 0
+ };
+ } else if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
+ // Search for future discontinuities that we can provide better timing
+ // information for and save that information for sync purposes
+ for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
+ var segmentIndex = playlist.discontinuityStarts[i];
+ var discontinuity = playlist.discontinuitySequence + i + 1;
+ var mediaIndexDiff = segmentIndex - segmentInfo.mediaIndex;
+ var accuracy = Math.abs(mediaIndexDiff);
+
+ if (!this.discontinuities[discontinuity] || this.discontinuities[discontinuity].accuracy > accuracy) {
+ var time = void 0;
+
+ if (mediaIndexDiff < 0) {
+ time = segment.start - sumDurations({
+ defaultDuration: playlist.targetDuration,
+ durationList: playlist.segments,
+ startIndex: segmentInfo.mediaIndex,
+ endIndex: segmentIndex
+ });
+ } else {
+ time = segment.end + sumDurations({
+ defaultDuration: playlist.targetDuration,
+ durationList: playlist.segments,
+ startIndex: segmentInfo.mediaIndex + 1,
+ endIndex: segmentIndex
+ });
+ }
+
+ this.discontinuities[discontinuity] = {
+ time: time,
+ accuracy: accuracy
+ };
+ }
+ }
+ }
+ };
+
+ _proto.dispose = function dispose() {
+ this.trigger('dispose');
+ this.off();
+ };
+
+ return SyncController;
+}(videojs.EventTarget);
+/**
+ * The TimelineChangeController acts as a source for segment loaders to listen for and
+ * keep track of latest and pending timeline changes. This is useful to ensure proper
+ * sync, as each loader may need to make a consideration for what timeline the other
+ * loader is on before making changes which could impact the other loader's media.
+ *
+ * @class TimelineChangeController
+ * @extends videojs.EventTarget
+ */
+
+
+var TimelineChangeController = /*#__PURE__*/function (_videojs$EventTarget) {
+ _inheritsLoose__default['default'](TimelineChangeController, _videojs$EventTarget);
+
+ function TimelineChangeController() {
+ var _this;
+
+ _this = _videojs$EventTarget.call(this) || this;
+ _this.pendingTimelineChanges_ = {};
+ _this.lastTimelineChanges_ = {};
+ return _this;
+ }
+
+ var _proto = TimelineChangeController.prototype;
+
+ _proto.clearPendingTimelineChange = function clearPendingTimelineChange(type) {
+ this.pendingTimelineChanges_[type] = null;
+ this.trigger('pendingtimelinechange');
+ };
+
+ _proto.pendingTimelineChange = function pendingTimelineChange(_ref) {
+ var type = _ref.type,
+ from = _ref.from,
+ to = _ref.to;
+
+ if (typeof from === 'number' && typeof to === 'number') {
+ this.pendingTimelineChanges_[type] = {
+ type: type,
+ from: from,
+ to: to
+ };
+ this.trigger('pendingtimelinechange');
+ }
+
+ return this.pendingTimelineChanges_[type];
+ };
+
+ _proto.lastTimelineChange = function lastTimelineChange(_ref2) {
+ var type = _ref2.type,
+ from = _ref2.from,
+ to = _ref2.to;
+
+ if (typeof from === 'number' && typeof to === 'number') {
+ this.lastTimelineChanges_[type] = {
+ type: type,
+ from: from,
+ to: to
+ };
+ delete this.pendingTimelineChanges_[type];
+ this.trigger('timelinechange');
+ }
+
+ return this.lastTimelineChanges_[type];
+ };
+
+ _proto.dispose = function dispose() {
+ this.trigger('dispose');
+ this.pendingTimelineChanges_ = {};
+ this.lastTimelineChanges_ = {};
+ this.off();
+ };
+
+ return TimelineChangeController;
+}(videojs.EventTarget);
+/* rollup-plugin-worker-factory start for worker!/Users/bclifford/Code/vhs-release-test/src/decrypter-worker.js */
+
+
+var workerCode = transform(getWorkerString(function () {
+ var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
+
+ function createCommonjsModule(fn, basedir, module) {
+ return module = {
+ path: basedir,
+ exports: {},
+ require: function require(path, base) {
+ return commonjsRequire(path, base === undefined || base === null ? module.path : base);
+ }
+ }, fn(module, module.exports), module.exports;
+ }
+
+ function commonjsRequire() {
+ throw new Error('Dynamic requires are not currently supported by @rollup/plugin-commonjs');
+ }
+
+ var createClass = createCommonjsModule(function (module) {
+ function _defineProperties(target, props) {
+ for (var i = 0; i < props.length; i++) {
+ var descriptor = props[i];
+ descriptor.enumerable = descriptor.enumerable || false;
+ descriptor.configurable = true;
+ if ("value" in descriptor) descriptor.writable = true;
+ Object.defineProperty(target, descriptor.key, descriptor);
+ }
+ }
+
+ function _createClass(Constructor, protoProps, staticProps) {
+ if (protoProps) _defineProperties(Constructor.prototype, protoProps);
+ if (staticProps) _defineProperties(Constructor, staticProps);
+ return Constructor;
+ }
+
+ module.exports = _createClass;
+ module.exports["default"] = module.exports, module.exports.__esModule = true;
+ });
+ var setPrototypeOf = createCommonjsModule(function (module) {
+ function _setPrototypeOf(o, p) {
+ module.exports = _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) {
+ o.__proto__ = p;
+ return o;
+ };
+
+ module.exports["default"] = module.exports, module.exports.__esModule = true;
+ return _setPrototypeOf(o, p);
+ }
+
+ module.exports = _setPrototypeOf;
+ module.exports["default"] = module.exports, module.exports.__esModule = true;
+ });
+ var inheritsLoose = createCommonjsModule(function (module) {
+ function _inheritsLoose(subClass, superClass) {
+ subClass.prototype = Object.create(superClass.prototype);
+ subClass.prototype.constructor = subClass;
+ setPrototypeOf(subClass, superClass);
+ }
+
+ module.exports = _inheritsLoose;
+ module.exports["default"] = module.exports, module.exports.__esModule = true;
+ });
+ /**
+ * @file stream.js
+ */
+
+ /**
+ * A lightweight readable stream implemention that handles event dispatching.
+ *
+ * @class Stream
+ */
+
+ var Stream = /*#__PURE__*/function () {
+ function Stream() {
+ this.listeners = {};
+ }
+ /**
+ * Add a listener for a specified event type.
+ *
+ * @param {string} type the event name
+ * @param {Function} listener the callback to be invoked when an event of
+ * the specified type occurs
+ */
+
+
+ var _proto = Stream.prototype;
+
+ _proto.on = function on(type, listener) {
+ if (!this.listeners[type]) {
+ this.listeners[type] = [];
+ }
+
+ this.listeners[type].push(listener);
+ }
+ /**
+ * Remove a listener for a specified event type.
+ *
+ * @param {string} type the event name
+ * @param {Function} listener a function previously registered for this
+ * type of event through `on`
+ * @return {boolean} if we could turn it off or not
+ */
+ ;
+
+ _proto.off = function off(type, listener) {
+ if (!this.listeners[type]) {
+ return false;
+ }
+
+ var index = this.listeners[type].indexOf(listener); // TODO: which is better?
+ // In Video.js we slice listener functions
+ // on trigger so that it does not mess up the order
+ // while we loop through.
+ //
+ // Here we slice on off so that the loop in trigger
+ // can continue using it's old reference to loop without
+ // messing up the order.
+
+ this.listeners[type] = this.listeners[type].slice(0);
+ this.listeners[type].splice(index, 1);
+ return index > -1;
+ }
+ /**
+ * Trigger an event of the specified type on this stream. Any additional
+ * arguments to this function are passed as parameters to event listeners.
+ *
+ * @param {string} type the event name
+ */
+ ;
+
+ _proto.trigger = function trigger(type) {
+ var callbacks = this.listeners[type];
+
+ if (!callbacks) {
+ return;
+ } // Slicing the arguments on every invocation of this method
+ // can add a significant amount of overhead. Avoid the
+ // intermediate object creation for the common case of a
+ // single callback argument
+
+
+ if (arguments.length === 2) {
+ var length = callbacks.length;
+
+ for (var i = 0; i < length; ++i) {
+ callbacks[i].call(this, arguments[1]);
+ }
+ } else {
+ var args = Array.prototype.slice.call(arguments, 1);
+ var _length = callbacks.length;
+
+ for (var _i = 0; _i < _length; ++_i) {
+ callbacks[_i].apply(this, args);
+ }
+ }
+ }
+ /**
+ * Destroys the stream and cleans up.
+ */
+ ;
+
+ _proto.dispose = function dispose() {
+ this.listeners = {};
+ }
+ /**
+ * Forwards all `data` events on this stream to the destination stream. The
+ * destination stream should provide a method `push` to receive the data
+ * events as they arrive.
+ *
+ * @param {Stream} destination the stream that will receive all `data` events
+ * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
+ */
+ ;
+
+ _proto.pipe = function pipe(destination) {
+ this.on('data', function (data) {
+ destination.push(data);
+ });
+ };
+
+ return Stream;
+ }();
+ /*! @name pkcs7 @version 1.0.4 @license Apache-2.0 */
+
+ /**
+ * Returns the subarray of a Uint8Array without PKCS#7 padding.
+ *
+ * @param padded {Uint8Array} unencrypted bytes that have been padded
+ * @return {Uint8Array} the unpadded bytes
+ * @see http://tools.ietf.org/html/rfc5652
+ */
+
+
+ function unpad(padded) {
+ return padded.subarray(0, padded.byteLength - padded[padded.byteLength - 1]);
+ }
+ /*! @name aes-decrypter @version 3.1.3 @license Apache-2.0 */
+
+ /**
+ * @file aes.js
+ *
+ * This file contains an adaptation of the AES decryption algorithm
+ * from the Standford Javascript Cryptography Library. That work is
+ * covered by the following copyright and permissions notice:
+ *
+ * Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
+ * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ * DISCLAIMED. IN NO EVENT SHALL OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation
+ * are those of the authors and should not be interpreted as representing
+ * official policies, either expressed or implied, of the authors.
+ */
+
+ /**
+ * Expand the S-box tables.
+ *
+ * @private
+ */
+
+
+ var precompute = function precompute() {
+ var tables = [[[], [], [], [], []], [[], [], [], [], []]];
+ var encTable = tables[0];
+ var decTable = tables[1];
+ var sbox = encTable[4];
+ var sboxInv = decTable[4];
+ var i;
+ var x;
+ var xInv;
+ var d = [];
+ var th = [];
+ var x2;
+ var x4;
+ var x8;
+ var s;
+ var tEnc;
+ var tDec; // Compute double and third tables
+
+ for (i = 0; i < 256; i++) {
+ th[(d[i] = i << 1 ^ (i >> 7) * 283) ^ i] = i;
+ }
+
+ for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
+ // Compute sbox
+ s = xInv ^ xInv << 1 ^ xInv << 2 ^ xInv << 3 ^ xInv << 4;
+ s = s >> 8 ^ s & 255 ^ 99;
+ sbox[x] = s;
+ sboxInv[s] = x; // Compute MixColumns
+
+ x8 = d[x4 = d[x2 = d[x]]];
+ tDec = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
+ tEnc = d[s] * 0x101 ^ s * 0x1010100;
+
+ for (i = 0; i < 4; i++) {
+ encTable[i][x] = tEnc = tEnc << 24 ^ tEnc >>> 8;
+ decTable[i][s] = tDec = tDec << 24 ^ tDec >>> 8;
+ }
+ } // Compactify. Considerable speedup on Firefox.
+
+
+ for (i = 0; i < 5; i++) {
+ encTable[i] = encTable[i].slice(0);
+ decTable[i] = decTable[i].slice(0);
+ }
+
+ return tables;
+ };
+
+ var aesTables = null;
+ /**
+ * Schedule out an AES key for both encryption and decryption. This
+ * is a low-level class. Use a cipher mode to do bulk encryption.
+ *
+ * @class AES
+ * @param key {Array} The key as an array of 4, 6 or 8 words.
+ */
+
+ var AES = /*#__PURE__*/function () {
+ function AES(key) {
+ /**
+ * The expanded S-box and inverse S-box tables. These will be computed
+ * on the client so that we don't have to send them down the wire.
+ *
+ * There are two tables, _tables[0] is for encryption and
+ * _tables[1] is for decryption.
+ *
+ * The first 4 sub-tables are the expanded S-box with MixColumns. The
+ * last (_tables[01][4]) is the S-box itself.
+ *
+ * @private
+ */
+ // if we have yet to precompute the S-box tables
+ // do so now
+ if (!aesTables) {
+ aesTables = precompute();
+ } // then make a copy of that object for use
+
+
+ this._tables = [[aesTables[0][0].slice(), aesTables[0][1].slice(), aesTables[0][2].slice(), aesTables[0][3].slice(), aesTables[0][4].slice()], [aesTables[1][0].slice(), aesTables[1][1].slice(), aesTables[1][2].slice(), aesTables[1][3].slice(), aesTables[1][4].slice()]];
+ var i;
+ var j;
+ var tmp;
+ var sbox = this._tables[0][4];
+ var decTable = this._tables[1];
+ var keyLen = key.length;
+ var rcon = 1;
+
+ if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
+ throw new Error('Invalid aes key size');
+ }
+
+ var encKey = key.slice(0);
+ var decKey = [];
+ this._key = [encKey, decKey]; // schedule encryption keys
+
+ for (i = keyLen; i < 4 * keyLen + 28; i++) {
+ tmp = encKey[i - 1]; // apply sbox
+
+ if (i % keyLen === 0 || keyLen === 8 && i % keyLen === 4) {
+ tmp = sbox[tmp >>> 24] << 24 ^ sbox[tmp >> 16 & 255] << 16 ^ sbox[tmp >> 8 & 255] << 8 ^ sbox[tmp & 255]; // shift rows and add rcon
+
+ if (i % keyLen === 0) {
+ tmp = tmp << 8 ^ tmp >>> 24 ^ rcon << 24;
+ rcon = rcon << 1 ^ (rcon >> 7) * 283;
+ }
+ }
+
+ encKey[i] = encKey[i - keyLen] ^ tmp;
+ } // schedule decryption keys
+
+
+ for (j = 0; i; j++, i--) {
+ tmp = encKey[j & 3 ? i : i - 4];
+
+ if (i <= 4 || j < 4) {
+ decKey[j] = tmp;
+ } else {
+ decKey[j] = decTable[0][sbox[tmp >>> 24]] ^ decTable[1][sbox[tmp >> 16 & 255]] ^ decTable[2][sbox[tmp >> 8 & 255]] ^ decTable[3][sbox[tmp & 255]];
+ }
+ }
+ }
+ /**
+ * Decrypt 16 bytes, specified as four 32-bit words.
+ *
+ * @param {number} encrypted0 the first word to decrypt
+ * @param {number} encrypted1 the second word to decrypt
+ * @param {number} encrypted2 the third word to decrypt
+ * @param {number} encrypted3 the fourth word to decrypt
+ * @param {Int32Array} out the array to write the decrypted words
+ * into
+ * @param {number} offset the offset into the output array to start
+ * writing results
+ * @return {Array} The plaintext.
+ */
+
+
+ var _proto = AES.prototype;
+
+ _proto.decrypt = function decrypt(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {
+ var key = this._key[1]; // state variables a,b,c,d are loaded with pre-whitened data
+
+ var a = encrypted0 ^ key[0];
+ var b = encrypted3 ^ key[1];
+ var c = encrypted2 ^ key[2];
+ var d = encrypted1 ^ key[3];
+ var a2;
+ var b2;
+ var c2; // key.length === 2 ?
+
+ var nInnerRounds = key.length / 4 - 2;
+ var i;
+ var kIndex = 4;
+ var table = this._tables[1]; // load up the tables
+
+ var table0 = table[0];
+ var table1 = table[1];
+ var table2 = table[2];
+ var table3 = table[3];
+ var sbox = table[4]; // Inner rounds. Cribbed from OpenSSL.
+
+ for (i = 0; i < nInnerRounds; i++) {
+ a2 = table0[a >>> 24] ^ table1[b >> 16 & 255] ^ table2[c >> 8 & 255] ^ table3[d & 255] ^ key[kIndex];
+ b2 = table0[b >>> 24] ^ table1[c >> 16 & 255] ^ table2[d >> 8 & 255] ^ table3[a & 255] ^ key[kIndex + 1];
+ c2 = table0[c >>> 24] ^ table1[d >> 16 & 255] ^ table2[a >> 8 & 255] ^ table3[b & 255] ^ key[kIndex + 2];
+ d = table0[d >>> 24] ^ table1[a >> 16 & 255] ^ table2[b >> 8 & 255] ^ table3[c & 255] ^ key[kIndex + 3];
+ kIndex += 4;
+ a = a2;
+ b = b2;
+ c = c2;
+ } // Last round.
+
+
+ for (i = 0; i < 4; i++) {
+ out[(3 & -i) + offset] = sbox[a >>> 24] << 24 ^ sbox[b >> 16 & 255] << 16 ^ sbox[c >> 8 & 255] << 8 ^ sbox[d & 255] ^ key[kIndex++];
+ a2 = a;
+ a = b;
+ b = c;
+ c = d;
+ d = a2;
+ }
+ };
+
+ return AES;
+ }();
+ /**
+ * A wrapper around the Stream class to use setTimeout
+ * and run stream "jobs" Asynchronously
+ *
+ * @class AsyncStream
+ * @extends Stream
+ */
+
+
+ var AsyncStream = /*#__PURE__*/function (_Stream) {
+ inheritsLoose(AsyncStream, _Stream);
+
+ function AsyncStream() {
+ var _this;
+
+ _this = _Stream.call(this, Stream) || this;
+ _this.jobs = [];
+ _this.delay = 1;
+ _this.timeout_ = null;
+ return _this;
+ }
+ /**
+ * process an async job
+ *
+ * @private
+ */
+
+
+ var _proto = AsyncStream.prototype;
+
+ _proto.processJob_ = function processJob_() {
+ this.jobs.shift()();
+
+ if (this.jobs.length) {
+ this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
+ } else {
+ this.timeout_ = null;
+ }
+ }
+ /**
+ * push a job into the stream
+ *
+ * @param {Function} job the job to push into the stream
+ */
+ ;
+
+ _proto.push = function push(job) {
+ this.jobs.push(job);
+
+ if (!this.timeout_) {
+ this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
+ }
+ };
+
+ return AsyncStream;
+ }(Stream);
+ /**
+ * Convert network-order (big-endian) bytes into their little-endian
+ * representation.
+ */
+
+
+ var ntoh = function ntoh(word) {
+ return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;
+ };
+ /**
+ * Decrypt bytes using AES-128 with CBC and PKCS#7 padding.
+ *
+ * @param {Uint8Array} encrypted the encrypted bytes
+ * @param {Uint32Array} key the bytes of the decryption key
+ * @param {Uint32Array} initVector the initialization vector (IV) to
+ * use for the first round of CBC.
+ * @return {Uint8Array} the decrypted bytes
+ *
+ * @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
+ * @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29
+ * @see https://tools.ietf.org/html/rfc2315
+ */
+
+
+ var decrypt = function decrypt(encrypted, key, initVector) {
+ // word-level access to the encrypted bytes
+ var encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2);
+ var decipher = new AES(Array.prototype.slice.call(key)); // byte and word-level access for the decrypted output
+
+ var decrypted = new Uint8Array(encrypted.byteLength);
+ var decrypted32 = new Int32Array(decrypted.buffer); // temporary variables for working with the IV, encrypted, and
+ // decrypted data
+
+ var init0;
+ var init1;
+ var init2;
+ var init3;
+ var encrypted0;
+ var encrypted1;
+ var encrypted2;
+ var encrypted3; // iteration variable
+
+ var wordIx; // pull out the words of the IV to ensure we don't modify the
+ // passed-in reference and easier access
+
+ init0 = initVector[0];
+ init1 = initVector[1];
+ init2 = initVector[2];
+ init3 = initVector[3]; // decrypt four word sequences, applying cipher-block chaining (CBC)
+ // to each decrypted block
+
+ for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {
+ // convert big-endian (network order) words into little-endian
+ // (javascript order)
+ encrypted0 = ntoh(encrypted32[wordIx]);
+ encrypted1 = ntoh(encrypted32[wordIx + 1]);
+ encrypted2 = ntoh(encrypted32[wordIx + 2]);
+ encrypted3 = ntoh(encrypted32[wordIx + 3]); // decrypt the block
+
+ decipher.decrypt(encrypted0, encrypted1, encrypted2, encrypted3, decrypted32, wordIx); // XOR with the IV, and restore network byte-order to obtain the
+ // plaintext
+
+ decrypted32[wordIx] = ntoh(decrypted32[wordIx] ^ init0);
+ decrypted32[wordIx + 1] = ntoh(decrypted32[wordIx + 1] ^ init1);
+ decrypted32[wordIx + 2] = ntoh(decrypted32[wordIx + 2] ^ init2);
+ decrypted32[wordIx + 3] = ntoh(decrypted32[wordIx + 3] ^ init3); // setup the IV for the next round
+
+ init0 = encrypted0;
+ init1 = encrypted1;
+ init2 = encrypted2;
+ init3 = encrypted3;
+ }
+
+ return decrypted;
+ };
+ /**
+ * The `Decrypter` class that manages decryption of AES
+ * data through `AsyncStream` objects and the `decrypt`
+ * function
+ *
+ * @param {Uint8Array} encrypted the encrypted bytes
+ * @param {Uint32Array} key the bytes of the decryption key
+ * @param {Uint32Array} initVector the initialization vector (IV) to
+ * @param {Function} done the function to run when done
+ * @class Decrypter
+ */
+
+
+ var Decrypter = /*#__PURE__*/function () {
+ function Decrypter(encrypted, key, initVector, done) {
+ var step = Decrypter.STEP;
+ var encrypted32 = new Int32Array(encrypted.buffer);
+ var decrypted = new Uint8Array(encrypted.byteLength);
+ var i = 0;
+ this.asyncStream_ = new AsyncStream(); // split up the encryption job and do the individual chunks asynchronously
+
+ this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
+
+ for (i = step; i < encrypted32.length; i += step) {
+ initVector = new Uint32Array([ntoh(encrypted32[i - 4]), ntoh(encrypted32[i - 3]), ntoh(encrypted32[i - 2]), ntoh(encrypted32[i - 1])]);
+ this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
+ } // invoke the done() callback when everything is finished
+
+
+ this.asyncStream_.push(function () {
+ // remove pkcs#7 padding from the decrypted bytes
+ done(null, unpad(decrypted));
+ });
+ }
+ /**
+ * a getter for step the maximum number of bytes to process at one time
+ *
+ * @return {number} the value of step 32000
+ */
+
+
+ var _proto = Decrypter.prototype;
+ /**
+ * @private
+ */
+
+ _proto.decryptChunk_ = function decryptChunk_(encrypted, key, initVector, decrypted) {
+ return function () {
+ var bytes = decrypt(encrypted, key, initVector);
+ decrypted.set(bytes, encrypted.byteOffset);
+ };
+ };
+
+ createClass(Decrypter, null, [{
+ key: "STEP",
+ get: function get() {
+ // 4 * 8000;
+ return 32000;
+ }
+ }]);
+ return Decrypter;
+ }();
+
+ var win;
+
+ if (typeof window !== "undefined") {
+ win = window;
+ } else if (typeof commonjsGlobal !== "undefined") {
+ win = commonjsGlobal;
+ } else if (typeof self !== "undefined") {
+ win = self;
+ } else {
+ win = {};
+ }
+
+ var window_1 = win;
+
+ var isArrayBufferView = function isArrayBufferView(obj) {
+ if (ArrayBuffer.isView === 'function') {
+ return ArrayBuffer.isView(obj);
+ }
+
+ return obj && obj.buffer instanceof ArrayBuffer;
+ };
+
+ var BigInt = window_1.BigInt || Number;
+ [BigInt('0x1'), BigInt('0x100'), BigInt('0x10000'), BigInt('0x1000000'), BigInt('0x100000000'), BigInt('0x10000000000'), BigInt('0x1000000000000'), BigInt('0x100000000000000'), BigInt('0x10000000000000000')];
+ /**
+ * Creates an object for sending to a web worker modifying properties that are TypedArrays
+ * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
+ *
+ * @param {Object} message
+ * Object of properties and values to send to the web worker
+ * @return {Object}
+ * Modified message with TypedArray values expanded
+ * @function createTransferableMessage
+ */
+
+
+ var createTransferableMessage = function createTransferableMessage(message) {
+ var transferable = {};
+ Object.keys(message).forEach(function (key) {
+ var value = message[key];
+
+ if (isArrayBufferView(value)) {
+ transferable[key] = {
+ bytes: value.buffer,
+ byteOffset: value.byteOffset,
+ byteLength: value.byteLength
+ };
+ } else {
+ transferable[key] = value;
+ }
+ });
+ return transferable;
+ };
+ /* global self */
+
+ /**
+ * Our web worker interface so that things can talk to aes-decrypter
+ * that will be running in a web worker. the scope is passed to this by
+ * webworkify.
+ */
+
+
+ self.onmessage = function (event) {
+ var data = event.data;
+ var encrypted = new Uint8Array(data.encrypted.bytes, data.encrypted.byteOffset, data.encrypted.byteLength);
+ var key = new Uint32Array(data.key.bytes, data.key.byteOffset, data.key.byteLength / 4);
+ var iv = new Uint32Array(data.iv.bytes, data.iv.byteOffset, data.iv.byteLength / 4);
+ /* eslint-disable no-new, handle-callback-err */
+
+ new Decrypter(encrypted, key, iv, function (err, bytes) {
+ self.postMessage(createTransferableMessage({
+ source: data.source,
+ decrypted: bytes
+ }), [bytes.buffer]);
+ });
+ /* eslint-enable */
+ };
+}));
+var Decrypter = factory(workerCode);
+/* rollup-plugin-worker-factory end for worker!/Users/bclifford/Code/vhs-release-test/src/decrypter-worker.js */
+
+/**
+ * Convert the properties of an HLS track into an audioTrackKind.
+ *
+ * @private
+ */
+
+var audioTrackKind_ = function audioTrackKind_(properties) {
+ var kind = properties["default"] ? 'main' : 'alternative';
+
+ if (properties.characteristics && properties.characteristics.indexOf('public.accessibility.describes-video') >= 0) {
+ kind = 'main-desc';
+ }
+
+ return kind;
+};
+/**
+ * Pause provided segment loader and playlist loader if active
+ *
+ * @param {SegmentLoader} segmentLoader
+ * SegmentLoader to pause
+ * @param {Object} mediaType
+ * Active media type
+ * @function stopLoaders
+ */
+
+
+var stopLoaders = function stopLoaders(segmentLoader, mediaType) {
+ segmentLoader.abort();
+ segmentLoader.pause();
+
+ if (mediaType && mediaType.activePlaylistLoader) {
+ mediaType.activePlaylistLoader.pause();
+ mediaType.activePlaylistLoader = null;
+ }
+};
+/**
+ * Start loading provided segment loader and playlist loader
+ *
+ * @param {PlaylistLoader} playlistLoader
+ * PlaylistLoader to start loading
+ * @param {Object} mediaType
+ * Active media type
+ * @function startLoaders
+ */
+
+
+var startLoaders = function startLoaders(playlistLoader, mediaType) {
+ // Segment loader will be started after `loadedmetadata` or `loadedplaylist` from the
+ // playlist loader
+ mediaType.activePlaylistLoader = playlistLoader;
+ playlistLoader.load();
+};
+/**
+ * Returns a function to be called when the media group changes. It performs a
+ * non-destructive (preserve the buffer) resync of the SegmentLoader. This is because a
+ * change of group is merely a rendition switch of the same content at another encoding,
+ * rather than a change of content, such as switching audio from English to Spanish.
+ *
+ * @param {string} type
+ * MediaGroup type
+ * @param {Object} settings
+ * Object containing required information for media groups
+ * @return {Function}
+ * Handler for a non-destructive resync of SegmentLoader when the active media
+ * group changes.
+ * @function onGroupChanged
+ */
+
+
+var onGroupChanged = function onGroupChanged(type, settings) {
+ return function () {
+ var _settings$segmentLoad = settings.segmentLoaders,
+ segmentLoader = _settings$segmentLoad[type],
+ mainSegmentLoader = _settings$segmentLoad.main,
+ mediaType = settings.mediaTypes[type];
+ var activeTrack = mediaType.activeTrack();
+ var activeGroup = mediaType.getActiveGroup();
+ var previousActiveLoader = mediaType.activePlaylistLoader;
+ var lastGroup = mediaType.lastGroup_; // the group did not change do nothing
+
+ if (activeGroup && lastGroup && activeGroup.id === lastGroup.id) {
+ return;
+ }
+
+ mediaType.lastGroup_ = activeGroup;
+ mediaType.lastTrack_ = activeTrack;
+ stopLoaders(segmentLoader, mediaType);
+
+ if (!activeGroup || activeGroup.isMasterPlaylist) {
+ // there is no group active or active group is a main playlist and won't change
+ return;
+ }
+
+ if (!activeGroup.playlistLoader) {
+ if (previousActiveLoader) {
+ // The previous group had a playlist loader but the new active group does not
+ // this means we are switching from demuxed to muxed audio. In this case we want to
+ // do a destructive reset of the main segment loader and not restart the audio
+ // loaders.
+ mainSegmentLoader.resetEverything();
+ }
+
+ return;
+ } // Non-destructive resync
+
+
+ segmentLoader.resyncLoader();
+ startLoaders(activeGroup.playlistLoader, mediaType);
+ };
+};
+
+var onGroupChanging = function onGroupChanging(type, settings) {
+ return function () {
+ var segmentLoader = settings.segmentLoaders[type],
+ mediaType = settings.mediaTypes[type];
+ mediaType.lastGroup_ = null;
+ segmentLoader.abort();
+ segmentLoader.pause();
+ };
+};
+/**
+ * Returns a function to be called when the media track changes. It performs a
+ * destructive reset of the SegmentLoader to ensure we start loading as close to
+ * currentTime as possible.
+ *
+ * @param {string} type
+ * MediaGroup type
+ * @param {Object} settings
+ * Object containing required information for media groups
+ * @return {Function}
+ * Handler for a destructive reset of SegmentLoader when the active media
+ * track changes.
+ * @function onTrackChanged
+ */
+
+
+var onTrackChanged = function onTrackChanged(type, settings) {
+ return function () {
+ var masterPlaylistLoader = settings.masterPlaylistLoader,
+ _settings$segmentLoad2 = settings.segmentLoaders,
+ segmentLoader = _settings$segmentLoad2[type],
+ mainSegmentLoader = _settings$segmentLoad2.main,
+ mediaType = settings.mediaTypes[type];
+ var activeTrack = mediaType.activeTrack();
+ var activeGroup = mediaType.getActiveGroup();
+ var previousActiveLoader = mediaType.activePlaylistLoader;
+ var lastTrack = mediaType.lastTrack_; // track did not change, do nothing
+
+ if (lastTrack && activeTrack && lastTrack.id === activeTrack.id) {
+ return;
+ }
+
+ mediaType.lastGroup_ = activeGroup;
+ mediaType.lastTrack_ = activeTrack;
+ stopLoaders(segmentLoader, mediaType);
+
+ if (!activeGroup) {
+ // there is no group active so we do not want to restart loaders
+ return;
+ }
+
+ if (activeGroup.isMasterPlaylist) {
+ // track did not change, do nothing
+ if (!activeTrack || !lastTrack || activeTrack.id === lastTrack.id) {
+ return;
+ }
+
+ var mpc = settings.vhs.masterPlaylistController_;
+ var newPlaylist = mpc.selectPlaylist(); // media will not change do nothing
+
+ if (mpc.media() === newPlaylist) {
+ return;
+ }
+
+ mediaType.logger_("track change. Switching master audio from " + lastTrack.id + " to " + activeTrack.id);
+ masterPlaylistLoader.pause();
+ mainSegmentLoader.resetEverything();
+ mpc.fastQualityChange_(newPlaylist);
+ return;
+ }
+
+ if (type === 'AUDIO') {
+ if (!activeGroup.playlistLoader) {
+ // when switching from demuxed audio/video to muxed audio/video (noted by no
+ // playlist loader for the audio group), we want to do a destructive reset of the
+ // main segment loader and not restart the audio loaders
+ mainSegmentLoader.setAudio(true); // don't have to worry about disabling the audio of the audio segment loader since
+ // it should be stopped
+
+ mainSegmentLoader.resetEverything();
+ return;
+ } // although the segment loader is an audio segment loader, call the setAudio
+ // function to ensure it is prepared to re-append the init segment (or handle other
+ // config changes)
+
+
+ segmentLoader.setAudio(true);
+ mainSegmentLoader.setAudio(false);
+ }
+
+ if (previousActiveLoader === activeGroup.playlistLoader) {
+ // Nothing has actually changed. This can happen because track change events can fire
+ // multiple times for a "single" change. One for enabling the new active track, and
+ // one for disabling the track that was active
+ startLoaders(activeGroup.playlistLoader, mediaType);
+ return;
+ }
+
+ if (segmentLoader.track) {
+ // For WebVTT, set the new text track in the segmentloader
+ segmentLoader.track(activeTrack);
+ } // destructive reset
+
+
+ segmentLoader.resetEverything();
+ startLoaders(activeGroup.playlistLoader, mediaType);
+ };
+};
+
+var onError = {
+ /**
+ * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
+ * an error.
+ *
+ * @param {string} type
+ * MediaGroup type
+ * @param {Object} settings
+ * Object containing required information for media groups
+ * @return {Function}
+ * Error handler. Logs warning (or error if the playlist is blacklisted) to
+ * console and switches back to default audio track.
+ * @function onError.AUDIO
+ */
+ AUDIO: function AUDIO(type, settings) {
+ return function () {
+ var segmentLoader = settings.segmentLoaders[type],
+ mediaType = settings.mediaTypes[type],
+ blacklistCurrentPlaylist = settings.blacklistCurrentPlaylist;
+ stopLoaders(segmentLoader, mediaType); // switch back to default audio track
+
+ var activeTrack = mediaType.activeTrack();
+ var activeGroup = mediaType.activeGroup();
+ var id = (activeGroup.filter(function (group) {
+ return group["default"];
+ })[0] || activeGroup[0]).id;
+ var defaultTrack = mediaType.tracks[id];
+
+ if (activeTrack === defaultTrack) {
+ // Default track encountered an error. All we can do now is blacklist the current
+ // rendition and hope another will switch audio groups
+ blacklistCurrentPlaylist({
+ message: 'Problem encountered loading the default audio track.'
+ });
+ return;
+ }
+
+ videojs.log.warn('Problem encountered loading the alternate audio track.' + 'Switching back to default.');
+
+ for (var trackId in mediaType.tracks) {
+ mediaType.tracks[trackId].enabled = mediaType.tracks[trackId] === defaultTrack;
+ }
+
+ mediaType.onTrackChanged();
+ };
+ },
+
+ /**
+ * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
+ * an error.
+ *
+ * @param {string} type
+ * MediaGroup type
+ * @param {Object} settings
+ * Object containing required information for media groups
+ * @return {Function}
+ * Error handler. Logs warning to console and disables the active subtitle track
+ * @function onError.SUBTITLES
+ */
+ SUBTITLES: function SUBTITLES(type, settings) {
+ return function () {
+ var segmentLoader = settings.segmentLoaders[type],
+ mediaType = settings.mediaTypes[type];
+ videojs.log.warn('Problem encountered loading the subtitle track.' + 'Disabling subtitle track.');
+ stopLoaders(segmentLoader, mediaType);
+ var track = mediaType.activeTrack();
+
+ if (track) {
+ track.mode = 'disabled';
+ }
+
+ mediaType.onTrackChanged();
+ };
+ }
+};
+var setupListeners = {
+ /**
+ * Setup event listeners for audio playlist loader
+ *
+ * @param {string} type
+ * MediaGroup type
+ * @param {PlaylistLoader|null} playlistLoader
+ * PlaylistLoader to register listeners on
+ * @param {Object} settings
+ * Object containing required information for media groups
+ * @function setupListeners.AUDIO
+ */
+ AUDIO: function AUDIO(type, playlistLoader, settings) {
+ if (!playlistLoader) {
+ // no playlist loader means audio will be muxed with the video
+ return;
+ }
+
+ var tech = settings.tech,
+ requestOptions = settings.requestOptions,
+ segmentLoader = settings.segmentLoaders[type];
+ playlistLoader.on('loadedmetadata', function () {
+ var media = playlistLoader.media();
+ segmentLoader.playlist(media, requestOptions); // if the video is already playing, or if this isn't a live video and preload
+ // permits, start downloading segments
+
+ if (!tech.paused() || media.endList && tech.preload() !== 'none') {
+ segmentLoader.load();
+ }
+ });
+ playlistLoader.on('loadedplaylist', function () {
+ segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
+
+ if (!tech.paused()) {
+ segmentLoader.load();
+ }
+ });
+ playlistLoader.on('error', onError[type](type, settings));
+ },
+
+ /**
+ * Setup event listeners for subtitle playlist loader
+ *
+ * @param {string} type
+ * MediaGroup type
+ * @param {PlaylistLoader|null} playlistLoader
+ * PlaylistLoader to register listeners on
+ * @param {Object} settings
+ * Object containing required information for media groups
+ * @function setupListeners.SUBTITLES
+ */
+ SUBTITLES: function SUBTITLES(type, playlistLoader, settings) {
+ var tech = settings.tech,
+ requestOptions = settings.requestOptions,
+ segmentLoader = settings.segmentLoaders[type],
+ mediaType = settings.mediaTypes[type];
+ playlistLoader.on('loadedmetadata', function () {
+ var media = playlistLoader.media();
+ segmentLoader.playlist(media, requestOptions);
+ segmentLoader.track(mediaType.activeTrack()); // if the video is already playing, or if this isn't a live video and preload
+ // permits, start downloading segments
+
+ if (!tech.paused() || media.endList && tech.preload() !== 'none') {
+ segmentLoader.load();
+ }
+ });
+ playlistLoader.on('loadedplaylist', function () {
+ segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
+
+ if (!tech.paused()) {
+ segmentLoader.load();
+ }
+ });
+ playlistLoader.on('error', onError[type](type, settings));
+ }
+};
+var initialize = {
+ /**
+ * Setup PlaylistLoaders and AudioTracks for the audio groups
+ *
+ * @param {string} type
+ * MediaGroup type
+ * @param {Object} settings
+ * Object containing required information for media groups
+ * @function initialize.AUDIO
+ */
+ 'AUDIO': function AUDIO(type, settings) {
+ var vhs = settings.vhs,
+ sourceType = settings.sourceType,
+ segmentLoader = settings.segmentLoaders[type],
+ requestOptions = settings.requestOptions,
+ mediaGroups = settings.master.mediaGroups,
+ _settings$mediaTypes$ = settings.mediaTypes[type],
+ groups = _settings$mediaTypes$.groups,
+ tracks = _settings$mediaTypes$.tracks,
+ logger_ = _settings$mediaTypes$.logger_,
+ masterPlaylistLoader = settings.masterPlaylistLoader;
+ var audioOnlyMaster = isAudioOnly(masterPlaylistLoader.master); // force a default if we have none
+
+ if (!mediaGroups[type] || Object.keys(mediaGroups[type]).length === 0) {
+ mediaGroups[type] = {
+ main: {
+ "default": {
+ "default": true
+ }
+ }
+ };
+
+ if (audioOnlyMaster) {
+ mediaGroups[type].main["default"].playlists = masterPlaylistLoader.master.playlists;
+ }
+ }
+
+ for (var groupId in mediaGroups[type]) {
+ if (!groups[groupId]) {
+ groups[groupId] = [];
+ }
+
+ for (var variantLabel in mediaGroups[type][groupId]) {
+ var properties = mediaGroups[type][groupId][variantLabel];
+ var playlistLoader = void 0;
+
+ if (audioOnlyMaster) {
+ logger_("AUDIO group '" + groupId + "' label '" + variantLabel + "' is a master playlist");
+ properties.isMasterPlaylist = true;
+ playlistLoader = null; // if vhs-json was provided as the source, and the media playlist was resolved,
+ // use the resolved media playlist object
+ } else if (sourceType === 'vhs-json' && properties.playlists) {
+ playlistLoader = new PlaylistLoader(properties.playlists[0], vhs, requestOptions);
+ } else if (properties.resolvedUri) {
+ playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions); // TODO: dash isn't the only type with properties.playlists
+ // should we even have properties.playlists in this check.
+ } else if (properties.playlists && sourceType === 'dash') {
+ playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, masterPlaylistLoader);
+ } else {
+ // no resolvedUri means the audio is muxed with the video when using this
+ // audio track
+ playlistLoader = null;
+ }
+
+ properties = videojs.mergeOptions({
+ id: variantLabel,
+ playlistLoader: playlistLoader
+ }, properties);
+ setupListeners[type](type, properties.playlistLoader, settings);
+ groups[groupId].push(properties);
+
+ if (typeof tracks[variantLabel] === 'undefined') {
+ var track = new videojs.AudioTrack({
+ id: variantLabel,
+ kind: audioTrackKind_(properties),
+ enabled: false,
+ language: properties.language,
+ "default": properties["default"],
+ label: variantLabel
+ });
+ tracks[variantLabel] = track;
+ }
+ }
+ } // setup single error event handler for the segment loader
+
+
+ segmentLoader.on('error', onError[type](type, settings));
+ },
+
+ /**
+ * Setup PlaylistLoaders and TextTracks for the subtitle groups
+ *
+ * @param {string} type
+ * MediaGroup type
+ * @param {Object} settings
+ * Object containing required information for media groups
+ * @function initialize.SUBTITLES
+ */
+ 'SUBTITLES': function SUBTITLES(type, settings) {
+ var tech = settings.tech,
+ vhs = settings.vhs,
+ sourceType = settings.sourceType,
+ segmentLoader = settings.segmentLoaders[type],
+ requestOptions = settings.requestOptions,
+ mediaGroups = settings.master.mediaGroups,
+ _settings$mediaTypes$2 = settings.mediaTypes[type],
+ groups = _settings$mediaTypes$2.groups,
+ tracks = _settings$mediaTypes$2.tracks,
+ masterPlaylistLoader = settings.masterPlaylistLoader;
+
+ for (var groupId in mediaGroups[type]) {
+ if (!groups[groupId]) {
+ groups[groupId] = [];
+ }
+
+ for (var variantLabel in mediaGroups[type][groupId]) {
+ if (mediaGroups[type][groupId][variantLabel].forced) {
+ // Subtitle playlists with the forced attribute are not selectable in Safari.
+ // According to Apple's HLS Authoring Specification:
+ // If content has forced subtitles and regular subtitles in a given language,
+ // the regular subtitles track in that language MUST contain both the forced
+ // subtitles and the regular subtitles for that language.
+ // Because of this requirement and that Safari does not add forced subtitles,
+ // forced subtitles are skipped here to maintain consistent experience across
+ // all platforms
+ continue;
+ }
+
+ var properties = mediaGroups[type][groupId][variantLabel];
+ var playlistLoader = void 0;
+
+ if (sourceType === 'hls') {
+ playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions);
+ } else if (sourceType === 'dash') {
+ var playlists = properties.playlists.filter(function (p) {
+ return p.excludeUntil !== Infinity;
+ });
+
+ if (!playlists.length) {
+ return;
+ }
+
+ playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, masterPlaylistLoader);
+ } else if (sourceType === 'vhs-json') {
+ playlistLoader = new PlaylistLoader( // if the vhs-json object included the media playlist, use the media playlist
+ // as provided, otherwise use the resolved URI to load the playlist
+ properties.playlists ? properties.playlists[0] : properties.resolvedUri, vhs, requestOptions);
+ }
+
+ properties = videojs.mergeOptions({
+ id: variantLabel,
+ playlistLoader: playlistLoader
+ }, properties);
+ setupListeners[type](type, properties.playlistLoader, settings);
+ groups[groupId].push(properties);
+
+ if (typeof tracks[variantLabel] === 'undefined') {
+ var track = tech.addRemoteTextTrack({
+ id: variantLabel,
+ kind: 'subtitles',
+ "default": properties["default"] && properties.autoselect,
+ language: properties.language,
+ label: variantLabel
+ }, false).track;
+ tracks[variantLabel] = track;
+ }
+ }
+ } // setup single error event handler for the segment loader
+
+
+ segmentLoader.on('error', onError[type](type, settings));
+ },
+
+ /**
+ * Setup TextTracks for the closed-caption groups
+ *
+ * @param {String} type
+ * MediaGroup type
+ * @param {Object} settings
+ * Object containing required information for media groups
+ * @function initialize['CLOSED-CAPTIONS']
+ */
+ 'CLOSED-CAPTIONS': function CLOSEDCAPTIONS(type, settings) {
+ var tech = settings.tech,
+ mediaGroups = settings.master.mediaGroups,
+ _settings$mediaTypes$3 = settings.mediaTypes[type],
+ groups = _settings$mediaTypes$3.groups,
+ tracks = _settings$mediaTypes$3.tracks;
+
+ for (var groupId in mediaGroups[type]) {
+ if (!groups[groupId]) {
+ groups[groupId] = [];
+ }
+
+ for (var variantLabel in mediaGroups[type][groupId]) {
+ var properties = mediaGroups[type][groupId][variantLabel]; // Look for either 608 (CCn) or 708 (SERVICEn) caption services
+
+ if (!/^(?:CC|SERVICE)/.test(properties.instreamId)) {
+ continue;
+ }
+
+ var captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};
+ var newProps = {
+ label: variantLabel,
+ language: properties.language,
+ instreamId: properties.instreamId,
+ "default": properties["default"] && properties.autoselect
+ };
+
+ if (captionServices[newProps.instreamId]) {
+ newProps = videojs.mergeOptions(newProps, captionServices[newProps.instreamId]);
+ }
+
+ if (newProps["default"] === undefined) {
+ delete newProps["default"];
+ } // No PlaylistLoader is required for Closed-Captions because the captions are
+ // embedded within the video stream
+
+
+ groups[groupId].push(videojs.mergeOptions({
+ id: variantLabel
+ }, properties));
+
+ if (typeof tracks[variantLabel] === 'undefined') {
+ var track = tech.addRemoteTextTrack({
+ id: newProps.instreamId,
+ kind: 'captions',
+ "default": newProps["default"],
+ language: newProps.language,
+ label: newProps.label
+ }, false).track;
+ tracks[variantLabel] = track;
+ }
+ }
+ }
+ }
+};
+
+var groupMatch = function groupMatch(list, media) {
+ for (var i = 0; i < list.length; i++) {
+ if (playlistMatch(media, list[i])) {
+ return true;
+ }
+
+ if (list[i].playlists && groupMatch(list[i].playlists, media)) {
+ return true;
+ }
+ }
+
+ return false;
+};
+/**
+ * Returns a function used to get the active group of the provided type
+ *
+ * @param {string} type
+ * MediaGroup type
+ * @param {Object} settings
+ * Object containing required information for media groups
+ * @return {Function}
+ * Function that returns the active media group for the provided type. Takes an
+ * optional parameter {TextTrack} track. If no track is provided, a list of all
+ * variants in the group, otherwise the variant corresponding to the provided
+ * track is returned.
+ * @function activeGroup
+ */
+
+
+var activeGroup = function activeGroup(type, settings) {
+ return function (track) {
+ var masterPlaylistLoader = settings.masterPlaylistLoader,
+ groups = settings.mediaTypes[type].groups;
+ var media = masterPlaylistLoader.media();
+
+ if (!media) {
+ return null;
+ }
+
+ var variants = null; // set to variants to main media active group
+
+ if (media.attributes[type]) {
+ variants = groups[media.attributes[type]];
+ }
+
+ var groupKeys = Object.keys(groups);
+
+ if (!variants) {
+ // find the masterPlaylistLoader media
+ // that is in a media group if we are dealing
+ // with audio only
+ if (type === 'AUDIO' && groupKeys.length > 1 && isAudioOnly(settings.master)) {
+ for (var i = 0; i < groupKeys.length; i++) {
+ var groupPropertyList = groups[groupKeys[i]];
+
+ if (groupMatch(groupPropertyList, media)) {
+ variants = groupPropertyList;
+ break;
+ }
+ } // use the main group if it exists
+
+ } else if (groups.main) {
+ variants = groups.main; // only one group, use that one
+ } else if (groupKeys.length === 1) {
+ variants = groups[groupKeys[0]];
+ }
+ }
+
+ if (typeof track === 'undefined') {
+ return variants;
+ }
+
+ if (track === null || !variants) {
+ // An active track was specified so a corresponding group is expected. track === null
+ // means no track is currently active so there is no corresponding group
+ return null;
+ }
+
+ return variants.filter(function (props) {
+ return props.id === track.id;
+ })[0] || null;
+ };
+};
+
+var activeTrack = {
+ /**
+ * Returns a function used to get the active track of type provided
+ *
+ * @param {string} type
+ * MediaGroup type
+ * @param {Object} settings
+ * Object containing required information for media groups
+ * @return {Function}
+ * Function that returns the active media track for the provided type. Returns
+ * null if no track is active
+ * @function activeTrack.AUDIO
+ */
+ AUDIO: function AUDIO(type, settings) {
+ return function () {
+ var tracks = settings.mediaTypes[type].tracks;
+
+ for (var id in tracks) {
+ if (tracks[id].enabled) {
+ return tracks[id];
+ }
+ }
+
+ return null;
+ };
+ },
+
+ /**
+ * Returns a function used to get the active track of type provided
+ *
+ * @param {string} type
+ * MediaGroup type
+ * @param {Object} settings
+ * Object containing required information for media groups
+ * @return {Function}
+ * Function that returns the active media track for the provided type. Returns
+ * null if no track is active
+ * @function activeTrack.SUBTITLES
+ */
+ SUBTITLES: function SUBTITLES(type, settings) {
+ return function () {
+ var tracks = settings.mediaTypes[type].tracks;
+
+ for (var id in tracks) {
+ if (tracks[id].mode === 'showing' || tracks[id].mode === 'hidden') {
+ return tracks[id];
+ }
+ }
+
+ return null;
+ };
+ }
+};
+
+var getActiveGroup = function getActiveGroup(type, _ref) {
+ var mediaTypes = _ref.mediaTypes;
+ return function () {
+ var activeTrack_ = mediaTypes[type].activeTrack();
+
+ if (!activeTrack_) {
+ return null;
+ }
+
+ return mediaTypes[type].activeGroup(activeTrack_);
+ };
+};
+/**
+ * Setup PlaylistLoaders and Tracks for media groups (Audio, Subtitles,
+ * Closed-Captions) specified in the master manifest.
+ *
+ * @param {Object} settings
+ * Object containing required information for setting up the media groups
+ * @param {Tech} settings.tech
+ * The tech of the player
+ * @param {Object} settings.requestOptions
+ * XHR request options used by the segment loaders
+ * @param {PlaylistLoader} settings.masterPlaylistLoader
+ * PlaylistLoader for the master source
+ * @param {VhsHandler} settings.vhs
+ * VHS SourceHandler
+ * @param {Object} settings.master
+ * The parsed master manifest
+ * @param {Object} settings.mediaTypes
+ * Object to store the loaders, tracks, and utility methods for each media type
+ * @param {Function} settings.blacklistCurrentPlaylist
+ * Blacklists the current rendition and forces a rendition switch.
+ * @function setupMediaGroups
+ */
+
+
+var setupMediaGroups = function setupMediaGroups(settings) {
+ ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
+ initialize[type](type, settings);
+ });
+ var mediaTypes = settings.mediaTypes,
+ masterPlaylistLoader = settings.masterPlaylistLoader,
+ tech = settings.tech,
+ vhs = settings.vhs,
+ _settings$segmentLoad3 = settings.segmentLoaders,
+ audioSegmentLoader = _settings$segmentLoad3['AUDIO'],
+ mainSegmentLoader = _settings$segmentLoad3.main; // setup active group and track getters and change event handlers
+
+ ['AUDIO', 'SUBTITLES'].forEach(function (type) {
+ mediaTypes[type].activeGroup = activeGroup(type, settings);
+ mediaTypes[type].activeTrack = activeTrack[type](type, settings);
+ mediaTypes[type].onGroupChanged = onGroupChanged(type, settings);
+ mediaTypes[type].onGroupChanging = onGroupChanging(type, settings);
+ mediaTypes[type].onTrackChanged = onTrackChanged(type, settings);
+ mediaTypes[type].getActiveGroup = getActiveGroup(type, settings);
+ }); // DO NOT enable the default subtitle or caption track.
+ // DO enable the default audio track
+
+ var audioGroup = mediaTypes.AUDIO.activeGroup();
+
+ if (audioGroup) {
+ var groupId = (audioGroup.filter(function (group) {
+ return group["default"];
+ })[0] || audioGroup[0]).id;
+ mediaTypes.AUDIO.tracks[groupId].enabled = true;
+ mediaTypes.AUDIO.onGroupChanged();
+ mediaTypes.AUDIO.onTrackChanged();
+ var activeAudioGroup = mediaTypes.AUDIO.getActiveGroup(); // a similar check for handling setAudio on each loader is run again each time the
+ // track is changed, but needs to be handled here since the track may not be considered
+ // changed on the first call to onTrackChanged
+
+ if (!activeAudioGroup.playlistLoader) {
+ // either audio is muxed with video or the stream is audio only
+ mainSegmentLoader.setAudio(true);
+ } else {
+ // audio is demuxed
+ mainSegmentLoader.setAudio(false);
+ audioSegmentLoader.setAudio(true);
+ }
+ }
+
+ masterPlaylistLoader.on('mediachange', function () {
+ ['AUDIO', 'SUBTITLES'].forEach(function (type) {
+ return mediaTypes[type].onGroupChanged();
+ });
+ });
+ masterPlaylistLoader.on('mediachanging', function () {
+ ['AUDIO', 'SUBTITLES'].forEach(function (type) {
+ return mediaTypes[type].onGroupChanging();
+ });
+ }); // custom audio track change event handler for usage event
+
+ var onAudioTrackChanged = function onAudioTrackChanged() {
+ mediaTypes.AUDIO.onTrackChanged();
+ tech.trigger({
+ type: 'usage',
+ name: 'vhs-audio-change'
+ });
+ tech.trigger({
+ type: 'usage',
+ name: 'hls-audio-change'
+ });
+ };
+
+ tech.audioTracks().addEventListener('change', onAudioTrackChanged);
+ tech.remoteTextTracks().addEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
+ vhs.on('dispose', function () {
+ tech.audioTracks().removeEventListener('change', onAudioTrackChanged);
+ tech.remoteTextTracks().removeEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
+ }); // clear existing audio tracks and add the ones we just created
+
+ tech.clearTracks('audio');
+
+ for (var id in mediaTypes.AUDIO.tracks) {
+ tech.audioTracks().addTrack(mediaTypes.AUDIO.tracks[id]);
+ }
+};
+/**
+ * Creates skeleton object used to store the loaders, tracks, and utility methods for each
+ * media type
+ *
+ * @return {Object}
+ * Object to store the loaders, tracks, and utility methods for each media type
+ * @function createMediaTypes
+ */
+
+
+var createMediaTypes = function createMediaTypes() {
+ var mediaTypes = {};
+ ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
+ mediaTypes[type] = {
+ groups: {},
+ tracks: {},
+ activePlaylistLoader: null,
+ activeGroup: noop,
+ activeTrack: noop,
+ getActiveGroup: noop,
+ onGroupChanged: noop,
+ onTrackChanged: noop,
+ lastTrack_: null,
+ logger_: logger("MediaGroups[" + type + "]")
+ };
+ });
+ return mediaTypes;
+};
+
+var ABORT_EARLY_BLACKLIST_SECONDS = 60 * 2;
+var Vhs$1; // SegmentLoader stats that need to have each loader's
+// values summed to calculate the final value
+
+var loaderStats = ['mediaRequests', 'mediaRequestsAborted', 'mediaRequestsTimedout', 'mediaRequestsErrored', 'mediaTransferDuration', 'mediaBytesTransferred', 'mediaAppends'];
+
+var sumLoaderStat = function sumLoaderStat(stat) {
+ return this.audioSegmentLoader_[stat] + this.mainSegmentLoader_[stat];
+};
+
+var shouldSwitchToMedia = function shouldSwitchToMedia(_ref) {
+ var currentPlaylist = _ref.currentPlaylist,
+ buffered = _ref.buffered,
+ currentTime = _ref.currentTime,
+ nextPlaylist = _ref.nextPlaylist,
+ bufferLowWaterLine = _ref.bufferLowWaterLine,
+ bufferHighWaterLine = _ref.bufferHighWaterLine,
+ duration = _ref.duration,
+ experimentalBufferBasedABR = _ref.experimentalBufferBasedABR,
+ log = _ref.log; // we have no other playlist to switch to
+
+ if (!nextPlaylist) {
+ videojs.log.warn('We received no playlist to switch to. Please check your stream.');
+ return false;
+ }
+
+ var sharedLogLine = "allowing switch " + (currentPlaylist && currentPlaylist.id || 'null') + " -> " + nextPlaylist.id;
+
+ if (!currentPlaylist) {
+ log(sharedLogLine + " as current playlist is not set");
+ return true;
+ } // no need to switch if playlist is the same
+
+
+ if (nextPlaylist.id === currentPlaylist.id) {
+ return false;
+ } // determine if current time is in a buffered range.
+
+
+ var isBuffered = Boolean(findRange(buffered, currentTime).length); // If the playlist is live, then we want to not take low water line into account.
+ // This is because in LIVE, the player plays 3 segments from the end of the
+ // playlist, and if `BUFFER_LOW_WATER_LINE` is greater than the duration availble
+ // in those segments, a viewer will never experience a rendition upswitch.
+
+ if (!currentPlaylist.endList) {
+ // For LLHLS live streams, don't switch renditions before playback has started, as it almost
+ // doubles the time to first playback.
+ if (!isBuffered && typeof currentPlaylist.partTargetDuration === 'number') {
+ log("not " + sharedLogLine + " as current playlist is live llhls, but currentTime isn't in buffered.");
+ return false;
+ }
+
+ log(sharedLogLine + " as current playlist is live");
+ return true;
+ }
+
+ var forwardBuffer = timeAheadOf(buffered, currentTime);
+ var maxBufferLowWaterLine = experimentalBufferBasedABR ? Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE : Config.MAX_BUFFER_LOW_WATER_LINE; // For the same reason as LIVE, we ignore the low water line when the VOD
+ // duration is below the max potential low water line
+
+ if (duration < maxBufferLowWaterLine) {
+ log(sharedLogLine + " as duration < max low water line (" + duration + " < " + maxBufferLowWaterLine + ")");
+ return true;
+ }
+
+ var nextBandwidth = nextPlaylist.attributes.BANDWIDTH;
+ var currBandwidth = currentPlaylist.attributes.BANDWIDTH; // when switching down, if our buffer is lower than the high water line,
+ // we can switch down
+
+ if (nextBandwidth < currBandwidth && (!experimentalBufferBasedABR || forwardBuffer < bufferHighWaterLine)) {
+ var logLine = sharedLogLine + " as next bandwidth < current bandwidth (" + nextBandwidth + " < " + currBandwidth + ")";
+
+ if (experimentalBufferBasedABR) {
+ logLine += " and forwardBuffer < bufferHighWaterLine (" + forwardBuffer + " < " + bufferHighWaterLine + ")";
+ }
+
+ log(logLine);
+ return true;
+ } // and if our buffer is higher than the low water line,
+ // we can switch up
+
+
+ if ((!experimentalBufferBasedABR || nextBandwidth > currBandwidth) && forwardBuffer >= bufferLowWaterLine) {
+ var _logLine = sharedLogLine + " as forwardBuffer >= bufferLowWaterLine (" + forwardBuffer + " >= " + bufferLowWaterLine + ")";
+
+ if (experimentalBufferBasedABR) {
+ _logLine += " and next bandwidth > current bandwidth (" + nextBandwidth + " > " + currBandwidth + ")";
+ }
+
+ log(_logLine);
+ return true;
+ }
+
+ log("not " + sharedLogLine + " as no switching criteria met");
+ return false;
+};
+/**
+ * the master playlist controller controller all interactons
+ * between playlists and segmentloaders. At this time this mainly
+ * involves a master playlist and a series of audio playlists
+ * if they are available
+ *
+ * @class MasterPlaylistController
+ * @extends videojs.EventTarget
+ */
+
+
+var MasterPlaylistController = /*#__PURE__*/function (_videojs$EventTarget) {
+ _inheritsLoose__default['default'](MasterPlaylistController, _videojs$EventTarget);
+
+ function MasterPlaylistController(options) {
+ var _this;
+
+ _this = _videojs$EventTarget.call(this) || this;
+ var src = options.src,
+ handleManifestRedirects = options.handleManifestRedirects,
+ withCredentials = options.withCredentials,
+ tech = options.tech,
+ bandwidth = options.bandwidth,
+ externVhs = options.externVhs,
+ useCueTags = options.useCueTags,
+ blacklistDuration = options.blacklistDuration,
+ enableLowInitialPlaylist = options.enableLowInitialPlaylist,
+ sourceType = options.sourceType,
+ cacheEncryptionKeys = options.cacheEncryptionKeys,
+ experimentalBufferBasedABR = options.experimentalBufferBasedABR,
+ experimentalLeastPixelDiffSelector = options.experimentalLeastPixelDiffSelector,
+ captionServices = options.captionServices;
+
+ if (!src) {
+ throw new Error('A non-empty playlist URL or JSON manifest string is required');
+ }
+
+ var maxPlaylistRetries = options.maxPlaylistRetries;
+
+ if (maxPlaylistRetries === null || typeof maxPlaylistRetries === 'undefined') {
+ maxPlaylistRetries = Infinity;
+ }
+
+ Vhs$1 = externVhs;
+ _this.experimentalBufferBasedABR = Boolean(experimentalBufferBasedABR);
+ _this.experimentalLeastPixelDiffSelector = Boolean(experimentalLeastPixelDiffSelector);
+ _this.withCredentials = withCredentials;
+ _this.tech_ = tech;
+ _this.vhs_ = tech.vhs;
+ _this.sourceType_ = sourceType;
+ _this.useCueTags_ = useCueTags;
+ _this.blacklistDuration = blacklistDuration;
+ _this.maxPlaylistRetries = maxPlaylistRetries;
+ _this.enableLowInitialPlaylist = enableLowInitialPlaylist;
+
+ if (_this.useCueTags_) {
+ _this.cueTagsTrack_ = _this.tech_.addTextTrack('metadata', 'ad-cues');
+ _this.cueTagsTrack_.inBandMetadataTrackDispatchType = '';
+ }
+
+ _this.requestOptions_ = {
+ withCredentials: withCredentials,
+ handleManifestRedirects: handleManifestRedirects,
+ maxPlaylistRetries: maxPlaylistRetries,
+ timeout: null
+ };
+
+ _this.on('error', _this.pauseLoading);
+
+ _this.mediaTypes_ = createMediaTypes();
+ _this.mediaSource = new window__default['default'].MediaSource();
+ _this.handleDurationChange_ = _this.handleDurationChange_.bind(_assertThisInitialized__default['default'](_this));
+ _this.handleSourceOpen_ = _this.handleSourceOpen_.bind(_assertThisInitialized__default['default'](_this));
+ _this.handleSourceEnded_ = _this.handleSourceEnded_.bind(_assertThisInitialized__default['default'](_this));
+
+ _this.mediaSource.addEventListener('durationchange', _this.handleDurationChange_); // load the media source into the player
+
+
+ _this.mediaSource.addEventListener('sourceopen', _this.handleSourceOpen_);
+
+ _this.mediaSource.addEventListener('sourceended', _this.handleSourceEnded_); // we don't have to handle sourceclose since dispose will handle termination of
+ // everything, and the MediaSource should not be detached without a proper disposal
+
+
+ _this.seekable_ = videojs.createTimeRanges();
+ _this.hasPlayed_ = false;
+ _this.syncController_ = new SyncController(options);
+ _this.segmentMetadataTrack_ = tech.addRemoteTextTrack({
+ kind: 'metadata',
+ label: 'segment-metadata'
+ }, false).track;
+ _this.decrypter_ = new Decrypter();
+ _this.sourceUpdater_ = new SourceUpdater(_this.mediaSource);
+ _this.inbandTextTracks_ = {};
+ _this.timelineChangeController_ = new TimelineChangeController();
+ var segmentLoaderSettings = {
+ vhs: _this.vhs_,
+ parse708captions: options.parse708captions,
+ useDtsForTimestampOffset: options.useDtsForTimestampOffset,
+ captionServices: captionServices,
+ mediaSource: _this.mediaSource,
+ currentTime: _this.tech_.currentTime.bind(_this.tech_),
+ seekable: function seekable() {
+ return _this.seekable();
+ },
+ seeking: function seeking() {
+ return _this.tech_.seeking();
+ },
+ duration: function duration() {
+ return _this.duration();
+ },
+ hasPlayed: function hasPlayed() {
+ return _this.hasPlayed_;
+ },
+ goalBufferLength: function goalBufferLength() {
+ return _this.goalBufferLength();
+ },
+ bandwidth: bandwidth,
+ syncController: _this.syncController_,
+ decrypter: _this.decrypter_,
+ sourceType: _this.sourceType_,
+ inbandTextTracks: _this.inbandTextTracks_,
+ cacheEncryptionKeys: cacheEncryptionKeys,
+ sourceUpdater: _this.sourceUpdater_,
+ timelineChangeController: _this.timelineChangeController_,
+ experimentalExactManifestTimings: options.experimentalExactManifestTimings
+ }; // The source type check not only determines whether a special DASH playlist loader
+ // should be used, but also covers the case where the provided src is a vhs-json
+ // manifest object (instead of a URL). In the case of vhs-json, the default
+ // PlaylistLoader should be used.
+
+ _this.masterPlaylistLoader_ = _this.sourceType_ === 'dash' ? new DashPlaylistLoader(src, _this.vhs_, _this.requestOptions_) : new PlaylistLoader(src, _this.vhs_, _this.requestOptions_);
+
+ _this.setupMasterPlaylistLoaderListeners_(); // setup segment loaders
+ // combined audio/video or just video when alternate audio track is selected
+
+
+ _this.mainSegmentLoader_ = new SegmentLoader(videojs.mergeOptions(segmentLoaderSettings, {
+ segmentMetadataTrack: _this.segmentMetadataTrack_,
+ loaderType: 'main'
+ }), options); // alternate audio track
+
+ _this.audioSegmentLoader_ = new SegmentLoader(videojs.mergeOptions(segmentLoaderSettings, {
+ loaderType: 'audio'
+ }), options);
+ _this.subtitleSegmentLoader_ = new VTTSegmentLoader(videojs.mergeOptions(segmentLoaderSettings, {
+ loaderType: 'vtt',
+ featuresNativeTextTracks: _this.tech_.featuresNativeTextTracks
+ }), options);
+
+ _this.setupSegmentLoaderListeners_();
+
+ if (_this.experimentalBufferBasedABR) {
+ _this.masterPlaylistLoader_.one('loadedplaylist', function () {
+ return _this.startABRTimer_();
+ });
+
+ _this.tech_.on('pause', function () {
+ return _this.stopABRTimer_();
+ });
+
+ _this.tech_.on('play', function () {
+ return _this.startABRTimer_();
+ });
+ } // Create SegmentLoader stat-getters
+ // mediaRequests_
+ // mediaRequestsAborted_
+ // mediaRequestsTimedout_
+ // mediaRequestsErrored_
+ // mediaTransferDuration_
+ // mediaBytesTransferred_
+ // mediaAppends_
+
+
+ loaderStats.forEach(function (stat) {
+ _this[stat + '_'] = sumLoaderStat.bind(_assertThisInitialized__default['default'](_this), stat);
+ });
+ _this.logger_ = logger('MPC');
+ _this.triggeredFmp4Usage = false;
+
+ if (_this.tech_.preload() === 'none') {
+ _this.loadOnPlay_ = function () {
+ _this.loadOnPlay_ = null;
+
+ _this.masterPlaylistLoader_.load();
+ };
+
+ _this.tech_.one('play', _this.loadOnPlay_);
+ } else {
+ _this.masterPlaylistLoader_.load();
+ }
+
+ _this.timeToLoadedData__ = -1;
+ _this.mainAppendsToLoadedData__ = -1;
+ _this.audioAppendsToLoadedData__ = -1;
+ var event = _this.tech_.preload() === 'none' ? 'play' : 'loadstart'; // start the first frame timer on loadstart or play (for preload none)
+
+ _this.tech_.one(event, function () {
+ var timeToLoadedDataStart = Date.now();
+
+ _this.tech_.one('loadeddata', function () {
+ _this.timeToLoadedData__ = Date.now() - timeToLoadedDataStart;
+ _this.mainAppendsToLoadedData__ = _this.mainSegmentLoader_.mediaAppends;
+ _this.audioAppendsToLoadedData__ = _this.audioSegmentLoader_.mediaAppends;
+ });
+ });
+
+ return _this;
+ }
+
+ var _proto = MasterPlaylistController.prototype;
+
+ _proto.mainAppendsToLoadedData_ = function mainAppendsToLoadedData_() {
+ return this.mainAppendsToLoadedData__;
+ };
+
+ _proto.audioAppendsToLoadedData_ = function audioAppendsToLoadedData_() {
+ return this.audioAppendsToLoadedData__;
+ };
+
+ _proto.appendsToLoadedData_ = function appendsToLoadedData_() {
+ var main = this.mainAppendsToLoadedData_();
+ var audio = this.audioAppendsToLoadedData_();
+
+ if (main === -1 || audio === -1) {
+ return -1;
+ }
+
+ return main + audio;
+ };
+
+ _proto.timeToLoadedData_ = function timeToLoadedData_() {
+ return this.timeToLoadedData__;
+ }
+ /**
+ * Run selectPlaylist and switch to the new playlist if we should
+ *
+ * @private
+ *
+ */
+ ;
+
+ _proto.checkABR_ = function checkABR_() {
+ var nextPlaylist = this.selectPlaylist();
+
+ if (nextPlaylist && this.shouldSwitchToMedia_(nextPlaylist)) {
+ this.switchMedia_(nextPlaylist, 'abr');
+ }
+ };
+
+ _proto.switchMedia_ = function switchMedia_(playlist, cause, delay) {
+ var oldMedia = this.media();
+ var oldId = oldMedia && (oldMedia.id || oldMedia.uri);
+ var newId = playlist.id || playlist.uri;
+
+ if (oldId && oldId !== newId) {
+ this.logger_("switch media " + oldId + " -> " + newId + " from " + cause);
+ this.tech_.trigger({
+ type: 'usage',
+ name: "vhs-rendition-change-" + cause
+ });
+ }
+
+ this.masterPlaylistLoader_.media(playlist, delay);
+ }
+ /**
+ * Start a timer that periodically calls checkABR_
+ *
+ * @private
+ */
+ ;
+
+ _proto.startABRTimer_ = function startABRTimer_() {
+ var _this2 = this;
+
+ this.stopABRTimer_();
+ this.abrTimer_ = window__default['default'].setInterval(function () {
+ return _this2.checkABR_();
+ }, 250);
+ }
+ /**
+ * Stop the timer that periodically calls checkABR_
+ *
+ * @private
+ */
+ ;
+
+ _proto.stopABRTimer_ = function stopABRTimer_() {
+ // if we're scrubbing, we don't need to pause.
+ // This getter will be added to Video.js in version 7.11.
+ if (this.tech_.scrubbing && this.tech_.scrubbing()) {
+ return;
+ }
+
+ window__default['default'].clearInterval(this.abrTimer_);
+ this.abrTimer_ = null;
+ }
+ /**
+ * Get a list of playlists for the currently selected audio playlist
+ *
+ * @return {Array} the array of audio playlists
+ */
+ ;
+
+ _proto.getAudioTrackPlaylists_ = function getAudioTrackPlaylists_() {
+ var master = this.master();
+ var defaultPlaylists = master && master.playlists || []; // if we don't have any audio groups then we can only
+ // assume that the audio tracks are contained in masters
+ // playlist array, use that or an empty array.
+
+ if (!master || !master.mediaGroups || !master.mediaGroups.AUDIO) {
+ return defaultPlaylists;
+ }
+
+ var AUDIO = master.mediaGroups.AUDIO;
+ var groupKeys = Object.keys(AUDIO);
+ var track; // get the current active track
+
+ if (Object.keys(this.mediaTypes_.AUDIO.groups).length) {
+ track = this.mediaTypes_.AUDIO.activeTrack(); // or get the default track from master if mediaTypes_ isn't setup yet
+ } else {
+ // default group is `main` or just the first group.
+ var defaultGroup = AUDIO.main || groupKeys.length && AUDIO[groupKeys[0]];
+
+ for (var label in defaultGroup) {
+ if (defaultGroup[label]["default"]) {
+ track = {
+ label: label
+ };
+ break;
+ }
+ }
+ } // no active track no playlists.
+
+
+ if (!track) {
+ return defaultPlaylists;
+ }
+
+ var playlists = []; // get all of the playlists that are possible for the
+ // active track.
+
+ for (var group in AUDIO) {
+ if (AUDIO[group][track.label]) {
+ var properties = AUDIO[group][track.label];
+
+ if (properties.playlists && properties.playlists.length) {
+ playlists.push.apply(playlists, properties.playlists);
+ } else if (properties.uri) {
+ playlists.push(properties);
+ } else if (master.playlists.length) {
+ // if an audio group does not have a uri
+ // see if we have main playlists that use it as a group.
+ // if we do then add those to the playlists list.
+ for (var i = 0; i < master.playlists.length; i++) {
+ var playlist = master.playlists[i];
+
+ if (playlist.attributes && playlist.attributes.AUDIO && playlist.attributes.AUDIO === group) {
+ playlists.push(playlist);
+ }
+ }
+ }
+ }
+ }
+
+ if (!playlists.length) {
+ return defaultPlaylists;
+ }
+
+ return playlists;
+ }
+ /**
+ * Register event handlers on the master playlist loader. A helper
+ * function for construction time.
+ *
+ * @private
+ */
+ ;
+
+ _proto.setupMasterPlaylistLoaderListeners_ = function setupMasterPlaylistLoaderListeners_() {
+ var _this3 = this;
+
+ this.masterPlaylistLoader_.on('loadedmetadata', function () {
+ var media = _this3.masterPlaylistLoader_.media();
+
+ var requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
+ // timeout the request.
+
+ if (isLowestEnabledRendition(_this3.masterPlaylistLoader_.master, _this3.masterPlaylistLoader_.media())) {
+ _this3.requestOptions_.timeout = 0;
+ } else {
+ _this3.requestOptions_.timeout = requestTimeout;
+ } // if this isn't a live video and preload permits, start
+ // downloading segments
+
+
+ if (media.endList && _this3.tech_.preload() !== 'none') {
+ _this3.mainSegmentLoader_.playlist(media, _this3.requestOptions_);
+
+ _this3.mainSegmentLoader_.load();
+ }
+
+ setupMediaGroups({
+ sourceType: _this3.sourceType_,
+ segmentLoaders: {
+ AUDIO: _this3.audioSegmentLoader_,
+ SUBTITLES: _this3.subtitleSegmentLoader_,
+ main: _this3.mainSegmentLoader_
+ },
+ tech: _this3.tech_,
+ requestOptions: _this3.requestOptions_,
+ masterPlaylistLoader: _this3.masterPlaylistLoader_,
+ vhs: _this3.vhs_,
+ master: _this3.master(),
+ mediaTypes: _this3.mediaTypes_,
+ blacklistCurrentPlaylist: _this3.blacklistCurrentPlaylist.bind(_this3)
+ });
+
+ _this3.triggerPresenceUsage_(_this3.master(), media);
+
+ _this3.setupFirstPlay();
+
+ if (!_this3.mediaTypes_.AUDIO.activePlaylistLoader || _this3.mediaTypes_.AUDIO.activePlaylistLoader.media()) {
+ _this3.trigger('selectedinitialmedia');
+ } else {
+ // We must wait for the active audio playlist loader to
+ // finish setting up before triggering this event so the
+ // representations API and EME setup is correct
+ _this3.mediaTypes_.AUDIO.activePlaylistLoader.one('loadedmetadata', function () {
+ _this3.trigger('selectedinitialmedia');
+ });
+ }
+ });
+ this.masterPlaylistLoader_.on('loadedplaylist', function () {
+ if (_this3.loadOnPlay_) {
+ _this3.tech_.off('play', _this3.loadOnPlay_);
+ }
+
+ var updatedPlaylist = _this3.masterPlaylistLoader_.media();
+
+ if (!updatedPlaylist) {
+ // exclude any variants that are not supported by the browser before selecting
+ // an initial media as the playlist selectors do not consider browser support
+ _this3.excludeUnsupportedVariants_();
+
+ var selectedMedia;
+
+ if (_this3.enableLowInitialPlaylist) {
+ selectedMedia = _this3.selectInitialPlaylist();
+ }
+
+ if (!selectedMedia) {
+ selectedMedia = _this3.selectPlaylist();
+ }
+
+ if (!selectedMedia || !_this3.shouldSwitchToMedia_(selectedMedia)) {
+ return;
+ }
+
+ _this3.initialMedia_ = selectedMedia;
+
+ _this3.switchMedia_(_this3.initialMedia_, 'initial'); // Under the standard case where a source URL is provided, loadedplaylist will
+ // fire again since the playlist will be requested. In the case of vhs-json
+ // (where the manifest object is provided as the source), when the media
+ // playlist's `segments` list is already available, a media playlist won't be
+ // requested, and loadedplaylist won't fire again, so the playlist handler must be
+ // called on its own here.
+
+
+ var haveJsonSource = _this3.sourceType_ === 'vhs-json' && _this3.initialMedia_.segments;
+
+ if (!haveJsonSource) {
+ return;
+ }
+
+ updatedPlaylist = _this3.initialMedia_;
+ }
+
+ _this3.handleUpdatedMediaPlaylist(updatedPlaylist);
+ });
+ this.masterPlaylistLoader_.on('error', function () {
+ _this3.blacklistCurrentPlaylist(_this3.masterPlaylistLoader_.error);
+ });
+ this.masterPlaylistLoader_.on('mediachanging', function () {
+ _this3.mainSegmentLoader_.abort();
+
+ _this3.mainSegmentLoader_.pause();
+ });
+ this.masterPlaylistLoader_.on('mediachange', function () {
+ var media = _this3.masterPlaylistLoader_.media();
+
+ var requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
+ // timeout the request.
+
+ if (isLowestEnabledRendition(_this3.masterPlaylistLoader_.master, _this3.masterPlaylistLoader_.media())) {
+ _this3.requestOptions_.timeout = 0;
+ } else {
+ _this3.requestOptions_.timeout = requestTimeout;
+ } // TODO: Create a new event on the PlaylistLoader that signals
+ // that the segments have changed in some way and use that to
+ // update the SegmentLoader instead of doing it twice here and
+ // on `loadedplaylist`
+
+
+ _this3.mainSegmentLoader_.playlist(media, _this3.requestOptions_);
+
+ _this3.mainSegmentLoader_.load();
+
+ _this3.tech_.trigger({
+ type: 'mediachange',
+ bubbles: true
+ });
+ });
+ this.masterPlaylistLoader_.on('playlistunchanged', function () {
+ var updatedPlaylist = _this3.masterPlaylistLoader_.media(); // ignore unchanged playlists that have already been
+ // excluded for not-changing. We likely just have a really slowly updating
+ // playlist.
+
+
+ if (updatedPlaylist.lastExcludeReason_ === 'playlist-unchanged') {
+ return;
+ }
+
+ var playlistOutdated = _this3.stuckAtPlaylistEnd_(updatedPlaylist);
+
+ if (playlistOutdated) {
+ // Playlist has stopped updating and we're stuck at its end. Try to
+ // blacklist it and switch to another playlist in the hope that that
+ // one is updating (and give the player a chance to re-adjust to the
+ // safe live point).
+ _this3.blacklistCurrentPlaylist({
+ message: 'Playlist no longer updating.',
+ reason: 'playlist-unchanged'
+ }); // useful for monitoring QoS
+
+
+ _this3.tech_.trigger('playliststuck');
+ }
+ });
+ this.masterPlaylistLoader_.on('renditiondisabled', function () {
+ _this3.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-rendition-disabled'
+ });
+
+ _this3.tech_.trigger({
+ type: 'usage',
+ name: 'hls-rendition-disabled'
+ });
+ });
+ this.masterPlaylistLoader_.on('renditionenabled', function () {
+ _this3.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-rendition-enabled'
+ });
+
+ _this3.tech_.trigger({
+ type: 'usage',
+ name: 'hls-rendition-enabled'
+ });
+ });
+ }
+ /**
+ * Given an updated media playlist (whether it was loaded for the first time, or
+ * refreshed for live playlists), update any relevant properties and state to reflect
+ * changes in the media that should be accounted for (e.g., cues and duration).
+ *
+ * @param {Object} updatedPlaylist the updated media playlist object
+ *
+ * @private
+ */
+ ;
+
+ _proto.handleUpdatedMediaPlaylist = function handleUpdatedMediaPlaylist(updatedPlaylist) {
+ if (this.useCueTags_) {
+ this.updateAdCues_(updatedPlaylist);
+ } // TODO: Create a new event on the PlaylistLoader that signals
+ // that the segments have changed in some way and use that to
+ // update the SegmentLoader instead of doing it twice here and
+ // on `mediachange`
+
+
+ this.mainSegmentLoader_.playlist(updatedPlaylist, this.requestOptions_);
+ this.updateDuration(!updatedPlaylist.endList); // If the player isn't paused, ensure that the segment loader is running,
+ // as it is possible that it was temporarily stopped while waiting for
+ // a playlist (e.g., in case the playlist errored and we re-requested it).
+
+ if (!this.tech_.paused()) {
+ this.mainSegmentLoader_.load();
+
+ if (this.audioSegmentLoader_) {
+ this.audioSegmentLoader_.load();
+ }
+ }
+ }
+ /**
+ * A helper function for triggerring presence usage events once per source
+ *
+ * @private
+ */
+ ;
+
+ _proto.triggerPresenceUsage_ = function triggerPresenceUsage_(master, media) {
+ var mediaGroups = master.mediaGroups || {};
+ var defaultDemuxed = true;
+ var audioGroupKeys = Object.keys(mediaGroups.AUDIO);
+
+ for (var mediaGroup in mediaGroups.AUDIO) {
+ for (var label in mediaGroups.AUDIO[mediaGroup]) {
+ var properties = mediaGroups.AUDIO[mediaGroup][label];
+
+ if (!properties.uri) {
+ defaultDemuxed = false;
+ }
+ }
+ }
+
+ if (defaultDemuxed) {
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-demuxed'
+ });
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'hls-demuxed'
+ });
+ }
+
+ if (Object.keys(mediaGroups.SUBTITLES).length) {
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-webvtt'
+ });
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'hls-webvtt'
+ });
+ }
+
+ if (Vhs$1.Playlist.isAes(media)) {
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-aes'
+ });
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'hls-aes'
+ });
+ }
+
+ if (audioGroupKeys.length && Object.keys(mediaGroups.AUDIO[audioGroupKeys[0]]).length > 1) {
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-alternate-audio'
+ });
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'hls-alternate-audio'
+ });
+ }
+
+ if (this.useCueTags_) {
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-playlist-cue-tags'
+ });
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'hls-playlist-cue-tags'
+ });
+ }
+ };
+
+ _proto.shouldSwitchToMedia_ = function shouldSwitchToMedia_(nextPlaylist) {
+ var currentPlaylist = this.masterPlaylistLoader_.media() || this.masterPlaylistLoader_.pendingMedia_;
+ var currentTime = this.tech_.currentTime();
+ var bufferLowWaterLine = this.bufferLowWaterLine();
+ var bufferHighWaterLine = this.bufferHighWaterLine();
+ var buffered = this.tech_.buffered();
+ return shouldSwitchToMedia({
+ buffered: buffered,
+ currentTime: currentTime,
+ currentPlaylist: currentPlaylist,
+ nextPlaylist: nextPlaylist,
+ bufferLowWaterLine: bufferLowWaterLine,
+ bufferHighWaterLine: bufferHighWaterLine,
+ duration: this.duration(),
+ experimentalBufferBasedABR: this.experimentalBufferBasedABR,
+ log: this.logger_
+ });
+ }
+ /**
+ * Register event handlers on the segment loaders. A helper function
+ * for construction time.
+ *
+ * @private
+ */
+ ;
+
+ _proto.setupSegmentLoaderListeners_ = function setupSegmentLoaderListeners_() {
+ var _this4 = this;
+
+ if (!this.experimentalBufferBasedABR) {
+ this.mainSegmentLoader_.on('bandwidthupdate', function () {
+ var nextPlaylist = _this4.selectPlaylist();
+
+ if (_this4.shouldSwitchToMedia_(nextPlaylist)) {
+ _this4.switchMedia_(nextPlaylist, 'bandwidthupdate');
+ }
+
+ _this4.tech_.trigger('bandwidthupdate');
+ });
+ this.mainSegmentLoader_.on('progress', function () {
+ _this4.trigger('progress');
+ });
+ }
+
+ this.mainSegmentLoader_.on('error', function () {
+ _this4.blacklistCurrentPlaylist(_this4.mainSegmentLoader_.error());
+ });
+ this.mainSegmentLoader_.on('appenderror', function () {
+ _this4.error = _this4.mainSegmentLoader_.error_;
+
+ _this4.trigger('error');
+ });
+ this.mainSegmentLoader_.on('syncinfoupdate', function () {
+ _this4.onSyncInfoUpdate_();
+ });
+ this.mainSegmentLoader_.on('timestampoffset', function () {
+ _this4.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-timestamp-offset'
+ });
+
+ _this4.tech_.trigger({
+ type: 'usage',
+ name: 'hls-timestamp-offset'
+ });
+ });
+ this.audioSegmentLoader_.on('syncinfoupdate', function () {
+ _this4.onSyncInfoUpdate_();
+ });
+ this.audioSegmentLoader_.on('appenderror', function () {
+ _this4.error = _this4.audioSegmentLoader_.error_;
+
+ _this4.trigger('error');
+ });
+ this.mainSegmentLoader_.on('ended', function () {
+ _this4.logger_('main segment loader ended');
+
+ _this4.onEndOfStream();
+ });
+ this.mainSegmentLoader_.on('earlyabort', function (event) {
+ // never try to early abort with the new ABR algorithm
+ if (_this4.experimentalBufferBasedABR) {
+ return;
+ }
+
+ _this4.delegateLoaders_('all', ['abort']);
+
+ _this4.blacklistCurrentPlaylist({
+ message: 'Aborted early because there isn\'t enough bandwidth to complete the ' + 'request without rebuffering.'
+ }, ABORT_EARLY_BLACKLIST_SECONDS);
+ });
+
+ var updateCodecs = function updateCodecs() {
+ if (!_this4.sourceUpdater_.hasCreatedSourceBuffers()) {
+ return _this4.tryToCreateSourceBuffers_();
+ }
+
+ var codecs = _this4.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
+
+
+ if (!codecs) {
+ return;
+ }
+
+ _this4.sourceUpdater_.addOrChangeSourceBuffers(codecs);
+ };
+
+ this.mainSegmentLoader_.on('trackinfo', updateCodecs);
+ this.audioSegmentLoader_.on('trackinfo', updateCodecs);
+ this.mainSegmentLoader_.on('fmp4', function () {
+ if (!_this4.triggeredFmp4Usage) {
+ _this4.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-fmp4'
+ });
+
+ _this4.tech_.trigger({
+ type: 'usage',
+ name: 'hls-fmp4'
+ });
+
+ _this4.triggeredFmp4Usage = true;
+ }
+ });
+ this.audioSegmentLoader_.on('fmp4', function () {
+ if (!_this4.triggeredFmp4Usage) {
+ _this4.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-fmp4'
+ });
+
+ _this4.tech_.trigger({
+ type: 'usage',
+ name: 'hls-fmp4'
+ });
+
+ _this4.triggeredFmp4Usage = true;
+ }
+ });
+ this.audioSegmentLoader_.on('ended', function () {
+ _this4.logger_('audioSegmentLoader ended');
+
+ _this4.onEndOfStream();
+ });
+ };
+
+ _proto.mediaSecondsLoaded_ = function mediaSecondsLoaded_() {
+ return Math.max(this.audioSegmentLoader_.mediaSecondsLoaded + this.mainSegmentLoader_.mediaSecondsLoaded);
+ }
+ /**
+ * Call load on our SegmentLoaders
+ */
+ ;
+
+ _proto.load = function load() {
+ this.mainSegmentLoader_.load();
+
+ if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
+ this.audioSegmentLoader_.load();
+ }
+
+ if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
+ this.subtitleSegmentLoader_.load();
+ }
+ }
+ /**
+ * Re-tune playback quality level for the current player
+ * conditions without performing destructive actions, like
+ * removing already buffered content
+ *
+ * @private
+ * @deprecated
+ */
+ ;
+
+ _proto.smoothQualityChange_ = function smoothQualityChange_(media) {
+ if (media === void 0) {
+ media = this.selectPlaylist();
+ }
+
+ this.fastQualityChange_(media);
+ }
+ /**
+ * Re-tune playback quality level for the current player
+ * conditions. This method will perform destructive actions like removing
+ * already buffered content in order to readjust the currently active
+ * playlist quickly. This is good for manual quality changes
+ *
+ * @private
+ */
+ ;
+
+ _proto.fastQualityChange_ = function fastQualityChange_(media) {
+ var _this5 = this;
+
+ if (media === void 0) {
+ media = this.selectPlaylist();
+ }
+
+ if (media === this.masterPlaylistLoader_.media()) {
+ this.logger_('skipping fastQualityChange because new media is same as old');
+ return;
+ }
+
+ this.switchMedia_(media, 'fast-quality'); // Delete all buffered data to allow an immediate quality switch, then seek to give
+ // the browser a kick to remove any cached frames from the previous rendtion (.04 seconds
+ // ahead is roughly the minimum that will accomplish this across a variety of content
+ // in IE and Edge, but seeking in place is sufficient on all other browsers)
+ // Edge/IE bug: https://developer.microsoft.com/en-us/microsoft-edge/platform/issues/14600375/
+ // Chrome bug: https://bugs.chromium.org/p/chromium/issues/detail?id=651904
+
+ this.mainSegmentLoader_.resetEverything(function () {
+ // Since this is not a typical seek, we avoid the seekTo method which can cause segments
+ // from the previously enabled rendition to load before the new playlist has finished loading
+ if (videojs.browser.IE_VERSION || videojs.browser.IS_EDGE) {
+ _this5.tech_.setCurrentTime(_this5.tech_.currentTime() + 0.04);
+ } else {
+ _this5.tech_.setCurrentTime(_this5.tech_.currentTime());
+ }
+ }); // don't need to reset audio as it is reset when media changes
+ }
+ /**
+ * Begin playback.
+ */
+ ;
+
+ _proto.play = function play() {
+ if (this.setupFirstPlay()) {
+ return;
+ }
+
+ if (this.tech_.ended()) {
+ this.tech_.setCurrentTime(0);
+ }
+
+ if (this.hasPlayed_) {
+ this.load();
+ }
+
+ var seekable = this.tech_.seekable(); // if the viewer has paused and we fell out of the live window,
+ // seek forward to the live point
+
+ if (this.tech_.duration() === Infinity) {
+ if (this.tech_.currentTime() < seekable.start(0)) {
+ return this.tech_.setCurrentTime(seekable.end(seekable.length - 1));
+ }
+ }
+ }
+ /**
+ * Seek to the latest media position if this is a live video and the
+ * player and video are loaded and initialized.
+ */
+ ;
+
+ _proto.setupFirstPlay = function setupFirstPlay() {
+ var _this6 = this;
+
+ var media = this.masterPlaylistLoader_.media(); // Check that everything is ready to begin buffering for the first call to play
+ // If 1) there is no active media
+ // 2) the player is paused
+ // 3) the first play has already been setup
+ // then exit early
+
+ if (!media || this.tech_.paused() || this.hasPlayed_) {
+ return false;
+ } // when the video is a live stream
+
+
+ if (!media.endList) {
+ var seekable = this.seekable();
+
+ if (!seekable.length) {
+ // without a seekable range, the player cannot seek to begin buffering at the live
+ // point
+ return false;
+ }
+
+ if (videojs.browser.IE_VERSION && this.tech_.readyState() === 0) {
+ // IE11 throws an InvalidStateError if you try to set currentTime while the
+ // readyState is 0, so it must be delayed until the tech fires loadedmetadata.
+ this.tech_.one('loadedmetadata', function () {
+ _this6.trigger('firstplay');
+
+ _this6.tech_.setCurrentTime(seekable.end(0));
+
+ _this6.hasPlayed_ = true;
+ });
+ return false;
+ } // trigger firstplay to inform the source handler to ignore the next seek event
+
+
+ this.trigger('firstplay'); // seek to the live point
+
+ this.tech_.setCurrentTime(seekable.end(0));
+ }
+
+ this.hasPlayed_ = true; // we can begin loading now that everything is ready
+
+ this.load();
+ return true;
+ }
+ /**
+ * handle the sourceopen event on the MediaSource
+ *
+ * @private
+ */
+ ;
+
+ _proto.handleSourceOpen_ = function handleSourceOpen_() {
+ // Only attempt to create the source buffer if none already exist.
+ // handleSourceOpen is also called when we are "re-opening" a source buffer
+ // after `endOfStream` has been called (in response to a seek for instance)
+ this.tryToCreateSourceBuffers_(); // if autoplay is enabled, begin playback. This is duplicative of
+ // code in video.js but is required because play() must be invoked
+ // *after* the media source has opened.
+
+ if (this.tech_.autoplay()) {
+ var playPromise = this.tech_.play(); // Catch/silence error when a pause interrupts a play request
+ // on browsers which return a promise
+
+ if (typeof playPromise !== 'undefined' && typeof playPromise.then === 'function') {
+ playPromise.then(null, function (e) {});
+ }
+ }
+
+ this.trigger('sourceopen');
+ }
+ /**
+ * handle the sourceended event on the MediaSource
+ *
+ * @private
+ */
+ ;
+
+ _proto.handleSourceEnded_ = function handleSourceEnded_() {
+ if (!this.inbandTextTracks_.metadataTrack_) {
+ return;
+ }
+
+ var cues = this.inbandTextTracks_.metadataTrack_.cues;
+
+ if (!cues || !cues.length) {
+ return;
+ }
+
+ var duration = this.duration();
+ cues[cues.length - 1].endTime = isNaN(duration) || Math.abs(duration) === Infinity ? Number.MAX_VALUE : duration;
+ }
+ /**
+ * handle the durationchange event on the MediaSource
+ *
+ * @private
+ */
+ ;
+
+ _proto.handleDurationChange_ = function handleDurationChange_() {
+ this.tech_.trigger('durationchange');
+ }
+ /**
+ * Calls endOfStream on the media source when all active stream types have called
+ * endOfStream
+ *
+ * @param {string} streamType
+ * Stream type of the segment loader that called endOfStream
+ * @private
+ */
+ ;
+
+ _proto.onEndOfStream = function onEndOfStream() {
+ var isEndOfStream = this.mainSegmentLoader_.ended_;
+
+ if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
+ var mainMediaInfo = this.mainSegmentLoader_.getCurrentMediaInfo_(); // if the audio playlist loader exists, then alternate audio is active
+
+ if (!mainMediaInfo || mainMediaInfo.hasVideo) {
+ // if we do not know if the main segment loader contains video yet or if we
+ // definitively know the main segment loader contains video, then we need to wait
+ // for both main and audio segment loaders to call endOfStream
+ isEndOfStream = isEndOfStream && this.audioSegmentLoader_.ended_;
+ } else {
+ // otherwise just rely on the audio loader
+ isEndOfStream = this.audioSegmentLoader_.ended_;
+ }
+ }
+
+ if (!isEndOfStream) {
+ return;
+ }
+
+ this.stopABRTimer_();
+ this.sourceUpdater_.endOfStream();
+ }
+ /**
+ * Check if a playlist has stopped being updated
+ *
+ * @param {Object} playlist the media playlist object
+ * @return {boolean} whether the playlist has stopped being updated or not
+ */
+ ;
+
+ _proto.stuckAtPlaylistEnd_ = function stuckAtPlaylistEnd_(playlist) {
+ var seekable = this.seekable();
+
+ if (!seekable.length) {
+ // playlist doesn't have enough information to determine whether we are stuck
+ return false;
+ }
+
+ var expired = this.syncController_.getExpiredTime(playlist, this.duration());
+
+ if (expired === null) {
+ return false;
+ } // does not use the safe live end to calculate playlist end, since we
+ // don't want to say we are stuck while there is still content
+
+
+ var absolutePlaylistEnd = Vhs$1.Playlist.playlistEnd(playlist, expired);
+ var currentTime = this.tech_.currentTime();
+ var buffered = this.tech_.buffered();
+
+ if (!buffered.length) {
+ // return true if the playhead reached the absolute end of the playlist
+ return absolutePlaylistEnd - currentTime <= SAFE_TIME_DELTA;
+ }
+
+ var bufferedEnd = buffered.end(buffered.length - 1); // return true if there is too little buffer left and buffer has reached absolute
+ // end of playlist
+
+ return bufferedEnd - currentTime <= SAFE_TIME_DELTA && absolutePlaylistEnd - bufferedEnd <= SAFE_TIME_DELTA;
+ }
+ /**
+ * Blacklists a playlist when an error occurs for a set amount of time
+ * making it unavailable for selection by the rendition selection algorithm
+ * and then forces a new playlist (rendition) selection.
+ *
+ * @param {Object=} error an optional error that may include the playlist
+ * to blacklist
+ * @param {number=} blacklistDuration an optional number of seconds to blacklist the
+ * playlist
+ */
+ ;
+
+ _proto.blacklistCurrentPlaylist = function blacklistCurrentPlaylist(error, blacklistDuration) {
+ if (error === void 0) {
+ error = {};
+ } // If the `error` was generated by the playlist loader, it will contain
+ // the playlist we were trying to load (but failed) and that should be
+ // blacklisted instead of the currently selected playlist which is likely
+ // out-of-date in this scenario
+
+
+ var currentPlaylist = error.playlist || this.masterPlaylistLoader_.media();
+ blacklistDuration = blacklistDuration || error.blacklistDuration || this.blacklistDuration; // If there is no current playlist, then an error occurred while we were
+ // trying to load the master OR while we were disposing of the tech
+
+ if (!currentPlaylist) {
+ this.error = error;
+
+ if (this.mediaSource.readyState !== 'open') {
+ this.trigger('error');
+ } else {
+ this.sourceUpdater_.endOfStream('network');
+ }
+
+ return;
+ }
+
+ currentPlaylist.playlistErrors_++;
+ var playlists = this.masterPlaylistLoader_.master.playlists;
+ var enabledPlaylists = playlists.filter(isEnabled);
+ var isFinalRendition = enabledPlaylists.length === 1 && enabledPlaylists[0] === currentPlaylist; // Don't blacklist the only playlist unless it was blacklisted
+ // forever
+
+ if (playlists.length === 1 && blacklistDuration !== Infinity) {
+ videojs.log.warn("Problem encountered with playlist " + currentPlaylist.id + ". " + 'Trying again since it is the only playlist.');
+ this.tech_.trigger('retryplaylist'); // if this is a final rendition, we should delay
+
+ return this.masterPlaylistLoader_.load(isFinalRendition);
+ }
+
+ if (isFinalRendition) {
+ // Since we're on the final non-blacklisted playlist, and we're about to blacklist
+ // it, instead of erring the player or retrying this playlist, clear out the current
+ // blacklist. This allows other playlists to be attempted in case any have been
+ // fixed.
+ var reincluded = false;
+ playlists.forEach(function (playlist) {
+ // skip current playlist which is about to be blacklisted
+ if (playlist === currentPlaylist) {
+ return;
+ }
+
+ var excludeUntil = playlist.excludeUntil; // a playlist cannot be reincluded if it wasn't excluded to begin with.
+
+ if (typeof excludeUntil !== 'undefined' && excludeUntil !== Infinity) {
+ reincluded = true;
+ delete playlist.excludeUntil;
+ }
+ });
+
+ if (reincluded) {
+ videojs.log.warn('Removing other playlists from the exclusion list because the last ' + 'rendition is about to be excluded.'); // Technically we are retrying a playlist, in that we are simply retrying a previous
+ // playlist. This is needed for users relying on the retryplaylist event to catch a
+ // case where the player might be stuck and looping through "dead" playlists.
+
+ this.tech_.trigger('retryplaylist');
+ }
+ } // Blacklist this playlist
+
+
+ var excludeUntil;
+
+ if (currentPlaylist.playlistErrors_ > this.maxPlaylistRetries) {
+ excludeUntil = Infinity;
+ } else {
+ excludeUntil = Date.now() + blacklistDuration * 1000;
+ }
+
+ currentPlaylist.excludeUntil = excludeUntil;
+
+ if (error.reason) {
+ currentPlaylist.lastExcludeReason_ = error.reason;
+ }
+
+ this.tech_.trigger('blacklistplaylist');
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-rendition-blacklisted'
+ });
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'hls-rendition-blacklisted'
+ }); // TODO: should we select a new playlist if this blacklist wasn't for the currentPlaylist?
+ // Would be something like media().id !=== currentPlaylist.id and we would need something
+ // like `pendingMedia` in playlist loaders to check against that too. This will prevent us
+ // from loading a new playlist on any blacklist.
+ // Select a new playlist
+
+ var nextPlaylist = this.selectPlaylist();
+
+ if (!nextPlaylist) {
+ this.error = 'Playback cannot continue. No available working or supported playlists.';
+ this.trigger('error');
+ return;
+ }
+
+ var logFn = error.internal ? this.logger_ : videojs.log.warn;
+ var errorMessage = error.message ? ' ' + error.message : '';
+ logFn((error.internal ? 'Internal problem' : 'Problem') + " encountered with playlist " + currentPlaylist.id + "." + (errorMessage + " Switching to playlist " + nextPlaylist.id + ".")); // if audio group changed reset audio loaders
+
+ if (nextPlaylist.attributes.AUDIO !== currentPlaylist.attributes.AUDIO) {
+ this.delegateLoaders_('audio', ['abort', 'pause']);
+ } // if subtitle group changed reset subtitle loaders
+
+
+ if (nextPlaylist.attributes.SUBTITLES !== currentPlaylist.attributes.SUBTITLES) {
+ this.delegateLoaders_('subtitle', ['abort', 'pause']);
+ }
+
+ this.delegateLoaders_('main', ['abort', 'pause']);
+ var delayDuration = nextPlaylist.targetDuration / 2 * 1000 || 5 * 1000;
+ var shouldDelay = typeof nextPlaylist.lastRequest === 'number' && Date.now() - nextPlaylist.lastRequest <= delayDuration; // delay if it's a final rendition or if the last refresh is sooner than half targetDuration
+
+ return this.switchMedia_(nextPlaylist, 'exclude', isFinalRendition || shouldDelay);
+ }
+ /**
+ * Pause all segment/playlist loaders
+ */
+ ;
+
+ _proto.pauseLoading = function pauseLoading() {
+ this.delegateLoaders_('all', ['abort', 'pause']);
+ this.stopABRTimer_();
+ }
+ /**
+ * Call a set of functions in order on playlist loaders, segment loaders,
+ * or both types of loaders.
+ *
+ * @param {string} filter
+ * Filter loaders that should call fnNames using a string. Can be:
+ * * all - run on all loaders
+ * * audio - run on all audio loaders
+ * * subtitle - run on all subtitle loaders
+ * * main - run on the main/master loaders
+ *
+ * @param {Array|string} fnNames
+ * A string or array of function names to call.
+ */
+ ;
+
+ _proto.delegateLoaders_ = function delegateLoaders_(filter, fnNames) {
+ var _this7 = this;
+
+ var loaders = [];
+ var dontFilterPlaylist = filter === 'all';
+
+ if (dontFilterPlaylist || filter === 'main') {
+ loaders.push(this.masterPlaylistLoader_);
+ }
+
+ var mediaTypes = [];
+
+ if (dontFilterPlaylist || filter === 'audio') {
+ mediaTypes.push('AUDIO');
+ }
+
+ if (dontFilterPlaylist || filter === 'subtitle') {
+ mediaTypes.push('CLOSED-CAPTIONS');
+ mediaTypes.push('SUBTITLES');
+ }
+
+ mediaTypes.forEach(function (mediaType) {
+ var loader = _this7.mediaTypes_[mediaType] && _this7.mediaTypes_[mediaType].activePlaylistLoader;
+
+ if (loader) {
+ loaders.push(loader);
+ }
+ });
+ ['main', 'audio', 'subtitle'].forEach(function (name) {
+ var loader = _this7[name + "SegmentLoader_"];
+
+ if (loader && (filter === name || filter === 'all')) {
+ loaders.push(loader);
+ }
+ });
+ loaders.forEach(function (loader) {
+ return fnNames.forEach(function (fnName) {
+ if (typeof loader[fnName] === 'function') {
+ loader[fnName]();
+ }
+ });
+ });
+ }
+ /**
+ * set the current time on all segment loaders
+ *
+ * @param {TimeRange} currentTime the current time to set
+ * @return {TimeRange} the current time
+ */
+ ;
+
+ _proto.setCurrentTime = function setCurrentTime(currentTime) {
+ var buffered = findRange(this.tech_.buffered(), currentTime);
+
+ if (!(this.masterPlaylistLoader_ && this.masterPlaylistLoader_.media())) {
+ // return immediately if the metadata is not ready yet
+ return 0;
+ } // it's clearly an edge-case but don't thrown an error if asked to
+ // seek within an empty playlist
+
+
+ if (!this.masterPlaylistLoader_.media().segments) {
+ return 0;
+ } // if the seek location is already buffered, continue buffering as usual
+
+
+ if (buffered && buffered.length) {
+ return currentTime;
+ } // cancel outstanding requests so we begin buffering at the new
+ // location
+
+
+ this.mainSegmentLoader_.resetEverything();
+ this.mainSegmentLoader_.abort();
+
+ if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
+ this.audioSegmentLoader_.resetEverything();
+ this.audioSegmentLoader_.abort();
+ }
+
+ if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
+ this.subtitleSegmentLoader_.resetEverything();
+ this.subtitleSegmentLoader_.abort();
+ } // start segment loader loading in case they are paused
+
+
+ this.load();
+ }
+ /**
+ * get the current duration
+ *
+ * @return {TimeRange} the duration
+ */
+ ;
+
+ _proto.duration = function duration() {
+ if (!this.masterPlaylistLoader_) {
+ return 0;
+ }
+
+ var media = this.masterPlaylistLoader_.media();
+
+ if (!media) {
+ // no playlists loaded yet, so can't determine a duration
+ return 0;
+ } // Don't rely on the media source for duration in the case of a live playlist since
+ // setting the native MediaSource's duration to infinity ends up with consequences to
+ // seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
+ //
+ // This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
+ // however, few browsers have support for setLiveSeekableRange()
+ // https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
+ //
+ // Until a time when the duration of the media source can be set to infinity, and a
+ // seekable range specified across browsers, just return Infinity.
+
+
+ if (!media.endList) {
+ return Infinity;
+ } // Since this is a VOD video, it is safe to rely on the media source's duration (if
+ // available). If it's not available, fall back to a playlist-calculated estimate.
+
+
+ if (this.mediaSource) {
+ return this.mediaSource.duration;
+ }
+
+ return Vhs$1.Playlist.duration(media);
+ }
+ /**
+ * check the seekable range
+ *
+ * @return {TimeRange} the seekable range
+ */
+ ;
+
+ _proto.seekable = function seekable() {
+ return this.seekable_;
+ };
+
+ _proto.onSyncInfoUpdate_ = function onSyncInfoUpdate_() {
+ var audioSeekable; // TODO check for creation of both source buffers before updating seekable
+ //
+ // A fix was made to this function where a check for
+ // this.sourceUpdater_.hasCreatedSourceBuffers
+ // was added to ensure that both source buffers were created before seekable was
+ // updated. However, it originally had a bug where it was checking for a true and
+ // returning early instead of checking for false. Setting it to check for false to
+ // return early though created other issues. A call to play() would check for seekable
+ // end without verifying that a seekable range was present. In addition, even checking
+ // for that didn't solve some issues, as handleFirstPlay is sometimes worked around
+ // due to a media update calling load on the segment loaders, skipping a seek to live,
+ // thereby starting live streams at the beginning of the stream rather than at the end.
+ //
+ // This conditional should be fixed to wait for the creation of two source buffers at
+ // the same time as the other sections of code are fixed to properly seek to live and
+ // not throw an error due to checking for a seekable end when no seekable range exists.
+ //
+ // For now, fall back to the older behavior, with the understanding that the seekable
+ // range may not be completely correct, leading to a suboptimal initial live point.
+
+ if (!this.masterPlaylistLoader_) {
+ return;
+ }
+
+ var media = this.masterPlaylistLoader_.media();
+
+ if (!media) {
+ return;
+ }
+
+ var expired = this.syncController_.getExpiredTime(media, this.duration());
+
+ if (expired === null) {
+ // not enough information to update seekable
+ return;
+ }
+
+ var master = this.masterPlaylistLoader_.master;
+ var mainSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(master, media));
+
+ if (mainSeekable.length === 0) {
+ return;
+ }
+
+ if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
+ media = this.mediaTypes_.AUDIO.activePlaylistLoader.media();
+ expired = this.syncController_.getExpiredTime(media, this.duration());
+
+ if (expired === null) {
+ return;
+ }
+
+ audioSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(master, media));
+
+ if (audioSeekable.length === 0) {
+ return;
+ }
+ }
+
+ var oldEnd;
+ var oldStart;
+
+ if (this.seekable_ && this.seekable_.length) {
+ oldEnd = this.seekable_.end(0);
+ oldStart = this.seekable_.start(0);
+ }
+
+ if (!audioSeekable) {
+ // seekable has been calculated based on buffering video data so it
+ // can be returned directly
+ this.seekable_ = mainSeekable;
+ } else if (audioSeekable.start(0) > mainSeekable.end(0) || mainSeekable.start(0) > audioSeekable.end(0)) {
+ // seekables are pretty far off, rely on main
+ this.seekable_ = mainSeekable;
+ } else {
+ this.seekable_ = videojs.createTimeRanges([[audioSeekable.start(0) > mainSeekable.start(0) ? audioSeekable.start(0) : mainSeekable.start(0), audioSeekable.end(0) < mainSeekable.end(0) ? audioSeekable.end(0) : mainSeekable.end(0)]]);
+ } // seekable is the same as last time
+
+
+ if (this.seekable_ && this.seekable_.length) {
+ if (this.seekable_.end(0) === oldEnd && this.seekable_.start(0) === oldStart) {
+ return;
+ }
+ }
+
+ this.logger_("seekable updated [" + printableRange(this.seekable_) + "]");
+ this.tech_.trigger('seekablechanged');
+ }
+ /**
+ * Update the player duration
+ */
+ ;
+
+ _proto.updateDuration = function updateDuration(isLive) {
+ if (this.updateDuration_) {
+ this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
+ this.updateDuration_ = null;
+ }
+
+ if (this.mediaSource.readyState !== 'open') {
+ this.updateDuration_ = this.updateDuration.bind(this, isLive);
+ this.mediaSource.addEventListener('sourceopen', this.updateDuration_);
+ return;
+ }
+
+ if (isLive) {
+ var seekable = this.seekable();
+
+ if (!seekable.length) {
+ return;
+ } // Even in the case of a live playlist, the native MediaSource's duration should not
+ // be set to Infinity (even though this would be expected for a live playlist), since
+ // setting the native MediaSource's duration to infinity ends up with consequences to
+ // seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
+ //
+ // This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
+ // however, few browsers have support for setLiveSeekableRange()
+ // https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
+ //
+ // Until a time when the duration of the media source can be set to infinity, and a
+ // seekable range specified across browsers, the duration should be greater than or
+ // equal to the last possible seekable value.
+ // MediaSource duration starts as NaN
+ // It is possible (and probable) that this case will never be reached for many
+ // sources, since the MediaSource reports duration as the highest value without
+ // accounting for timestamp offset. For example, if the timestamp offset is -100 and
+ // we buffered times 0 to 100 with real times of 100 to 200, even though current
+ // time will be between 0 and 100, the native media source may report the duration
+ // as 200. However, since we report duration separate from the media source (as
+ // Infinity), and as long as the native media source duration value is greater than
+ // our reported seekable range, seeks will work as expected. The large number as
+ // duration for live is actually a strategy used by some players to work around the
+ // issue of live seekable ranges cited above.
+
+
+ if (isNaN(this.mediaSource.duration) || this.mediaSource.duration < seekable.end(seekable.length - 1)) {
+ this.sourceUpdater_.setDuration(seekable.end(seekable.length - 1));
+ }
+
+ return;
+ }
+
+ var buffered = this.tech_.buffered();
+ var duration = Vhs$1.Playlist.duration(this.masterPlaylistLoader_.media());
+
+ if (buffered.length > 0) {
+ duration = Math.max(duration, buffered.end(buffered.length - 1));
+ }
+
+ if (this.mediaSource.duration !== duration) {
+ this.sourceUpdater_.setDuration(duration);
+ }
+ }
+ /**
+ * dispose of the MasterPlaylistController and everything
+ * that it controls
+ */
+ ;
+
+ _proto.dispose = function dispose() {
+ var _this8 = this;
+
+ this.trigger('dispose');
+ this.decrypter_.terminate();
+ this.masterPlaylistLoader_.dispose();
+ this.mainSegmentLoader_.dispose();
+
+ if (this.loadOnPlay_) {
+ this.tech_.off('play', this.loadOnPlay_);
+ }
+
+ ['AUDIO', 'SUBTITLES'].forEach(function (type) {
+ var groups = _this8.mediaTypes_[type].groups;
+
+ for (var id in groups) {
+ groups[id].forEach(function (group) {
+ if (group.playlistLoader) {
+ group.playlistLoader.dispose();
+ }
+ });
+ }
+ });
+ this.audioSegmentLoader_.dispose();
+ this.subtitleSegmentLoader_.dispose();
+ this.sourceUpdater_.dispose();
+ this.timelineChangeController_.dispose();
+ this.stopABRTimer_();
+
+ if (this.updateDuration_) {
+ this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
+ }
+
+ this.mediaSource.removeEventListener('durationchange', this.handleDurationChange_); // load the media source into the player
+
+ this.mediaSource.removeEventListener('sourceopen', this.handleSourceOpen_);
+ this.mediaSource.removeEventListener('sourceended', this.handleSourceEnded_);
+ this.off();
+ }
+ /**
+ * return the master playlist object if we have one
+ *
+ * @return {Object} the master playlist object that we parsed
+ */
+ ;
+
+ _proto.master = function master() {
+ return this.masterPlaylistLoader_.master;
+ }
+ /**
+ * return the currently selected playlist
+ *
+ * @return {Object} the currently selected playlist object that we parsed
+ */
+ ;
+
+ _proto.media = function media() {
+ // playlist loader will not return media if it has not been fully loaded
+ return this.masterPlaylistLoader_.media() || this.initialMedia_;
+ };
+
+ _proto.areMediaTypesKnown_ = function areMediaTypesKnown_() {
+ var usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;
+ var hasMainMediaInfo = !!this.mainSegmentLoader_.getCurrentMediaInfo_(); // if we are not using an audio loader, then we have audio media info
+ // otherwise check on the segment loader.
+
+ var hasAudioMediaInfo = !usingAudioLoader ? true : !!this.audioSegmentLoader_.getCurrentMediaInfo_(); // one or both loaders has not loaded sufficently to get codecs
+
+ if (!hasMainMediaInfo || !hasAudioMediaInfo) {
+ return false;
+ }
+
+ return true;
+ };
+
+ _proto.getCodecsOrExclude_ = function getCodecsOrExclude_() {
+ var _this9 = this;
+
+ var media = {
+ main: this.mainSegmentLoader_.getCurrentMediaInfo_() || {},
+ audio: this.audioSegmentLoader_.getCurrentMediaInfo_() || {}
+ }; // set "main" media equal to video
+
+ media.video = media.main;
+ var playlistCodecs = codecsForPlaylist(this.master(), this.media());
+ var codecs = {};
+ var usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;
+
+ if (media.main.hasVideo) {
+ codecs.video = playlistCodecs.video || media.main.videoCodec || codecs_js.DEFAULT_VIDEO_CODEC;
+ }
+
+ if (media.main.isMuxed) {
+ codecs.video += "," + (playlistCodecs.audio || media.main.audioCodec || codecs_js.DEFAULT_AUDIO_CODEC);
+ }
+
+ if (media.main.hasAudio && !media.main.isMuxed || media.audio.hasAudio || usingAudioLoader) {
+ codecs.audio = playlistCodecs.audio || media.main.audioCodec || media.audio.audioCodec || codecs_js.DEFAULT_AUDIO_CODEC; // set audio isFmp4 so we use the correct "supports" function below
+
+ media.audio.isFmp4 = media.main.hasAudio && !media.main.isMuxed ? media.main.isFmp4 : media.audio.isFmp4;
+ } // no codecs, no playback.
+
+
+ if (!codecs.audio && !codecs.video) {
+ this.blacklistCurrentPlaylist({
+ playlist: this.media(),
+ message: 'Could not determine codecs for playlist.',
+ blacklistDuration: Infinity
+ });
+ return;
+ } // fmp4 relies on browser support, while ts relies on muxer support
+
+
+ var supportFunction = function supportFunction(isFmp4, codec) {
+ return isFmp4 ? codecs_js.browserSupportsCodec(codec) : codecs_js.muxerSupportsCodec(codec);
+ };
+
+ var unsupportedCodecs = {};
+ var unsupportedAudio;
+ ['video', 'audio'].forEach(function (type) {
+ if (codecs.hasOwnProperty(type) && !supportFunction(media[type].isFmp4, codecs[type])) {
+ var supporter = media[type].isFmp4 ? 'browser' : 'muxer';
+ unsupportedCodecs[supporter] = unsupportedCodecs[supporter] || [];
+ unsupportedCodecs[supporter].push(codecs[type]);
+
+ if (type === 'audio') {
+ unsupportedAudio = supporter;
+ }
+ }
+ });
+
+ if (usingAudioLoader && unsupportedAudio && this.media().attributes.AUDIO) {
+ var audioGroup = this.media().attributes.AUDIO;
+ this.master().playlists.forEach(function (variant) {
+ var variantAudioGroup = variant.attributes && variant.attributes.AUDIO;
+
+ if (variantAudioGroup === audioGroup && variant !== _this9.media()) {
+ variant.excludeUntil = Infinity;
+ }
+ });
+ this.logger_("excluding audio group " + audioGroup + " as " + unsupportedAudio + " does not support codec(s): \"" + codecs.audio + "\"");
+ } // if we have any unsupported codecs blacklist this playlist.
+
+
+ if (Object.keys(unsupportedCodecs).length) {
+ var message = Object.keys(unsupportedCodecs).reduce(function (acc, supporter) {
+ if (acc) {
+ acc += ', ';
+ }
+
+ acc += supporter + " does not support codec(s): \"" + unsupportedCodecs[supporter].join(',') + "\"";
+ return acc;
+ }, '') + '.';
+ this.blacklistCurrentPlaylist({
+ playlist: this.media(),
+ internal: true,
+ message: message,
+ blacklistDuration: Infinity
+ });
+ return;
+ } // check if codec switching is happening
+
+
+ if (this.sourceUpdater_.hasCreatedSourceBuffers() && !this.sourceUpdater_.canChangeType()) {
+ var switchMessages = [];
+ ['video', 'audio'].forEach(function (type) {
+ var newCodec = (codecs_js.parseCodecs(_this9.sourceUpdater_.codecs[type] || '')[0] || {}).type;
+ var oldCodec = (codecs_js.parseCodecs(codecs[type] || '')[0] || {}).type;
+
+ if (newCodec && oldCodec && newCodec.toLowerCase() !== oldCodec.toLowerCase()) {
+ switchMessages.push("\"" + _this9.sourceUpdater_.codecs[type] + "\" -> \"" + codecs[type] + "\"");
+ }
+ });
+
+ if (switchMessages.length) {
+ this.blacklistCurrentPlaylist({
+ playlist: this.media(),
+ message: "Codec switching not supported: " + switchMessages.join(', ') + ".",
+ blacklistDuration: Infinity,
+ internal: true
+ });
+ return;
+ }
+ } // TODO: when using the muxer shouldn't we just return
+ // the codecs that the muxer outputs?
+
+
+ return codecs;
+ }
+ /**
+ * Create source buffers and exlude any incompatible renditions.
+ *
+ * @private
+ */
+ ;
+
+ _proto.tryToCreateSourceBuffers_ = function tryToCreateSourceBuffers_() {
+ // media source is not ready yet or sourceBuffers are already
+ // created.
+ if (this.mediaSource.readyState !== 'open' || this.sourceUpdater_.hasCreatedSourceBuffers()) {
+ return;
+ }
+
+ if (!this.areMediaTypesKnown_()) {
+ return;
+ }
+
+ var codecs = this.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
+
+ if (!codecs) {
+ return;
+ }
+
+ this.sourceUpdater_.createSourceBuffers(codecs);
+ var codecString = [codecs.video, codecs.audio].filter(Boolean).join(',');
+ this.excludeIncompatibleVariants_(codecString);
+ }
+ /**
+ * Excludes playlists with codecs that are unsupported by the muxer and browser.
+ */
+ ;
+
+ _proto.excludeUnsupportedVariants_ = function excludeUnsupportedVariants_() {
+ var _this10 = this;
+
+ var playlists = this.master().playlists;
+ var ids = []; // TODO: why don't we have a property to loop through all
+ // playlist? Why did we ever mix indexes and keys?
+
+ Object.keys(playlists).forEach(function (key) {
+ var variant = playlists[key]; // check if we already processed this playlist.
+
+ if (ids.indexOf(variant.id) !== -1) {
+ return;
+ }
+
+ ids.push(variant.id);
+ var codecs = codecsForPlaylist(_this10.master, variant);
+ var unsupported = [];
+
+ if (codecs.audio && !codecs_js.muxerSupportsCodec(codecs.audio) && !codecs_js.browserSupportsCodec(codecs.audio)) {
+ unsupported.push("audio codec " + codecs.audio);
+ }
+
+ if (codecs.video && !codecs_js.muxerSupportsCodec(codecs.video) && !codecs_js.browserSupportsCodec(codecs.video)) {
+ unsupported.push("video codec " + codecs.video);
+ }
+
+ if (codecs.text && codecs.text === 'stpp.ttml.im1t') {
+ unsupported.push("text codec " + codecs.text);
+ }
+
+ if (unsupported.length) {
+ variant.excludeUntil = Infinity;
+
+ _this10.logger_("excluding " + variant.id + " for unsupported: " + unsupported.join(', '));
+ }
+ });
+ }
+ /**
+ * Blacklist playlists that are known to be codec or
+ * stream-incompatible with the SourceBuffer configuration. For
+ * instance, Media Source Extensions would cause the video element to
+ * stall waiting for video data if you switched from a variant with
+ * video and audio to an audio-only one.
+ *
+ * @param {Object} media a media playlist compatible with the current
+ * set of SourceBuffers. Variants in the current master playlist that
+ * do not appear to have compatible codec or stream configurations
+ * will be excluded from the default playlist selection algorithm
+ * indefinitely.
+ * @private
+ */
+ ;
+
+ _proto.excludeIncompatibleVariants_ = function excludeIncompatibleVariants_(codecString) {
+ var _this11 = this;
+
+ var ids = [];
+ var playlists = this.master().playlists;
+ var codecs = unwrapCodecList(codecs_js.parseCodecs(codecString));
+ var codecCount_ = codecCount(codecs);
+ var videoDetails = codecs.video && codecs_js.parseCodecs(codecs.video)[0] || null;
+ var audioDetails = codecs.audio && codecs_js.parseCodecs(codecs.audio)[0] || null;
+ Object.keys(playlists).forEach(function (key) {
+ var variant = playlists[key]; // check if we already processed this playlist.
+ // or it if it is already excluded forever.
+
+ if (ids.indexOf(variant.id) !== -1 || variant.excludeUntil === Infinity) {
+ return;
+ }
+
+ ids.push(variant.id);
+ var blacklistReasons = []; // get codecs from the playlist for this variant
+
+ var variantCodecs = codecsForPlaylist(_this11.masterPlaylistLoader_.master, variant);
+ var variantCodecCount = codecCount(variantCodecs); // if no codecs are listed, we cannot determine that this
+ // variant is incompatible. Wait for mux.js to probe
+
+ if (!variantCodecs.audio && !variantCodecs.video) {
+ return;
+ } // TODO: we can support this by removing the
+ // old media source and creating a new one, but it will take some work.
+ // The number of streams cannot change
+
+
+ if (variantCodecCount !== codecCount_) {
+ blacklistReasons.push("codec count \"" + variantCodecCount + "\" !== \"" + codecCount_ + "\"");
+ } // only exclude playlists by codec change, if codecs cannot switch
+ // during playback.
+
+
+ if (!_this11.sourceUpdater_.canChangeType()) {
+ var variantVideoDetails = variantCodecs.video && codecs_js.parseCodecs(variantCodecs.video)[0] || null;
+ var variantAudioDetails = variantCodecs.audio && codecs_js.parseCodecs(variantCodecs.audio)[0] || null; // the video codec cannot change
+
+ if (variantVideoDetails && videoDetails && variantVideoDetails.type.toLowerCase() !== videoDetails.type.toLowerCase()) {
+ blacklistReasons.push("video codec \"" + variantVideoDetails.type + "\" !== \"" + videoDetails.type + "\"");
+ } // the audio codec cannot change
+
+
+ if (variantAudioDetails && audioDetails && variantAudioDetails.type.toLowerCase() !== audioDetails.type.toLowerCase()) {
+ blacklistReasons.push("audio codec \"" + variantAudioDetails.type + "\" !== \"" + audioDetails.type + "\"");
+ }
+ }
+
+ if (blacklistReasons.length) {
+ variant.excludeUntil = Infinity;
+
+ _this11.logger_("blacklisting " + variant.id + ": " + blacklistReasons.join(' && '));
+ }
+ });
+ };
+
+ _proto.updateAdCues_ = function updateAdCues_(media) {
+ var offset = 0;
+ var seekable = this.seekable();
+
+ if (seekable.length) {
+ offset = seekable.start(0);
+ }
+
+ updateAdCues(media, this.cueTagsTrack_, offset);
+ }
+ /**
+ * Calculates the desired forward buffer length based on current time
+ *
+ * @return {number} Desired forward buffer length in seconds
+ */
+ ;
+
+ _proto.goalBufferLength = function goalBufferLength() {
+ var currentTime = this.tech_.currentTime();
+ var initial = Config.GOAL_BUFFER_LENGTH;
+ var rate = Config.GOAL_BUFFER_LENGTH_RATE;
+ var max = Math.max(initial, Config.MAX_GOAL_BUFFER_LENGTH);
+ return Math.min(initial + currentTime * rate, max);
+ }
+ /**
+ * Calculates the desired buffer low water line based on current time
+ *
+ * @return {number} Desired buffer low water line in seconds
+ */
+ ;
+
+ _proto.bufferLowWaterLine = function bufferLowWaterLine() {
+ var currentTime = this.tech_.currentTime();
+ var initial = Config.BUFFER_LOW_WATER_LINE;
+ var rate = Config.BUFFER_LOW_WATER_LINE_RATE;
+ var max = Math.max(initial, Config.MAX_BUFFER_LOW_WATER_LINE);
+ var newMax = Math.max(initial, Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE);
+ return Math.min(initial + currentTime * rate, this.experimentalBufferBasedABR ? newMax : max);
+ };
+
+ _proto.bufferHighWaterLine = function bufferHighWaterLine() {
+ return Config.BUFFER_HIGH_WATER_LINE;
+ };
+
+ return MasterPlaylistController;
+}(videojs.EventTarget);
+/**
+ * Returns a function that acts as the Enable/disable playlist function.
+ *
+ * @param {PlaylistLoader} loader - The master playlist loader
+ * @param {string} playlistID - id of the playlist
+ * @param {Function} changePlaylistFn - A function to be called after a
+ * playlist's enabled-state has been changed. Will NOT be called if a
+ * playlist's enabled-state is unchanged
+ * @param {boolean=} enable - Value to set the playlist enabled-state to
+ * or if undefined returns the current enabled-state for the playlist
+ * @return {Function} Function for setting/getting enabled
+ */
+
+
+var enableFunction = function enableFunction(loader, playlistID, changePlaylistFn) {
+ return function (enable) {
+ var playlist = loader.master.playlists[playlistID];
+ var incompatible = isIncompatible(playlist);
+ var currentlyEnabled = isEnabled(playlist);
+
+ if (typeof enable === 'undefined') {
+ return currentlyEnabled;
+ }
+
+ if (enable) {
+ delete playlist.disabled;
+ } else {
+ playlist.disabled = true;
+ }
+
+ if (enable !== currentlyEnabled && !incompatible) {
+ // Ensure the outside world knows about our changes
+ changePlaylistFn();
+
+ if (enable) {
+ loader.trigger('renditionenabled');
+ } else {
+ loader.trigger('renditiondisabled');
+ }
+ }
+
+ return enable;
+ };
+};
+/**
+ * The representation object encapsulates the publicly visible information
+ * in a media playlist along with a setter/getter-type function (enabled)
+ * for changing the enabled-state of a particular playlist entry
+ *
+ * @class Representation
+ */
+
+
+var Representation = function Representation(vhsHandler, playlist, id) {
+ var mpc = vhsHandler.masterPlaylistController_,
+ smoothQualityChange = vhsHandler.options_.smoothQualityChange; // Get a reference to a bound version of the quality change function
+
+ var changeType = smoothQualityChange ? 'smooth' : 'fast';
+ var qualityChangeFunction = mpc[changeType + "QualityChange_"].bind(mpc); // some playlist attributes are optional
+
+ if (playlist.attributes) {
+ var resolution = playlist.attributes.RESOLUTION;
+ this.width = resolution && resolution.width;
+ this.height = resolution && resolution.height;
+ this.bandwidth = playlist.attributes.BANDWIDTH;
+ }
+
+ this.codecs = codecsForPlaylist(mpc.master(), playlist);
+ this.playlist = playlist; // The id is simply the ordinality of the media playlist
+ // within the master playlist
+
+ this.id = id; // Partially-apply the enableFunction to create a playlist-
+ // specific variant
+
+ this.enabled = enableFunction(vhsHandler.playlists, playlist.id, qualityChangeFunction);
+};
+/**
+ * A mixin function that adds the `representations` api to an instance
+ * of the VhsHandler class
+ *
+ * @param {VhsHandler} vhsHandler - An instance of VhsHandler to add the
+ * representation API into
+ */
+
+
+var renditionSelectionMixin = function renditionSelectionMixin(vhsHandler) {
+ // Add a single API-specific function to the VhsHandler instance
+ vhsHandler.representations = function () {
+ var master = vhsHandler.masterPlaylistController_.master();
+ var playlists = isAudioOnly(master) ? vhsHandler.masterPlaylistController_.getAudioTrackPlaylists_() : master.playlists;
+
+ if (!playlists) {
+ return [];
+ }
+
+ return playlists.filter(function (media) {
+ return !isIncompatible(media);
+ }).map(function (e, i) {
+ return new Representation(vhsHandler, e, e.id);
+ });
+ };
+};
+/**
+ * @file playback-watcher.js
+ *
+ * Playback starts, and now my watch begins. It shall not end until my death. I shall
+ * take no wait, hold no uncleared timeouts, father no bad seeks. I shall wear no crowns
+ * and win no glory. I shall live and die at my post. I am the corrector of the underflow.
+ * I am the watcher of gaps. I am the shield that guards the realms of seekable. I pledge
+ * my life and honor to the Playback Watch, for this Player and all the Players to come.
+ */
+
+
+var timerCancelEvents = ['seeking', 'seeked', 'pause', 'playing', 'error'];
+/**
+ * @class PlaybackWatcher
+ */
+
+var PlaybackWatcher = /*#__PURE__*/function () {
+ /**
+ * Represents an PlaybackWatcher object.
+ *
+ * @class
+ * @param {Object} options an object that includes the tech and settings
+ */
+ function PlaybackWatcher(options) {
+ var _this = this;
+
+ this.masterPlaylistController_ = options.masterPlaylistController;
+ this.tech_ = options.tech;
+ this.seekable = options.seekable;
+ this.allowSeeksWithinUnsafeLiveWindow = options.allowSeeksWithinUnsafeLiveWindow;
+ this.liveRangeSafeTimeDelta = options.liveRangeSafeTimeDelta;
+ this.media = options.media;
+ this.consecutiveUpdates = 0;
+ this.lastRecordedTime = null;
+ this.timer_ = null;
+ this.checkCurrentTimeTimeout_ = null;
+ this.logger_ = logger('PlaybackWatcher');
+ this.logger_('initialize');
+
+ var playHandler = function playHandler() {
+ return _this.monitorCurrentTime_();
+ };
+
+ var canPlayHandler = function canPlayHandler() {
+ return _this.monitorCurrentTime_();
+ };
+
+ var waitingHandler = function waitingHandler() {
+ return _this.techWaiting_();
+ };
+
+ var cancelTimerHandler = function cancelTimerHandler() {
+ return _this.cancelTimer_();
+ };
+
+ var mpc = this.masterPlaylistController_;
+ var loaderTypes = ['main', 'subtitle', 'audio'];
+ var loaderChecks = {};
+ loaderTypes.forEach(function (type) {
+ loaderChecks[type] = {
+ reset: function reset() {
+ return _this.resetSegmentDownloads_(type);
+ },
+ updateend: function updateend() {
+ return _this.checkSegmentDownloads_(type);
+ }
+ };
+ mpc[type + "SegmentLoader_"].on('appendsdone', loaderChecks[type].updateend); // If a rendition switch happens during a playback stall where the buffer
+ // isn't changing we want to reset. We cannot assume that the new rendition
+ // will also be stalled, until after new appends.
+
+ mpc[type + "SegmentLoader_"].on('playlistupdate', loaderChecks[type].reset); // Playback stalls should not be detected right after seeking.
+ // This prevents one segment playlists (single vtt or single segment content)
+ // from being detected as stalling. As the buffer will not change in those cases, since
+ // the buffer is the entire video duration.
+
+ _this.tech_.on(['seeked', 'seeking'], loaderChecks[type].reset);
+ });
+ /**
+ * We check if a seek was into a gap through the following steps:
+ * 1. We get a seeking event and we do not get a seeked event. This means that
+ * a seek was attempted but not completed.
+ * 2. We run `fixesBadSeeks_` on segment loader appends. This means that we already
+ * removed everything from our buffer and appended a segment, and should be ready
+ * to check for gaps.
+ */
+
+ var setSeekingHandlers = function setSeekingHandlers(fn) {
+ ['main', 'audio'].forEach(function (type) {
+ mpc[type + "SegmentLoader_"][fn]('appended', _this.seekingAppendCheck_);
+ });
+ };
+
+ this.seekingAppendCheck_ = function () {
+ if (_this.fixesBadSeeks_()) {
+ _this.consecutiveUpdates = 0;
+ _this.lastRecordedTime = _this.tech_.currentTime();
+ setSeekingHandlers('off');
+ }
+ };
+
+ this.clearSeekingAppendCheck_ = function () {
+ return setSeekingHandlers('off');
+ };
+
+ this.watchForBadSeeking_ = function () {
+ _this.clearSeekingAppendCheck_();
+
+ setSeekingHandlers('on');
+ };
+
+ this.tech_.on('seeked', this.clearSeekingAppendCheck_);
+ this.tech_.on('seeking', this.watchForBadSeeking_);
+ this.tech_.on('waiting', waitingHandler);
+ this.tech_.on(timerCancelEvents, cancelTimerHandler);
+ this.tech_.on('canplay', canPlayHandler);
+ /*
+ An edge case exists that results in gaps not being skipped when they exist at the beginning of a stream. This case
+ is surfaced in one of two ways:
+ 1) The `waiting` event is fired before the player has buffered content, making it impossible
+ to find or skip the gap. The `waiting` event is followed by a `play` event. On first play
+ we can check if playback is stalled due to a gap, and skip the gap if necessary.
+ 2) A source with a gap at the beginning of the stream is loaded programatically while the player
+ is in a playing state. To catch this case, it's important that our one-time play listener is setup
+ even if the player is in a playing state
+ */
+
+ this.tech_.one('play', playHandler); // Define the dispose function to clean up our events
+
+ this.dispose = function () {
+ _this.clearSeekingAppendCheck_();
+
+ _this.logger_('dispose');
+
+ _this.tech_.off('waiting', waitingHandler);
+
+ _this.tech_.off(timerCancelEvents, cancelTimerHandler);
+
+ _this.tech_.off('canplay', canPlayHandler);
+
+ _this.tech_.off('play', playHandler);
+
+ _this.tech_.off('seeking', _this.watchForBadSeeking_);
+
+ _this.tech_.off('seeked', _this.clearSeekingAppendCheck_);
+
+ loaderTypes.forEach(function (type) {
+ mpc[type + "SegmentLoader_"].off('appendsdone', loaderChecks[type].updateend);
+ mpc[type + "SegmentLoader_"].off('playlistupdate', loaderChecks[type].reset);
+
+ _this.tech_.off(['seeked', 'seeking'], loaderChecks[type].reset);
+ });
+
+ if (_this.checkCurrentTimeTimeout_) {
+ window__default['default'].clearTimeout(_this.checkCurrentTimeTimeout_);
+ }
+
+ _this.cancelTimer_();
+ };
+ }
+ /**
+ * Periodically check current time to see if playback stopped
+ *
+ * @private
+ */
+
+
+ var _proto = PlaybackWatcher.prototype;
+
+ _proto.monitorCurrentTime_ = function monitorCurrentTime_() {
+ this.checkCurrentTime_();
+
+ if (this.checkCurrentTimeTimeout_) {
+ window__default['default'].clearTimeout(this.checkCurrentTimeTimeout_);
+ } // 42 = 24 fps // 250 is what Webkit uses // FF uses 15
+
+
+ this.checkCurrentTimeTimeout_ = window__default['default'].setTimeout(this.monitorCurrentTime_.bind(this), 250);
+ }
+ /**
+ * Reset stalled download stats for a specific type of loader
+ *
+ * @param {string} type
+ * The segment loader type to check.
+ *
+ * @listens SegmentLoader#playlistupdate
+ * @listens Tech#seeking
+ * @listens Tech#seeked
+ */
+ ;
+
+ _proto.resetSegmentDownloads_ = function resetSegmentDownloads_(type) {
+ var loader = this.masterPlaylistController_[type + "SegmentLoader_"];
+
+ if (this[type + "StalledDownloads_"] > 0) {
+ this.logger_("resetting possible stalled download count for " + type + " loader");
+ }
+
+ this[type + "StalledDownloads_"] = 0;
+ this[type + "Buffered_"] = loader.buffered_();
+ }
+ /**
+ * Checks on every segment `appendsdone` to see
+ * if segment appends are making progress. If they are not
+ * and we are still downloading bytes. We blacklist the playlist.
+ *
+ * @param {string} type
+ * The segment loader type to check.
+ *
+ * @listens SegmentLoader#appendsdone
+ */
+ ;
+
+ _proto.checkSegmentDownloads_ = function checkSegmentDownloads_(type) {
+ var mpc = this.masterPlaylistController_;
+ var loader = mpc[type + "SegmentLoader_"];
+ var buffered = loader.buffered_();
+ var isBufferedDifferent = isRangeDifferent(this[type + "Buffered_"], buffered);
+ this[type + "Buffered_"] = buffered; // if another watcher is going to fix the issue or
+ // the buffered value for this loader changed
+ // appends are working
+
+ if (isBufferedDifferent) {
+ this.resetSegmentDownloads_(type);
+ return;
+ }
+
+ this[type + "StalledDownloads_"]++;
+ this.logger_("found #" + this[type + "StalledDownloads_"] + " " + type + " appends that did not increase buffer (possible stalled download)", {
+ playlistId: loader.playlist_ && loader.playlist_.id,
+ buffered: timeRangesToArray(buffered)
+ }); // after 10 possibly stalled appends with no reset, exclude
+
+ if (this[type + "StalledDownloads_"] < 10) {
+ return;
+ }
+
+ this.logger_(type + " loader stalled download exclusion");
+ this.resetSegmentDownloads_(type);
+ this.tech_.trigger({
+ type: 'usage',
+ name: "vhs-" + type + "-download-exclusion"
+ });
+
+ if (type === 'subtitle') {
+ return;
+ } // TODO: should we exclude audio tracks rather than main tracks
+ // when type is audio?
+
+
+ mpc.blacklistCurrentPlaylist({
+ message: "Excessive " + type + " segment downloading detected."
+ }, Infinity);
+ }
+ /**
+ * The purpose of this function is to emulate the "waiting" event on
+ * browsers that do not emit it when they are waiting for more
+ * data to continue playback
+ *
+ * @private
+ */
+ ;
+
+ _proto.checkCurrentTime_ = function checkCurrentTime_() {
+ if (this.tech_.paused() || this.tech_.seeking()) {
+ return;
+ }
+
+ var currentTime = this.tech_.currentTime();
+ var buffered = this.tech_.buffered();
+
+ if (this.lastRecordedTime === currentTime && (!buffered.length || currentTime + SAFE_TIME_DELTA >= buffered.end(buffered.length - 1))) {
+ // If current time is at the end of the final buffered region, then any playback
+ // stall is most likely caused by buffering in a low bandwidth environment. The tech
+ // should fire a `waiting` event in this scenario, but due to browser and tech
+ // inconsistencies. Calling `techWaiting_` here allows us to simulate
+ // responding to a native `waiting` event when the tech fails to emit one.
+ return this.techWaiting_();
+ }
+
+ if (this.consecutiveUpdates >= 5 && currentTime === this.lastRecordedTime) {
+ this.consecutiveUpdates++;
+ this.waiting_();
+ } else if (currentTime === this.lastRecordedTime) {
+ this.consecutiveUpdates++;
+ } else {
+ this.consecutiveUpdates = 0;
+ this.lastRecordedTime = currentTime;
+ }
+ }
+ /**
+ * Cancels any pending timers and resets the 'timeupdate' mechanism
+ * designed to detect that we are stalled
+ *
+ * @private
+ */
+ ;
+
+ _proto.cancelTimer_ = function cancelTimer_() {
+ this.consecutiveUpdates = 0;
+
+ if (this.timer_) {
+ this.logger_('cancelTimer_');
+ clearTimeout(this.timer_);
+ }
+
+ this.timer_ = null;
+ }
+ /**
+ * Fixes situations where there's a bad seek
+ *
+ * @return {boolean} whether an action was taken to fix the seek
+ * @private
+ */
+ ;
+
+ _proto.fixesBadSeeks_ = function fixesBadSeeks_() {
+ var seeking = this.tech_.seeking();
+
+ if (!seeking) {
+ return false;
+ } // TODO: It's possible that these seekable checks should be moved out of this function
+ // and into a function that runs on seekablechange. It's also possible that we only need
+ // afterSeekableWindow as the buffered check at the bottom is good enough to handle before
+ // seekable range.
+
+
+ var seekable = this.seekable();
+ var currentTime = this.tech_.currentTime();
+ var isAfterSeekableRange = this.afterSeekableWindow_(seekable, currentTime, this.media(), this.allowSeeksWithinUnsafeLiveWindow);
+ var seekTo;
+
+ if (isAfterSeekableRange) {
+ var seekableEnd = seekable.end(seekable.length - 1); // sync to live point (if VOD, our seekable was updated and we're simply adjusting)
+
+ seekTo = seekableEnd;
+ }
+
+ if (this.beforeSeekableWindow_(seekable, currentTime)) {
+ var seekableStart = seekable.start(0); // sync to the beginning of the live window
+ // provide a buffer of .1 seconds to handle rounding/imprecise numbers
+
+ seekTo = seekableStart + ( // if the playlist is too short and the seekable range is an exact time (can
+ // happen in live with a 3 segment playlist), then don't use a time delta
+ seekableStart === seekable.end(0) ? 0 : SAFE_TIME_DELTA);
+ }
+
+ if (typeof seekTo !== 'undefined') {
+ this.logger_("Trying to seek outside of seekable at time " + currentTime + " with " + ("seekable range " + printableRange(seekable) + ". Seeking to ") + (seekTo + "."));
+ this.tech_.setCurrentTime(seekTo);
+ return true;
+ }
+
+ var sourceUpdater = this.masterPlaylistController_.sourceUpdater_;
+ var buffered = this.tech_.buffered();
+ var audioBuffered = sourceUpdater.audioBuffer ? sourceUpdater.audioBuffered() : null;
+ var videoBuffered = sourceUpdater.videoBuffer ? sourceUpdater.videoBuffered() : null;
+ var media = this.media(); // verify that at least two segment durations or one part duration have been
+ // appended before checking for a gap.
+
+ var minAppendedDuration = media.partTargetDuration ? media.partTargetDuration : (media.targetDuration - TIME_FUDGE_FACTOR) * 2; // verify that at least two segment durations have been
+ // appended before checking for a gap.
+
+ var bufferedToCheck = [audioBuffered, videoBuffered];
+
+ for (var i = 0; i < bufferedToCheck.length; i++) {
+ // skip null buffered
+ if (!bufferedToCheck[i]) {
+ continue;
+ }
+
+ var timeAhead = timeAheadOf(bufferedToCheck[i], currentTime); // if we are less than two video/audio segment durations or one part
+ // duration behind we haven't appended enough to call this a bad seek.
+
+ if (timeAhead < minAppendedDuration) {
+ return false;
+ }
+ }
+
+ var nextRange = findNextRange(buffered, currentTime); // we have appended enough content, but we don't have anything buffered
+ // to seek over the gap
+
+ if (nextRange.length === 0) {
+ return false;
+ }
+
+ seekTo = nextRange.start(0) + SAFE_TIME_DELTA;
+ this.logger_("Buffered region starts (" + nextRange.start(0) + ") " + (" just beyond seek point (" + currentTime + "). Seeking to " + seekTo + "."));
+ this.tech_.setCurrentTime(seekTo);
+ return true;
+ }
+ /**
+ * Handler for situations when we determine the player is waiting.
+ *
+ * @private
+ */
+ ;
+
+ _proto.waiting_ = function waiting_() {
+ if (this.techWaiting_()) {
+ return;
+ } // All tech waiting checks failed. Use last resort correction
+
+
+ var currentTime = this.tech_.currentTime();
+ var buffered = this.tech_.buffered();
+ var currentRange = findRange(buffered, currentTime); // Sometimes the player can stall for unknown reasons within a contiguous buffered
+ // region with no indication that anything is amiss (seen in Firefox). Seeking to
+ // currentTime is usually enough to kickstart the player. This checks that the player
+ // is currently within a buffered region before attempting a corrective seek.
+ // Chrome does not appear to continue `timeupdate` events after a `waiting` event
+ // until there is ~ 3 seconds of forward buffer available. PlaybackWatcher should also
+ // make sure there is ~3 seconds of forward buffer before taking any corrective action
+ // to avoid triggering an `unknownwaiting` event when the network is slow.
+
+ if (currentRange.length && currentTime + 3 <= currentRange.end(0)) {
+ this.cancelTimer_();
+ this.tech_.setCurrentTime(currentTime);
+ this.logger_("Stopped at " + currentTime + " while inside a buffered region " + ("[" + currentRange.start(0) + " -> " + currentRange.end(0) + "]. Attempting to resume ") + 'playback by seeking to the current time.'); // unknown waiting corrections may be useful for monitoring QoS
+
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-unknown-waiting'
+ });
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'hls-unknown-waiting'
+ });
+ return;
+ }
+ }
+ /**
+ * Handler for situations when the tech fires a `waiting` event
+ *
+ * @return {boolean}
+ * True if an action (or none) was needed to correct the waiting. False if no
+ * checks passed
+ * @private
+ */
+ ;
+
+ _proto.techWaiting_ = function techWaiting_() {
+ var seekable = this.seekable();
+ var currentTime = this.tech_.currentTime();
+
+ if (this.tech_.seeking() || this.timer_ !== null) {
+ // Tech is seeking or already waiting on another action, no action needed
+ return true;
+ }
+
+ if (this.beforeSeekableWindow_(seekable, currentTime)) {
+ var livePoint = seekable.end(seekable.length - 1);
+ this.logger_("Fell out of live window at time " + currentTime + ". Seeking to " + ("live point (seekable end) " + livePoint));
+ this.cancelTimer_();
+ this.tech_.setCurrentTime(livePoint); // live window resyncs may be useful for monitoring QoS
+
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-live-resync'
+ });
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'hls-live-resync'
+ });
+ return true;
+ }
+
+ var sourceUpdater = this.tech_.vhs.masterPlaylistController_.sourceUpdater_;
+ var buffered = this.tech_.buffered();
+ var videoUnderflow = this.videoUnderflow_({
+ audioBuffered: sourceUpdater.audioBuffered(),
+ videoBuffered: sourceUpdater.videoBuffered(),
+ currentTime: currentTime
+ });
+
+ if (videoUnderflow) {
+ // Even though the video underflowed and was stuck in a gap, the audio overplayed
+ // the gap, leading currentTime into a buffered range. Seeking to currentTime
+ // allows the video to catch up to the audio position without losing any audio
+ // (only suffering ~3 seconds of frozen video and a pause in audio playback).
+ this.cancelTimer_();
+ this.tech_.setCurrentTime(currentTime); // video underflow may be useful for monitoring QoS
+
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-video-underflow'
+ });
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'hls-video-underflow'
+ });
+ return true;
+ }
+
+ var nextRange = findNextRange(buffered, currentTime); // check for gap
+
+ if (nextRange.length > 0) {
+ var difference = nextRange.start(0) - currentTime;
+ this.logger_("Stopped at " + currentTime + ", setting timer for " + difference + ", seeking " + ("to " + nextRange.start(0)));
+ this.cancelTimer_();
+ this.timer_ = setTimeout(this.skipTheGap_.bind(this), difference * 1000, currentTime);
+ return true;
+ } // All checks failed. Returning false to indicate failure to correct waiting
+
+
+ return false;
+ };
+
+ _proto.afterSeekableWindow_ = function afterSeekableWindow_(seekable, currentTime, playlist, allowSeeksWithinUnsafeLiveWindow) {
+ if (allowSeeksWithinUnsafeLiveWindow === void 0) {
+ allowSeeksWithinUnsafeLiveWindow = false;
+ }
+
+ if (!seekable.length) {
+ // we can't make a solid case if there's no seekable, default to false
+ return false;
+ }
+
+ var allowedEnd = seekable.end(seekable.length - 1) + SAFE_TIME_DELTA;
+ var isLive = !playlist.endList;
+
+ if (isLive && allowSeeksWithinUnsafeLiveWindow) {
+ allowedEnd = seekable.end(seekable.length - 1) + playlist.targetDuration * 3;
+ }
+
+ if (currentTime > allowedEnd) {
+ return true;
+ }
+
+ return false;
+ };
+
+ _proto.beforeSeekableWindow_ = function beforeSeekableWindow_(seekable, currentTime) {
+ if (seekable.length && // can't fall before 0 and 0 seekable start identifies VOD stream
+ seekable.start(0) > 0 && currentTime < seekable.start(0) - this.liveRangeSafeTimeDelta) {
+ return true;
+ }
+
+ return false;
+ };
+
+ _proto.videoUnderflow_ = function videoUnderflow_(_ref) {
+ var videoBuffered = _ref.videoBuffered,
+ audioBuffered = _ref.audioBuffered,
+ currentTime = _ref.currentTime; // audio only content will not have video underflow :)
+
+ if (!videoBuffered) {
+ return;
+ }
+
+ var gap; // find a gap in demuxed content.
+
+ if (videoBuffered.length && audioBuffered.length) {
+ // in Chrome audio will continue to play for ~3s when we run out of video
+ // so we have to check that the video buffer did have some buffer in the
+ // past.
+ var lastVideoRange = findRange(videoBuffered, currentTime - 3);
+ var videoRange = findRange(videoBuffered, currentTime);
+ var audioRange = findRange(audioBuffered, currentTime);
+
+ if (audioRange.length && !videoRange.length && lastVideoRange.length) {
+ gap = {
+ start: lastVideoRange.end(0),
+ end: audioRange.end(0)
+ };
+ } // find a gap in muxed content.
+
+ } else {
+ var nextRange = findNextRange(videoBuffered, currentTime); // Even if there is no available next range, there is still a possibility we are
+ // stuck in a gap due to video underflow.
+
+ if (!nextRange.length) {
+ gap = this.gapFromVideoUnderflow_(videoBuffered, currentTime);
+ }
+ }
+
+ if (gap) {
+ this.logger_("Encountered a gap in video from " + gap.start + " to " + gap.end + ". " + ("Seeking to current time " + currentTime));
+ return true;
+ }
+
+ return false;
+ }
+ /**
+ * Timer callback. If playback still has not proceeded, then we seek
+ * to the start of the next buffered region.
+ *
+ * @private
+ */
+ ;
+
+ _proto.skipTheGap_ = function skipTheGap_(scheduledCurrentTime) {
+ var buffered = this.tech_.buffered();
+ var currentTime = this.tech_.currentTime();
+ var nextRange = findNextRange(buffered, currentTime);
+ this.cancelTimer_();
+
+ if (nextRange.length === 0 || currentTime !== scheduledCurrentTime) {
+ return;
+ }
+
+ this.logger_('skipTheGap_:', 'currentTime:', currentTime, 'scheduled currentTime:', scheduledCurrentTime, 'nextRange start:', nextRange.start(0)); // only seek if we still have not played
+
+ this.tech_.setCurrentTime(nextRange.start(0) + TIME_FUDGE_FACTOR);
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-gap-skip'
+ });
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'hls-gap-skip'
+ });
+ };
+
+ _proto.gapFromVideoUnderflow_ = function gapFromVideoUnderflow_(buffered, currentTime) {
+ // At least in Chrome, if there is a gap in the video buffer, the audio will continue
+ // playing for ~3 seconds after the video gap starts. This is done to account for
+ // video buffer underflow/underrun (note that this is not done when there is audio
+ // buffer underflow/underrun -- in that case the video will stop as soon as it
+ // encounters the gap, as audio stalls are more noticeable/jarring to a user than
+ // video stalls). The player's time will reflect the playthrough of audio, so the
+ // time will appear as if we are in a buffered region, even if we are stuck in a
+ // "gap."
+ //
+ // Example:
+ // video buffer: 0 => 10.1, 10.2 => 20
+ // audio buffer: 0 => 20
+ // overall buffer: 0 => 10.1, 10.2 => 20
+ // current time: 13
+ //
+ // Chrome's video froze at 10 seconds, where the video buffer encountered the gap,
+ // however, the audio continued playing until it reached ~3 seconds past the gap
+ // (13 seconds), at which point it stops as well. Since current time is past the
+ // gap, findNextRange will return no ranges.
+ //
+ // To check for this issue, we see if there is a gap that starts somewhere within
+ // a 3 second range (3 seconds +/- 1 second) back from our current time.
+ var gaps = findGaps(buffered);
+
+ for (var i = 0; i < gaps.length; i++) {
+ var start = gaps.start(i);
+ var end = gaps.end(i); // gap is starts no more than 4 seconds back
+
+ if (currentTime - start < 4 && currentTime - start > 2) {
+ return {
+ start: start,
+ end: end
+ };
+ }
+ }
+
+ return null;
+ };
+
+ return PlaybackWatcher;
+}();
+
+var defaultOptions = {
+ errorInterval: 30,
+ getSource: function getSource(next) {
+ var tech = this.tech({
+ IWillNotUseThisInPlugins: true
+ });
+ var sourceObj = tech.currentSource_ || this.currentSource();
+ return next(sourceObj);
+ }
+};
+/**
+ * Main entry point for the plugin
+ *
+ * @param {Player} player a reference to a videojs Player instance
+ * @param {Object} [options] an object with plugin options
+ * @private
+ */
+
+var initPlugin = function initPlugin(player, options) {
+ var lastCalled = 0;
+ var seekTo = 0;
+ var localOptions = videojs.mergeOptions(defaultOptions, options);
+ player.ready(function () {
+ player.trigger({
+ type: 'usage',
+ name: 'vhs-error-reload-initialized'
+ });
+ player.trigger({
+ type: 'usage',
+ name: 'hls-error-reload-initialized'
+ });
+ });
+ /**
+ * Player modifications to perform that must wait until `loadedmetadata`
+ * has been triggered
+ *
+ * @private
+ */
+
+ var loadedMetadataHandler = function loadedMetadataHandler() {
+ if (seekTo) {
+ player.currentTime(seekTo);
+ }
+ };
+ /**
+ * Set the source on the player element, play, and seek if necessary
+ *
+ * @param {Object} sourceObj An object specifying the source url and mime-type to play
+ * @private
+ */
+
+
+ var setSource = function setSource(sourceObj) {
+ if (sourceObj === null || sourceObj === undefined) {
+ return;
+ }
+
+ seekTo = player.duration() !== Infinity && player.currentTime() || 0;
+ player.one('loadedmetadata', loadedMetadataHandler);
+ player.src(sourceObj);
+ player.trigger({
+ type: 'usage',
+ name: 'vhs-error-reload'
+ });
+ player.trigger({
+ type: 'usage',
+ name: 'hls-error-reload'
+ });
+ player.play();
+ };
+ /**
+ * Attempt to get a source from either the built-in getSource function
+ * or a custom function provided via the options
+ *
+ * @private
+ */
+
+
+ var errorHandler = function errorHandler() {
+ // Do not attempt to reload the source if a source-reload occurred before
+ // 'errorInterval' time has elapsed since the last source-reload
+ if (Date.now() - lastCalled < localOptions.errorInterval * 1000) {
+ player.trigger({
+ type: 'usage',
+ name: 'vhs-error-reload-canceled'
+ });
+ player.trigger({
+ type: 'usage',
+ name: 'hls-error-reload-canceled'
+ });
+ return;
+ }
+
+ if (!localOptions.getSource || typeof localOptions.getSource !== 'function') {
+ videojs.log.error('ERROR: reloadSourceOnError - The option getSource must be a function!');
+ return;
+ }
+
+ lastCalled = Date.now();
+ return localOptions.getSource.call(player, setSource);
+ };
+ /**
+ * Unbind any event handlers that were bound by the plugin
+ *
+ * @private
+ */
+
+
+ var cleanupEvents = function cleanupEvents() {
+ player.off('loadedmetadata', loadedMetadataHandler);
+ player.off('error', errorHandler);
+ player.off('dispose', cleanupEvents);
+ };
+ /**
+ * Cleanup before re-initializing the plugin
+ *
+ * @param {Object} [newOptions] an object with plugin options
+ * @private
+ */
+
+
+ var reinitPlugin = function reinitPlugin(newOptions) {
+ cleanupEvents();
+ initPlugin(player, newOptions);
+ };
+
+ player.on('error', errorHandler);
+ player.on('dispose', cleanupEvents); // Overwrite the plugin function so that we can correctly cleanup before
+ // initializing the plugin
+
+ player.reloadSourceOnError = reinitPlugin;
+};
+/**
+ * Reload the source when an error is detected as long as there
+ * wasn't an error previously within the last 30 seconds
+ *
+ * @param {Object} [options] an object with plugin options
+ */
+
+
+var reloadSourceOnError = function reloadSourceOnError(options) {
+ initPlugin(this, options);
+};
+
+var version$4 = "2.14.2";
+var version$3 = "6.0.1";
+var version$2 = "0.21.1";
+var version$1 = "4.7.1";
+var version = "3.1.3";
+var Vhs = {
+ PlaylistLoader: PlaylistLoader,
+ Playlist: Playlist,
+ utils: utils,
+ STANDARD_PLAYLIST_SELECTOR: lastBandwidthSelector,
+ INITIAL_PLAYLIST_SELECTOR: lowestBitrateCompatibleVariantSelector,
+ lastBandwidthSelector: lastBandwidthSelector,
+ movingAverageBandwidthSelector: movingAverageBandwidthSelector,
+ comparePlaylistBandwidth: comparePlaylistBandwidth,
+ comparePlaylistResolution: comparePlaylistResolution,
+ xhr: xhrFactory()
+}; // Define getter/setters for config properties
+
+Object.keys(Config).forEach(function (prop) {
+ Object.defineProperty(Vhs, prop, {
+ get: function get() {
+ videojs.log.warn("using Vhs." + prop + " is UNSAFE be sure you know what you are doing");
+ return Config[prop];
+ },
+ set: function set(value) {
+ videojs.log.warn("using Vhs." + prop + " is UNSAFE be sure you know what you are doing");
+
+ if (typeof value !== 'number' || value < 0) {
+ videojs.log.warn("value of Vhs." + prop + " must be greater than or equal to 0");
+ return;
+ }
+
+ Config[prop] = value;
+ }
+ });
+});
+var LOCAL_STORAGE_KEY = 'videojs-vhs';
+/**
+ * Updates the selectedIndex of the QualityLevelList when a mediachange happens in vhs.
+ *
+ * @param {QualityLevelList} qualityLevels The QualityLevelList to update.
+ * @param {PlaylistLoader} playlistLoader PlaylistLoader containing the new media info.
+ * @function handleVhsMediaChange
+ */
+
+var handleVhsMediaChange = function handleVhsMediaChange(qualityLevels, playlistLoader) {
+ var newPlaylist = playlistLoader.media();
+ var selectedIndex = -1;
+
+ for (var i = 0; i < qualityLevels.length; i++) {
+ if (qualityLevels[i].id === newPlaylist.id) {
+ selectedIndex = i;
+ break;
+ }
+ }
+
+ qualityLevels.selectedIndex_ = selectedIndex;
+ qualityLevels.trigger({
+ selectedIndex: selectedIndex,
+ type: 'change'
+ });
+};
+/**
+ * Adds quality levels to list once playlist metadata is available
+ *
+ * @param {QualityLevelList} qualityLevels The QualityLevelList to attach events to.
+ * @param {Object} vhs Vhs object to listen to for media events.
+ * @function handleVhsLoadedMetadata
+ */
+
+
+var handleVhsLoadedMetadata = function handleVhsLoadedMetadata(qualityLevels, vhs) {
+ vhs.representations().forEach(function (rep) {
+ qualityLevels.addQualityLevel(rep);
+ });
+ handleVhsMediaChange(qualityLevels, vhs.playlists);
+}; // HLS is a source handler, not a tech. Make sure attempts to use it
+// as one do not cause exceptions.
+
+
+Vhs.canPlaySource = function () {
+ return videojs.log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
+};
+
+var emeKeySystems = function emeKeySystems(keySystemOptions, mainPlaylist, audioPlaylist) {
+ if (!keySystemOptions) {
+ return keySystemOptions;
+ }
+
+ var codecs = {};
+
+ if (mainPlaylist && mainPlaylist.attributes && mainPlaylist.attributes.CODECS) {
+ codecs = unwrapCodecList(codecs_js.parseCodecs(mainPlaylist.attributes.CODECS));
+ }
+
+ if (audioPlaylist && audioPlaylist.attributes && audioPlaylist.attributes.CODECS) {
+ codecs.audio = audioPlaylist.attributes.CODECS;
+ }
+
+ var videoContentType = codecs_js.getMimeForCodec(codecs.video);
+ var audioContentType = codecs_js.getMimeForCodec(codecs.audio); // upsert the content types based on the selected playlist
+
+ var keySystemContentTypes = {};
+
+ for (var keySystem in keySystemOptions) {
+ keySystemContentTypes[keySystem] = {};
+
+ if (audioContentType) {
+ keySystemContentTypes[keySystem].audioContentType = audioContentType;
+ }
+
+ if (videoContentType) {
+ keySystemContentTypes[keySystem].videoContentType = videoContentType;
+ } // Default to using the video playlist's PSSH even though they may be different, as
+ // videojs-contrib-eme will only accept one in the options.
+ //
+ // This shouldn't be an issue for most cases as early intialization will handle all
+ // unique PSSH values, and if they aren't, then encrypted events should have the
+ // specific information needed for the unique license.
+
+
+ if (mainPlaylist.contentProtection && mainPlaylist.contentProtection[keySystem] && mainPlaylist.contentProtection[keySystem].pssh) {
+ keySystemContentTypes[keySystem].pssh = mainPlaylist.contentProtection[keySystem].pssh;
+ } // videojs-contrib-eme accepts the option of specifying: 'com.some.cdm': 'url'
+ // so we need to prevent overwriting the URL entirely
+
+
+ if (typeof keySystemOptions[keySystem] === 'string') {
+ keySystemContentTypes[keySystem].url = keySystemOptions[keySystem];
+ }
+ }
+
+ return videojs.mergeOptions(keySystemOptions, keySystemContentTypes);
+};
+/**
+ * @typedef {Object} KeySystems
+ *
+ * keySystems configuration for https://github.com/videojs/videojs-contrib-eme
+ * Note: not all options are listed here.
+ *
+ * @property {Uint8Array} [pssh]
+ * Protection System Specific Header
+ */
+
+/**
+ * Goes through all the playlists and collects an array of KeySystems options objects
+ * containing each playlist's keySystems and their pssh values, if available.
+ *
+ * @param {Object[]} playlists
+ * The playlists to look through
+ * @param {string[]} keySystems
+ * The keySystems to collect pssh values for
+ *
+ * @return {KeySystems[]}
+ * An array of KeySystems objects containing available key systems and their
+ * pssh values
+ */
+
+
+var getAllPsshKeySystemsOptions = function getAllPsshKeySystemsOptions(playlists, keySystems) {
+ return playlists.reduce(function (keySystemsArr, playlist) {
+ if (!playlist.contentProtection) {
+ return keySystemsArr;
+ }
+
+ var keySystemsOptions = keySystems.reduce(function (keySystemsObj, keySystem) {
+ var keySystemOptions = playlist.contentProtection[keySystem];
+
+ if (keySystemOptions && keySystemOptions.pssh) {
+ keySystemsObj[keySystem] = {
+ pssh: keySystemOptions.pssh
+ };
+ }
+
+ return keySystemsObj;
+ }, {});
+
+ if (Object.keys(keySystemsOptions).length) {
+ keySystemsArr.push(keySystemsOptions);
+ }
+
+ return keySystemsArr;
+ }, []);
+};
+/**
+ * Returns a promise that waits for the
+ * [eme plugin](https://github.com/videojs/videojs-contrib-eme) to create a key session.
+ *
+ * Works around https://bugs.chromium.org/p/chromium/issues/detail?id=895449 in non-IE11
+ * browsers.
+ *
+ * As per the above ticket, this is particularly important for Chrome, where, if
+ * unencrypted content is appended before encrypted content and the key session has not
+ * been created, a MEDIA_ERR_DECODE will be thrown once the encrypted content is reached
+ * during playback.
+ *
+ * @param {Object} player
+ * The player instance
+ * @param {Object[]} sourceKeySystems
+ * The key systems options from the player source
+ * @param {Object} [audioMedia]
+ * The active audio media playlist (optional)
+ * @param {Object[]} mainPlaylists
+ * The playlists found on the master playlist object
+ *
+ * @return {Object}
+ * Promise that resolves when the key session has been created
+ */
+
+
+var waitForKeySessionCreation = function waitForKeySessionCreation(_ref) {
+ var player = _ref.player,
+ sourceKeySystems = _ref.sourceKeySystems,
+ audioMedia = _ref.audioMedia,
+ mainPlaylists = _ref.mainPlaylists;
+
+ if (!player.eme.initializeMediaKeys) {
+ return Promise.resolve();
+ } // TODO should all audio PSSH values be initialized for DRM?
+ //
+ // All unique video rendition pssh values are initialized for DRM, but here only
+ // the initial audio playlist license is initialized. In theory, an encrypted
+ // event should be fired if the user switches to an alternative audio playlist
+ // where a license is required, but this case hasn't yet been tested. In addition, there
+ // may be many alternate audio playlists unlikely to be used (e.g., multiple different
+ // languages).
+
+
+ var playlists = audioMedia ? mainPlaylists.concat([audioMedia]) : mainPlaylists;
+ var keySystemsOptionsArr = getAllPsshKeySystemsOptions(playlists, Object.keys(sourceKeySystems));
+ var initializationFinishedPromises = [];
+ var keySessionCreatedPromises = []; // Since PSSH values are interpreted as initData, EME will dedupe any duplicates. The
+ // only place where it should not be deduped is for ms-prefixed APIs, but the early
+ // return for IE11 above, and the existence of modern EME APIs in addition to
+ // ms-prefixed APIs on Edge should prevent this from being a concern.
+ // initializeMediaKeys also won't use the webkit-prefixed APIs.
+
+ keySystemsOptionsArr.forEach(function (keySystemsOptions) {
+ keySessionCreatedPromises.push(new Promise(function (resolve, reject) {
+ player.tech_.one('keysessioncreated', resolve);
+ }));
+ initializationFinishedPromises.push(new Promise(function (resolve, reject) {
+ player.eme.initializeMediaKeys({
+ keySystems: keySystemsOptions
+ }, function (err) {
+ if (err) {
+ reject(err);
+ return;
+ }
+
+ resolve();
+ });
+ }));
+ }); // The reasons Promise.race is chosen over Promise.any:
+ //
+ // * Promise.any is only available in Safari 14+.
+ // * None of these promises are expected to reject. If they do reject, it might be
+ // better here for the race to surface the rejection, rather than mask it by using
+ // Promise.any.
+
+ return Promise.race([// If a session was previously created, these will all finish resolving without
+ // creating a new session, otherwise it will take until the end of all license
+ // requests, which is why the key session check is used (to make setup much faster).
+ Promise.all(initializationFinishedPromises), // Once a single session is created, the browser knows DRM will be used.
+ Promise.race(keySessionCreatedPromises)]);
+};
+/**
+ * If the [eme](https://github.com/videojs/videojs-contrib-eme) plugin is available, and
+ * there are keySystems on the source, sets up source options to prepare the source for
+ * eme.
+ *
+ * @param {Object} player
+ * The player instance
+ * @param {Object[]} sourceKeySystems
+ * The key systems options from the player source
+ * @param {Object} media
+ * The active media playlist
+ * @param {Object} [audioMedia]
+ * The active audio media playlist (optional)
+ *
+ * @return {boolean}
+ * Whether or not options were configured and EME is available
+ */
+
+
+var setupEmeOptions = function setupEmeOptions(_ref2) {
+ var player = _ref2.player,
+ sourceKeySystems = _ref2.sourceKeySystems,
+ media = _ref2.media,
+ audioMedia = _ref2.audioMedia;
+ var sourceOptions = emeKeySystems(sourceKeySystems, media, audioMedia);
+
+ if (!sourceOptions) {
+ return false;
+ }
+
+ player.currentSource().keySystems = sourceOptions; // eme handles the rest of the setup, so if it is missing
+ // do nothing.
+
+ if (sourceOptions && !player.eme) {
+ videojs.log.warn('DRM encrypted source cannot be decrypted without a DRM plugin');
+ return false;
+ }
+
+ return true;
+};
+
+var getVhsLocalStorage = function getVhsLocalStorage() {
+ if (!window__default['default'].localStorage) {
+ return null;
+ }
+
+ var storedObject = window__default['default'].localStorage.getItem(LOCAL_STORAGE_KEY);
+
+ if (!storedObject) {
+ return null;
+ }
+
+ try {
+ return JSON.parse(storedObject);
+ } catch (e) {
+ // someone may have tampered with the value
+ return null;
+ }
+};
+
+var updateVhsLocalStorage = function updateVhsLocalStorage(options) {
+ if (!window__default['default'].localStorage) {
+ return false;
+ }
+
+ var objectToStore = getVhsLocalStorage();
+ objectToStore = objectToStore ? videojs.mergeOptions(objectToStore, options) : options;
+
+ try {
+ window__default['default'].localStorage.setItem(LOCAL_STORAGE_KEY, JSON.stringify(objectToStore));
+ } catch (e) {
+ // Throws if storage is full (e.g., always on iOS 5+ Safari private mode, where
+ // storage is set to 0).
+ // https://developer.mozilla.org/en-US/docs/Web/API/Storage/setItem#Exceptions
+ // No need to perform any operation.
+ return false;
+ }
+
+ return objectToStore;
+};
+/**
+ * Parses VHS-supported media types from data URIs. See
+ * https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs
+ * for information on data URIs.
+ *
+ * @param {string} dataUri
+ * The data URI
+ *
+ * @return {string|Object}
+ * The parsed object/string, or the original string if no supported media type
+ * was found
+ */
+
+
+var expandDataUri = function expandDataUri(dataUri) {
+ if (dataUri.toLowerCase().indexOf('data:application/vnd.videojs.vhs+json,') === 0) {
+ return JSON.parse(dataUri.substring(dataUri.indexOf(',') + 1));
+ } // no known case for this data URI, return the string as-is
+
+
+ return dataUri;
+};
+/**
+ * Whether the browser has built-in HLS support.
+ */
+
+
+Vhs.supportsNativeHls = function () {
+ if (!document__default['default'] || !document__default['default'].createElement) {
+ return false;
+ }
+
+ var video = document__default['default'].createElement('video'); // native HLS is definitely not supported if HTML5 video isn't
+
+ if (!videojs.getTech('Html5').isSupported()) {
+ return false;
+ } // HLS manifests can go by many mime-types
+
+
+ var canPlay = [// Apple santioned
+ 'application/vnd.apple.mpegurl', // Apple sanctioned for backwards compatibility
+ 'audio/mpegurl', // Very common
+ 'audio/x-mpegurl', // Very common
+ 'application/x-mpegurl', // Included for completeness
+ 'video/x-mpegurl', 'video/mpegurl', 'application/mpegurl'];
+ return canPlay.some(function (canItPlay) {
+ return /maybe|probably/i.test(video.canPlayType(canItPlay));
+ });
+}();
+
+Vhs.supportsNativeDash = function () {
+ if (!document__default['default'] || !document__default['default'].createElement || !videojs.getTech('Html5').isSupported()) {
+ return false;
+ }
+
+ return /maybe|probably/i.test(document__default['default'].createElement('video').canPlayType('application/dash+xml'));
+}();
+
+Vhs.supportsTypeNatively = function (type) {
+ if (type === 'hls') {
+ return Vhs.supportsNativeHls;
+ }
+
+ if (type === 'dash') {
+ return Vhs.supportsNativeDash;
+ }
+
+ return false;
+};
+/**
+ * HLS is a source handler, not a tech. Make sure attempts to use it
+ * as one do not cause exceptions.
+ */
+
+
+Vhs.isSupported = function () {
+ return videojs.log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
+};
+
+var Component = videojs.getComponent('Component');
+/**
+ * The Vhs Handler object, where we orchestrate all of the parts
+ * of HLS to interact with video.js
+ *
+ * @class VhsHandler
+ * @extends videojs.Component
+ * @param {Object} source the soruce object
+ * @param {Tech} tech the parent tech object
+ * @param {Object} options optional and required options
+ */
+
+var VhsHandler = /*#__PURE__*/function (_Component) {
+ _inheritsLoose__default['default'](VhsHandler, _Component);
+
+ function VhsHandler(source, tech, options) {
+ var _this;
+
+ _this = _Component.call(this, tech, videojs.mergeOptions(options.hls, options.vhs)) || this;
+
+ if (options.hls && Object.keys(options.hls).length) {
+ videojs.log.warn('Using hls options is deprecated. Please rename `hls` to `vhs` in your options object.');
+ } // if a tech level `initialBandwidth` option was passed
+ // use that over the VHS level `bandwidth` option
+
+
+ if (typeof options.initialBandwidth === 'number') {
+ _this.options_.bandwidth = options.initialBandwidth;
+ }
+
+ _this.logger_ = logger('VhsHandler'); // tech.player() is deprecated but setup a reference to HLS for
+ // backwards-compatibility
+
+ if (tech.options_ && tech.options_.playerId) {
+ var _player = videojs(tech.options_.playerId);
+
+ if (!_player.hasOwnProperty('hls')) {
+ Object.defineProperty(_player, 'hls', {
+ get: function get() {
+ videojs.log.warn('player.hls is deprecated. Use player.tech().vhs instead.');
+ tech.trigger({
+ type: 'usage',
+ name: 'hls-player-access'
+ });
+ return _assertThisInitialized__default['default'](_this);
+ },
+ configurable: true
+ });
+ }
+
+ if (!_player.hasOwnProperty('vhs')) {
+ Object.defineProperty(_player, 'vhs', {
+ get: function get() {
+ videojs.log.warn('player.vhs is deprecated. Use player.tech().vhs instead.');
+ tech.trigger({
+ type: 'usage',
+ name: 'vhs-player-access'
+ });
+ return _assertThisInitialized__default['default'](_this);
+ },
+ configurable: true
+ });
+ }
+
+ if (!_player.hasOwnProperty('dash')) {
+ Object.defineProperty(_player, 'dash', {
+ get: function get() {
+ videojs.log.warn('player.dash is deprecated. Use player.tech().vhs instead.');
+ return _assertThisInitialized__default['default'](_this);
+ },
+ configurable: true
+ });
+ }
+
+ _this.player_ = _player;
+ }
+
+ _this.tech_ = tech;
+ _this.source_ = source;
+ _this.stats = {};
+ _this.ignoreNextSeekingEvent_ = false;
+
+ _this.setOptions_();
+
+ if (_this.options_.overrideNative && tech.overrideNativeAudioTracks && tech.overrideNativeVideoTracks) {
+ tech.overrideNativeAudioTracks(true);
+ tech.overrideNativeVideoTracks(true);
+ } else if (_this.options_.overrideNative && (tech.featuresNativeVideoTracks || tech.featuresNativeAudioTracks)) {
+ // overriding native HLS only works if audio tracks have been emulated
+ // error early if we're misconfigured
+ throw new Error('Overriding native HLS requires emulated tracks. ' + 'See https://git.io/vMpjB');
+ } // listen for fullscreenchange events for this player so that we
+ // can adjust our quality selection quickly
+
+
+ _this.on(document__default['default'], ['fullscreenchange', 'webkitfullscreenchange', 'mozfullscreenchange', 'MSFullscreenChange'], function (event) {
+ var fullscreenElement = document__default['default'].fullscreenElement || document__default['default'].webkitFullscreenElement || document__default['default'].mozFullScreenElement || document__default['default'].msFullscreenElement;
+
+ if (fullscreenElement && fullscreenElement.contains(_this.tech_.el())) {
+ _this.masterPlaylistController_.fastQualityChange_();
+ } else {
+ // When leaving fullscreen, since the in page pixel dimensions should be smaller
+ // than full screen, see if there should be a rendition switch down to preserve
+ // bandwidth.
+ _this.masterPlaylistController_.checkABR_();
+ }
+ });
+
+ _this.on(_this.tech_, 'seeking', function () {
+ if (this.ignoreNextSeekingEvent_) {
+ this.ignoreNextSeekingEvent_ = false;
+ return;
+ }
+
+ this.setCurrentTime(this.tech_.currentTime());
+ });
+
+ _this.on(_this.tech_, 'error', function () {
+ // verify that the error was real and we are loaded
+ // enough to have mpc loaded.
+ if (this.tech_.error() && this.masterPlaylistController_) {
+ this.masterPlaylistController_.pauseLoading();
+ }
+ });
+
+ _this.on(_this.tech_, 'play', _this.play);
+
+ return _this;
+ }
+
+ var _proto = VhsHandler.prototype;
+
+ _proto.setOptions_ = function setOptions_() {
+ var _this2 = this; // defaults
+
+
+ this.options_.withCredentials = this.options_.withCredentials || false;
+ this.options_.handleManifestRedirects = this.options_.handleManifestRedirects === false ? false : true;
+ this.options_.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions === false ? false : true;
+ this.options_.useDevicePixelRatio = this.options_.useDevicePixelRatio || false;
+ this.options_.smoothQualityChange = this.options_.smoothQualityChange || false;
+ this.options_.useBandwidthFromLocalStorage = typeof this.source_.useBandwidthFromLocalStorage !== 'undefined' ? this.source_.useBandwidthFromLocalStorage : this.options_.useBandwidthFromLocalStorage || false;
+ this.options_.useNetworkInformationApi = this.options_.useNetworkInformationApi || false;
+ this.options_.useDtsForTimestampOffset = this.options_.useDtsForTimestampOffset || false;
+ this.options_.customTagParsers = this.options_.customTagParsers || [];
+ this.options_.customTagMappers = this.options_.customTagMappers || [];
+ this.options_.cacheEncryptionKeys = this.options_.cacheEncryptionKeys || false;
+
+ if (typeof this.options_.blacklistDuration !== 'number') {
+ this.options_.blacklistDuration = 5 * 60;
+ }
+
+ if (typeof this.options_.bandwidth !== 'number') {
+ if (this.options_.useBandwidthFromLocalStorage) {
+ var storedObject = getVhsLocalStorage();
+
+ if (storedObject && storedObject.bandwidth) {
+ this.options_.bandwidth = storedObject.bandwidth;
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-bandwidth-from-local-storage'
+ });
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'hls-bandwidth-from-local-storage'
+ });
+ }
+
+ if (storedObject && storedObject.throughput) {
+ this.options_.throughput = storedObject.throughput;
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-throughput-from-local-storage'
+ });
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'hls-throughput-from-local-storage'
+ });
+ }
+ }
+ } // if bandwidth was not set by options or pulled from local storage, start playlist
+ // selection at a reasonable bandwidth
+
+
+ if (typeof this.options_.bandwidth !== 'number') {
+ this.options_.bandwidth = Config.INITIAL_BANDWIDTH;
+ } // If the bandwidth number is unchanged from the initial setting
+ // then this takes precedence over the enableLowInitialPlaylist option
+
+
+ this.options_.enableLowInitialPlaylist = this.options_.enableLowInitialPlaylist && this.options_.bandwidth === Config.INITIAL_BANDWIDTH; // grab options passed to player.src
+
+ ['withCredentials', 'useDevicePixelRatio', 'limitRenditionByPlayerDimensions', 'bandwidth', 'smoothQualityChange', 'customTagParsers', 'customTagMappers', 'handleManifestRedirects', 'cacheEncryptionKeys', 'playlistSelector', 'initialPlaylistSelector', 'experimentalBufferBasedABR', 'liveRangeSafeTimeDelta', 'experimentalLLHLS', 'useNetworkInformationApi', 'useDtsForTimestampOffset', 'experimentalExactManifestTimings', 'experimentalLeastPixelDiffSelector'].forEach(function (option) {
+ if (typeof _this2.source_[option] !== 'undefined') {
+ _this2.options_[option] = _this2.source_[option];
+ }
+ });
+ this.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions;
+ this.useDevicePixelRatio = this.options_.useDevicePixelRatio;
+ }
+ /**
+ * called when player.src gets called, handle a new source
+ *
+ * @param {Object} src the source object to handle
+ */
+ ;
+
+ _proto.src = function src(_src, type) {
+ var _this3 = this; // do nothing if the src is falsey
+
+
+ if (!_src) {
+ return;
+ }
+
+ this.setOptions_(); // add master playlist controller options
+
+ this.options_.src = expandDataUri(this.source_.src);
+ this.options_.tech = this.tech_;
+ this.options_.externVhs = Vhs;
+ this.options_.sourceType = mediaTypes_js.simpleTypeFromSourceType(type); // Whenever we seek internally, we should update the tech
+
+ this.options_.seekTo = function (time) {
+ _this3.tech_.setCurrentTime(time);
+ };
+
+ if (this.options_.smoothQualityChange) {
+ videojs.log.warn('smoothQualityChange is deprecated and will be removed in the next major version');
+ }
+
+ this.masterPlaylistController_ = new MasterPlaylistController(this.options_);
+ var playbackWatcherOptions = videojs.mergeOptions({
+ liveRangeSafeTimeDelta: SAFE_TIME_DELTA
+ }, this.options_, {
+ seekable: function seekable() {
+ return _this3.seekable();
+ },
+ media: function media() {
+ return _this3.masterPlaylistController_.media();
+ },
+ masterPlaylistController: this.masterPlaylistController_
+ });
+ this.playbackWatcher_ = new PlaybackWatcher(playbackWatcherOptions);
+ this.masterPlaylistController_.on('error', function () {
+ var player = videojs.players[_this3.tech_.options_.playerId];
+ var error = _this3.masterPlaylistController_.error;
+
+ if (typeof error === 'object' && !error.code) {
+ error.code = 3;
+ } else if (typeof error === 'string') {
+ error = {
+ message: error,
+ code: 3
+ };
+ }
+
+ player.error(error);
+ });
+ var defaultSelector = this.options_.experimentalBufferBasedABR ? Vhs.movingAverageBandwidthSelector(0.55) : Vhs.STANDARD_PLAYLIST_SELECTOR; // `this` in selectPlaylist should be the VhsHandler for backwards
+ // compatibility with < v2
+
+ this.masterPlaylistController_.selectPlaylist = this.selectPlaylist ? this.selectPlaylist.bind(this) : defaultSelector.bind(this);
+ this.masterPlaylistController_.selectInitialPlaylist = Vhs.INITIAL_PLAYLIST_SELECTOR.bind(this); // re-expose some internal objects for backwards compatibility with < v2
+
+ this.playlists = this.masterPlaylistController_.masterPlaylistLoader_;
+ this.mediaSource = this.masterPlaylistController_.mediaSource; // Proxy assignment of some properties to the master playlist
+ // controller. Using a custom property for backwards compatibility
+ // with < v2
+
+ Object.defineProperties(this, {
+ selectPlaylist: {
+ get: function get() {
+ return this.masterPlaylistController_.selectPlaylist;
+ },
+ set: function set(selectPlaylist) {
+ this.masterPlaylistController_.selectPlaylist = selectPlaylist.bind(this);
+ }
+ },
+ throughput: {
+ get: function get() {
+ return this.masterPlaylistController_.mainSegmentLoader_.throughput.rate;
+ },
+ set: function set(throughput) {
+ this.masterPlaylistController_.mainSegmentLoader_.throughput.rate = throughput; // By setting `count` to 1 the throughput value becomes the starting value
+ // for the cumulative average
+
+ this.masterPlaylistController_.mainSegmentLoader_.throughput.count = 1;
+ }
+ },
+ bandwidth: {
+ get: function get() {
+ var playerBandwidthEst = this.masterPlaylistController_.mainSegmentLoader_.bandwidth;
+ var networkInformation = window__default['default'].navigator.connection || window__default['default'].navigator.mozConnection || window__default['default'].navigator.webkitConnection;
+ var tenMbpsAsBitsPerSecond = 10e6;
+
+ if (this.options_.useNetworkInformationApi && networkInformation) {
+ // downlink returns Mbps
+ // https://developer.mozilla.org/en-US/docs/Web/API/NetworkInformation/downlink
+ var networkInfoBandwidthEstBitsPerSec = networkInformation.downlink * 1000 * 1000; // downlink maxes out at 10 Mbps. In the event that both networkInformationApi and the player
+ // estimate a bandwidth greater than 10 Mbps, use the larger of the two estimates to ensure that
+ // high quality streams are not filtered out.
+
+ if (networkInfoBandwidthEstBitsPerSec >= tenMbpsAsBitsPerSecond && playerBandwidthEst >= tenMbpsAsBitsPerSecond) {
+ playerBandwidthEst = Math.max(playerBandwidthEst, networkInfoBandwidthEstBitsPerSec);
+ } else {
+ playerBandwidthEst = networkInfoBandwidthEstBitsPerSec;
+ }
+ }
+
+ return playerBandwidthEst;
+ },
+ set: function set(bandwidth) {
+ this.masterPlaylistController_.mainSegmentLoader_.bandwidth = bandwidth; // setting the bandwidth manually resets the throughput counter
+ // `count` is set to zero that current value of `rate` isn't included
+ // in the cumulative average
+
+ this.masterPlaylistController_.mainSegmentLoader_.throughput = {
+ rate: 0,
+ count: 0
+ };
+ }
+ },
+
+ /**
+ * `systemBandwidth` is a combination of two serial processes bit-rates. The first
+ * is the network bitrate provided by `bandwidth` and the second is the bitrate of
+ * the entire process after that - decryption, transmuxing, and appending - provided
+ * by `throughput`.
+ *
+ * Since the two process are serial, the overall system bandwidth is given by:
+ * sysBandwidth = 1 / (1 / bandwidth + 1 / throughput)
+ */
+ systemBandwidth: {
+ get: function get() {
+ var invBandwidth = 1 / (this.bandwidth || 1);
+ var invThroughput;
+
+ if (this.throughput > 0) {
+ invThroughput = 1 / this.throughput;
+ } else {
+ invThroughput = 0;
+ }
+
+ var systemBitrate = Math.floor(1 / (invBandwidth + invThroughput));
+ return systemBitrate;
+ },
+ set: function set() {
+ videojs.log.error('The "systemBandwidth" property is read-only');
+ }
+ }
+ });
+
+ if (this.options_.bandwidth) {
+ this.bandwidth = this.options_.bandwidth;
+ }
+
+ if (this.options_.throughput) {
+ this.throughput = this.options_.throughput;
+ }
+
+ Object.defineProperties(this.stats, {
+ bandwidth: {
+ get: function get() {
+ return _this3.bandwidth || 0;
+ },
+ enumerable: true
+ },
+ mediaRequests: {
+ get: function get() {
+ return _this3.masterPlaylistController_.mediaRequests_() || 0;
+ },
+ enumerable: true
+ },
+ mediaRequestsAborted: {
+ get: function get() {
+ return _this3.masterPlaylistController_.mediaRequestsAborted_() || 0;
+ },
+ enumerable: true
+ },
+ mediaRequestsTimedout: {
+ get: function get() {
+ return _this3.masterPlaylistController_.mediaRequestsTimedout_() || 0;
+ },
+ enumerable: true
+ },
+ mediaRequestsErrored: {
+ get: function get() {
+ return _this3.masterPlaylistController_.mediaRequestsErrored_() || 0;
+ },
+ enumerable: true
+ },
+ mediaTransferDuration: {
+ get: function get() {
+ return _this3.masterPlaylistController_.mediaTransferDuration_() || 0;
+ },
+ enumerable: true
+ },
+ mediaBytesTransferred: {
+ get: function get() {
+ return _this3.masterPlaylistController_.mediaBytesTransferred_() || 0;
+ },
+ enumerable: true
+ },
+ mediaSecondsLoaded: {
+ get: function get() {
+ return _this3.masterPlaylistController_.mediaSecondsLoaded_() || 0;
+ },
+ enumerable: true
+ },
+ mediaAppends: {
+ get: function get() {
+ return _this3.masterPlaylistController_.mediaAppends_() || 0;
+ },
+ enumerable: true
+ },
+ mainAppendsToLoadedData: {
+ get: function get() {
+ return _this3.masterPlaylistController_.mainAppendsToLoadedData_() || 0;
+ },
+ enumerable: true
+ },
+ audioAppendsToLoadedData: {
+ get: function get() {
+ return _this3.masterPlaylistController_.audioAppendsToLoadedData_() || 0;
+ },
+ enumerable: true
+ },
+ appendsToLoadedData: {
+ get: function get() {
+ return _this3.masterPlaylistController_.appendsToLoadedData_() || 0;
+ },
+ enumerable: true
+ },
+ timeToLoadedData: {
+ get: function get() {
+ return _this3.masterPlaylistController_.timeToLoadedData_() || 0;
+ },
+ enumerable: true
+ },
+ buffered: {
+ get: function get() {
+ return timeRangesToArray(_this3.tech_.buffered());
+ },
+ enumerable: true
+ },
+ currentTime: {
+ get: function get() {
+ return _this3.tech_.currentTime();
+ },
+ enumerable: true
+ },
+ currentSource: {
+ get: function get() {
+ return _this3.tech_.currentSource_;
+ },
+ enumerable: true
+ },
+ currentTech: {
+ get: function get() {
+ return _this3.tech_.name_;
+ },
+ enumerable: true
+ },
+ duration: {
+ get: function get() {
+ return _this3.tech_.duration();
+ },
+ enumerable: true
+ },
+ master: {
+ get: function get() {
+ return _this3.playlists.master;
+ },
+ enumerable: true
+ },
+ playerDimensions: {
+ get: function get() {
+ return _this3.tech_.currentDimensions();
+ },
+ enumerable: true
+ },
+ seekable: {
+ get: function get() {
+ return timeRangesToArray(_this3.tech_.seekable());
+ },
+ enumerable: true
+ },
+ timestamp: {
+ get: function get() {
+ return Date.now();
+ },
+ enumerable: true
+ },
+ videoPlaybackQuality: {
+ get: function get() {
+ return _this3.tech_.getVideoPlaybackQuality();
+ },
+ enumerable: true
+ }
+ });
+ this.tech_.one('canplay', this.masterPlaylistController_.setupFirstPlay.bind(this.masterPlaylistController_));
+ this.tech_.on('bandwidthupdate', function () {
+ if (_this3.options_.useBandwidthFromLocalStorage) {
+ updateVhsLocalStorage({
+ bandwidth: _this3.bandwidth,
+ throughput: Math.round(_this3.throughput)
+ });
+ }
+ });
+ this.masterPlaylistController_.on('selectedinitialmedia', function () {
+ // Add the manual rendition mix-in to VhsHandler
+ renditionSelectionMixin(_this3);
+ });
+ this.masterPlaylistController_.sourceUpdater_.on('createdsourcebuffers', function () {
+ _this3.setupEme_();
+ }); // the bandwidth of the primary segment loader is our best
+ // estimate of overall bandwidth
+
+ this.on(this.masterPlaylistController_, 'progress', function () {
+ this.tech_.trigger('progress');
+ }); // In the live case, we need to ignore the very first `seeking` event since
+ // that will be the result of the seek-to-live behavior
+
+ this.on(this.masterPlaylistController_, 'firstplay', function () {
+ this.ignoreNextSeekingEvent_ = true;
+ });
+ this.setupQualityLevels_(); // do nothing if the tech has been disposed already
+ // this can occur if someone sets the src in player.ready(), for instance
+
+ if (!this.tech_.el()) {
+ return;
+ }
+
+ this.mediaSourceUrl_ = window__default['default'].URL.createObjectURL(this.masterPlaylistController_.mediaSource);
+ this.tech_.src(this.mediaSourceUrl_);
+ };
+
+ _proto.createKeySessions_ = function createKeySessions_() {
+ var _this4 = this;
+
+ var audioPlaylistLoader = this.masterPlaylistController_.mediaTypes_.AUDIO.activePlaylistLoader;
+ this.logger_('waiting for EME key session creation');
+ waitForKeySessionCreation({
+ player: this.player_,
+ sourceKeySystems: this.source_.keySystems,
+ audioMedia: audioPlaylistLoader && audioPlaylistLoader.media(),
+ mainPlaylists: this.playlists.master.playlists
+ }).then(function () {
+ _this4.logger_('created EME key session');
+
+ _this4.masterPlaylistController_.sourceUpdater_.initializedEme();
+ })["catch"](function (err) {
+ _this4.logger_('error while creating EME key session', err);
+
+ _this4.player_.error({
+ message: 'Failed to initialize media keys for EME',
+ code: 3
+ });
+ });
+ };
+
+ _proto.handleWaitingForKey_ = function handleWaitingForKey_() {
+ // If waitingforkey is fired, it's possible that the data that's necessary to retrieve
+ // the key is in the manifest. While this should've happened on initial source load, it
+ // may happen again in live streams where the keys change, and the manifest info
+ // reflects the update.
+ //
+ // Because videojs-contrib-eme compares the PSSH data we send to that of PSSH data it's
+ // already requested keys for, we don't have to worry about this generating extraneous
+ // requests.
+ this.logger_('waitingforkey fired, attempting to create any new key sessions');
+ this.createKeySessions_();
+ }
+ /**
+ * If necessary and EME is available, sets up EME options and waits for key session
+ * creation.
+ *
+ * This function also updates the source updater so taht it can be used, as for some
+ * browsers, EME must be configured before content is appended (if appending unencrypted
+ * content before encrypted content).
+ */
+ ;
+
+ _proto.setupEme_ = function setupEme_() {
+ var _this5 = this;
+
+ var audioPlaylistLoader = this.masterPlaylistController_.mediaTypes_.AUDIO.activePlaylistLoader;
+ var didSetupEmeOptions = setupEmeOptions({
+ player: this.player_,
+ sourceKeySystems: this.source_.keySystems,
+ media: this.playlists.media(),
+ audioMedia: audioPlaylistLoader && audioPlaylistLoader.media()
+ });
+ this.player_.tech_.on('keystatuschange', function (e) {
+ if (e.status === 'output-restricted') {
+ _this5.masterPlaylistController_.blacklistCurrentPlaylist({
+ playlist: _this5.masterPlaylistController_.media(),
+ message: "DRM keystatus changed to " + e.status + ". Playlist will fail to play. Check for HDCP content.",
+ blacklistDuration: Infinity
+ });
+ }
+ });
+ this.handleWaitingForKey_ = this.handleWaitingForKey_.bind(this);
+ this.player_.tech_.on('waitingforkey', this.handleWaitingForKey_); // In IE11 this is too early to initialize media keys, and IE11 does not support
+ // promises.
+
+ if (videojs.browser.IE_VERSION === 11 || !didSetupEmeOptions) {
+ // If EME options were not set up, we've done all we could to initialize EME.
+ this.masterPlaylistController_.sourceUpdater_.initializedEme();
+ return;
+ }
+
+ this.createKeySessions_();
+ }
+ /**
+ * Initializes the quality levels and sets listeners to update them.
+ *
+ * @method setupQualityLevels_
+ * @private
+ */
+ ;
+
+ _proto.setupQualityLevels_ = function setupQualityLevels_() {
+ var _this6 = this;
+
+ var player = videojs.players[this.tech_.options_.playerId]; // if there isn't a player or there isn't a qualityLevels plugin
+ // or qualityLevels_ listeners have already been setup, do nothing.
+
+ if (!player || !player.qualityLevels || this.qualityLevels_) {
+ return;
+ }
+
+ this.qualityLevels_ = player.qualityLevels();
+ this.masterPlaylistController_.on('selectedinitialmedia', function () {
+ handleVhsLoadedMetadata(_this6.qualityLevels_, _this6);
+ });
+ this.playlists.on('mediachange', function () {
+ handleVhsMediaChange(_this6.qualityLevels_, _this6.playlists);
+ });
+ }
+ /**
+ * return the version
+ */
+ ;
+
+ VhsHandler.version = function version$5() {
+ return {
+ '@videojs/http-streaming': version$4,
+ 'mux.js': version$3,
+ 'mpd-parser': version$2,
+ 'm3u8-parser': version$1,
+ 'aes-decrypter': version
+ };
+ }
+ /**
+ * return the version
+ */
+ ;
+
+ _proto.version = function version() {
+ return this.constructor.version();
+ };
+
+ _proto.canChangeType = function canChangeType() {
+ return SourceUpdater.canChangeType();
+ }
+ /**
+ * Begin playing the video.
+ */
+ ;
+
+ _proto.play = function play() {
+ this.masterPlaylistController_.play();
+ }
+ /**
+ * a wrapper around the function in MasterPlaylistController
+ */
+ ;
+
+ _proto.setCurrentTime = function setCurrentTime(currentTime) {
+ this.masterPlaylistController_.setCurrentTime(currentTime);
+ }
+ /**
+ * a wrapper around the function in MasterPlaylistController
+ */
+ ;
+
+ _proto.duration = function duration() {
+ return this.masterPlaylistController_.duration();
+ }
+ /**
+ * a wrapper around the function in MasterPlaylistController
+ */
+ ;
+
+ _proto.seekable = function seekable() {
+ return this.masterPlaylistController_.seekable();
+ }
+ /**
+ * Abort all outstanding work and cleanup.
+ */
+ ;
+
+ _proto.dispose = function dispose() {
+ if (this.playbackWatcher_) {
+ this.playbackWatcher_.dispose();
+ }
+
+ if (this.masterPlaylistController_) {
+ this.masterPlaylistController_.dispose();
+ }
+
+ if (this.qualityLevels_) {
+ this.qualityLevels_.dispose();
+ }
+
+ if (this.player_) {
+ delete this.player_.vhs;
+ delete this.player_.dash;
+ delete this.player_.hls;
+ }
+
+ if (this.tech_ && this.tech_.vhs) {
+ delete this.tech_.vhs;
+ } // don't check this.tech_.hls as it will log a deprecated warning
+
+
+ if (this.tech_) {
+ delete this.tech_.hls;
+ }
+
+ if (this.mediaSourceUrl_ && window__default['default'].URL.revokeObjectURL) {
+ window__default['default'].URL.revokeObjectURL(this.mediaSourceUrl_);
+ this.mediaSourceUrl_ = null;
+ }
+
+ if (this.tech_) {
+ this.tech_.off('waitingforkey', this.handleWaitingForKey_);
+ }
+
+ _Component.prototype.dispose.call(this);
+ };
+
+ _proto.convertToProgramTime = function convertToProgramTime(time, callback) {
+ return getProgramTime({
+ playlist: this.masterPlaylistController_.media(),
+ time: time,
+ callback: callback
+ });
+ } // the player must be playing before calling this
+ ;
+
+ _proto.seekToProgramTime = function seekToProgramTime$1(programTime, callback, pauseAfterSeek, retryCount) {
+ if (pauseAfterSeek === void 0) {
+ pauseAfterSeek = true;
+ }
+
+ if (retryCount === void 0) {
+ retryCount = 2;
+ }
+
+ return seekToProgramTime({
+ programTime: programTime,
+ playlist: this.masterPlaylistController_.media(),
+ retryCount: retryCount,
+ pauseAfterSeek: pauseAfterSeek,
+ seekTo: this.options_.seekTo,
+ tech: this.options_.tech,
+ callback: callback
+ });
+ };
+
+ return VhsHandler;
+}(Component);
+/**
+ * The Source Handler object, which informs video.js what additional
+ * MIME types are supported and sets up playback. It is registered
+ * automatically to the appropriate tech based on the capabilities of
+ * the browser it is running in. It is not necessary to use or modify
+ * this object in normal usage.
+ */
+
+
+var VhsSourceHandler = {
+ name: 'videojs-http-streaming',
+ VERSION: version$4,
+ canHandleSource: function canHandleSource(srcObj, options) {
+ if (options === void 0) {
+ options = {};
+ }
+
+ var localOptions = videojs.mergeOptions(videojs.options, options);
+ return VhsSourceHandler.canPlayType(srcObj.type, localOptions);
+ },
+ handleSource: function handleSource(source, tech, options) {
+ if (options === void 0) {
+ options = {};
+ }
+
+ var localOptions = videojs.mergeOptions(videojs.options, options);
+ tech.vhs = new VhsHandler(source, tech, localOptions);
+
+ if (!videojs.hasOwnProperty('hls')) {
+ Object.defineProperty(tech, 'hls', {
+ get: function get() {
+ videojs.log.warn('player.tech().hls is deprecated. Use player.tech().vhs instead.');
+ return tech.vhs;
+ },
+ configurable: true
+ });
+ }
+
+ tech.vhs.xhr = xhrFactory();
+ tech.vhs.src(source.src, source.type);
+ return tech.vhs;
+ },
+ canPlayType: function canPlayType(type, options) {
+ if (options === void 0) {
+ options = {};
+ }
+
+ var _videojs$mergeOptions = videojs.mergeOptions(videojs.options, options),
+ _videojs$mergeOptions2 = _videojs$mergeOptions.vhs;
+
+ _videojs$mergeOptions2 = _videojs$mergeOptions2 === void 0 ? {} : _videojs$mergeOptions2;
+ var _videojs$mergeOptions3 = _videojs$mergeOptions2.overrideNative,
+ overrideNative = _videojs$mergeOptions3 === void 0 ? !videojs.browser.IS_ANY_SAFARI : _videojs$mergeOptions3,
+ _videojs$mergeOptions4 = _videojs$mergeOptions.hls;
+ _videojs$mergeOptions4 = _videojs$mergeOptions4 === void 0 ? {} : _videojs$mergeOptions4;
+ var _videojs$mergeOptions5 = _videojs$mergeOptions4.overrideNative,
+ legacyOverrideNative = _videojs$mergeOptions5 === void 0 ? false : _videojs$mergeOptions5;
+ var supportedType = mediaTypes_js.simpleTypeFromSourceType(type);
+ var canUseMsePlayback = supportedType && (!Vhs.supportsTypeNatively(supportedType) || legacyOverrideNative || overrideNative);
+ return canUseMsePlayback ? 'maybe' : '';
+ }
+};
+/**
+ * Check to see if the native MediaSource object exists and supports
+ * an MP4 container with both H.264 video and AAC-LC audio.
+ *
+ * @return {boolean} if native media sources are supported
+ */
+
+var supportsNativeMediaSources = function supportsNativeMediaSources() {
+ return codecs_js.browserSupportsCodec('avc1.4d400d,mp4a.40.2');
+}; // register source handlers with the appropriate techs
+
+
+if (supportsNativeMediaSources()) {
+ videojs.getTech('Html5').registerSourceHandler(VhsSourceHandler, 0);
+}
+
+videojs.VhsHandler = VhsHandler;
+Object.defineProperty(videojs, 'HlsHandler', {
+ get: function get() {
+ videojs.log.warn('videojs.HlsHandler is deprecated. Use videojs.VhsHandler instead.');
+ return VhsHandler;
+ },
+ configurable: true
+});
+videojs.VhsSourceHandler = VhsSourceHandler;
+Object.defineProperty(videojs, 'HlsSourceHandler', {
+ get: function get() {
+ videojs.log.warn('videojs.HlsSourceHandler is deprecated. ' + 'Use videojs.VhsSourceHandler instead.');
+ return VhsSourceHandler;
+ },
+ configurable: true
+});
+videojs.Vhs = Vhs;
+Object.defineProperty(videojs, 'Hls', {
+ get: function get() {
+ videojs.log.warn('videojs.Hls is deprecated. Use videojs.Vhs instead.');
+ return Vhs;
+ },
+ configurable: true
+});
+
+if (!videojs.use) {
+ videojs.registerComponent('Hls', Vhs);
+ videojs.registerComponent('Vhs', Vhs);
+}
+
+videojs.options.vhs = videojs.options.vhs || {};
+videojs.options.hls = videojs.options.hls || {};
+
+if (!videojs.getPlugin || !videojs.getPlugin('reloadSourceOnError')) {
+ var registerPlugin = videojs.registerPlugin || videojs.plugin;
+ registerPlugin('reloadSourceOnError', reloadSourceOnError);
+}
+
+module.exports = videojs;
diff --git a/frontend/src/static/lib/video-js/7.20.2/video.es.js b/frontend/src/static/lib/video-js/7.20.2/video.es.js
new file mode 100644
index 0000000..6c182d9
--- /dev/null
+++ b/frontend/src/static/lib/video-js/7.20.2/video.es.js
@@ -0,0 +1,55198 @@
+/**
+ * @license
+ * Video.js 7.20.2
+ * Copyright Brightcove, Inc.
+ * Available under Apache License Version 2.0
+ *
+ *
+ * Includes vtt.js
+ * Available under Apache License Version 2.0
+ *
+ */
+
+import window$1 from 'global/window';
+import document from 'global/document';
+import _extends from '@babel/runtime/helpers/extends';
+import keycode from 'keycode';
+import _assertThisInitialized from '@babel/runtime/helpers/assertThisInitialized';
+import _inheritsLoose from '@babel/runtime/helpers/inheritsLoose';
+import safeParseTuple from 'safe-json-parse/tuple';
+import XHR from '@videojs/xhr';
+import vtt from 'videojs-vtt.js';
+import _construct from '@babel/runtime/helpers/construct';
+import _inherits from '@babel/runtime/helpers/inherits';
+import _resolveUrl from '@videojs/vhs-utils/es/resolve-url.js';
+import { Parser } from 'm3u8-parser';
+import { browserSupportsCodec, DEFAULT_VIDEO_CODEC, DEFAULT_AUDIO_CODEC, muxerSupportsCodec, parseCodecs, translateLegacyCodec, codecsFromDefault, getMimeForCodec, isAudioCodec } from '@videojs/vhs-utils/es/codecs.js';
+import { simpleTypeFromSourceType } from '@videojs/vhs-utils/es/media-types.js';
+import { isArrayBufferView, concatTypedArrays, stringToBytes, toUint8 } from '@videojs/vhs-utils/es/byte-helpers';
+import { generateSidxKey, parseUTCTiming, parse, addSidxSegmentsToPlaylist } from 'mpd-parser';
+import parseSidx from 'mux.js/lib/tools/parse-sidx';
+import { getId3Offset } from '@videojs/vhs-utils/es/id3-helpers';
+import { detectContainerForBytes, isLikelyFmp4MediaSegment } from '@videojs/vhs-utils/es/containers';
+import { ONE_SECOND_IN_TS } from 'mux.js/lib/utils/clock';
+
+var version$5 = "7.20.2";
+
+/**
+ * An Object that contains lifecycle hooks as keys which point to an array
+ * of functions that are run when a lifecycle is triggered
+ *
+ * @private
+ */
+var hooks_ = {};
+/**
+ * Get a list of hooks for a specific lifecycle
+ *
+ * @param {string} type
+ * the lifecyle to get hooks from
+ *
+ * @param {Function|Function[]} [fn]
+ * Optionally add a hook (or hooks) to the lifecycle that your are getting.
+ *
+ * @return {Array}
+ * an array of hooks, or an empty array if there are none.
+ */
+
+var hooks = function hooks(type, fn) {
+ hooks_[type] = hooks_[type] || [];
+
+ if (fn) {
+ hooks_[type] = hooks_[type].concat(fn);
+ }
+
+ return hooks_[type];
+};
+/**
+ * Add a function hook to a specific videojs lifecycle.
+ *
+ * @param {string} type
+ * the lifecycle to hook the function to.
+ *
+ * @param {Function|Function[]}
+ * The function or array of functions to attach.
+ */
+
+
+var hook = function hook(type, fn) {
+ hooks(type, fn);
+};
+/**
+ * Remove a hook from a specific videojs lifecycle.
+ *
+ * @param {string} type
+ * the lifecycle that the function hooked to
+ *
+ * @param {Function} fn
+ * The hooked function to remove
+ *
+ * @return {boolean}
+ * The function that was removed or undef
+ */
+
+
+var removeHook = function removeHook(type, fn) {
+ var index = hooks(type).indexOf(fn);
+
+ if (index <= -1) {
+ return false;
+ }
+
+ hooks_[type] = hooks_[type].slice();
+ hooks_[type].splice(index, 1);
+ return true;
+};
+/**
+ * Add a function hook that will only run once to a specific videojs lifecycle.
+ *
+ * @param {string} type
+ * the lifecycle to hook the function to.
+ *
+ * @param {Function|Function[]}
+ * The function or array of functions to attach.
+ */
+
+
+var hookOnce = function hookOnce(type, fn) {
+ hooks(type, [].concat(fn).map(function (original) {
+ var wrapper = function wrapper() {
+ removeHook(type, wrapper);
+ return original.apply(void 0, arguments);
+ };
+
+ return wrapper;
+ }));
+};
+
+/**
+ * @file fullscreen-api.js
+ * @module fullscreen-api
+ * @private
+ */
+/**
+ * Store the browser-specific methods for the fullscreen API.
+ *
+ * @type {Object}
+ * @see [Specification]{@link https://fullscreen.spec.whatwg.org}
+ * @see [Map Approach From Screenfull.js]{@link https://github.com/sindresorhus/screenfull.js}
+ */
+
+var FullscreenApi = {
+ prefixed: true
+}; // browser API methods
+
+var apiMap = [['requestFullscreen', 'exitFullscreen', 'fullscreenElement', 'fullscreenEnabled', 'fullscreenchange', 'fullscreenerror', 'fullscreen'], // WebKit
+['webkitRequestFullscreen', 'webkitExitFullscreen', 'webkitFullscreenElement', 'webkitFullscreenEnabled', 'webkitfullscreenchange', 'webkitfullscreenerror', '-webkit-full-screen'], // Mozilla
+['mozRequestFullScreen', 'mozCancelFullScreen', 'mozFullScreenElement', 'mozFullScreenEnabled', 'mozfullscreenchange', 'mozfullscreenerror', '-moz-full-screen'], // Microsoft
+['msRequestFullscreen', 'msExitFullscreen', 'msFullscreenElement', 'msFullscreenEnabled', 'MSFullscreenChange', 'MSFullscreenError', '-ms-fullscreen']];
+var specApi = apiMap[0];
+var browserApi; // determine the supported set of functions
+
+for (var i = 0; i < apiMap.length; i++) {
+ // check for exitFullscreen function
+ if (apiMap[i][1] in document) {
+ browserApi = apiMap[i];
+ break;
+ }
+} // map the browser API names to the spec API names
+
+
+if (browserApi) {
+ for (var _i = 0; _i < browserApi.length; _i++) {
+ FullscreenApi[specApi[_i]] = browserApi[_i];
+ }
+
+ FullscreenApi.prefixed = browserApi[0] !== specApi[0];
+}
+
+/**
+ * @file create-logger.js
+ * @module create-logger
+ */
+
+var history = [];
+/**
+ * Log messages to the console and history based on the type of message
+ *
+ * @private
+ * @param {string} type
+ * The name of the console method to use.
+ *
+ * @param {Array} args
+ * The arguments to be passed to the matching console method.
+ */
+
+var LogByTypeFactory = function LogByTypeFactory(name, log) {
+ return function (type, level, args) {
+ var lvl = log.levels[level];
+ var lvlRegExp = new RegExp("^(" + lvl + ")$");
+
+ if (type !== 'log') {
+ // Add the type to the front of the message when it's not "log".
+ args.unshift(type.toUpperCase() + ':');
+ } // Add console prefix after adding to history.
+
+
+ args.unshift(name + ':'); // Add a clone of the args at this point to history.
+
+ if (history) {
+ history.push([].concat(args)); // only store 1000 history entries
+
+ var splice = history.length - 1000;
+ history.splice(0, splice > 0 ? splice : 0);
+ } // If there's no console then don't try to output messages, but they will
+ // still be stored in history.
+
+
+ if (!window$1.console) {
+ return;
+ } // Was setting these once outside of this function, but containing them
+ // in the function makes it easier to test cases where console doesn't exist
+ // when the module is executed.
+
+
+ var fn = window$1.console[type];
+
+ if (!fn && type === 'debug') {
+ // Certain browsers don't have support for console.debug. For those, we
+ // should default to the closest comparable log.
+ fn = window$1.console.info || window$1.console.log;
+ } // Bail out if there's no console or if this type is not allowed by the
+ // current logging level.
+
+
+ if (!fn || !lvl || !lvlRegExp.test(type)) {
+ return;
+ }
+
+ fn[Array.isArray(args) ? 'apply' : 'call'](window$1.console, args);
+ };
+};
+
+function createLogger$1(name) {
+ // This is the private tracking variable for logging level.
+ var level = 'info'; // the curried logByType bound to the specific log and history
+
+ var logByType;
+ /**
+ * Logs plain debug messages. Similar to `console.log`.
+ *
+ * Due to [limitations](https://github.com/jsdoc3/jsdoc/issues/955#issuecomment-313829149)
+ * of our JSDoc template, we cannot properly document this as both a function
+ * and a namespace, so its function signature is documented here.
+ *
+ * #### Arguments
+ * ##### *args
+ * Mixed[]
+ *
+ * Any combination of values that could be passed to `console.log()`.
+ *
+ * #### Return Value
+ *
+ * `undefined`
+ *
+ * @namespace
+ * @param {Mixed[]} args
+ * One or more messages or objects that should be logged.
+ */
+
+ var log = function log() {
+ for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
+ args[_key] = arguments[_key];
+ }
+
+ logByType('log', level, args);
+ }; // This is the logByType helper that the logging methods below use
+
+
+ logByType = LogByTypeFactory(name, log);
+ /**
+ * Create a new sublogger which chains the old name to the new name.
+ *
+ * For example, doing `videojs.log.createLogger('player')` and then using that logger will log the following:
+ * ```js
+ * mylogger('foo');
+ * // > VIDEOJS: player: foo
+ * ```
+ *
+ * @param {string} name
+ * The name to add call the new logger
+ * @return {Object}
+ */
+
+ log.createLogger = function (subname) {
+ return createLogger$1(name + ': ' + subname);
+ };
+ /**
+ * Enumeration of available logging levels, where the keys are the level names
+ * and the values are `|`-separated strings containing logging methods allowed
+ * in that logging level. These strings are used to create a regular expression
+ * matching the function name being called.
+ *
+ * Levels provided by Video.js are:
+ *
+ * - `off`: Matches no calls. Any value that can be cast to `false` will have
+ * this effect. The most restrictive.
+ * - `all`: Matches only Video.js-provided functions (`debug`, `log`,
+ * `log.warn`, and `log.error`).
+ * - `debug`: Matches `log.debug`, `log`, `log.warn`, and `log.error` calls.
+ * - `info` (default): Matches `log`, `log.warn`, and `log.error` calls.
+ * - `warn`: Matches `log.warn` and `log.error` calls.
+ * - `error`: Matches only `log.error` calls.
+ *
+ * @type {Object}
+ */
+
+
+ log.levels = {
+ all: 'debug|log|warn|error',
+ off: '',
+ debug: 'debug|log|warn|error',
+ info: 'log|warn|error',
+ warn: 'warn|error',
+ error: 'error',
+ DEFAULT: level
+ };
+ /**
+ * Get or set the current logging level.
+ *
+ * If a string matching a key from {@link module:log.levels} is provided, acts
+ * as a setter.
+ *
+ * @param {string} [lvl]
+ * Pass a valid level to set a new logging level.
+ *
+ * @return {string}
+ * The current logging level.
+ */
+
+ log.level = function (lvl) {
+ if (typeof lvl === 'string') {
+ if (!log.levels.hasOwnProperty(lvl)) {
+ throw new Error("\"" + lvl + "\" in not a valid log level");
+ }
+
+ level = lvl;
+ }
+
+ return level;
+ };
+ /**
+ * Returns an array containing everything that has been logged to the history.
+ *
+ * This array is a shallow clone of the internal history record. However, its
+ * contents are _not_ cloned; so, mutating objects inside this array will
+ * mutate them in history.
+ *
+ * @return {Array}
+ */
+
+
+ log.history = function () {
+ return history ? [].concat(history) : [];
+ };
+ /**
+ * Allows you to filter the history by the given logger name
+ *
+ * @param {string} fname
+ * The name to filter by
+ *
+ * @return {Array}
+ * The filtered list to return
+ */
+
+
+ log.history.filter = function (fname) {
+ return (history || []).filter(function (historyItem) {
+ // if the first item in each historyItem includes `fname`, then it's a match
+ return new RegExp(".*" + fname + ".*").test(historyItem[0]);
+ });
+ };
+ /**
+ * Clears the internal history tracking, but does not prevent further history
+ * tracking.
+ */
+
+
+ log.history.clear = function () {
+ if (history) {
+ history.length = 0;
+ }
+ };
+ /**
+ * Disable history tracking if it is currently enabled.
+ */
+
+
+ log.history.disable = function () {
+ if (history !== null) {
+ history.length = 0;
+ history = null;
+ }
+ };
+ /**
+ * Enable history tracking if it is currently disabled.
+ */
+
+
+ log.history.enable = function () {
+ if (history === null) {
+ history = [];
+ }
+ };
+ /**
+ * Logs error messages. Similar to `console.error`.
+ *
+ * @param {Mixed[]} args
+ * One or more messages or objects that should be logged as an error
+ */
+
+
+ log.error = function () {
+ for (var _len2 = arguments.length, args = new Array(_len2), _key2 = 0; _key2 < _len2; _key2++) {
+ args[_key2] = arguments[_key2];
+ }
+
+ return logByType('error', level, args);
+ };
+ /**
+ * Logs warning messages. Similar to `console.warn`.
+ *
+ * @param {Mixed[]} args
+ * One or more messages or objects that should be logged as a warning.
+ */
+
+
+ log.warn = function () {
+ for (var _len3 = arguments.length, args = new Array(_len3), _key3 = 0; _key3 < _len3; _key3++) {
+ args[_key3] = arguments[_key3];
+ }
+
+ return logByType('warn', level, args);
+ };
+ /**
+ * Logs debug messages. Similar to `console.debug`, but may also act as a comparable
+ * log if `console.debug` is not available
+ *
+ * @param {Mixed[]} args
+ * One or more messages or objects that should be logged as debug.
+ */
+
+
+ log.debug = function () {
+ for (var _len4 = arguments.length, args = new Array(_len4), _key4 = 0; _key4 < _len4; _key4++) {
+ args[_key4] = arguments[_key4];
+ }
+
+ return logByType('debug', level, args);
+ };
+
+ return log;
+}
+
+/**
+ * @file log.js
+ * @module log
+ */
+var log$1 = createLogger$1('VIDEOJS');
+var createLogger = log$1.createLogger;
+
+/**
+ * @file obj.js
+ * @module obj
+ */
+
+/**
+ * @callback obj:EachCallback
+ *
+ * @param {Mixed} value
+ * The current key for the object that is being iterated over.
+ *
+ * @param {string} key
+ * The current key-value for object that is being iterated over
+ */
+
+/**
+ * @callback obj:ReduceCallback
+ *
+ * @param {Mixed} accum
+ * The value that is accumulating over the reduce loop.
+ *
+ * @param {Mixed} value
+ * The current key for the object that is being iterated over.
+ *
+ * @param {string} key
+ * The current key-value for object that is being iterated over
+ *
+ * @return {Mixed}
+ * The new accumulated value.
+ */
+var toString = Object.prototype.toString;
+/**
+ * Get the keys of an Object
+ *
+ * @param {Object}
+ * The Object to get the keys from
+ *
+ * @return {string[]}
+ * An array of the keys from the object. Returns an empty array if the
+ * object passed in was invalid or had no keys.
+ *
+ * @private
+ */
+
+var keys = function keys(object) {
+ return isObject(object) ? Object.keys(object) : [];
+};
+/**
+ * Array-like iteration for objects.
+ *
+ * @param {Object} object
+ * The object to iterate over
+ *
+ * @param {obj:EachCallback} fn
+ * The callback function which is called for each key in the object.
+ */
+
+
+function each(object, fn) {
+ keys(object).forEach(function (key) {
+ return fn(object[key], key);
+ });
+}
+/**
+ * Array-like reduce for objects.
+ *
+ * @param {Object} object
+ * The Object that you want to reduce.
+ *
+ * @param {Function} fn
+ * A callback function which is called for each key in the object. It
+ * receives the accumulated value and the per-iteration value and key
+ * as arguments.
+ *
+ * @param {Mixed} [initial = 0]
+ * Starting value
+ *
+ * @return {Mixed}
+ * The final accumulated value.
+ */
+
+function reduce(object, fn, initial) {
+ if (initial === void 0) {
+ initial = 0;
+ }
+
+ return keys(object).reduce(function (accum, key) {
+ return fn(accum, object[key], key);
+ }, initial);
+}
+/**
+ * Object.assign-style object shallow merge/extend.
+ *
+ * @param {Object} target
+ * @param {Object} ...sources
+ * @return {Object}
+ */
+
+function assign(target) {
+ for (var _len = arguments.length, sources = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
+ sources[_key - 1] = arguments[_key];
+ }
+
+ if (Object.assign) {
+ return _extends.apply(void 0, [target].concat(sources));
+ }
+
+ sources.forEach(function (source) {
+ if (!source) {
+ return;
+ }
+
+ each(source, function (value, key) {
+ target[key] = value;
+ });
+ });
+ return target;
+}
+/**
+ * Returns whether a value is an object of any kind - including DOM nodes,
+ * arrays, regular expressions, etc. Not functions, though.
+ *
+ * This avoids the gotcha where using `typeof` on a `null` value
+ * results in `'object'`.
+ *
+ * @param {Object} value
+ * @return {boolean}
+ */
+
+function isObject(value) {
+ return !!value && typeof value === 'object';
+}
+/**
+ * Returns whether an object appears to be a "plain" object - that is, a
+ * direct instance of `Object`.
+ *
+ * @param {Object} value
+ * @return {boolean}
+ */
+
+function isPlain(value) {
+ return isObject(value) && toString.call(value) === '[object Object]' && value.constructor === Object;
+}
+
+/**
+ * @file computed-style.js
+ * @module computed-style
+ */
+/**
+ * A safe getComputedStyle.
+ *
+ * This is needed because in Firefox, if the player is loaded in an iframe with
+ * `display:none`, then `getComputedStyle` returns `null`, so, we do a
+ * null-check to make sure that the player doesn't break in these cases.
+ *
+ * @function
+ * @param {Element} el
+ * The element you want the computed style of
+ *
+ * @param {string} prop
+ * The property name you want
+ *
+ * @see https://bugzilla.mozilla.org/show_bug.cgi?id=548397
+ */
+
+function computedStyle(el, prop) {
+ if (!el || !prop) {
+ return '';
+ }
+
+ if (typeof window$1.getComputedStyle === 'function') {
+ var computedStyleValue;
+
+ try {
+ computedStyleValue = window$1.getComputedStyle(el);
+ } catch (e) {
+ return '';
+ }
+
+ return computedStyleValue ? computedStyleValue.getPropertyValue(prop) || computedStyleValue[prop] : '';
+ }
+
+ return '';
+}
+
+/**
+ * @file browser.js
+ * @module browser
+ */
+var USER_AGENT = window$1.navigator && window$1.navigator.userAgent || '';
+var webkitVersionMap = /AppleWebKit\/([\d.]+)/i.exec(USER_AGENT);
+var appleWebkitVersion = webkitVersionMap ? parseFloat(webkitVersionMap.pop()) : null;
+/**
+ * Whether or not this device is an iPod.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+var IS_IPOD = /iPod/i.test(USER_AGENT);
+/**
+ * The detected iOS version - or `null`.
+ *
+ * @static
+ * @const
+ * @type {string|null}
+ */
+
+var IOS_VERSION = function () {
+ var match = USER_AGENT.match(/OS (\d+)_/i);
+
+ if (match && match[1]) {
+ return match[1];
+ }
+
+ return null;
+}();
+/**
+ * Whether or not this is an Android device.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+var IS_ANDROID = /Android/i.test(USER_AGENT);
+/**
+ * The detected Android version - or `null`.
+ *
+ * @static
+ * @const
+ * @type {number|string|null}
+ */
+
+var ANDROID_VERSION = function () {
+ // This matches Android Major.Minor.Patch versions
+ // ANDROID_VERSION is Major.Minor as a Number, if Minor isn't available, then only Major is returned
+ var match = USER_AGENT.match(/Android (\d+)(?:\.(\d+))?(?:\.(\d+))*/i);
+
+ if (!match) {
+ return null;
+ }
+
+ var major = match[1] && parseFloat(match[1]);
+ var minor = match[2] && parseFloat(match[2]);
+
+ if (major && minor) {
+ return parseFloat(match[1] + '.' + match[2]);
+ } else if (major) {
+ return major;
+ }
+
+ return null;
+}();
+/**
+ * Whether or not this is a native Android browser.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+var IS_NATIVE_ANDROID = IS_ANDROID && ANDROID_VERSION < 5 && appleWebkitVersion < 537;
+/**
+ * Whether or not this is Mozilla Firefox.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+var IS_FIREFOX = /Firefox/i.test(USER_AGENT);
+/**
+ * Whether or not this is Microsoft Edge.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+var IS_EDGE = /Edg/i.test(USER_AGENT);
+/**
+ * Whether or not this is Google Chrome.
+ *
+ * This will also be `true` for Chrome on iOS, which will have different support
+ * as it is actually Safari under the hood.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+var IS_CHROME = !IS_EDGE && (/Chrome/i.test(USER_AGENT) || /CriOS/i.test(USER_AGENT));
+/**
+ * The detected Google Chrome version - or `null`.
+ *
+ * @static
+ * @const
+ * @type {number|null}
+ */
+
+var CHROME_VERSION = function () {
+ var match = USER_AGENT.match(/(Chrome|CriOS)\/(\d+)/);
+
+ if (match && match[2]) {
+ return parseFloat(match[2]);
+ }
+
+ return null;
+}();
+/**
+ * The detected Internet Explorer version - or `null`.
+ *
+ * @static
+ * @const
+ * @type {number|null}
+ */
+
+var IE_VERSION = function () {
+ var result = /MSIE\s(\d+)\.\d/.exec(USER_AGENT);
+ var version = result && parseFloat(result[1]);
+
+ if (!version && /Trident\/7.0/i.test(USER_AGENT) && /rv:11.0/.test(USER_AGENT)) {
+ // IE 11 has a different user agent string than other IE versions
+ version = 11.0;
+ }
+
+ return version;
+}();
+/**
+ * Whether or not this is desktop Safari.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+var IS_SAFARI = /Safari/i.test(USER_AGENT) && !IS_CHROME && !IS_ANDROID && !IS_EDGE;
+/**
+ * Whether or not this is a Windows machine.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+var IS_WINDOWS = /Windows/i.test(USER_AGENT);
+/**
+ * Whether or not this device is touch-enabled.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+var TOUCH_ENABLED = Boolean(isReal() && ('ontouchstart' in window$1 || window$1.navigator.maxTouchPoints || window$1.DocumentTouch && window$1.document instanceof window$1.DocumentTouch));
+/**
+ * Whether or not this device is an iPad.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+var IS_IPAD = /iPad/i.test(USER_AGENT) || IS_SAFARI && TOUCH_ENABLED && !/iPhone/i.test(USER_AGENT);
+/**
+ * Whether or not this device is an iPhone.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+// The Facebook app's UIWebView identifies as both an iPhone and iPad, so
+// to identify iPhones, we need to exclude iPads.
+// http://artsy.github.io/blog/2012/10/18/the-perils-of-ios-user-agent-sniffing/
+
+var IS_IPHONE = /iPhone/i.test(USER_AGENT) && !IS_IPAD;
+/**
+ * Whether or not this is an iOS device.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+var IS_IOS = IS_IPHONE || IS_IPAD || IS_IPOD;
+/**
+ * Whether or not this is any flavor of Safari - including iOS.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+var IS_ANY_SAFARI = (IS_SAFARI || IS_IOS) && !IS_CHROME;
+
+var browser = /*#__PURE__*/Object.freeze({
+ __proto__: null,
+ IS_IPOD: IS_IPOD,
+ IOS_VERSION: IOS_VERSION,
+ IS_ANDROID: IS_ANDROID,
+ ANDROID_VERSION: ANDROID_VERSION,
+ IS_NATIVE_ANDROID: IS_NATIVE_ANDROID,
+ IS_FIREFOX: IS_FIREFOX,
+ IS_EDGE: IS_EDGE,
+ IS_CHROME: IS_CHROME,
+ CHROME_VERSION: CHROME_VERSION,
+ IE_VERSION: IE_VERSION,
+ IS_SAFARI: IS_SAFARI,
+ IS_WINDOWS: IS_WINDOWS,
+ TOUCH_ENABLED: TOUCH_ENABLED,
+ IS_IPAD: IS_IPAD,
+ IS_IPHONE: IS_IPHONE,
+ IS_IOS: IS_IOS,
+ IS_ANY_SAFARI: IS_ANY_SAFARI
+});
+
+/**
+ * @file dom.js
+ * @module dom
+ */
+/**
+ * Detect if a value is a string with any non-whitespace characters.
+ *
+ * @private
+ * @param {string} str
+ * The string to check
+ *
+ * @return {boolean}
+ * Will be `true` if the string is non-blank, `false` otherwise.
+ *
+ */
+
+function isNonBlankString(str) {
+ // we use str.trim as it will trim any whitespace characters
+ // from the front or back of non-whitespace characters. aka
+ // Any string that contains non-whitespace characters will
+ // still contain them after `trim` but whitespace only strings
+ // will have a length of 0, failing this check.
+ return typeof str === 'string' && Boolean(str.trim());
+}
+/**
+ * Throws an error if the passed string has whitespace. This is used by
+ * class methods to be relatively consistent with the classList API.
+ *
+ * @private
+ * @param {string} str
+ * The string to check for whitespace.
+ *
+ * @throws {Error}
+ * Throws an error if there is whitespace in the string.
+ */
+
+
+function throwIfWhitespace(str) {
+ // str.indexOf instead of regex because str.indexOf is faster performance wise.
+ if (str.indexOf(' ') >= 0) {
+ throw new Error('class has illegal whitespace characters');
+ }
+}
+/**
+ * Produce a regular expression for matching a className within an elements className.
+ *
+ * @private
+ * @param {string} className
+ * The className to generate the RegExp for.
+ *
+ * @return {RegExp}
+ * The RegExp that will check for a specific `className` in an elements
+ * className.
+ */
+
+
+function classRegExp(className) {
+ return new RegExp('(^|\\s)' + className + '($|\\s)');
+}
+/**
+ * Whether the current DOM interface appears to be real (i.e. not simulated).
+ *
+ * @return {boolean}
+ * Will be `true` if the DOM appears to be real, `false` otherwise.
+ */
+
+
+function isReal() {
+ // Both document and window will never be undefined thanks to `global`.
+ return document === window$1.document;
+}
+/**
+ * Determines, via duck typing, whether or not a value is a DOM element.
+ *
+ * @param {Mixed} value
+ * The value to check.
+ *
+ * @return {boolean}
+ * Will be `true` if the value is a DOM element, `false` otherwise.
+ */
+
+function isEl(value) {
+ return isObject(value) && value.nodeType === 1;
+}
+/**
+ * Determines if the current DOM is embedded in an iframe.
+ *
+ * @return {boolean}
+ * Will be `true` if the DOM is embedded in an iframe, `false`
+ * otherwise.
+ */
+
+function isInFrame() {
+ // We need a try/catch here because Safari will throw errors when attempting
+ // to get either `parent` or `self`
+ try {
+ return window$1.parent !== window$1.self;
+ } catch (x) {
+ return true;
+ }
+}
+/**
+ * Creates functions to query the DOM using a given method.
+ *
+ * @private
+ * @param {string} method
+ * The method to create the query with.
+ *
+ * @return {Function}
+ * The query method
+ */
+
+function createQuerier(method) {
+ return function (selector, context) {
+ if (!isNonBlankString(selector)) {
+ return document[method](null);
+ }
+
+ if (isNonBlankString(context)) {
+ context = document.querySelector(context);
+ }
+
+ var ctx = isEl(context) ? context : document;
+ return ctx[method] && ctx[method](selector);
+ };
+}
+/**
+ * Creates an element and applies properties, attributes, and inserts content.
+ *
+ * @param {string} [tagName='div']
+ * Name of tag to be created.
+ *
+ * @param {Object} [properties={}]
+ * Element properties to be applied.
+ *
+ * @param {Object} [attributes={}]
+ * Element attributes to be applied.
+ *
+ * @param {module:dom~ContentDescriptor} content
+ * A content descriptor object.
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+
+
+function createEl(tagName, properties, attributes, content) {
+ if (tagName === void 0) {
+ tagName = 'div';
+ }
+
+ if (properties === void 0) {
+ properties = {};
+ }
+
+ if (attributes === void 0) {
+ attributes = {};
+ }
+
+ var el = document.createElement(tagName);
+ Object.getOwnPropertyNames(properties).forEach(function (propName) {
+ var val = properties[propName]; // See #2176
+ // We originally were accepting both properties and attributes in the
+ // same object, but that doesn't work so well.
+
+ if (propName.indexOf('aria-') !== -1 || propName === 'role' || propName === 'type') {
+ log$1.warn('Setting attributes in the second argument of createEl()\n' + 'has been deprecated. Use the third argument instead.\n' + ("createEl(type, properties, attributes). Attempting to set " + propName + " to " + val + "."));
+ el.setAttribute(propName, val); // Handle textContent since it's not supported everywhere and we have a
+ // method for it.
+ } else if (propName === 'textContent') {
+ textContent(el, val);
+ } else if (el[propName] !== val || propName === 'tabIndex') {
+ el[propName] = val;
+ }
+ });
+ Object.getOwnPropertyNames(attributes).forEach(function (attrName) {
+ el.setAttribute(attrName, attributes[attrName]);
+ });
+
+ if (content) {
+ appendContent(el, content);
+ }
+
+ return el;
+}
+/**
+ * Injects text into an element, replacing any existing contents entirely.
+ *
+ * @param {Element} el
+ * The element to add text content into
+ *
+ * @param {string} text
+ * The text content to add.
+ *
+ * @return {Element}
+ * The element with added text content.
+ */
+
+function textContent(el, text) {
+ if (typeof el.textContent === 'undefined') {
+ el.innerText = text;
+ } else {
+ el.textContent = text;
+ }
+
+ return el;
+}
+/**
+ * Insert an element as the first child node of another
+ *
+ * @param {Element} child
+ * Element to insert
+ *
+ * @param {Element} parent
+ * Element to insert child into
+ */
+
+function prependTo(child, parent) {
+ if (parent.firstChild) {
+ parent.insertBefore(child, parent.firstChild);
+ } else {
+ parent.appendChild(child);
+ }
+}
+/**
+ * Check if an element has a class name.
+ *
+ * @param {Element} element
+ * Element to check
+ *
+ * @param {string} classToCheck
+ * Class name to check for
+ *
+ * @return {boolean}
+ * Will be `true` if the element has a class, `false` otherwise.
+ *
+ * @throws {Error}
+ * Throws an error if `classToCheck` has white space.
+ */
+
+function hasClass(element, classToCheck) {
+ throwIfWhitespace(classToCheck);
+
+ if (element.classList) {
+ return element.classList.contains(classToCheck);
+ }
+
+ return classRegExp(classToCheck).test(element.className);
+}
+/**
+ * Add a class name to an element.
+ *
+ * @param {Element} element
+ * Element to add class name to.
+ *
+ * @param {string} classToAdd
+ * Class name to add.
+ *
+ * @return {Element}
+ * The DOM element with the added class name.
+ */
+
+function addClass(element, classToAdd) {
+ if (element.classList) {
+ element.classList.add(classToAdd); // Don't need to `throwIfWhitespace` here because `hasElClass` will do it
+ // in the case of classList not being supported.
+ } else if (!hasClass(element, classToAdd)) {
+ element.className = (element.className + ' ' + classToAdd).trim();
+ }
+
+ return element;
+}
+/**
+ * Remove a class name from an element.
+ *
+ * @param {Element} element
+ * Element to remove a class name from.
+ *
+ * @param {string} classToRemove
+ * Class name to remove
+ *
+ * @return {Element}
+ * The DOM element with class name removed.
+ */
+
+function removeClass(element, classToRemove) {
+ // Protect in case the player gets disposed
+ if (!element) {
+ log$1.warn("removeClass was called with an element that doesn't exist");
+ return null;
+ }
+
+ if (element.classList) {
+ element.classList.remove(classToRemove);
+ } else {
+ throwIfWhitespace(classToRemove);
+ element.className = element.className.split(/\s+/).filter(function (c) {
+ return c !== classToRemove;
+ }).join(' ');
+ }
+
+ return element;
+}
+/**
+ * The callback definition for toggleClass.
+ *
+ * @callback module:dom~PredicateCallback
+ * @param {Element} element
+ * The DOM element of the Component.
+ *
+ * @param {string} classToToggle
+ * The `className` that wants to be toggled
+ *
+ * @return {boolean|undefined}
+ * If `true` is returned, the `classToToggle` will be added to the
+ * `element`. If `false`, the `classToToggle` will be removed from
+ * the `element`. If `undefined`, the callback will be ignored.
+ */
+
+/**
+ * Adds or removes a class name to/from an element depending on an optional
+ * condition or the presence/absence of the class name.
+ *
+ * @param {Element} element
+ * The element to toggle a class name on.
+ *
+ * @param {string} classToToggle
+ * The class that should be toggled.
+ *
+ * @param {boolean|module:dom~PredicateCallback} [predicate]
+ * See the return value for {@link module:dom~PredicateCallback}
+ *
+ * @return {Element}
+ * The element with a class that has been toggled.
+ */
+
+function toggleClass(element, classToToggle, predicate) {
+ // This CANNOT use `classList` internally because IE11 does not support the
+ // second parameter to the `classList.toggle()` method! Which is fine because
+ // `classList` will be used by the add/remove functions.
+ var has = hasClass(element, classToToggle);
+
+ if (typeof predicate === 'function') {
+ predicate = predicate(element, classToToggle);
+ }
+
+ if (typeof predicate !== 'boolean') {
+ predicate = !has;
+ } // If the necessary class operation matches the current state of the
+ // element, no action is required.
+
+
+ if (predicate === has) {
+ return;
+ }
+
+ if (predicate) {
+ addClass(element, classToToggle);
+ } else {
+ removeClass(element, classToToggle);
+ }
+
+ return element;
+}
+/**
+ * Apply attributes to an HTML element.
+ *
+ * @param {Element} el
+ * Element to add attributes to.
+ *
+ * @param {Object} [attributes]
+ * Attributes to be applied.
+ */
+
+function setAttributes(el, attributes) {
+ Object.getOwnPropertyNames(attributes).forEach(function (attrName) {
+ var attrValue = attributes[attrName];
+
+ if (attrValue === null || typeof attrValue === 'undefined' || attrValue === false) {
+ el.removeAttribute(attrName);
+ } else {
+ el.setAttribute(attrName, attrValue === true ? '' : attrValue);
+ }
+ });
+}
+/**
+ * Get an element's attribute values, as defined on the HTML tag.
+ *
+ * Attributes are not the same as properties. They're defined on the tag
+ * or with setAttribute.
+ *
+ * @param {Element} tag
+ * Element from which to get tag attributes.
+ *
+ * @return {Object}
+ * All attributes of the element. Boolean attributes will be `true` or
+ * `false`, others will be strings.
+ */
+
+function getAttributes(tag) {
+ var obj = {}; // known boolean attributes
+ // we can check for matching boolean properties, but not all browsers
+ // and not all tags know about these attributes, so, we still want to check them manually
+
+ var knownBooleans = ',' + 'autoplay,controls,playsinline,loop,muted,default,defaultMuted' + ',';
+
+ if (tag && tag.attributes && tag.attributes.length > 0) {
+ var attrs = tag.attributes;
+
+ for (var i = attrs.length - 1; i >= 0; i--) {
+ var attrName = attrs[i].name;
+ var attrVal = attrs[i].value; // check for known booleans
+ // the matching element property will return a value for typeof
+
+ if (typeof tag[attrName] === 'boolean' || knownBooleans.indexOf(',' + attrName + ',') !== -1) {
+ // the value of an included boolean attribute is typically an empty
+ // string ('') which would equal false if we just check for a false value.
+ // we also don't want support bad code like autoplay='false'
+ attrVal = attrVal !== null ? true : false;
+ }
+
+ obj[attrName] = attrVal;
+ }
+ }
+
+ return obj;
+}
+/**
+ * Get the value of an element's attribute.
+ *
+ * @param {Element} el
+ * A DOM element.
+ *
+ * @param {string} attribute
+ * Attribute to get the value of.
+ *
+ * @return {string}
+ * The value of the attribute.
+ */
+
+function getAttribute(el, attribute) {
+ return el.getAttribute(attribute);
+}
+/**
+ * Set the value of an element's attribute.
+ *
+ * @param {Element} el
+ * A DOM element.
+ *
+ * @param {string} attribute
+ * Attribute to set.
+ *
+ * @param {string} value
+ * Value to set the attribute to.
+ */
+
+function setAttribute(el, attribute, value) {
+ el.setAttribute(attribute, value);
+}
+/**
+ * Remove an element's attribute.
+ *
+ * @param {Element} el
+ * A DOM element.
+ *
+ * @param {string} attribute
+ * Attribute to remove.
+ */
+
+function removeAttribute(el, attribute) {
+ el.removeAttribute(attribute);
+}
+/**
+ * Attempt to block the ability to select text.
+ */
+
+function blockTextSelection() {
+ document.body.focus();
+
+ document.onselectstart = function () {
+ return false;
+ };
+}
+/**
+ * Turn off text selection blocking.
+ */
+
+function unblockTextSelection() {
+ document.onselectstart = function () {
+ return true;
+ };
+}
+/**
+ * Identical to the native `getBoundingClientRect` function, but ensures that
+ * the method is supported at all (it is in all browsers we claim to support)
+ * and that the element is in the DOM before continuing.
+ *
+ * This wrapper function also shims properties which are not provided by some
+ * older browsers (namely, IE8).
+ *
+ * Additionally, some browsers do not support adding properties to a
+ * `ClientRect`/`DOMRect` object; so, we shallow-copy it with the standard
+ * properties (except `x` and `y` which are not widely supported). This helps
+ * avoid implementations where keys are non-enumerable.
+ *
+ * @param {Element} el
+ * Element whose `ClientRect` we want to calculate.
+ *
+ * @return {Object|undefined}
+ * Always returns a plain object - or `undefined` if it cannot.
+ */
+
+function getBoundingClientRect(el) {
+ if (el && el.getBoundingClientRect && el.parentNode) {
+ var rect = el.getBoundingClientRect();
+ var result = {};
+ ['bottom', 'height', 'left', 'right', 'top', 'width'].forEach(function (k) {
+ if (rect[k] !== undefined) {
+ result[k] = rect[k];
+ }
+ });
+
+ if (!result.height) {
+ result.height = parseFloat(computedStyle(el, 'height'));
+ }
+
+ if (!result.width) {
+ result.width = parseFloat(computedStyle(el, 'width'));
+ }
+
+ return result;
+ }
+}
+/**
+ * Represents the position of a DOM element on the page.
+ *
+ * @typedef {Object} module:dom~Position
+ *
+ * @property {number} left
+ * Pixels to the left.
+ *
+ * @property {number} top
+ * Pixels from the top.
+ */
+
+/**
+ * Get the position of an element in the DOM.
+ *
+ * Uses `getBoundingClientRect` technique from John Resig.
+ *
+ * @see http://ejohn.org/blog/getboundingclientrect-is-awesome/
+ *
+ * @param {Element} el
+ * Element from which to get offset.
+ *
+ * @return {module:dom~Position}
+ * The position of the element that was passed in.
+ */
+
+function findPosition(el) {
+ if (!el || el && !el.offsetParent) {
+ return {
+ left: 0,
+ top: 0,
+ width: 0,
+ height: 0
+ };
+ }
+
+ var width = el.offsetWidth;
+ var height = el.offsetHeight;
+ var left = 0;
+ var top = 0;
+
+ while (el.offsetParent && el !== document[FullscreenApi.fullscreenElement]) {
+ left += el.offsetLeft;
+ top += el.offsetTop;
+ el = el.offsetParent;
+ }
+
+ return {
+ left: left,
+ top: top,
+ width: width,
+ height: height
+ };
+}
+/**
+ * Represents x and y coordinates for a DOM element or mouse pointer.
+ *
+ * @typedef {Object} module:dom~Coordinates
+ *
+ * @property {number} x
+ * x coordinate in pixels
+ *
+ * @property {number} y
+ * y coordinate in pixels
+ */
+
+/**
+ * Get the pointer position within an element.
+ *
+ * The base on the coordinates are the bottom left of the element.
+ *
+ * @param {Element} el
+ * Element on which to get the pointer position on.
+ *
+ * @param {EventTarget~Event} event
+ * Event object.
+ *
+ * @return {module:dom~Coordinates}
+ * A coordinates object corresponding to the mouse position.
+ *
+ */
+
+function getPointerPosition(el, event) {
+ var translated = {
+ x: 0,
+ y: 0
+ };
+
+ if (IS_IOS) {
+ var item = el;
+
+ while (item && item.nodeName.toLowerCase() !== 'html') {
+ var transform = computedStyle(item, 'transform');
+
+ if (/^matrix/.test(transform)) {
+ var values = transform.slice(7, -1).split(/,\s/).map(Number);
+ translated.x += values[4];
+ translated.y += values[5];
+ } else if (/^matrix3d/.test(transform)) {
+ var _values = transform.slice(9, -1).split(/,\s/).map(Number);
+
+ translated.x += _values[12];
+ translated.y += _values[13];
+ }
+
+ item = item.parentNode;
+ }
+ }
+
+ var position = {};
+ var boxTarget = findPosition(event.target);
+ var box = findPosition(el);
+ var boxW = box.width;
+ var boxH = box.height;
+ var offsetY = event.offsetY - (box.top - boxTarget.top);
+ var offsetX = event.offsetX - (box.left - boxTarget.left);
+
+ if (event.changedTouches) {
+ offsetX = event.changedTouches[0].pageX - box.left;
+ offsetY = event.changedTouches[0].pageY + box.top;
+
+ if (IS_IOS) {
+ offsetX -= translated.x;
+ offsetY -= translated.y;
+ }
+ }
+
+ position.y = 1 - Math.max(0, Math.min(1, offsetY / boxH));
+ position.x = Math.max(0, Math.min(1, offsetX / boxW));
+ return position;
+}
+/**
+ * Determines, via duck typing, whether or not a value is a text node.
+ *
+ * @param {Mixed} value
+ * Check if this value is a text node.
+ *
+ * @return {boolean}
+ * Will be `true` if the value is a text node, `false` otherwise.
+ */
+
+function isTextNode(value) {
+ return isObject(value) && value.nodeType === 3;
+}
+/**
+ * Empties the contents of an element.
+ *
+ * @param {Element} el
+ * The element to empty children from
+ *
+ * @return {Element}
+ * The element with no children
+ */
+
+function emptyEl(el) {
+ while (el.firstChild) {
+ el.removeChild(el.firstChild);
+ }
+
+ return el;
+}
+/**
+ * This is a mixed value that describes content to be injected into the DOM
+ * via some method. It can be of the following types:
+ *
+ * Type | Description
+ * -----------|-------------
+ * `string` | The value will be normalized into a text node.
+ * `Element` | The value will be accepted as-is.
+ * `TextNode` | The value will be accepted as-is.
+ * `Array` | A one-dimensional array of strings, elements, text nodes, or functions. These functions should return a string, element, or text node (any other return value, like an array, will be ignored).
+ * `Function` | A function, which is expected to return a string, element, text node, or array - any of the other possible values described above. This means that a content descriptor could be a function that returns an array of functions, but those second-level functions must return strings, elements, or text nodes.
+ *
+ * @typedef {string|Element|TextNode|Array|Function} module:dom~ContentDescriptor
+ */
+
+/**
+ * Normalizes content for eventual insertion into the DOM.
+ *
+ * This allows a wide range of content definition methods, but helps protect
+ * from falling into the trap of simply writing to `innerHTML`, which could
+ * be an XSS concern.
+ *
+ * The content for an element can be passed in multiple types and
+ * combinations, whose behavior is as follows:
+ *
+ * @param {module:dom~ContentDescriptor} content
+ * A content descriptor value.
+ *
+ * @return {Array}
+ * All of the content that was passed in, normalized to an array of
+ * elements or text nodes.
+ */
+
+function normalizeContent(content) {
+ // First, invoke content if it is a function. If it produces an array,
+ // that needs to happen before normalization.
+ if (typeof content === 'function') {
+ content = content();
+ } // Next up, normalize to an array, so one or many items can be normalized,
+ // filtered, and returned.
+
+
+ return (Array.isArray(content) ? content : [content]).map(function (value) {
+ // First, invoke value if it is a function to produce a new value,
+ // which will be subsequently normalized to a Node of some kind.
+ if (typeof value === 'function') {
+ value = value();
+ }
+
+ if (isEl(value) || isTextNode(value)) {
+ return value;
+ }
+
+ if (typeof value === 'string' && /\S/.test(value)) {
+ return document.createTextNode(value);
+ }
+ }).filter(function (value) {
+ return value;
+ });
+}
+/**
+ * Normalizes and appends content to an element.
+ *
+ * @param {Element} el
+ * Element to append normalized content to.
+ *
+ * @param {module:dom~ContentDescriptor} content
+ * A content descriptor value.
+ *
+ * @return {Element}
+ * The element with appended normalized content.
+ */
+
+function appendContent(el, content) {
+ normalizeContent(content).forEach(function (node) {
+ return el.appendChild(node);
+ });
+ return el;
+}
+/**
+ * Normalizes and inserts content into an element; this is identical to
+ * `appendContent()`, except it empties the element first.
+ *
+ * @param {Element} el
+ * Element to insert normalized content into.
+ *
+ * @param {module:dom~ContentDescriptor} content
+ * A content descriptor value.
+ *
+ * @return {Element}
+ * The element with inserted normalized content.
+ */
+
+function insertContent(el, content) {
+ return appendContent(emptyEl(el), content);
+}
+/**
+ * Check if an event was a single left click.
+ *
+ * @param {EventTarget~Event} event
+ * Event object.
+ *
+ * @return {boolean}
+ * Will be `true` if a single left click, `false` otherwise.
+ */
+
+function isSingleLeftClick(event) {
+ // Note: if you create something draggable, be sure to
+ // call it on both `mousedown` and `mousemove` event,
+ // otherwise `mousedown` should be enough for a button
+ if (event.button === undefined && event.buttons === undefined) {
+ // Why do we need `buttons` ?
+ // Because, middle mouse sometimes have this:
+ // e.button === 0 and e.buttons === 4
+ // Furthermore, we want to prevent combination click, something like
+ // HOLD middlemouse then left click, that would be
+ // e.button === 0, e.buttons === 5
+ // just `button` is not gonna work
+ // Alright, then what this block does ?
+ // this is for chrome `simulate mobile devices`
+ // I want to support this as well
+ return true;
+ }
+
+ if (event.button === 0 && event.buttons === undefined) {
+ // Touch screen, sometimes on some specific device, `buttons`
+ // doesn't have anything (safari on ios, blackberry...)
+ return true;
+ } // `mouseup` event on a single left click has
+ // `button` and `buttons` equal to 0
+
+
+ if (event.type === 'mouseup' && event.button === 0 && event.buttons === 0) {
+ return true;
+ }
+
+ if (event.button !== 0 || event.buttons !== 1) {
+ // This is the reason we have those if else block above
+ // if any special case we can catch and let it slide
+ // we do it above, when get to here, this definitely
+ // is-not-left-click
+ return false;
+ }
+
+ return true;
+}
+/**
+ * Finds a single DOM element matching `selector` within the optional
+ * `context` of another DOM element (defaulting to `document`).
+ *
+ * @param {string} selector
+ * A valid CSS selector, which will be passed to `querySelector`.
+ *
+ * @param {Element|String} [context=document]
+ * A DOM element within which to query. Can also be a selector
+ * string in which case the first matching element will be used
+ * as context. If missing (or no element matches selector), falls
+ * back to `document`.
+ *
+ * @return {Element|null}
+ * The element that was found or null.
+ */
+
+var $ = createQuerier('querySelector');
+/**
+ * Finds a all DOM elements matching `selector` within the optional
+ * `context` of another DOM element (defaulting to `document`).
+ *
+ * @param {string} selector
+ * A valid CSS selector, which will be passed to `querySelectorAll`.
+ *
+ * @param {Element|String} [context=document]
+ * A DOM element within which to query. Can also be a selector
+ * string in which case the first matching element will be used
+ * as context. If missing (or no element matches selector), falls
+ * back to `document`.
+ *
+ * @return {NodeList}
+ * A element list of elements that were found. Will be empty if none
+ * were found.
+ *
+ */
+
+var $$ = createQuerier('querySelectorAll');
+
+var Dom = /*#__PURE__*/Object.freeze({
+ __proto__: null,
+ isReal: isReal,
+ isEl: isEl,
+ isInFrame: isInFrame,
+ createEl: createEl,
+ textContent: textContent,
+ prependTo: prependTo,
+ hasClass: hasClass,
+ addClass: addClass,
+ removeClass: removeClass,
+ toggleClass: toggleClass,
+ setAttributes: setAttributes,
+ getAttributes: getAttributes,
+ getAttribute: getAttribute,
+ setAttribute: setAttribute,
+ removeAttribute: removeAttribute,
+ blockTextSelection: blockTextSelection,
+ unblockTextSelection: unblockTextSelection,
+ getBoundingClientRect: getBoundingClientRect,
+ findPosition: findPosition,
+ getPointerPosition: getPointerPosition,
+ isTextNode: isTextNode,
+ emptyEl: emptyEl,
+ normalizeContent: normalizeContent,
+ appendContent: appendContent,
+ insertContent: insertContent,
+ isSingleLeftClick: isSingleLeftClick,
+ $: $,
+ $$: $$
+});
+
+/**
+ * @file setup.js - Functions for setting up a player without
+ * user interaction based on the data-setup `attribute` of the video tag.
+ *
+ * @module setup
+ */
+var _windowLoaded = false;
+var videojs$1;
+/**
+ * Set up any tags that have a data-setup `attribute` when the player is started.
+ */
+
+var autoSetup = function autoSetup() {
+ if (videojs$1.options.autoSetup === false) {
+ return;
+ }
+
+ var vids = Array.prototype.slice.call(document.getElementsByTagName('video'));
+ var audios = Array.prototype.slice.call(document.getElementsByTagName('audio'));
+ var divs = Array.prototype.slice.call(document.getElementsByTagName('video-js'));
+ var mediaEls = vids.concat(audios, divs); // Check if any media elements exist
+
+ if (mediaEls && mediaEls.length > 0) {
+ for (var i = 0, e = mediaEls.length; i < e; i++) {
+ var mediaEl = mediaEls[i]; // Check if element exists, has getAttribute func.
+
+ if (mediaEl && mediaEl.getAttribute) {
+ // Make sure this player hasn't already been set up.
+ if (mediaEl.player === undefined) {
+ var options = mediaEl.getAttribute('data-setup'); // Check if data-setup attr exists.
+ // We only auto-setup if they've added the data-setup attr.
+
+ if (options !== null) {
+ // Create new video.js instance.
+ videojs$1(mediaEl);
+ }
+ } // If getAttribute isn't defined, we need to wait for the DOM.
+
+ } else {
+ autoSetupTimeout(1);
+ break;
+ }
+ } // No videos were found, so keep looping unless page is finished loading.
+
+ } else if (!_windowLoaded) {
+ autoSetupTimeout(1);
+ }
+};
+/**
+ * Wait until the page is loaded before running autoSetup. This will be called in
+ * autoSetup if `hasLoaded` returns false.
+ *
+ * @param {number} wait
+ * How long to wait in ms
+ *
+ * @param {module:videojs} [vjs]
+ * The videojs library function
+ */
+
+
+function autoSetupTimeout(wait, vjs) {
+ // Protect against breakage in non-browser environments
+ if (!isReal()) {
+ return;
+ }
+
+ if (vjs) {
+ videojs$1 = vjs;
+ }
+
+ window$1.setTimeout(autoSetup, wait);
+}
+/**
+ * Used to set the internal tracking of window loaded state to true.
+ *
+ * @private
+ */
+
+
+function setWindowLoaded() {
+ _windowLoaded = true;
+ window$1.removeEventListener('load', setWindowLoaded);
+}
+
+if (isReal()) {
+ if (document.readyState === 'complete') {
+ setWindowLoaded();
+ } else {
+ /**
+ * Listen for the load event on window, and set _windowLoaded to true.
+ *
+ * We use a standard event listener here to avoid incrementing the GUID
+ * before any players are created.
+ *
+ * @listens load
+ */
+ window$1.addEventListener('load', setWindowLoaded);
+ }
+}
+
+/**
+ * @file stylesheet.js
+ * @module stylesheet
+ */
+/**
+ * Create a DOM syle element given a className for it.
+ *
+ * @param {string} className
+ * The className to add to the created style element.
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+
+var createStyleElement = function createStyleElement(className) {
+ var style = document.createElement('style');
+ style.className = className;
+ return style;
+};
+/**
+ * Add text to a DOM element.
+ *
+ * @param {Element} el
+ * The Element to add text content to.
+ *
+ * @param {string} content
+ * The text to add to the element.
+ */
+
+var setTextContent = function setTextContent(el, content) {
+ if (el.styleSheet) {
+ el.styleSheet.cssText = content;
+ } else {
+ el.textContent = content;
+ }
+};
+
+/**
+ * @file guid.js
+ * @module guid
+ */
+// Default value for GUIDs. This allows us to reset the GUID counter in tests.
+//
+// The initial GUID is 3 because some users have come to rely on the first
+// default player ID ending up as `vjs_video_3`.
+//
+// See: https://github.com/videojs/video.js/pull/6216
+var _initialGuid = 3;
+/**
+ * Unique ID for an element or function
+ *
+ * @type {Number}
+ */
+
+var _guid = _initialGuid;
+/**
+ * Get a unique auto-incrementing ID by number that has not been returned before.
+ *
+ * @return {number}
+ * A new unique ID.
+ */
+
+function newGUID() {
+ return _guid++;
+}
+
+/**
+ * @file dom-data.js
+ * @module dom-data
+ */
+var FakeWeakMap;
+
+if (!window$1.WeakMap) {
+ FakeWeakMap = /*#__PURE__*/function () {
+ function FakeWeakMap() {
+ this.vdata = 'vdata' + Math.floor(window$1.performance && window$1.performance.now() || Date.now());
+ this.data = {};
+ }
+
+ var _proto = FakeWeakMap.prototype;
+
+ _proto.set = function set(key, value) {
+ var access = key[this.vdata] || newGUID();
+
+ if (!key[this.vdata]) {
+ key[this.vdata] = access;
+ }
+
+ this.data[access] = value;
+ return this;
+ };
+
+ _proto.get = function get(key) {
+ var access = key[this.vdata]; // we have data, return it
+
+ if (access) {
+ return this.data[access];
+ } // we don't have data, return nothing.
+ // return undefined explicitly as that's the contract for this method
+
+
+ log$1('We have no data for this element', key);
+ return undefined;
+ };
+
+ _proto.has = function has(key) {
+ var access = key[this.vdata];
+ return access in this.data;
+ };
+
+ _proto["delete"] = function _delete(key) {
+ var access = key[this.vdata];
+
+ if (access) {
+ delete this.data[access];
+ delete key[this.vdata];
+ }
+ };
+
+ return FakeWeakMap;
+ }();
+}
+/**
+ * Element Data Store.
+ *
+ * Allows for binding data to an element without putting it directly on the
+ * element. Ex. Event listeners are stored here.
+ * (also from jsninja.com, slightly modified and updated for closure compiler)
+ *
+ * @type {Object}
+ * @private
+ */
+
+
+var DomData = window$1.WeakMap ? new WeakMap() : new FakeWeakMap();
+
+/**
+ * @file events.js. An Event System (John Resig - Secrets of a JS Ninja http://jsninja.com/)
+ * (Original book version wasn't completely usable, so fixed some things and made Closure Compiler compatible)
+ * This should work very similarly to jQuery's events, however it's based off the book version which isn't as
+ * robust as jquery's, so there's probably some differences.
+ *
+ * @file events.js
+ * @module events
+ */
+/**
+ * Clean up the listener cache and dispatchers
+ *
+ * @param {Element|Object} elem
+ * Element to clean up
+ *
+ * @param {string} type
+ * Type of event to clean up
+ */
+
+function _cleanUpEvents(elem, type) {
+ if (!DomData.has(elem)) {
+ return;
+ }
+
+ var data = DomData.get(elem); // Remove the events of a particular type if there are none left
+
+ if (data.handlers[type].length === 0) {
+ delete data.handlers[type]; // data.handlers[type] = null;
+ // Setting to null was causing an error with data.handlers
+ // Remove the meta-handler from the element
+
+ if (elem.removeEventListener) {
+ elem.removeEventListener(type, data.dispatcher, false);
+ } else if (elem.detachEvent) {
+ elem.detachEvent('on' + type, data.dispatcher);
+ }
+ } // Remove the events object if there are no types left
+
+
+ if (Object.getOwnPropertyNames(data.handlers).length <= 0) {
+ delete data.handlers;
+ delete data.dispatcher;
+ delete data.disabled;
+ } // Finally remove the element data if there is no data left
+
+
+ if (Object.getOwnPropertyNames(data).length === 0) {
+ DomData["delete"](elem);
+ }
+}
+/**
+ * Loops through an array of event types and calls the requested method for each type.
+ *
+ * @param {Function} fn
+ * The event method we want to use.
+ *
+ * @param {Element|Object} elem
+ * Element or object to bind listeners to
+ *
+ * @param {string} type
+ * Type of event to bind to.
+ *
+ * @param {EventTarget~EventListener} callback
+ * Event listener.
+ */
+
+
+function _handleMultipleEvents(fn, elem, types, callback) {
+ types.forEach(function (type) {
+ // Call the event method for each one of the types
+ fn(elem, type, callback);
+ });
+}
+/**
+ * Fix a native event to have standard property values
+ *
+ * @param {Object} event
+ * Event object to fix.
+ *
+ * @return {Object}
+ * Fixed event object.
+ */
+
+
+function fixEvent(event) {
+ if (event.fixed_) {
+ return event;
+ }
+
+ function returnTrue() {
+ return true;
+ }
+
+ function returnFalse() {
+ return false;
+ } // Test if fixing up is needed
+ // Used to check if !event.stopPropagation instead of isPropagationStopped
+ // But native events return true for stopPropagation, but don't have
+ // other expected methods like isPropagationStopped. Seems to be a problem
+ // with the Javascript Ninja code. So we're just overriding all events now.
+
+
+ if (!event || !event.isPropagationStopped || !event.isImmediatePropagationStopped) {
+ var old = event || window$1.event;
+ event = {}; // Clone the old object so that we can modify the values event = {};
+ // IE8 Doesn't like when you mess with native event properties
+ // Firefox returns false for event.hasOwnProperty('type') and other props
+ // which makes copying more difficult.
+ // TODO: Probably best to create a whitelist of event props
+
+ for (var key in old) {
+ // Safari 6.0.3 warns you if you try to copy deprecated layerX/Y
+ // Chrome warns you if you try to copy deprecated keyboardEvent.keyLocation
+ // and webkitMovementX/Y
+ // Lighthouse complains if Event.path is copied
+ if (key !== 'layerX' && key !== 'layerY' && key !== 'keyLocation' && key !== 'webkitMovementX' && key !== 'webkitMovementY' && key !== 'path') {
+ // Chrome 32+ warns if you try to copy deprecated returnValue, but
+ // we still want to if preventDefault isn't supported (IE8).
+ if (!(key === 'returnValue' && old.preventDefault)) {
+ event[key] = old[key];
+ }
+ }
+ } // The event occurred on this element
+
+
+ if (!event.target) {
+ event.target = event.srcElement || document;
+ } // Handle which other element the event is related to
+
+
+ if (!event.relatedTarget) {
+ event.relatedTarget = event.fromElement === event.target ? event.toElement : event.fromElement;
+ } // Stop the default browser action
+
+
+ event.preventDefault = function () {
+ if (old.preventDefault) {
+ old.preventDefault();
+ }
+
+ event.returnValue = false;
+ old.returnValue = false;
+ event.defaultPrevented = true;
+ };
+
+ event.defaultPrevented = false; // Stop the event from bubbling
+
+ event.stopPropagation = function () {
+ if (old.stopPropagation) {
+ old.stopPropagation();
+ }
+
+ event.cancelBubble = true;
+ old.cancelBubble = true;
+ event.isPropagationStopped = returnTrue;
+ };
+
+ event.isPropagationStopped = returnFalse; // Stop the event from bubbling and executing other handlers
+
+ event.stopImmediatePropagation = function () {
+ if (old.stopImmediatePropagation) {
+ old.stopImmediatePropagation();
+ }
+
+ event.isImmediatePropagationStopped = returnTrue;
+ event.stopPropagation();
+ };
+
+ event.isImmediatePropagationStopped = returnFalse; // Handle mouse position
+
+ if (event.clientX !== null && event.clientX !== undefined) {
+ var doc = document.documentElement;
+ var body = document.body;
+ event.pageX = event.clientX + (doc && doc.scrollLeft || body && body.scrollLeft || 0) - (doc && doc.clientLeft || body && body.clientLeft || 0);
+ event.pageY = event.clientY + (doc && doc.scrollTop || body && body.scrollTop || 0) - (doc && doc.clientTop || body && body.clientTop || 0);
+ } // Handle key presses
+
+
+ event.which = event.charCode || event.keyCode; // Fix button for mouse clicks:
+ // 0 == left; 1 == middle; 2 == right
+
+ if (event.button !== null && event.button !== undefined) {
+ // The following is disabled because it does not pass videojs-standard
+ // and... yikes.
+
+ /* eslint-disable */
+ event.button = event.button & 1 ? 0 : event.button & 4 ? 1 : event.button & 2 ? 2 : 0;
+ /* eslint-enable */
+ }
+ }
+
+ event.fixed_ = true; // Returns fixed-up instance
+
+ return event;
+}
+/**
+ * Whether passive event listeners are supported
+ */
+
+var _supportsPassive;
+
+var supportsPassive = function supportsPassive() {
+ if (typeof _supportsPassive !== 'boolean') {
+ _supportsPassive = false;
+
+ try {
+ var opts = Object.defineProperty({}, 'passive', {
+ get: function get() {
+ _supportsPassive = true;
+ }
+ });
+ window$1.addEventListener('test', null, opts);
+ window$1.removeEventListener('test', null, opts);
+ } catch (e) {// disregard
+ }
+ }
+
+ return _supportsPassive;
+};
+/**
+ * Touch events Chrome expects to be passive
+ */
+
+
+var passiveEvents = ['touchstart', 'touchmove'];
+/**
+ * Add an event listener to element
+ * It stores the handler function in a separate cache object
+ * and adds a generic handler to the element's event,
+ * along with a unique id (guid) to the element.
+ *
+ * @param {Element|Object} elem
+ * Element or object to bind listeners to
+ *
+ * @param {string|string[]} type
+ * Type of event to bind to.
+ *
+ * @param {EventTarget~EventListener} fn
+ * Event listener.
+ */
+
+function on(elem, type, fn) {
+ if (Array.isArray(type)) {
+ return _handleMultipleEvents(on, elem, type, fn);
+ }
+
+ if (!DomData.has(elem)) {
+ DomData.set(elem, {});
+ }
+
+ var data = DomData.get(elem); // We need a place to store all our handler data
+
+ if (!data.handlers) {
+ data.handlers = {};
+ }
+
+ if (!data.handlers[type]) {
+ data.handlers[type] = [];
+ }
+
+ if (!fn.guid) {
+ fn.guid = newGUID();
+ }
+
+ data.handlers[type].push(fn);
+
+ if (!data.dispatcher) {
+ data.disabled = false;
+
+ data.dispatcher = function (event, hash) {
+ if (data.disabled) {
+ return;
+ }
+
+ event = fixEvent(event);
+ var handlers = data.handlers[event.type];
+
+ if (handlers) {
+ // Copy handlers so if handlers are added/removed during the process it doesn't throw everything off.
+ var handlersCopy = handlers.slice(0);
+
+ for (var m = 0, n = handlersCopy.length; m < n; m++) {
+ if (event.isImmediatePropagationStopped()) {
+ break;
+ } else {
+ try {
+ handlersCopy[m].call(elem, event, hash);
+ } catch (e) {
+ log$1.error(e);
+ }
+ }
+ }
+ }
+ };
+ }
+
+ if (data.handlers[type].length === 1) {
+ if (elem.addEventListener) {
+ var options = false;
+
+ if (supportsPassive() && passiveEvents.indexOf(type) > -1) {
+ options = {
+ passive: true
+ };
+ }
+
+ elem.addEventListener(type, data.dispatcher, options);
+ } else if (elem.attachEvent) {
+ elem.attachEvent('on' + type, data.dispatcher);
+ }
+ }
+}
+/**
+ * Removes event listeners from an element
+ *
+ * @param {Element|Object} elem
+ * Object to remove listeners from.
+ *
+ * @param {string|string[]} [type]
+ * Type of listener to remove. Don't include to remove all events from element.
+ *
+ * @param {EventTarget~EventListener} [fn]
+ * Specific listener to remove. Don't include to remove listeners for an event
+ * type.
+ */
+
+function off(elem, type, fn) {
+ // Don't want to add a cache object through getElData if not needed
+ if (!DomData.has(elem)) {
+ return;
+ }
+
+ var data = DomData.get(elem); // If no events exist, nothing to unbind
+
+ if (!data.handlers) {
+ return;
+ }
+
+ if (Array.isArray(type)) {
+ return _handleMultipleEvents(off, elem, type, fn);
+ } // Utility function
+
+
+ var removeType = function removeType(el, t) {
+ data.handlers[t] = [];
+
+ _cleanUpEvents(el, t);
+ }; // Are we removing all bound events?
+
+
+ if (type === undefined) {
+ for (var t in data.handlers) {
+ if (Object.prototype.hasOwnProperty.call(data.handlers || {}, t)) {
+ removeType(elem, t);
+ }
+ }
+
+ return;
+ }
+
+ var handlers = data.handlers[type]; // If no handlers exist, nothing to unbind
+
+ if (!handlers) {
+ return;
+ } // If no listener was provided, remove all listeners for type
+
+
+ if (!fn) {
+ removeType(elem, type);
+ return;
+ } // We're only removing a single handler
+
+
+ if (fn.guid) {
+ for (var n = 0; n < handlers.length; n++) {
+ if (handlers[n].guid === fn.guid) {
+ handlers.splice(n--, 1);
+ }
+ }
+ }
+
+ _cleanUpEvents(elem, type);
+}
+/**
+ * Trigger an event for an element
+ *
+ * @param {Element|Object} elem
+ * Element to trigger an event on
+ *
+ * @param {EventTarget~Event|string} event
+ * A string (the type) or an event object with a type attribute
+ *
+ * @param {Object} [hash]
+ * data hash to pass along with the event
+ *
+ * @return {boolean|undefined}
+ * Returns the opposite of `defaultPrevented` if default was
+ * prevented. Otherwise, returns `undefined`
+ */
+
+function trigger(elem, event, hash) {
+ // Fetches element data and a reference to the parent (for bubbling).
+ // Don't want to add a data object to cache for every parent,
+ // so checking hasElData first.
+ var elemData = DomData.has(elem) ? DomData.get(elem) : {};
+ var parent = elem.parentNode || elem.ownerDocument; // type = event.type || event,
+ // handler;
+ // If an event name was passed as a string, creates an event out of it
+
+ if (typeof event === 'string') {
+ event = {
+ type: event,
+ target: elem
+ };
+ } else if (!event.target) {
+ event.target = elem;
+ } // Normalizes the event properties.
+
+
+ event = fixEvent(event); // If the passed element has a dispatcher, executes the established handlers.
+
+ if (elemData.dispatcher) {
+ elemData.dispatcher.call(elem, event, hash);
+ } // Unless explicitly stopped or the event does not bubble (e.g. media events)
+ // recursively calls this function to bubble the event up the DOM.
+
+
+ if (parent && !event.isPropagationStopped() && event.bubbles === true) {
+ trigger.call(null, parent, event, hash); // If at the top of the DOM, triggers the default action unless disabled.
+ } else if (!parent && !event.defaultPrevented && event.target && event.target[event.type]) {
+ if (!DomData.has(event.target)) {
+ DomData.set(event.target, {});
+ }
+
+ var targetData = DomData.get(event.target); // Checks if the target has a default action for this event.
+
+ if (event.target[event.type]) {
+ // Temporarily disables event dispatching on the target as we have already executed the handler.
+ targetData.disabled = true; // Executes the default action.
+
+ if (typeof event.target[event.type] === 'function') {
+ event.target[event.type]();
+ } // Re-enables event dispatching.
+
+
+ targetData.disabled = false;
+ }
+ } // Inform the triggerer if the default was prevented by returning false
+
+
+ return !event.defaultPrevented;
+}
+/**
+ * Trigger a listener only once for an event.
+ *
+ * @param {Element|Object} elem
+ * Element or object to bind to.
+ *
+ * @param {string|string[]} type
+ * Name/type of event
+ *
+ * @param {Event~EventListener} fn
+ * Event listener function
+ */
+
+function one(elem, type, fn) {
+ if (Array.isArray(type)) {
+ return _handleMultipleEvents(one, elem, type, fn);
+ }
+
+ var func = function func() {
+ off(elem, type, func);
+ fn.apply(this, arguments);
+ }; // copy the guid to the new function so it can removed using the original function's ID
+
+
+ func.guid = fn.guid = fn.guid || newGUID();
+ on(elem, type, func);
+}
+/**
+ * Trigger a listener only once and then turn if off for all
+ * configured events
+ *
+ * @param {Element|Object} elem
+ * Element or object to bind to.
+ *
+ * @param {string|string[]} type
+ * Name/type of event
+ *
+ * @param {Event~EventListener} fn
+ * Event listener function
+ */
+
+function any(elem, type, fn) {
+ var func = function func() {
+ off(elem, type, func);
+ fn.apply(this, arguments);
+ }; // copy the guid to the new function so it can removed using the original function's ID
+
+
+ func.guid = fn.guid = fn.guid || newGUID(); // multiple ons, but one off for everything
+
+ on(elem, type, func);
+}
+
+var Events = /*#__PURE__*/Object.freeze({
+ __proto__: null,
+ fixEvent: fixEvent,
+ on: on,
+ off: off,
+ trigger: trigger,
+ one: one,
+ any: any
+});
+
+/**
+ * @file fn.js
+ * @module fn
+ */
+var UPDATE_REFRESH_INTERVAL = 30;
+/**
+ * Bind (a.k.a proxy or context). A simple method for changing the context of
+ * a function.
+ *
+ * It also stores a unique id on the function so it can be easily removed from
+ * events.
+ *
+ * @function
+ * @param {Mixed} context
+ * The object to bind as scope.
+ *
+ * @param {Function} fn
+ * The function to be bound to a scope.
+ *
+ * @param {number} [uid]
+ * An optional unique ID for the function to be set
+ *
+ * @return {Function}
+ * The new function that will be bound into the context given
+ */
+
+var bind = function bind(context, fn, uid) {
+ // Make sure the function has a unique ID
+ if (!fn.guid) {
+ fn.guid = newGUID();
+ } // Create the new function that changes the context
+
+
+ var bound = fn.bind(context); // Allow for the ability to individualize this function
+ // Needed in the case where multiple objects might share the same prototype
+ // IF both items add an event listener with the same function, then you try to remove just one
+ // it will remove both because they both have the same guid.
+ // when using this, you need to use the bind method when you remove the listener as well.
+ // currently used in text tracks
+
+ bound.guid = uid ? uid + '_' + fn.guid : fn.guid;
+ return bound;
+};
+/**
+ * Wraps the given function, `fn`, with a new function that only invokes `fn`
+ * at most once per every `wait` milliseconds.
+ *
+ * @function
+ * @param {Function} fn
+ * The function to be throttled.
+ *
+ * @param {number} wait
+ * The number of milliseconds by which to throttle.
+ *
+ * @return {Function}
+ */
+
+var throttle = function throttle(fn, wait) {
+ var last = window$1.performance.now();
+
+ var throttled = function throttled() {
+ var now = window$1.performance.now();
+
+ if (now - last >= wait) {
+ fn.apply(void 0, arguments);
+ last = now;
+ }
+ };
+
+ return throttled;
+};
+/**
+ * Creates a debounced function that delays invoking `func` until after `wait`
+ * milliseconds have elapsed since the last time the debounced function was
+ * invoked.
+ *
+ * Inspired by lodash and underscore implementations.
+ *
+ * @function
+ * @param {Function} func
+ * The function to wrap with debounce behavior.
+ *
+ * @param {number} wait
+ * The number of milliseconds to wait after the last invocation.
+ *
+ * @param {boolean} [immediate]
+ * Whether or not to invoke the function immediately upon creation.
+ *
+ * @param {Object} [context=window]
+ * The "context" in which the debounced function should debounce. For
+ * example, if this function should be tied to a Video.js player,
+ * the player can be passed here. Alternatively, defaults to the
+ * global `window` object.
+ *
+ * @return {Function}
+ * A debounced function.
+ */
+
+var debounce = function debounce(func, wait, immediate, context) {
+ if (context === void 0) {
+ context = window$1;
+ }
+
+ var timeout;
+
+ var cancel = function cancel() {
+ context.clearTimeout(timeout);
+ timeout = null;
+ };
+ /* eslint-disable consistent-this */
+
+
+ var debounced = function debounced() {
+ var self = this;
+ var args = arguments;
+
+ var _later = function later() {
+ timeout = null;
+ _later = null;
+
+ if (!immediate) {
+ func.apply(self, args);
+ }
+ };
+
+ if (!timeout && immediate) {
+ func.apply(self, args);
+ }
+
+ context.clearTimeout(timeout);
+ timeout = context.setTimeout(_later, wait);
+ };
+ /* eslint-enable consistent-this */
+
+
+ debounced.cancel = cancel;
+ return debounced;
+};
+
+/**
+ * @file src/js/event-target.js
+ */
+/**
+ * `EventTarget` is a class that can have the same API as the DOM `EventTarget`. It
+ * adds shorthand functions that wrap around lengthy functions. For example:
+ * the `on` function is a wrapper around `addEventListener`.
+ *
+ * @see [EventTarget Spec]{@link https://www.w3.org/TR/DOM-Level-2-Events/events.html#Events-EventTarget}
+ * @class EventTarget
+ */
+
+var EventTarget$2 = function EventTarget() {};
+/**
+ * A Custom DOM event.
+ *
+ * @typedef {Object} EventTarget~Event
+ * @see [Properties]{@link https://developer.mozilla.org/en-US/docs/Web/API/CustomEvent}
+ */
+
+/**
+ * All event listeners should follow the following format.
+ *
+ * @callback EventTarget~EventListener
+ * @this {EventTarget}
+ *
+ * @param {EventTarget~Event} event
+ * the event that triggered this function
+ *
+ * @param {Object} [hash]
+ * hash of data sent during the event
+ */
+
+/**
+ * An object containing event names as keys and booleans as values.
+ *
+ * > NOTE: If an event name is set to a true value here {@link EventTarget#trigger}
+ * will have extra functionality. See that function for more information.
+ *
+ * @property EventTarget.prototype.allowedEvents_
+ * @private
+ */
+
+
+EventTarget$2.prototype.allowedEvents_ = {};
+/**
+ * Adds an `event listener` to an instance of an `EventTarget`. An `event listener` is a
+ * function that will get called when an event with a certain name gets triggered.
+ *
+ * @param {string|string[]} type
+ * An event name or an array of event names.
+ *
+ * @param {EventTarget~EventListener} fn
+ * The function to call with `EventTarget`s
+ */
+
+EventTarget$2.prototype.on = function (type, fn) {
+ // Remove the addEventListener alias before calling Events.on
+ // so we don't get into an infinite type loop
+ var ael = this.addEventListener;
+
+ this.addEventListener = function () {};
+
+ on(this, type, fn);
+ this.addEventListener = ael;
+};
+/**
+ * An alias of {@link EventTarget#on}. Allows `EventTarget` to mimic
+ * the standard DOM API.
+ *
+ * @function
+ * @see {@link EventTarget#on}
+ */
+
+
+EventTarget$2.prototype.addEventListener = EventTarget$2.prototype.on;
+/**
+ * Removes an `event listener` for a specific event from an instance of `EventTarget`.
+ * This makes it so that the `event listener` will no longer get called when the
+ * named event happens.
+ *
+ * @param {string|string[]} type
+ * An event name or an array of event names.
+ *
+ * @param {EventTarget~EventListener} fn
+ * The function to remove.
+ */
+
+EventTarget$2.prototype.off = function (type, fn) {
+ off(this, type, fn);
+};
+/**
+ * An alias of {@link EventTarget#off}. Allows `EventTarget` to mimic
+ * the standard DOM API.
+ *
+ * @function
+ * @see {@link EventTarget#off}
+ */
+
+
+EventTarget$2.prototype.removeEventListener = EventTarget$2.prototype.off;
+/**
+ * This function will add an `event listener` that gets triggered only once. After the
+ * first trigger it will get removed. This is like adding an `event listener`
+ * with {@link EventTarget#on} that calls {@link EventTarget#off} on itself.
+ *
+ * @param {string|string[]} type
+ * An event name or an array of event names.
+ *
+ * @param {EventTarget~EventListener} fn
+ * The function to be called once for each event name.
+ */
+
+EventTarget$2.prototype.one = function (type, fn) {
+ // Remove the addEventListener aliasing Events.on
+ // so we don't get into an infinite type loop
+ var ael = this.addEventListener;
+
+ this.addEventListener = function () {};
+
+ one(this, type, fn);
+ this.addEventListener = ael;
+};
+
+EventTarget$2.prototype.any = function (type, fn) {
+ // Remove the addEventListener aliasing Events.on
+ // so we don't get into an infinite type loop
+ var ael = this.addEventListener;
+
+ this.addEventListener = function () {};
+
+ any(this, type, fn);
+ this.addEventListener = ael;
+};
+/**
+ * This function causes an event to happen. This will then cause any `event listeners`
+ * that are waiting for that event, to get called. If there are no `event listeners`
+ * for an event then nothing will happen.
+ *
+ * If the name of the `Event` that is being triggered is in `EventTarget.allowedEvents_`.
+ * Trigger will also call the `on` + `uppercaseEventName` function.
+ *
+ * Example:
+ * 'click' is in `EventTarget.allowedEvents_`, so, trigger will attempt to call
+ * `onClick` if it exists.
+ *
+ * @param {string|EventTarget~Event|Object} event
+ * The name of the event, an `Event`, or an object with a key of type set to
+ * an event name.
+ */
+
+
+EventTarget$2.prototype.trigger = function (event) {
+ var type = event.type || event; // deprecation
+ // In a future version we should default target to `this`
+ // similar to how we default the target to `elem` in
+ // `Events.trigger`. Right now the default `target` will be
+ // `document` due to the `Event.fixEvent` call.
+
+ if (typeof event === 'string') {
+ event = {
+ type: type
+ };
+ }
+
+ event = fixEvent(event);
+
+ if (this.allowedEvents_[type] && this['on' + type]) {
+ this['on' + type](event);
+ }
+
+ trigger(this, event);
+};
+/**
+ * An alias of {@link EventTarget#trigger}. Allows `EventTarget` to mimic
+ * the standard DOM API.
+ *
+ * @function
+ * @see {@link EventTarget#trigger}
+ */
+
+
+EventTarget$2.prototype.dispatchEvent = EventTarget$2.prototype.trigger;
+var EVENT_MAP;
+
+EventTarget$2.prototype.queueTrigger = function (event) {
+ var _this = this;
+
+ // only set up EVENT_MAP if it'll be used
+ if (!EVENT_MAP) {
+ EVENT_MAP = new Map();
+ }
+
+ var type = event.type || event;
+ var map = EVENT_MAP.get(this);
+
+ if (!map) {
+ map = new Map();
+ EVENT_MAP.set(this, map);
+ }
+
+ var oldTimeout = map.get(type);
+ map["delete"](type);
+ window$1.clearTimeout(oldTimeout);
+ var timeout = window$1.setTimeout(function () {
+ // if we cleared out all timeouts for the current target, delete its map
+ if (map.size === 0) {
+ map = null;
+ EVENT_MAP["delete"](_this);
+ }
+
+ _this.trigger(event);
+ }, 0);
+ map.set(type, timeout);
+};
+
+/**
+ * @file mixins/evented.js
+ * @module evented
+ */
+
+var objName = function objName(obj) {
+ if (typeof obj.name === 'function') {
+ return obj.name();
+ }
+
+ if (typeof obj.name === 'string') {
+ return obj.name;
+ }
+
+ if (obj.name_) {
+ return obj.name_;
+ }
+
+ if (obj.constructor && obj.constructor.name) {
+ return obj.constructor.name;
+ }
+
+ return typeof obj;
+};
+/**
+ * Returns whether or not an object has had the evented mixin applied.
+ *
+ * @param {Object} object
+ * An object to test.
+ *
+ * @return {boolean}
+ * Whether or not the object appears to be evented.
+ */
+
+
+var isEvented = function isEvented(object) {
+ return object instanceof EventTarget$2 || !!object.eventBusEl_ && ['on', 'one', 'off', 'trigger'].every(function (k) {
+ return typeof object[k] === 'function';
+ });
+};
+/**
+ * Adds a callback to run after the evented mixin applied.
+ *
+ * @param {Object} object
+ * An object to Add
+ * @param {Function} callback
+ * The callback to run.
+ */
+
+
+var addEventedCallback = function addEventedCallback(target, callback) {
+ if (isEvented(target)) {
+ callback();
+ } else {
+ if (!target.eventedCallbacks) {
+ target.eventedCallbacks = [];
+ }
+
+ target.eventedCallbacks.push(callback);
+ }
+};
+/**
+ * Whether a value is a valid event type - non-empty string or array.
+ *
+ * @private
+ * @param {string|Array} type
+ * The type value to test.
+ *
+ * @return {boolean}
+ * Whether or not the type is a valid event type.
+ */
+
+
+var isValidEventType = function isValidEventType(type) {
+ return (// The regex here verifies that the `type` contains at least one non-
+ // whitespace character.
+ typeof type === 'string' && /\S/.test(type) || Array.isArray(type) && !!type.length
+ );
+};
+/**
+ * Validates a value to determine if it is a valid event target. Throws if not.
+ *
+ * @private
+ * @throws {Error}
+ * If the target does not appear to be a valid event target.
+ *
+ * @param {Object} target
+ * The object to test.
+ *
+ * @param {Object} obj
+ * The evented object we are validating for
+ *
+ * @param {string} fnName
+ * The name of the evented mixin function that called this.
+ */
+
+
+var validateTarget = function validateTarget(target, obj, fnName) {
+ if (!target || !target.nodeName && !isEvented(target)) {
+ throw new Error("Invalid target for " + objName(obj) + "#" + fnName + "; must be a DOM node or evented object.");
+ }
+};
+/**
+ * Validates a value to determine if it is a valid event target. Throws if not.
+ *
+ * @private
+ * @throws {Error}
+ * If the type does not appear to be a valid event type.
+ *
+ * @param {string|Array} type
+ * The type to test.
+ *
+ * @param {Object} obj
+* The evented object we are validating for
+ *
+ * @param {string} fnName
+ * The name of the evented mixin function that called this.
+ */
+
+
+var validateEventType = function validateEventType(type, obj, fnName) {
+ if (!isValidEventType(type)) {
+ throw new Error("Invalid event type for " + objName(obj) + "#" + fnName + "; must be a non-empty string or array.");
+ }
+};
+/**
+ * Validates a value to determine if it is a valid listener. Throws if not.
+ *
+ * @private
+ * @throws {Error}
+ * If the listener is not a function.
+ *
+ * @param {Function} listener
+ * The listener to test.
+ *
+ * @param {Object} obj
+ * The evented object we are validating for
+ *
+ * @param {string} fnName
+ * The name of the evented mixin function that called this.
+ */
+
+
+var validateListener = function validateListener(listener, obj, fnName) {
+ if (typeof listener !== 'function') {
+ throw new Error("Invalid listener for " + objName(obj) + "#" + fnName + "; must be a function.");
+ }
+};
+/**
+ * Takes an array of arguments given to `on()` or `one()`, validates them, and
+ * normalizes them into an object.
+ *
+ * @private
+ * @param {Object} self
+ * The evented object on which `on()` or `one()` was called. This
+ * object will be bound as the `this` value for the listener.
+ *
+ * @param {Array} args
+ * An array of arguments passed to `on()` or `one()`.
+ *
+ * @param {string} fnName
+ * The name of the evented mixin function that called this.
+ *
+ * @return {Object}
+ * An object containing useful values for `on()` or `one()` calls.
+ */
+
+
+var normalizeListenArgs = function normalizeListenArgs(self, args, fnName) {
+ // If the number of arguments is less than 3, the target is always the
+ // evented object itself.
+ var isTargetingSelf = args.length < 3 || args[0] === self || args[0] === self.eventBusEl_;
+ var target;
+ var type;
+ var listener;
+
+ if (isTargetingSelf) {
+ target = self.eventBusEl_; // Deal with cases where we got 3 arguments, but we are still listening to
+ // the evented object itself.
+
+ if (args.length >= 3) {
+ args.shift();
+ }
+
+ type = args[0];
+ listener = args[1];
+ } else {
+ target = args[0];
+ type = args[1];
+ listener = args[2];
+ }
+
+ validateTarget(target, self, fnName);
+ validateEventType(type, self, fnName);
+ validateListener(listener, self, fnName);
+ listener = bind(self, listener);
+ return {
+ isTargetingSelf: isTargetingSelf,
+ target: target,
+ type: type,
+ listener: listener
+ };
+};
+/**
+ * Adds the listener to the event type(s) on the target, normalizing for
+ * the type of target.
+ *
+ * @private
+ * @param {Element|Object} target
+ * A DOM node or evented object.
+ *
+ * @param {string} method
+ * The event binding method to use ("on" or "one").
+ *
+ * @param {string|Array} type
+ * One or more event type(s).
+ *
+ * @param {Function} listener
+ * A listener function.
+ */
+
+
+var listen = function listen(target, method, type, listener) {
+ validateTarget(target, target, method);
+
+ if (target.nodeName) {
+ Events[method](target, type, listener);
+ } else {
+ target[method](type, listener);
+ }
+};
+/**
+ * Contains methods that provide event capabilities to an object which is passed
+ * to {@link module:evented|evented}.
+ *
+ * @mixin EventedMixin
+ */
+
+
+var EventedMixin = {
+ /**
+ * Add a listener to an event (or events) on this object or another evented
+ * object.
+ *
+ * @param {string|Array|Element|Object} targetOrType
+ * If this is a string or array, it represents the event type(s)
+ * that will trigger the listener.
+ *
+ * Another evented object can be passed here instead, which will
+ * cause the listener to listen for events on _that_ object.
+ *
+ * In either case, the listener's `this` value will be bound to
+ * this object.
+ *
+ * @param {string|Array|Function} typeOrListener
+ * If the first argument was a string or array, this should be the
+ * listener function. Otherwise, this is a string or array of event
+ * type(s).
+ *
+ * @param {Function} [listener]
+ * If the first argument was another evented object, this will be
+ * the listener function.
+ */
+ on: function on() {
+ var _this = this;
+
+ for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
+ args[_key] = arguments[_key];
+ }
+
+ var _normalizeListenArgs = normalizeListenArgs(this, args, 'on'),
+ isTargetingSelf = _normalizeListenArgs.isTargetingSelf,
+ target = _normalizeListenArgs.target,
+ type = _normalizeListenArgs.type,
+ listener = _normalizeListenArgs.listener;
+
+ listen(target, 'on', type, listener); // If this object is listening to another evented object.
+
+ if (!isTargetingSelf) {
+ // If this object is disposed, remove the listener.
+ var removeListenerOnDispose = function removeListenerOnDispose() {
+ return _this.off(target, type, listener);
+ }; // Use the same function ID as the listener so we can remove it later it
+ // using the ID of the original listener.
+
+
+ removeListenerOnDispose.guid = listener.guid; // Add a listener to the target's dispose event as well. This ensures
+ // that if the target is disposed BEFORE this object, we remove the
+ // removal listener that was just added. Otherwise, we create a memory leak.
+
+ var removeRemoverOnTargetDispose = function removeRemoverOnTargetDispose() {
+ return _this.off('dispose', removeListenerOnDispose);
+ }; // Use the same function ID as the listener so we can remove it later
+ // it using the ID of the original listener.
+
+
+ removeRemoverOnTargetDispose.guid = listener.guid;
+ listen(this, 'on', 'dispose', removeListenerOnDispose);
+ listen(target, 'on', 'dispose', removeRemoverOnTargetDispose);
+ }
+ },
+
+ /**
+ * Add a listener to an event (or events) on this object or another evented
+ * object. The listener will be called once per event and then removed.
+ *
+ * @param {string|Array|Element|Object} targetOrType
+ * If this is a string or array, it represents the event type(s)
+ * that will trigger the listener.
+ *
+ * Another evented object can be passed here instead, which will
+ * cause the listener to listen for events on _that_ object.
+ *
+ * In either case, the listener's `this` value will be bound to
+ * this object.
+ *
+ * @param {string|Array|Function} typeOrListener
+ * If the first argument was a string or array, this should be the
+ * listener function. Otherwise, this is a string or array of event
+ * type(s).
+ *
+ * @param {Function} [listener]
+ * If the first argument was another evented object, this will be
+ * the listener function.
+ */
+ one: function one() {
+ var _this2 = this;
+
+ for (var _len2 = arguments.length, args = new Array(_len2), _key2 = 0; _key2 < _len2; _key2++) {
+ args[_key2] = arguments[_key2];
+ }
+
+ var _normalizeListenArgs2 = normalizeListenArgs(this, args, 'one'),
+ isTargetingSelf = _normalizeListenArgs2.isTargetingSelf,
+ target = _normalizeListenArgs2.target,
+ type = _normalizeListenArgs2.type,
+ listener = _normalizeListenArgs2.listener; // Targeting this evented object.
+
+
+ if (isTargetingSelf) {
+ listen(target, 'one', type, listener); // Targeting another evented object.
+ } else {
+ // TODO: This wrapper is incorrect! It should only
+ // remove the wrapper for the event type that called it.
+ // Instead all listners are removed on the first trigger!
+ // see https://github.com/videojs/video.js/issues/5962
+ var wrapper = function wrapper() {
+ _this2.off(target, type, wrapper);
+
+ for (var _len3 = arguments.length, largs = new Array(_len3), _key3 = 0; _key3 < _len3; _key3++) {
+ largs[_key3] = arguments[_key3];
+ }
+
+ listener.apply(null, largs);
+ }; // Use the same function ID as the listener so we can remove it later
+ // it using the ID of the original listener.
+
+
+ wrapper.guid = listener.guid;
+ listen(target, 'one', type, wrapper);
+ }
+ },
+
+ /**
+ * Add a listener to an event (or events) on this object or another evented
+ * object. The listener will only be called once for the first event that is triggered
+ * then removed.
+ *
+ * @param {string|Array|Element|Object} targetOrType
+ * If this is a string or array, it represents the event type(s)
+ * that will trigger the listener.
+ *
+ * Another evented object can be passed here instead, which will
+ * cause the listener to listen for events on _that_ object.
+ *
+ * In either case, the listener's `this` value will be bound to
+ * this object.
+ *
+ * @param {string|Array|Function} typeOrListener
+ * If the first argument was a string or array, this should be the
+ * listener function. Otherwise, this is a string or array of event
+ * type(s).
+ *
+ * @param {Function} [listener]
+ * If the first argument was another evented object, this will be
+ * the listener function.
+ */
+ any: function any() {
+ var _this3 = this;
+
+ for (var _len4 = arguments.length, args = new Array(_len4), _key4 = 0; _key4 < _len4; _key4++) {
+ args[_key4] = arguments[_key4];
+ }
+
+ var _normalizeListenArgs3 = normalizeListenArgs(this, args, 'any'),
+ isTargetingSelf = _normalizeListenArgs3.isTargetingSelf,
+ target = _normalizeListenArgs3.target,
+ type = _normalizeListenArgs3.type,
+ listener = _normalizeListenArgs3.listener; // Targeting this evented object.
+
+
+ if (isTargetingSelf) {
+ listen(target, 'any', type, listener); // Targeting another evented object.
+ } else {
+ var wrapper = function wrapper() {
+ _this3.off(target, type, wrapper);
+
+ for (var _len5 = arguments.length, largs = new Array(_len5), _key5 = 0; _key5 < _len5; _key5++) {
+ largs[_key5] = arguments[_key5];
+ }
+
+ listener.apply(null, largs);
+ }; // Use the same function ID as the listener so we can remove it later
+ // it using the ID of the original listener.
+
+
+ wrapper.guid = listener.guid;
+ listen(target, 'any', type, wrapper);
+ }
+ },
+
+ /**
+ * Removes listener(s) from event(s) on an evented object.
+ *
+ * @param {string|Array|Element|Object} [targetOrType]
+ * If this is a string or array, it represents the event type(s).
+ *
+ * Another evented object can be passed here instead, in which case
+ * ALL 3 arguments are _required_.
+ *
+ * @param {string|Array|Function} [typeOrListener]
+ * If the first argument was a string or array, this may be the
+ * listener function. Otherwise, this is a string or array of event
+ * type(s).
+ *
+ * @param {Function} [listener]
+ * If the first argument was another evented object, this will be
+ * the listener function; otherwise, _all_ listeners bound to the
+ * event type(s) will be removed.
+ */
+ off: function off$1(targetOrType, typeOrListener, listener) {
+ // Targeting this evented object.
+ if (!targetOrType || isValidEventType(targetOrType)) {
+ off(this.eventBusEl_, targetOrType, typeOrListener); // Targeting another evented object.
+ } else {
+ var target = targetOrType;
+ var type = typeOrListener; // Fail fast and in a meaningful way!
+
+ validateTarget(target, this, 'off');
+ validateEventType(type, this, 'off');
+ validateListener(listener, this, 'off'); // Ensure there's at least a guid, even if the function hasn't been used
+
+ listener = bind(this, listener); // Remove the dispose listener on this evented object, which was given
+ // the same guid as the event listener in on().
+
+ this.off('dispose', listener);
+
+ if (target.nodeName) {
+ off(target, type, listener);
+ off(target, 'dispose', listener);
+ } else if (isEvented(target)) {
+ target.off(type, listener);
+ target.off('dispose', listener);
+ }
+ }
+ },
+
+ /**
+ * Fire an event on this evented object, causing its listeners to be called.
+ *
+ * @param {string|Object} event
+ * An event type or an object with a type property.
+ *
+ * @param {Object} [hash]
+ * An additional object to pass along to listeners.
+ *
+ * @return {boolean}
+ * Whether or not the default behavior was prevented.
+ */
+ trigger: function trigger$1(event, hash) {
+ validateTarget(this.eventBusEl_, this, 'trigger');
+ var type = event && typeof event !== 'string' ? event.type : event;
+
+ if (!isValidEventType(type)) {
+ var error = "Invalid event type for " + objName(this) + "#trigger; " + 'must be a non-empty string or object with a type key that has a non-empty value.';
+
+ if (event) {
+ (this.log || log$1).error(error);
+ } else {
+ throw new Error(error);
+ }
+ }
+
+ return trigger(this.eventBusEl_, event, hash);
+ }
+};
+/**
+ * Applies {@link module:evented~EventedMixin|EventedMixin} to a target object.
+ *
+ * @param {Object} target
+ * The object to which to add event methods.
+ *
+ * @param {Object} [options={}]
+ * Options for customizing the mixin behavior.
+ *
+ * @param {string} [options.eventBusKey]
+ * By default, adds a `eventBusEl_` DOM element to the target object,
+ * which is used as an event bus. If the target object already has a
+ * DOM element that should be used, pass its key here.
+ *
+ * @return {Object}
+ * The target object.
+ */
+
+function evented(target, options) {
+ if (options === void 0) {
+ options = {};
+ }
+
+ var _options = options,
+ eventBusKey = _options.eventBusKey; // Set or create the eventBusEl_.
+
+ if (eventBusKey) {
+ if (!target[eventBusKey].nodeName) {
+ throw new Error("The eventBusKey \"" + eventBusKey + "\" does not refer to an element.");
+ }
+
+ target.eventBusEl_ = target[eventBusKey];
+ } else {
+ target.eventBusEl_ = createEl('span', {
+ className: 'vjs-event-bus'
+ });
+ }
+
+ assign(target, EventedMixin);
+
+ if (target.eventedCallbacks) {
+ target.eventedCallbacks.forEach(function (callback) {
+ callback();
+ });
+ } // When any evented object is disposed, it removes all its listeners.
+
+
+ target.on('dispose', function () {
+ target.off();
+ [target, target.el_, target.eventBusEl_].forEach(function (val) {
+ if (val && DomData.has(val)) {
+ DomData["delete"](val);
+ }
+ });
+ window$1.setTimeout(function () {
+ target.eventBusEl_ = null;
+ }, 0);
+ });
+ return target;
+}
+
+/**
+ * @file mixins/stateful.js
+ * @module stateful
+ */
+/**
+ * Contains methods that provide statefulness to an object which is passed
+ * to {@link module:stateful}.
+ *
+ * @mixin StatefulMixin
+ */
+
+var StatefulMixin = {
+ /**
+ * A hash containing arbitrary keys and values representing the state of
+ * the object.
+ *
+ * @type {Object}
+ */
+ state: {},
+
+ /**
+ * Set the state of an object by mutating its
+ * {@link module:stateful~StatefulMixin.state|state} object in place.
+ *
+ * @fires module:stateful~StatefulMixin#statechanged
+ * @param {Object|Function} stateUpdates
+ * A new set of properties to shallow-merge into the plugin state.
+ * Can be a plain object or a function returning a plain object.
+ *
+ * @return {Object|undefined}
+ * An object containing changes that occurred. If no changes
+ * occurred, returns `undefined`.
+ */
+ setState: function setState(stateUpdates) {
+ var _this = this;
+
+ // Support providing the `stateUpdates` state as a function.
+ if (typeof stateUpdates === 'function') {
+ stateUpdates = stateUpdates();
+ }
+
+ var changes;
+ each(stateUpdates, function (value, key) {
+ // Record the change if the value is different from what's in the
+ // current state.
+ if (_this.state[key] !== value) {
+ changes = changes || {};
+ changes[key] = {
+ from: _this.state[key],
+ to: value
+ };
+ }
+
+ _this.state[key] = value;
+ }); // Only trigger "statechange" if there were changes AND we have a trigger
+ // function. This allows us to not require that the target object be an
+ // evented object.
+
+ if (changes && isEvented(this)) {
+ /**
+ * An event triggered on an object that is both
+ * {@link module:stateful|stateful} and {@link module:evented|evented}
+ * indicating that its state has changed.
+ *
+ * @event module:stateful~StatefulMixin#statechanged
+ * @type {Object}
+ * @property {Object} changes
+ * A hash containing the properties that were changed and
+ * the values they were changed `from` and `to`.
+ */
+ this.trigger({
+ changes: changes,
+ type: 'statechanged'
+ });
+ }
+
+ return changes;
+ }
+};
+/**
+ * Applies {@link module:stateful~StatefulMixin|StatefulMixin} to a target
+ * object.
+ *
+ * If the target object is {@link module:evented|evented} and has a
+ * `handleStateChanged` method, that method will be automatically bound to the
+ * `statechanged` event on itself.
+ *
+ * @param {Object} target
+ * The object to be made stateful.
+ *
+ * @param {Object} [defaultState]
+ * A default set of properties to populate the newly-stateful object's
+ * `state` property.
+ *
+ * @return {Object}
+ * Returns the `target`.
+ */
+
+function stateful(target, defaultState) {
+ assign(target, StatefulMixin); // This happens after the mixing-in because we need to replace the `state`
+ // added in that step.
+
+ target.state = assign({}, target.state, defaultState); // Auto-bind the `handleStateChanged` method of the target object if it exists.
+
+ if (typeof target.handleStateChanged === 'function' && isEvented(target)) {
+ target.on('statechanged', target.handleStateChanged);
+ }
+
+ return target;
+}
+
+/**
+ * @file string-cases.js
+ * @module to-lower-case
+ */
+
+/**
+ * Lowercase the first letter of a string.
+ *
+ * @param {string} string
+ * String to be lowercased
+ *
+ * @return {string}
+ * The string with a lowercased first letter
+ */
+var toLowerCase = function toLowerCase(string) {
+ if (typeof string !== 'string') {
+ return string;
+ }
+
+ return string.replace(/./, function (w) {
+ return w.toLowerCase();
+ });
+};
+/**
+ * Uppercase the first letter of a string.
+ *
+ * @param {string} string
+ * String to be uppercased
+ *
+ * @return {string}
+ * The string with an uppercased first letter
+ */
+
+var toTitleCase$1 = function toTitleCase(string) {
+ if (typeof string !== 'string') {
+ return string;
+ }
+
+ return string.replace(/./, function (w) {
+ return w.toUpperCase();
+ });
+};
+/**
+ * Compares the TitleCase versions of the two strings for equality.
+ *
+ * @param {string} str1
+ * The first string to compare
+ *
+ * @param {string} str2
+ * The second string to compare
+ *
+ * @return {boolean}
+ * Whether the TitleCase versions of the strings are equal
+ */
+
+var titleCaseEquals = function titleCaseEquals(str1, str2) {
+ return toTitleCase$1(str1) === toTitleCase$1(str2);
+};
+
+/**
+ * @file merge-options.js
+ * @module merge-options
+ */
+/**
+ * Merge two objects recursively.
+ *
+ * Performs a deep merge like
+ * {@link https://lodash.com/docs/4.17.10#merge|lodash.merge}, but only merges
+ * plain objects (not arrays, elements, or anything else).
+ *
+ * Non-plain object values will be copied directly from the right-most
+ * argument.
+ *
+ * @static
+ * @param {Object[]} sources
+ * One or more objects to merge into a new object.
+ *
+ * @return {Object}
+ * A new object that is the merged result of all sources.
+ */
+
+function mergeOptions$3() {
+ var result = {};
+
+ for (var _len = arguments.length, sources = new Array(_len), _key = 0; _key < _len; _key++) {
+ sources[_key] = arguments[_key];
+ }
+
+ sources.forEach(function (source) {
+ if (!source) {
+ return;
+ }
+
+ each(source, function (value, key) {
+ if (!isPlain(value)) {
+ result[key] = value;
+ return;
+ }
+
+ if (!isPlain(result[key])) {
+ result[key] = {};
+ }
+
+ result[key] = mergeOptions$3(result[key], value);
+ });
+ });
+ return result;
+}
+
+var MapSham = /*#__PURE__*/function () {
+ function MapSham() {
+ this.map_ = {};
+ }
+
+ var _proto = MapSham.prototype;
+
+ _proto.has = function has(key) {
+ return key in this.map_;
+ };
+
+ _proto["delete"] = function _delete(key) {
+ var has = this.has(key);
+ delete this.map_[key];
+ return has;
+ };
+
+ _proto.set = function set(key, value) {
+ this.map_[key] = value;
+ return this;
+ };
+
+ _proto.forEach = function forEach(callback, thisArg) {
+ for (var key in this.map_) {
+ callback.call(thisArg, this.map_[key], key, this);
+ }
+ };
+
+ return MapSham;
+}();
+
+var Map$1 = window$1.Map ? window$1.Map : MapSham;
+
+var SetSham = /*#__PURE__*/function () {
+ function SetSham() {
+ this.set_ = {};
+ }
+
+ var _proto = SetSham.prototype;
+
+ _proto.has = function has(key) {
+ return key in this.set_;
+ };
+
+ _proto["delete"] = function _delete(key) {
+ var has = this.has(key);
+ delete this.set_[key];
+ return has;
+ };
+
+ _proto.add = function add(key) {
+ this.set_[key] = 1;
+ return this;
+ };
+
+ _proto.forEach = function forEach(callback, thisArg) {
+ for (var key in this.set_) {
+ callback.call(thisArg, key, key, this);
+ }
+ };
+
+ return SetSham;
+}();
+
+var Set$1 = window$1.Set ? window$1.Set : SetSham;
+
+/**
+ * Player Component - Base class for all UI objects
+ *
+ * @file component.js
+ */
+/**
+ * Base class for all UI Components.
+ * Components are UI objects which represent both a javascript object and an element
+ * in the DOM. They can be children of other components, and can have
+ * children themselves.
+ *
+ * Components can also use methods from {@link EventTarget}
+ */
+
+var Component$1 = /*#__PURE__*/function () {
+ /**
+ * A callback that is called when a component is ready. Does not have any
+ * paramters and any callback value will be ignored.
+ *
+ * @callback Component~ReadyCallback
+ * @this Component
+ */
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of component options.
+ *
+ * @param {Object[]} [options.children]
+ * An array of children objects to intialize this component with. Children objects have
+ * a name property that will be used if more than one component of the same type needs to be
+ * added.
+ *
+ * @param {string} [options.className]
+ * A class or space separated list of classes to add the component
+ *
+ * @param {Component~ReadyCallback} [ready]
+ * Function that gets called when the `Component` is ready.
+ */
+ function Component(player, options, ready) {
+ var _this = this;
+
+ // The component might be the player itself and we can't pass `this` to super
+ if (!player && this.play) {
+ this.player_ = player = this; // eslint-disable-line
+ } else {
+ this.player_ = player;
+ }
+
+ this.isDisposed_ = false; // Hold the reference to the parent component via `addChild` method
+
+ this.parentComponent_ = null; // Make a copy of prototype.options_ to protect against overriding defaults
+
+ this.options_ = mergeOptions$3({}, this.options_); // Updated options with supplied options
+
+ options = this.options_ = mergeOptions$3(this.options_, options); // Get ID from options or options element if one is supplied
+
+ this.id_ = options.id || options.el && options.el.id; // If there was no ID from the options, generate one
+
+ if (!this.id_) {
+ // Don't require the player ID function in the case of mock players
+ var id = player && player.id && player.id() || 'no_player';
+ this.id_ = id + "_component_" + newGUID();
+ }
+
+ this.name_ = options.name || null; // Create element if one wasn't provided in options
+
+ if (options.el) {
+ this.el_ = options.el;
+ } else if (options.createEl !== false) {
+ this.el_ = this.createEl();
+ }
+
+ if (options.className && this.el_) {
+ options.className.split(' ').forEach(function (c) {
+ return _this.addClass(c);
+ });
+ } // if evented is anything except false, we want to mixin in evented
+
+
+ if (options.evented !== false) {
+ // Make this an evented object and use `el_`, if available, as its event bus
+ evented(this, {
+ eventBusKey: this.el_ ? 'el_' : null
+ });
+ this.handleLanguagechange = this.handleLanguagechange.bind(this);
+ this.on(this.player_, 'languagechange', this.handleLanguagechange);
+ }
+
+ stateful(this, this.constructor.defaultState);
+ this.children_ = [];
+ this.childIndex_ = {};
+ this.childNameIndex_ = {};
+ this.setTimeoutIds_ = new Set$1();
+ this.setIntervalIds_ = new Set$1();
+ this.rafIds_ = new Set$1();
+ this.namedRafs_ = new Map$1();
+ this.clearingTimersOnDispose_ = false; // Add any child components in options
+
+ if (options.initChildren !== false) {
+ this.initChildren();
+ } // Don't want to trigger ready here or it will go before init is actually
+ // finished for all children that run this constructor
+
+
+ this.ready(ready);
+
+ if (options.reportTouchActivity !== false) {
+ this.enableTouchActivity();
+ }
+ }
+ /**
+ * Dispose of the `Component` and all child components.
+ *
+ * @fires Component#dispose
+ *
+ * @param {Object} options
+ * @param {Element} options.originalEl element with which to replace player element
+ */
+
+
+ var _proto = Component.prototype;
+
+ _proto.dispose = function dispose(options) {
+ if (options === void 0) {
+ options = {};
+ }
+
+ // Bail out if the component has already been disposed.
+ if (this.isDisposed_) {
+ return;
+ }
+
+ if (this.readyQueue_) {
+ this.readyQueue_.length = 0;
+ }
+ /**
+ * Triggered when a `Component` is disposed.
+ *
+ * @event Component#dispose
+ * @type {EventTarget~Event}
+ *
+ * @property {boolean} [bubbles=false]
+ * set to false so that the dispose event does not
+ * bubble up
+ */
+
+
+ this.trigger({
+ type: 'dispose',
+ bubbles: false
+ });
+ this.isDisposed_ = true; // Dispose all children.
+
+ if (this.children_) {
+ for (var i = this.children_.length - 1; i >= 0; i--) {
+ if (this.children_[i].dispose) {
+ this.children_[i].dispose();
+ }
+ }
+ } // Delete child references
+
+
+ this.children_ = null;
+ this.childIndex_ = null;
+ this.childNameIndex_ = null;
+ this.parentComponent_ = null;
+
+ if (this.el_) {
+ // Remove element from DOM
+ if (this.el_.parentNode) {
+ if (options.restoreEl) {
+ this.el_.parentNode.replaceChild(options.restoreEl, this.el_);
+ } else {
+ this.el_.parentNode.removeChild(this.el_);
+ }
+ }
+
+ this.el_ = null;
+ } // remove reference to the player after disposing of the element
+
+
+ this.player_ = null;
+ }
+ /**
+ * Determine whether or not this component has been disposed.
+ *
+ * @return {boolean}
+ * If the component has been disposed, will be `true`. Otherwise, `false`.
+ */
+ ;
+
+ _proto.isDisposed = function isDisposed() {
+ return Boolean(this.isDisposed_);
+ }
+ /**
+ * Return the {@link Player} that the `Component` has attached to.
+ *
+ * @return {Player}
+ * The player that this `Component` has attached to.
+ */
+ ;
+
+ _proto.player = function player() {
+ return this.player_;
+ }
+ /**
+ * Deep merge of options objects with new options.
+ * > Note: When both `obj` and `options` contain properties whose values are objects.
+ * The two properties get merged using {@link module:mergeOptions}
+ *
+ * @param {Object} obj
+ * The object that contains new options.
+ *
+ * @return {Object}
+ * A new object of `this.options_` and `obj` merged together.
+ */
+ ;
+
+ _proto.options = function options(obj) {
+ if (!obj) {
+ return this.options_;
+ }
+
+ this.options_ = mergeOptions$3(this.options_, obj);
+ return this.options_;
+ }
+ /**
+ * Get the `Component`s DOM element
+ *
+ * @return {Element}
+ * The DOM element for this `Component`.
+ */
+ ;
+
+ _proto.el = function el() {
+ return this.el_;
+ }
+ /**
+ * Create the `Component`s DOM element.
+ *
+ * @param {string} [tagName]
+ * Element's DOM node type. e.g. 'div'
+ *
+ * @param {Object} [properties]
+ * An object of properties that should be set.
+ *
+ * @param {Object} [attributes]
+ * An object of attributes that should be set.
+ *
+ * @return {Element}
+ * The element that gets created.
+ */
+ ;
+
+ _proto.createEl = function createEl$1(tagName, properties, attributes) {
+ return createEl(tagName, properties, attributes);
+ }
+ /**
+ * Localize a string given the string in english.
+ *
+ * If tokens are provided, it'll try and run a simple token replacement on the provided string.
+ * The tokens it looks for look like `{1}` with the index being 1-indexed into the tokens array.
+ *
+ * If a `defaultValue` is provided, it'll use that over `string`,
+ * if a value isn't found in provided language files.
+ * This is useful if you want to have a descriptive key for token replacement
+ * but have a succinct localized string and not require `en.json` to be included.
+ *
+ * Currently, it is used for the progress bar timing.
+ * ```js
+ * {
+ * "progress bar timing: currentTime={1} duration={2}": "{1} of {2}"
+ * }
+ * ```
+ * It is then used like so:
+ * ```js
+ * this.localize('progress bar timing: currentTime={1} duration{2}',
+ * [this.player_.currentTime(), this.player_.duration()],
+ * '{1} of {2}');
+ * ```
+ *
+ * Which outputs something like: `01:23 of 24:56`.
+ *
+ *
+ * @param {string} string
+ * The string to localize and the key to lookup in the language files.
+ * @param {string[]} [tokens]
+ * If the current item has token replacements, provide the tokens here.
+ * @param {string} [defaultValue]
+ * Defaults to `string`. Can be a default value to use for token replacement
+ * if the lookup key is needed to be separate.
+ *
+ * @return {string}
+ * The localized string or if no localization exists the english string.
+ */
+ ;
+
+ _proto.localize = function localize(string, tokens, defaultValue) {
+ if (defaultValue === void 0) {
+ defaultValue = string;
+ }
+
+ var code = this.player_.language && this.player_.language();
+ var languages = this.player_.languages && this.player_.languages();
+ var language = languages && languages[code];
+ var primaryCode = code && code.split('-')[0];
+ var primaryLang = languages && languages[primaryCode];
+ var localizedString = defaultValue;
+
+ if (language && language[string]) {
+ localizedString = language[string];
+ } else if (primaryLang && primaryLang[string]) {
+ localizedString = primaryLang[string];
+ }
+
+ if (tokens) {
+ localizedString = localizedString.replace(/\{(\d+)\}/g, function (match, index) {
+ var value = tokens[index - 1];
+ var ret = value;
+
+ if (typeof value === 'undefined') {
+ ret = match;
+ }
+
+ return ret;
+ });
+ }
+
+ return localizedString;
+ }
+ /**
+ * Handles language change for the player in components. Should be overriden by sub-components.
+ *
+ * @abstract
+ */
+ ;
+
+ _proto.handleLanguagechange = function handleLanguagechange() {}
+ /**
+ * Return the `Component`s DOM element. This is where children get inserted.
+ * This will usually be the the same as the element returned in {@link Component#el}.
+ *
+ * @return {Element}
+ * The content element for this `Component`.
+ */
+ ;
+
+ _proto.contentEl = function contentEl() {
+ return this.contentEl_ || this.el_;
+ }
+ /**
+ * Get this `Component`s ID
+ *
+ * @return {string}
+ * The id of this `Component`
+ */
+ ;
+
+ _proto.id = function id() {
+ return this.id_;
+ }
+ /**
+ * Get the `Component`s name. The name gets used to reference the `Component`
+ * and is set during registration.
+ *
+ * @return {string}
+ * The name of this `Component`.
+ */
+ ;
+
+ _proto.name = function name() {
+ return this.name_;
+ }
+ /**
+ * Get an array of all child components
+ *
+ * @return {Array}
+ * The children
+ */
+ ;
+
+ _proto.children = function children() {
+ return this.children_;
+ }
+ /**
+ * Returns the child `Component` with the given `id`.
+ *
+ * @param {string} id
+ * The id of the child `Component` to get.
+ *
+ * @return {Component|undefined}
+ * The child `Component` with the given `id` or undefined.
+ */
+ ;
+
+ _proto.getChildById = function getChildById(id) {
+ return this.childIndex_[id];
+ }
+ /**
+ * Returns the child `Component` with the given `name`.
+ *
+ * @param {string} name
+ * The name of the child `Component` to get.
+ *
+ * @return {Component|undefined}
+ * The child `Component` with the given `name` or undefined.
+ */
+ ;
+
+ _proto.getChild = function getChild(name) {
+ if (!name) {
+ return;
+ }
+
+ return this.childNameIndex_[name];
+ }
+ /**
+ * Returns the descendant `Component` following the givent
+ * descendant `names`. For instance ['foo', 'bar', 'baz'] would
+ * try to get 'foo' on the current component, 'bar' on the 'foo'
+ * component and 'baz' on the 'bar' component and return undefined
+ * if any of those don't exist.
+ *
+ * @param {...string[]|...string} names
+ * The name of the child `Component` to get.
+ *
+ * @return {Component|undefined}
+ * The descendant `Component` following the given descendant
+ * `names` or undefined.
+ */
+ ;
+
+ _proto.getDescendant = function getDescendant() {
+ for (var _len = arguments.length, names = new Array(_len), _key = 0; _key < _len; _key++) {
+ names[_key] = arguments[_key];
+ }
+
+ // flatten array argument into the main array
+ names = names.reduce(function (acc, n) {
+ return acc.concat(n);
+ }, []);
+ var currentChild = this;
+
+ for (var i = 0; i < names.length; i++) {
+ currentChild = currentChild.getChild(names[i]);
+
+ if (!currentChild || !currentChild.getChild) {
+ return;
+ }
+ }
+
+ return currentChild;
+ }
+ /**
+ * Add a child `Component` inside the current `Component`.
+ *
+ *
+ * @param {string|Component} child
+ * The name or instance of a child to add.
+ *
+ * @param {Object} [options={}]
+ * The key/value store of options that will get passed to children of
+ * the child.
+ *
+ * @param {number} [index=this.children_.length]
+ * The index to attempt to add a child into.
+ *
+ * @return {Component}
+ * The `Component` that gets added as a child. When using a string the
+ * `Component` will get created by this process.
+ */
+ ;
+
+ _proto.addChild = function addChild(child, options, index) {
+ if (options === void 0) {
+ options = {};
+ }
+
+ if (index === void 0) {
+ index = this.children_.length;
+ }
+
+ var component;
+ var componentName; // If child is a string, create component with options
+
+ if (typeof child === 'string') {
+ componentName = toTitleCase$1(child);
+ var componentClassName = options.componentClass || componentName; // Set name through options
+
+ options.name = componentName; // Create a new object & element for this controls set
+ // If there's no .player_, this is a player
+
+ var ComponentClass = Component.getComponent(componentClassName);
+
+ if (!ComponentClass) {
+ throw new Error("Component " + componentClassName + " does not exist");
+ } // data stored directly on the videojs object may be
+ // misidentified as a component to retain
+ // backwards-compatibility with 4.x. check to make sure the
+ // component class can be instantiated.
+
+
+ if (typeof ComponentClass !== 'function') {
+ return null;
+ }
+
+ component = new ComponentClass(this.player_ || this, options); // child is a component instance
+ } else {
+ component = child;
+ }
+
+ if (component.parentComponent_) {
+ component.parentComponent_.removeChild(component);
+ }
+
+ this.children_.splice(index, 0, component);
+ component.parentComponent_ = this;
+
+ if (typeof component.id === 'function') {
+ this.childIndex_[component.id()] = component;
+ } // If a name wasn't used to create the component, check if we can use the
+ // name function of the component
+
+
+ componentName = componentName || component.name && toTitleCase$1(component.name());
+
+ if (componentName) {
+ this.childNameIndex_[componentName] = component;
+ this.childNameIndex_[toLowerCase(componentName)] = component;
+ } // Add the UI object's element to the container div (box)
+ // Having an element is not required
+
+
+ if (typeof component.el === 'function' && component.el()) {
+ // If inserting before a component, insert before that component's element
+ var refNode = null;
+
+ if (this.children_[index + 1]) {
+ // Most children are components, but the video tech is an HTML element
+ if (this.children_[index + 1].el_) {
+ refNode = this.children_[index + 1].el_;
+ } else if (isEl(this.children_[index + 1])) {
+ refNode = this.children_[index + 1];
+ }
+ }
+
+ this.contentEl().insertBefore(component.el(), refNode);
+ } // Return so it can stored on parent object if desired.
+
+
+ return component;
+ }
+ /**
+ * Remove a child `Component` from this `Component`s list of children. Also removes
+ * the child `Component`s element from this `Component`s element.
+ *
+ * @param {Component} component
+ * The child `Component` to remove.
+ */
+ ;
+
+ _proto.removeChild = function removeChild(component) {
+ if (typeof component === 'string') {
+ component = this.getChild(component);
+ }
+
+ if (!component || !this.children_) {
+ return;
+ }
+
+ var childFound = false;
+
+ for (var i = this.children_.length - 1; i >= 0; i--) {
+ if (this.children_[i] === component) {
+ childFound = true;
+ this.children_.splice(i, 1);
+ break;
+ }
+ }
+
+ if (!childFound) {
+ return;
+ }
+
+ component.parentComponent_ = null;
+ this.childIndex_[component.id()] = null;
+ this.childNameIndex_[toTitleCase$1(component.name())] = null;
+ this.childNameIndex_[toLowerCase(component.name())] = null;
+ var compEl = component.el();
+
+ if (compEl && compEl.parentNode === this.contentEl()) {
+ this.contentEl().removeChild(component.el());
+ }
+ }
+ /**
+ * Add and initialize default child `Component`s based upon options.
+ */
+ ;
+
+ _proto.initChildren = function initChildren() {
+ var _this2 = this;
+
+ var children = this.options_.children;
+
+ if (children) {
+ // `this` is `parent`
+ var parentOptions = this.options_;
+
+ var handleAdd = function handleAdd(child) {
+ var name = child.name;
+ var opts = child.opts; // Allow options for children to be set at the parent options
+ // e.g. videojs(id, { controlBar: false });
+ // instead of videojs(id, { children: { controlBar: false });
+
+ if (parentOptions[name] !== undefined) {
+ opts = parentOptions[name];
+ } // Allow for disabling default components
+ // e.g. options['children']['posterImage'] = false
+
+
+ if (opts === false) {
+ return;
+ } // Allow options to be passed as a simple boolean if no configuration
+ // is necessary.
+
+
+ if (opts === true) {
+ opts = {};
+ } // We also want to pass the original player options
+ // to each component as well so they don't need to
+ // reach back into the player for options later.
+
+
+ opts.playerOptions = _this2.options_.playerOptions; // Create and add the child component.
+ // Add a direct reference to the child by name on the parent instance.
+ // If two of the same component are used, different names should be supplied
+ // for each
+
+ var newChild = _this2.addChild(name, opts);
+
+ if (newChild) {
+ _this2[name] = newChild;
+ }
+ }; // Allow for an array of children details to passed in the options
+
+
+ var workingChildren;
+ var Tech = Component.getComponent('Tech');
+
+ if (Array.isArray(children)) {
+ workingChildren = children;
+ } else {
+ workingChildren = Object.keys(children);
+ }
+
+ workingChildren // children that are in this.options_ but also in workingChildren would
+ // give us extra children we do not want. So, we want to filter them out.
+ .concat(Object.keys(this.options_).filter(function (child) {
+ return !workingChildren.some(function (wchild) {
+ if (typeof wchild === 'string') {
+ return child === wchild;
+ }
+
+ return child === wchild.name;
+ });
+ })).map(function (child) {
+ var name;
+ var opts;
+
+ if (typeof child === 'string') {
+ name = child;
+ opts = children[name] || _this2.options_[name] || {};
+ } else {
+ name = child.name;
+ opts = child;
+ }
+
+ return {
+ name: name,
+ opts: opts
+ };
+ }).filter(function (child) {
+ // we have to make sure that child.name isn't in the techOrder since
+ // techs are registerd as Components but can't aren't compatible
+ // See https://github.com/videojs/video.js/issues/2772
+ var c = Component.getComponent(child.opts.componentClass || toTitleCase$1(child.name));
+ return c && !Tech.isTech(c);
+ }).forEach(handleAdd);
+ }
+ }
+ /**
+ * Builds the default DOM class name. Should be overriden by sub-components.
+ *
+ * @return {string}
+ * The DOM class name for this object.
+ *
+ * @abstract
+ */
+ ;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ // Child classes can include a function that does:
+ // return 'CLASS NAME' + this._super();
+ return '';
+ }
+ /**
+ * Bind a listener to the component's ready state.
+ * Different from event listeners in that if the ready event has already happened
+ * it will trigger the function immediately.
+ *
+ * @return {Component}
+ * Returns itself; method can be chained.
+ */
+ ;
+
+ _proto.ready = function ready(fn, sync) {
+ if (sync === void 0) {
+ sync = false;
+ }
+
+ if (!fn) {
+ return;
+ }
+
+ if (!this.isReady_) {
+ this.readyQueue_ = this.readyQueue_ || [];
+ this.readyQueue_.push(fn);
+ return;
+ }
+
+ if (sync) {
+ fn.call(this);
+ } else {
+ // Call the function asynchronously by default for consistency
+ this.setTimeout(fn, 1);
+ }
+ }
+ /**
+ * Trigger all the ready listeners for this `Component`.
+ *
+ * @fires Component#ready
+ */
+ ;
+
+ _proto.triggerReady = function triggerReady() {
+ this.isReady_ = true; // Ensure ready is triggered asynchronously
+
+ this.setTimeout(function () {
+ var readyQueue = this.readyQueue_; // Reset Ready Queue
+
+ this.readyQueue_ = [];
+
+ if (readyQueue && readyQueue.length > 0) {
+ readyQueue.forEach(function (fn) {
+ fn.call(this);
+ }, this);
+ } // Allow for using event listeners also
+
+ /**
+ * Triggered when a `Component` is ready.
+ *
+ * @event Component#ready
+ * @type {EventTarget~Event}
+ */
+
+
+ this.trigger('ready');
+ }, 1);
+ }
+ /**
+ * Find a single DOM element matching a `selector`. This can be within the `Component`s
+ * `contentEl()` or another custom context.
+ *
+ * @param {string} selector
+ * A valid CSS selector, which will be passed to `querySelector`.
+ *
+ * @param {Element|string} [context=this.contentEl()]
+ * A DOM element within which to query. Can also be a selector string in
+ * which case the first matching element will get used as context. If
+ * missing `this.contentEl()` gets used. If `this.contentEl()` returns
+ * nothing it falls back to `document`.
+ *
+ * @return {Element|null}
+ * the dom element that was found, or null
+ *
+ * @see [Information on CSS Selectors](https://developer.mozilla.org/en-US/docs/Web/Guide/CSS/Getting_Started/Selectors)
+ */
+ ;
+
+ _proto.$ = function $$1(selector, context) {
+ return $(selector, context || this.contentEl());
+ }
+ /**
+ * Finds all DOM element matching a `selector`. This can be within the `Component`s
+ * `contentEl()` or another custom context.
+ *
+ * @param {string} selector
+ * A valid CSS selector, which will be passed to `querySelectorAll`.
+ *
+ * @param {Element|string} [context=this.contentEl()]
+ * A DOM element within which to query. Can also be a selector string in
+ * which case the first matching element will get used as context. If
+ * missing `this.contentEl()` gets used. If `this.contentEl()` returns
+ * nothing it falls back to `document`.
+ *
+ * @return {NodeList}
+ * a list of dom elements that were found
+ *
+ * @see [Information on CSS Selectors](https://developer.mozilla.org/en-US/docs/Web/Guide/CSS/Getting_Started/Selectors)
+ */
+ ;
+
+ _proto.$$ = function $$$1(selector, context) {
+ return $$(selector, context || this.contentEl());
+ }
+ /**
+ * Check if a component's element has a CSS class name.
+ *
+ * @param {string} classToCheck
+ * CSS class name to check.
+ *
+ * @return {boolean}
+ * - True if the `Component` has the class.
+ * - False if the `Component` does not have the class`
+ */
+ ;
+
+ _proto.hasClass = function hasClass$1(classToCheck) {
+ return hasClass(this.el_, classToCheck);
+ }
+ /**
+ * Add a CSS class name to the `Component`s element.
+ *
+ * @param {string} classToAdd
+ * CSS class name to add
+ */
+ ;
+
+ _proto.addClass = function addClass$1(classToAdd) {
+ addClass(this.el_, classToAdd);
+ }
+ /**
+ * Remove a CSS class name from the `Component`s element.
+ *
+ * @param {string} classToRemove
+ * CSS class name to remove
+ */
+ ;
+
+ _proto.removeClass = function removeClass$1(classToRemove) {
+ removeClass(this.el_, classToRemove);
+ }
+ /**
+ * Add or remove a CSS class name from the component's element.
+ * - `classToToggle` gets added when {@link Component#hasClass} would return false.
+ * - `classToToggle` gets removed when {@link Component#hasClass} would return true.
+ *
+ * @param {string} classToToggle
+ * The class to add or remove based on (@link Component#hasClass}
+ *
+ * @param {boolean|Dom~predicate} [predicate]
+ * An {@link Dom~predicate} function or a boolean
+ */
+ ;
+
+ _proto.toggleClass = function toggleClass$1(classToToggle, predicate) {
+ toggleClass(this.el_, classToToggle, predicate);
+ }
+ /**
+ * Show the `Component`s element if it is hidden by removing the
+ * 'vjs-hidden' class name from it.
+ */
+ ;
+
+ _proto.show = function show() {
+ this.removeClass('vjs-hidden');
+ }
+ /**
+ * Hide the `Component`s element if it is currently showing by adding the
+ * 'vjs-hidden` class name to it.
+ */
+ ;
+
+ _proto.hide = function hide() {
+ this.addClass('vjs-hidden');
+ }
+ /**
+ * Lock a `Component`s element in its visible state by adding the 'vjs-lock-showing'
+ * class name to it. Used during fadeIn/fadeOut.
+ *
+ * @private
+ */
+ ;
+
+ _proto.lockShowing = function lockShowing() {
+ this.addClass('vjs-lock-showing');
+ }
+ /**
+ * Unlock a `Component`s element from its visible state by removing the 'vjs-lock-showing'
+ * class name from it. Used during fadeIn/fadeOut.
+ *
+ * @private
+ */
+ ;
+
+ _proto.unlockShowing = function unlockShowing() {
+ this.removeClass('vjs-lock-showing');
+ }
+ /**
+ * Get the value of an attribute on the `Component`s element.
+ *
+ * @param {string} attribute
+ * Name of the attribute to get the value from.
+ *
+ * @return {string|null}
+ * - The value of the attribute that was asked for.
+ * - Can be an empty string on some browsers if the attribute does not exist
+ * or has no value
+ * - Most browsers will return null if the attibute does not exist or has
+ * no value.
+ *
+ * @see [DOM API]{@link https://developer.mozilla.org/en-US/docs/Web/API/Element/getAttribute}
+ */
+ ;
+
+ _proto.getAttribute = function getAttribute$1(attribute) {
+ return getAttribute(this.el_, attribute);
+ }
+ /**
+ * Set the value of an attribute on the `Component`'s element
+ *
+ * @param {string} attribute
+ * Name of the attribute to set.
+ *
+ * @param {string} value
+ * Value to set the attribute to.
+ *
+ * @see [DOM API]{@link https://developer.mozilla.org/en-US/docs/Web/API/Element/setAttribute}
+ */
+ ;
+
+ _proto.setAttribute = function setAttribute$1(attribute, value) {
+ setAttribute(this.el_, attribute, value);
+ }
+ /**
+ * Remove an attribute from the `Component`s element.
+ *
+ * @param {string} attribute
+ * Name of the attribute to remove.
+ *
+ * @see [DOM API]{@link https://developer.mozilla.org/en-US/docs/Web/API/Element/removeAttribute}
+ */
+ ;
+
+ _proto.removeAttribute = function removeAttribute$1(attribute) {
+ removeAttribute(this.el_, attribute);
+ }
+ /**
+ * Get or set the width of the component based upon the CSS styles.
+ * See {@link Component#dimension} for more detailed information.
+ *
+ * @param {number|string} [num]
+ * The width that you want to set postfixed with '%', 'px' or nothing.
+ *
+ * @param {boolean} [skipListeners]
+ * Skip the componentresize event trigger
+ *
+ * @return {number|string}
+ * The width when getting, zero if there is no width. Can be a string
+ * postpixed with '%' or 'px'.
+ */
+ ;
+
+ _proto.width = function width(num, skipListeners) {
+ return this.dimension('width', num, skipListeners);
+ }
+ /**
+ * Get or set the height of the component based upon the CSS styles.
+ * See {@link Component#dimension} for more detailed information.
+ *
+ * @param {number|string} [num]
+ * The height that you want to set postfixed with '%', 'px' or nothing.
+ *
+ * @param {boolean} [skipListeners]
+ * Skip the componentresize event trigger
+ *
+ * @return {number|string}
+ * The width when getting, zero if there is no width. Can be a string
+ * postpixed with '%' or 'px'.
+ */
+ ;
+
+ _proto.height = function height(num, skipListeners) {
+ return this.dimension('height', num, skipListeners);
+ }
+ /**
+ * Set both the width and height of the `Component` element at the same time.
+ *
+ * @param {number|string} width
+ * Width to set the `Component`s element to.
+ *
+ * @param {number|string} height
+ * Height to set the `Component`s element to.
+ */
+ ;
+
+ _proto.dimensions = function dimensions(width, height) {
+ // Skip componentresize listeners on width for optimization
+ this.width(width, true);
+ this.height(height);
+ }
+ /**
+ * Get or set width or height of the `Component` element. This is the shared code
+ * for the {@link Component#width} and {@link Component#height}.
+ *
+ * Things to know:
+ * - If the width or height in an number this will return the number postfixed with 'px'.
+ * - If the width/height is a percent this will return the percent postfixed with '%'
+ * - Hidden elements have a width of 0 with `window.getComputedStyle`. This function
+ * defaults to the `Component`s `style.width` and falls back to `window.getComputedStyle`.
+ * See [this]{@link http://www.foliotek.com/devblog/getting-the-width-of-a-hidden-element-with-jquery-using-width/}
+ * for more information
+ * - If you want the computed style of the component, use {@link Component#currentWidth}
+ * and {@link {Component#currentHeight}
+ *
+ * @fires Component#componentresize
+ *
+ * @param {string} widthOrHeight
+ 8 'width' or 'height'
+ *
+ * @param {number|string} [num]
+ 8 New dimension
+ *
+ * @param {boolean} [skipListeners]
+ * Skip componentresize event trigger
+ *
+ * @return {number}
+ * The dimension when getting or 0 if unset
+ */
+ ;
+
+ _proto.dimension = function dimension(widthOrHeight, num, skipListeners) {
+ if (num !== undefined) {
+ // Set to zero if null or literally NaN (NaN !== NaN)
+ if (num === null || num !== num) {
+ num = 0;
+ } // Check if using css width/height (% or px) and adjust
+
+
+ if (('' + num).indexOf('%') !== -1 || ('' + num).indexOf('px') !== -1) {
+ this.el_.style[widthOrHeight] = num;
+ } else if (num === 'auto') {
+ this.el_.style[widthOrHeight] = '';
+ } else {
+ this.el_.style[widthOrHeight] = num + 'px';
+ } // skipListeners allows us to avoid triggering the resize event when setting both width and height
+
+
+ if (!skipListeners) {
+ /**
+ * Triggered when a component is resized.
+ *
+ * @event Component#componentresize
+ * @type {EventTarget~Event}
+ */
+ this.trigger('componentresize');
+ }
+
+ return;
+ } // Not setting a value, so getting it
+ // Make sure element exists
+
+
+ if (!this.el_) {
+ return 0;
+ } // Get dimension value from style
+
+
+ var val = this.el_.style[widthOrHeight];
+ var pxIndex = val.indexOf('px');
+
+ if (pxIndex !== -1) {
+ // Return the pixel value with no 'px'
+ return parseInt(val.slice(0, pxIndex), 10);
+ } // No px so using % or no style was set, so falling back to offsetWidth/height
+ // If component has display:none, offset will return 0
+ // TODO: handle display:none and no dimension style using px
+
+
+ return parseInt(this.el_['offset' + toTitleCase$1(widthOrHeight)], 10);
+ }
+ /**
+ * Get the computed width or the height of the component's element.
+ *
+ * Uses `window.getComputedStyle`.
+ *
+ * @param {string} widthOrHeight
+ * A string containing 'width' or 'height'. Whichever one you want to get.
+ *
+ * @return {number}
+ * The dimension that gets asked for or 0 if nothing was set
+ * for that dimension.
+ */
+ ;
+
+ _proto.currentDimension = function currentDimension(widthOrHeight) {
+ var computedWidthOrHeight = 0;
+
+ if (widthOrHeight !== 'width' && widthOrHeight !== 'height') {
+ throw new Error('currentDimension only accepts width or height value');
+ }
+
+ computedWidthOrHeight = computedStyle(this.el_, widthOrHeight); // remove 'px' from variable and parse as integer
+
+ computedWidthOrHeight = parseFloat(computedWidthOrHeight); // if the computed value is still 0, it's possible that the browser is lying
+ // and we want to check the offset values.
+ // This code also runs wherever getComputedStyle doesn't exist.
+
+ if (computedWidthOrHeight === 0 || isNaN(computedWidthOrHeight)) {
+ var rule = "offset" + toTitleCase$1(widthOrHeight);
+ computedWidthOrHeight = this.el_[rule];
+ }
+
+ return computedWidthOrHeight;
+ }
+ /**
+ * An object that contains width and height values of the `Component`s
+ * computed style. Uses `window.getComputedStyle`.
+ *
+ * @typedef {Object} Component~DimensionObject
+ *
+ * @property {number} width
+ * The width of the `Component`s computed style.
+ *
+ * @property {number} height
+ * The height of the `Component`s computed style.
+ */
+
+ /**
+ * Get an object that contains computed width and height values of the
+ * component's element.
+ *
+ * Uses `window.getComputedStyle`.
+ *
+ * @return {Component~DimensionObject}
+ * The computed dimensions of the component's element.
+ */
+ ;
+
+ _proto.currentDimensions = function currentDimensions() {
+ return {
+ width: this.currentDimension('width'),
+ height: this.currentDimension('height')
+ };
+ }
+ /**
+ * Get the computed width of the component's element.
+ *
+ * Uses `window.getComputedStyle`.
+ *
+ * @return {number}
+ * The computed width of the component's element.
+ */
+ ;
+
+ _proto.currentWidth = function currentWidth() {
+ return this.currentDimension('width');
+ }
+ /**
+ * Get the computed height of the component's element.
+ *
+ * Uses `window.getComputedStyle`.
+ *
+ * @return {number}
+ * The computed height of the component's element.
+ */
+ ;
+
+ _proto.currentHeight = function currentHeight() {
+ return this.currentDimension('height');
+ }
+ /**
+ * Set the focus to this component
+ */
+ ;
+
+ _proto.focus = function focus() {
+ this.el_.focus();
+ }
+ /**
+ * Remove the focus from this component
+ */
+ ;
+
+ _proto.blur = function blur() {
+ this.el_.blur();
+ }
+ /**
+ * When this Component receives a `keydown` event which it does not process,
+ * it passes the event to the Player for handling.
+ *
+ * @param {EventTarget~Event} event
+ * The `keydown` event that caused this function to be called.
+ */
+ ;
+
+ _proto.handleKeyDown = function handleKeyDown(event) {
+ if (this.player_) {
+ // We only stop propagation here because we want unhandled events to fall
+ // back to the browser. Exclude Tab for focus trapping.
+ if (!keycode.isEventKey(event, 'Tab')) {
+ event.stopPropagation();
+ }
+
+ this.player_.handleKeyDown(event);
+ }
+ }
+ /**
+ * Many components used to have a `handleKeyPress` method, which was poorly
+ * named because it listened to a `keydown` event. This method name now
+ * delegates to `handleKeyDown`. This means anyone calling `handleKeyPress`
+ * will not see their method calls stop working.
+ *
+ * @param {EventTarget~Event} event
+ * The event that caused this function to be called.
+ */
+ ;
+
+ _proto.handleKeyPress = function handleKeyPress(event) {
+ this.handleKeyDown(event);
+ }
+ /**
+ * Emit a 'tap' events when touch event support gets detected. This gets used to
+ * support toggling the controls through a tap on the video. They get enabled
+ * because every sub-component would have extra overhead otherwise.
+ *
+ * @private
+ * @fires Component#tap
+ * @listens Component#touchstart
+ * @listens Component#touchmove
+ * @listens Component#touchleave
+ * @listens Component#touchcancel
+ * @listens Component#touchend
+ */
+ ;
+
+ _proto.emitTapEvents = function emitTapEvents() {
+ // Track the start time so we can determine how long the touch lasted
+ var touchStart = 0;
+ var firstTouch = null; // Maximum movement allowed during a touch event to still be considered a tap
+ // Other popular libs use anywhere from 2 (hammer.js) to 15,
+ // so 10 seems like a nice, round number.
+
+ var tapMovementThreshold = 10; // The maximum length a touch can be while still being considered a tap
+
+ var touchTimeThreshold = 200;
+ var couldBeTap;
+ this.on('touchstart', function (event) {
+ // If more than one finger, don't consider treating this as a click
+ if (event.touches.length === 1) {
+ // Copy pageX/pageY from the object
+ firstTouch = {
+ pageX: event.touches[0].pageX,
+ pageY: event.touches[0].pageY
+ }; // Record start time so we can detect a tap vs. "touch and hold"
+
+ touchStart = window$1.performance.now(); // Reset couldBeTap tracking
+
+ couldBeTap = true;
+ }
+ });
+ this.on('touchmove', function (event) {
+ // If more than one finger, don't consider treating this as a click
+ if (event.touches.length > 1) {
+ couldBeTap = false;
+ } else if (firstTouch) {
+ // Some devices will throw touchmoves for all but the slightest of taps.
+ // So, if we moved only a small distance, this could still be a tap
+ var xdiff = event.touches[0].pageX - firstTouch.pageX;
+ var ydiff = event.touches[0].pageY - firstTouch.pageY;
+ var touchDistance = Math.sqrt(xdiff * xdiff + ydiff * ydiff);
+
+ if (touchDistance > tapMovementThreshold) {
+ couldBeTap = false;
+ }
+ }
+ });
+
+ var noTap = function noTap() {
+ couldBeTap = false;
+ }; // TODO: Listen to the original target. http://youtu.be/DujfpXOKUp8?t=13m8s
+
+
+ this.on('touchleave', noTap);
+ this.on('touchcancel', noTap); // When the touch ends, measure how long it took and trigger the appropriate
+ // event
+
+ this.on('touchend', function (event) {
+ firstTouch = null; // Proceed only if the touchmove/leave/cancel event didn't happen
+
+ if (couldBeTap === true) {
+ // Measure how long the touch lasted
+ var touchTime = window$1.performance.now() - touchStart; // Make sure the touch was less than the threshold to be considered a tap
+
+ if (touchTime < touchTimeThreshold) {
+ // Don't let browser turn this into a click
+ event.preventDefault();
+ /**
+ * Triggered when a `Component` is tapped.
+ *
+ * @event Component#tap
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('tap'); // It may be good to copy the touchend event object and change the
+ // type to tap, if the other event properties aren't exact after
+ // Events.fixEvent runs (e.g. event.target)
+ }
+ }
+ });
+ }
+ /**
+ * This function reports user activity whenever touch events happen. This can get
+ * turned off by any sub-components that wants touch events to act another way.
+ *
+ * Report user touch activity when touch events occur. User activity gets used to
+ * determine when controls should show/hide. It is simple when it comes to mouse
+ * events, because any mouse event should show the controls. So we capture mouse
+ * events that bubble up to the player and report activity when that happens.
+ * With touch events it isn't as easy as `touchstart` and `touchend` toggle player
+ * controls. So touch events can't help us at the player level either.
+ *
+ * User activity gets checked asynchronously. So what could happen is a tap event
+ * on the video turns the controls off. Then the `touchend` event bubbles up to
+ * the player. Which, if it reported user activity, would turn the controls right
+ * back on. We also don't want to completely block touch events from bubbling up.
+ * Furthermore a `touchmove` event and anything other than a tap, should not turn
+ * controls back on.
+ *
+ * @listens Component#touchstart
+ * @listens Component#touchmove
+ * @listens Component#touchend
+ * @listens Component#touchcancel
+ */
+ ;
+
+ _proto.enableTouchActivity = function enableTouchActivity() {
+ // Don't continue if the root player doesn't support reporting user activity
+ if (!this.player() || !this.player().reportUserActivity) {
+ return;
+ } // listener for reporting that the user is active
+
+
+ var report = bind(this.player(), this.player().reportUserActivity);
+ var touchHolding;
+ this.on('touchstart', function () {
+ report(); // For as long as the they are touching the device or have their mouse down,
+ // we consider them active even if they're not moving their finger or mouse.
+ // So we want to continue to update that they are active
+
+ this.clearInterval(touchHolding); // report at the same interval as activityCheck
+
+ touchHolding = this.setInterval(report, 250);
+ });
+
+ var touchEnd = function touchEnd(event) {
+ report(); // stop the interval that maintains activity if the touch is holding
+
+ this.clearInterval(touchHolding);
+ };
+
+ this.on('touchmove', report);
+ this.on('touchend', touchEnd);
+ this.on('touchcancel', touchEnd);
+ }
+ /**
+ * A callback that has no parameters and is bound into `Component`s context.
+ *
+ * @callback Component~GenericCallback
+ * @this Component
+ */
+
+ /**
+ * Creates a function that runs after an `x` millisecond timeout. This function is a
+ * wrapper around `window.setTimeout`. There are a few reasons to use this one
+ * instead though:
+ * 1. It gets cleared via {@link Component#clearTimeout} when
+ * {@link Component#dispose} gets called.
+ * 2. The function callback will gets turned into a {@link Component~GenericCallback}
+ *
+ * > Note: You can't use `window.clearTimeout` on the id returned by this function. This
+ * will cause its dispose listener not to get cleaned up! Please use
+ * {@link Component#clearTimeout} or {@link Component#dispose} instead.
+ *
+ * @param {Component~GenericCallback} fn
+ * The function that will be run after `timeout`.
+ *
+ * @param {number} timeout
+ * Timeout in milliseconds to delay before executing the specified function.
+ *
+ * @return {number}
+ * Returns a timeout ID that gets used to identify the timeout. It can also
+ * get used in {@link Component#clearTimeout} to clear the timeout that
+ * was set.
+ *
+ * @listens Component#dispose
+ * @see [Similar to]{@link https://developer.mozilla.org/en-US/docs/Web/API/WindowTimers/setTimeout}
+ */
+ ;
+
+ _proto.setTimeout = function setTimeout(fn, timeout) {
+ var _this3 = this;
+
+ // declare as variables so they are properly available in timeout function
+ // eslint-disable-next-line
+ var timeoutId;
+ fn = bind(this, fn);
+ this.clearTimersOnDispose_();
+ timeoutId = window$1.setTimeout(function () {
+ if (_this3.setTimeoutIds_.has(timeoutId)) {
+ _this3.setTimeoutIds_["delete"](timeoutId);
+ }
+
+ fn();
+ }, timeout);
+ this.setTimeoutIds_.add(timeoutId);
+ return timeoutId;
+ }
+ /**
+ * Clears a timeout that gets created via `window.setTimeout` or
+ * {@link Component#setTimeout}. If you set a timeout via {@link Component#setTimeout}
+ * use this function instead of `window.clearTimout`. If you don't your dispose
+ * listener will not get cleaned up until {@link Component#dispose}!
+ *
+ * @param {number} timeoutId
+ * The id of the timeout to clear. The return value of
+ * {@link Component#setTimeout} or `window.setTimeout`.
+ *
+ * @return {number}
+ * Returns the timeout id that was cleared.
+ *
+ * @see [Similar to]{@link https://developer.mozilla.org/en-US/docs/Web/API/WindowTimers/clearTimeout}
+ */
+ ;
+
+ _proto.clearTimeout = function clearTimeout(timeoutId) {
+ if (this.setTimeoutIds_.has(timeoutId)) {
+ this.setTimeoutIds_["delete"](timeoutId);
+ window$1.clearTimeout(timeoutId);
+ }
+
+ return timeoutId;
+ }
+ /**
+ * Creates a function that gets run every `x` milliseconds. This function is a wrapper
+ * around `window.setInterval`. There are a few reasons to use this one instead though.
+ * 1. It gets cleared via {@link Component#clearInterval} when
+ * {@link Component#dispose} gets called.
+ * 2. The function callback will be a {@link Component~GenericCallback}
+ *
+ * @param {Component~GenericCallback} fn
+ * The function to run every `x` seconds.
+ *
+ * @param {number} interval
+ * Execute the specified function every `x` milliseconds.
+ *
+ * @return {number}
+ * Returns an id that can be used to identify the interval. It can also be be used in
+ * {@link Component#clearInterval} to clear the interval.
+ *
+ * @listens Component#dispose
+ * @see [Similar to]{@link https://developer.mozilla.org/en-US/docs/Web/API/WindowTimers/setInterval}
+ */
+ ;
+
+ _proto.setInterval = function setInterval(fn, interval) {
+ fn = bind(this, fn);
+ this.clearTimersOnDispose_();
+ var intervalId = window$1.setInterval(fn, interval);
+ this.setIntervalIds_.add(intervalId);
+ return intervalId;
+ }
+ /**
+ * Clears an interval that gets created via `window.setInterval` or
+ * {@link Component#setInterval}. If you set an inteval via {@link Component#setInterval}
+ * use this function instead of `window.clearInterval`. If you don't your dispose
+ * listener will not get cleaned up until {@link Component#dispose}!
+ *
+ * @param {number} intervalId
+ * The id of the interval to clear. The return value of
+ * {@link Component#setInterval} or `window.setInterval`.
+ *
+ * @return {number}
+ * Returns the interval id that was cleared.
+ *
+ * @see [Similar to]{@link https://developer.mozilla.org/en-US/docs/Web/API/WindowTimers/clearInterval}
+ */
+ ;
+
+ _proto.clearInterval = function clearInterval(intervalId) {
+ if (this.setIntervalIds_.has(intervalId)) {
+ this.setIntervalIds_["delete"](intervalId);
+ window$1.clearInterval(intervalId);
+ }
+
+ return intervalId;
+ }
+ /**
+ * Queues up a callback to be passed to requestAnimationFrame (rAF), but
+ * with a few extra bonuses:
+ *
+ * - Supports browsers that do not support rAF by falling back to
+ * {@link Component#setTimeout}.
+ *
+ * - The callback is turned into a {@link Component~GenericCallback} (i.e.
+ * bound to the component).
+ *
+ * - Automatic cancellation of the rAF callback is handled if the component
+ * is disposed before it is called.
+ *
+ * @param {Component~GenericCallback} fn
+ * A function that will be bound to this component and executed just
+ * before the browser's next repaint.
+ *
+ * @return {number}
+ * Returns an rAF ID that gets used to identify the timeout. It can
+ * also be used in {@link Component#cancelAnimationFrame} to cancel
+ * the animation frame callback.
+ *
+ * @listens Component#dispose
+ * @see [Similar to]{@link https://developer.mozilla.org/en-US/docs/Web/API/window/requestAnimationFrame}
+ */
+ ;
+
+ _proto.requestAnimationFrame = function requestAnimationFrame(fn) {
+ var _this4 = this;
+
+ // Fall back to using a timer.
+ if (!this.supportsRaf_) {
+ return this.setTimeout(fn, 1000 / 60);
+ }
+
+ this.clearTimersOnDispose_(); // declare as variables so they are properly available in rAF function
+ // eslint-disable-next-line
+
+ var id;
+ fn = bind(this, fn);
+ id = window$1.requestAnimationFrame(function () {
+ if (_this4.rafIds_.has(id)) {
+ _this4.rafIds_["delete"](id);
+ }
+
+ fn();
+ });
+ this.rafIds_.add(id);
+ return id;
+ }
+ /**
+ * Request an animation frame, but only one named animation
+ * frame will be queued. Another will never be added until
+ * the previous one finishes.
+ *
+ * @param {string} name
+ * The name to give this requestAnimationFrame
+ *
+ * @param {Component~GenericCallback} fn
+ * A function that will be bound to this component and executed just
+ * before the browser's next repaint.
+ */
+ ;
+
+ _proto.requestNamedAnimationFrame = function requestNamedAnimationFrame(name, fn) {
+ var _this5 = this;
+
+ if (this.namedRafs_.has(name)) {
+ return;
+ }
+
+ this.clearTimersOnDispose_();
+ fn = bind(this, fn);
+ var id = this.requestAnimationFrame(function () {
+ fn();
+
+ if (_this5.namedRafs_.has(name)) {
+ _this5.namedRafs_["delete"](name);
+ }
+ });
+ this.namedRafs_.set(name, id);
+ return name;
+ }
+ /**
+ * Cancels a current named animation frame if it exists.
+ *
+ * @param {string} name
+ * The name of the requestAnimationFrame to cancel.
+ */
+ ;
+
+ _proto.cancelNamedAnimationFrame = function cancelNamedAnimationFrame(name) {
+ if (!this.namedRafs_.has(name)) {
+ return;
+ }
+
+ this.cancelAnimationFrame(this.namedRafs_.get(name));
+ this.namedRafs_["delete"](name);
+ }
+ /**
+ * Cancels a queued callback passed to {@link Component#requestAnimationFrame}
+ * (rAF).
+ *
+ * If you queue an rAF callback via {@link Component#requestAnimationFrame},
+ * use this function instead of `window.cancelAnimationFrame`. If you don't,
+ * your dispose listener will not get cleaned up until {@link Component#dispose}!
+ *
+ * @param {number} id
+ * The rAF ID to clear. The return value of {@link Component#requestAnimationFrame}.
+ *
+ * @return {number}
+ * Returns the rAF ID that was cleared.
+ *
+ * @see [Similar to]{@link https://developer.mozilla.org/en-US/docs/Web/API/window/cancelAnimationFrame}
+ */
+ ;
+
+ _proto.cancelAnimationFrame = function cancelAnimationFrame(id) {
+ // Fall back to using a timer.
+ if (!this.supportsRaf_) {
+ return this.clearTimeout(id);
+ }
+
+ if (this.rafIds_.has(id)) {
+ this.rafIds_["delete"](id);
+ window$1.cancelAnimationFrame(id);
+ }
+
+ return id;
+ }
+ /**
+ * A function to setup `requestAnimationFrame`, `setTimeout`,
+ * and `setInterval`, clearing on dispose.
+ *
+ * > Previously each timer added and removed dispose listeners on it's own.
+ * For better performance it was decided to batch them all, and use `Set`s
+ * to track outstanding timer ids.
+ *
+ * @private
+ */
+ ;
+
+ _proto.clearTimersOnDispose_ = function clearTimersOnDispose_() {
+ var _this6 = this;
+
+ if (this.clearingTimersOnDispose_) {
+ return;
+ }
+
+ this.clearingTimersOnDispose_ = true;
+ this.one('dispose', function () {
+ [['namedRafs_', 'cancelNamedAnimationFrame'], ['rafIds_', 'cancelAnimationFrame'], ['setTimeoutIds_', 'clearTimeout'], ['setIntervalIds_', 'clearInterval']].forEach(function (_ref) {
+ var idName = _ref[0],
+ cancelName = _ref[1];
+
+ // for a `Set` key will actually be the value again
+ // so forEach((val, val) =>` but for maps we want to use
+ // the key.
+ _this6[idName].forEach(function (val, key) {
+ return _this6[cancelName](key);
+ });
+ });
+ _this6.clearingTimersOnDispose_ = false;
+ });
+ }
+ /**
+ * Register a `Component` with `videojs` given the name and the component.
+ *
+ * > NOTE: {@link Tech}s should not be registered as a `Component`. {@link Tech}s
+ * should be registered using {@link Tech.registerTech} or
+ * {@link videojs:videojs.registerTech}.
+ *
+ * > NOTE: This function can also be seen on videojs as
+ * {@link videojs:videojs.registerComponent}.
+ *
+ * @param {string} name
+ * The name of the `Component` to register.
+ *
+ * @param {Component} ComponentToRegister
+ * The `Component` class to register.
+ *
+ * @return {Component}
+ * The `Component` that was registered.
+ */
+ ;
+
+ Component.registerComponent = function registerComponent(name, ComponentToRegister) {
+ if (typeof name !== 'string' || !name) {
+ throw new Error("Illegal component name, \"" + name + "\"; must be a non-empty string.");
+ }
+
+ var Tech = Component.getComponent('Tech'); // We need to make sure this check is only done if Tech has been registered.
+
+ var isTech = Tech && Tech.isTech(ComponentToRegister);
+ var isComp = Component === ComponentToRegister || Component.prototype.isPrototypeOf(ComponentToRegister.prototype);
+
+ if (isTech || !isComp) {
+ var reason;
+
+ if (isTech) {
+ reason = 'techs must be registered using Tech.registerTech()';
+ } else {
+ reason = 'must be a Component subclass';
+ }
+
+ throw new Error("Illegal component, \"" + name + "\"; " + reason + ".");
+ }
+
+ name = toTitleCase$1(name);
+
+ if (!Component.components_) {
+ Component.components_ = {};
+ }
+
+ var Player = Component.getComponent('Player');
+
+ if (name === 'Player' && Player && Player.players) {
+ var players = Player.players;
+ var playerNames = Object.keys(players); // If we have players that were disposed, then their name will still be
+ // in Players.players. So, we must loop through and verify that the value
+ // for each item is not null. This allows registration of the Player component
+ // after all players have been disposed or before any were created.
+
+ if (players && playerNames.length > 0 && playerNames.map(function (pname) {
+ return players[pname];
+ }).every(Boolean)) {
+ throw new Error('Can not register Player component after player has been created.');
+ }
+ }
+
+ Component.components_[name] = ComponentToRegister;
+ Component.components_[toLowerCase(name)] = ComponentToRegister;
+ return ComponentToRegister;
+ }
+ /**
+ * Get a `Component` based on the name it was registered with.
+ *
+ * @param {string} name
+ * The Name of the component to get.
+ *
+ * @return {Component}
+ * The `Component` that got registered under the given name.
+ */
+ ;
+
+ Component.getComponent = function getComponent(name) {
+ if (!name || !Component.components_) {
+ return;
+ }
+
+ return Component.components_[name];
+ };
+
+ return Component;
+}();
+/**
+ * Whether or not this component supports `requestAnimationFrame`.
+ *
+ * This is exposed primarily for testing purposes.
+ *
+ * @private
+ * @type {Boolean}
+ */
+
+
+Component$1.prototype.supportsRaf_ = typeof window$1.requestAnimationFrame === 'function' && typeof window$1.cancelAnimationFrame === 'function';
+Component$1.registerComponent('Component', Component$1);
+
+/**
+ * @file time-ranges.js
+ * @module time-ranges
+ */
+/**
+ * Returns the time for the specified index at the start or end
+ * of a TimeRange object.
+ *
+ * @typedef {Function} TimeRangeIndex
+ *
+ * @param {number} [index=0]
+ * The range number to return the time for.
+ *
+ * @return {number}
+ * The time offset at the specified index.
+ *
+ * @deprecated The index argument must be provided.
+ * In the future, leaving it out will throw an error.
+ */
+
+/**
+ * An object that contains ranges of time.
+ *
+ * @typedef {Object} TimeRange
+ *
+ * @property {number} length
+ * The number of time ranges represented by this object.
+ *
+ * @property {module:time-ranges~TimeRangeIndex} start
+ * Returns the time offset at which a specified time range begins.
+ *
+ * @property {module:time-ranges~TimeRangeIndex} end
+ * Returns the time offset at which a specified time range ends.
+ *
+ * @see https://developer.mozilla.org/en-US/docs/Web/API/TimeRanges
+ */
+
+/**
+ * Check if any of the time ranges are over the maximum index.
+ *
+ * @private
+ * @param {string} fnName
+ * The function name to use for logging
+ *
+ * @param {number} index
+ * The index to check
+ *
+ * @param {number} maxIndex
+ * The maximum possible index
+ *
+ * @throws {Error} if the timeRanges provided are over the maxIndex
+ */
+
+function rangeCheck(fnName, index, maxIndex) {
+ if (typeof index !== 'number' || index < 0 || index > maxIndex) {
+ throw new Error("Failed to execute '" + fnName + "' on 'TimeRanges': The index provided (" + index + ") is non-numeric or out of bounds (0-" + maxIndex + ").");
+ }
+}
+/**
+ * Get the time for the specified index at the start or end
+ * of a TimeRange object.
+ *
+ * @private
+ * @param {string} fnName
+ * The function name to use for logging
+ *
+ * @param {string} valueIndex
+ * The property that should be used to get the time. should be
+ * 'start' or 'end'
+ *
+ * @param {Array} ranges
+ * An array of time ranges
+ *
+ * @param {Array} [rangeIndex=0]
+ * The index to start the search at
+ *
+ * @return {number}
+ * The time that offset at the specified index.
+ *
+ * @deprecated rangeIndex must be set to a value, in the future this will throw an error.
+ * @throws {Error} if rangeIndex is more than the length of ranges
+ */
+
+
+function getRange(fnName, valueIndex, ranges, rangeIndex) {
+ rangeCheck(fnName, rangeIndex, ranges.length - 1);
+ return ranges[rangeIndex][valueIndex];
+}
+/**
+ * Create a time range object given ranges of time.
+ *
+ * @private
+ * @param {Array} [ranges]
+ * An array of time ranges.
+ */
+
+
+function createTimeRangesObj(ranges) {
+ var timeRangesObj;
+
+ if (ranges === undefined || ranges.length === 0) {
+ timeRangesObj = {
+ length: 0,
+ start: function start() {
+ throw new Error('This TimeRanges object is empty');
+ },
+ end: function end() {
+ throw new Error('This TimeRanges object is empty');
+ }
+ };
+ } else {
+ timeRangesObj = {
+ length: ranges.length,
+ start: getRange.bind(null, 'start', 0, ranges),
+ end: getRange.bind(null, 'end', 1, ranges)
+ };
+ }
+
+ if (window$1.Symbol && window$1.Symbol.iterator) {
+ timeRangesObj[window$1.Symbol.iterator] = function () {
+ return (ranges || []).values();
+ };
+ }
+
+ return timeRangesObj;
+}
+/**
+ * Create a `TimeRange` object which mimics an
+ * {@link https://developer.mozilla.org/en-US/docs/Web/API/TimeRanges|HTML5 TimeRanges instance}.
+ *
+ * @param {number|Array[]} start
+ * The start of a single range (a number) or an array of ranges (an
+ * array of arrays of two numbers each).
+ *
+ * @param {number} end
+ * The end of a single range. Cannot be used with the array form of
+ * the `start` argument.
+ */
+
+
+function createTimeRanges(start, end) {
+ if (Array.isArray(start)) {
+ return createTimeRangesObj(start);
+ } else if (start === undefined || end === undefined) {
+ return createTimeRangesObj();
+ }
+
+ return createTimeRangesObj([[start, end]]);
+}
+
+/**
+ * @file buffer.js
+ * @module buffer
+ */
+/**
+ * Compute the percentage of the media that has been buffered.
+ *
+ * @param {TimeRange} buffered
+ * The current `TimeRange` object representing buffered time ranges
+ *
+ * @param {number} duration
+ * Total duration of the media
+ *
+ * @return {number}
+ * Percent buffered of the total duration in decimal form.
+ */
+
+function bufferedPercent(buffered, duration) {
+ var bufferedDuration = 0;
+ var start;
+ var end;
+
+ if (!duration) {
+ return 0;
+ }
+
+ if (!buffered || !buffered.length) {
+ buffered = createTimeRanges(0, 0);
+ }
+
+ for (var i = 0; i < buffered.length; i++) {
+ start = buffered.start(i);
+ end = buffered.end(i); // buffered end can be bigger than duration by a very small fraction
+
+ if (end > duration) {
+ end = duration;
+ }
+
+ bufferedDuration += end - start;
+ }
+
+ return bufferedDuration / duration;
+}
+
+/**
+ * @file media-error.js
+ */
+/**
+ * A Custom `MediaError` class which mimics the standard HTML5 `MediaError` class.
+ *
+ * @param {number|string|Object|MediaError} value
+ * This can be of multiple types:
+ * - number: should be a standard error code
+ * - string: an error message (the code will be 0)
+ * - Object: arbitrary properties
+ * - `MediaError` (native): used to populate a video.js `MediaError` object
+ * - `MediaError` (video.js): will return itself if it's already a
+ * video.js `MediaError` object.
+ *
+ * @see [MediaError Spec]{@link https://dev.w3.org/html5/spec-author-view/video.html#mediaerror}
+ * @see [Encrypted MediaError Spec]{@link https://www.w3.org/TR/2013/WD-encrypted-media-20130510/#error-codes}
+ *
+ * @class MediaError
+ */
+
+function MediaError(value) {
+ // Allow redundant calls to this constructor to avoid having `instanceof`
+ // checks peppered around the code.
+ if (value instanceof MediaError) {
+ return value;
+ }
+
+ if (typeof value === 'number') {
+ this.code = value;
+ } else if (typeof value === 'string') {
+ // default code is zero, so this is a custom error
+ this.message = value;
+ } else if (isObject(value)) {
+ // We assign the `code` property manually because native `MediaError` objects
+ // do not expose it as an own/enumerable property of the object.
+ if (typeof value.code === 'number') {
+ this.code = value.code;
+ }
+
+ assign(this, value);
+ }
+
+ if (!this.message) {
+ this.message = MediaError.defaultMessages[this.code] || '';
+ }
+}
+/**
+ * The error code that refers two one of the defined `MediaError` types
+ *
+ * @type {Number}
+ */
+
+
+MediaError.prototype.code = 0;
+/**
+ * An optional message that to show with the error. Message is not part of the HTML5
+ * video spec but allows for more informative custom errors.
+ *
+ * @type {String}
+ */
+
+MediaError.prototype.message = '';
+/**
+ * An optional status code that can be set by plugins to allow even more detail about
+ * the error. For example a plugin might provide a specific HTTP status code and an
+ * error message for that code. Then when the plugin gets that error this class will
+ * know how to display an error message for it. This allows a custom message to show
+ * up on the `Player` error overlay.
+ *
+ * @type {Array}
+ */
+
+MediaError.prototype.status = null;
+/**
+ * Errors indexed by the W3C standard. The order **CANNOT CHANGE**! See the
+ * specification listed under {@link MediaError} for more information.
+ *
+ * @enum {array}
+ * @readonly
+ * @property {string} 0 - MEDIA_ERR_CUSTOM
+ * @property {string} 1 - MEDIA_ERR_ABORTED
+ * @property {string} 2 - MEDIA_ERR_NETWORK
+ * @property {string} 3 - MEDIA_ERR_DECODE
+ * @property {string} 4 - MEDIA_ERR_SRC_NOT_SUPPORTED
+ * @property {string} 5 - MEDIA_ERR_ENCRYPTED
+ */
+
+MediaError.errorTypes = ['MEDIA_ERR_CUSTOM', 'MEDIA_ERR_ABORTED', 'MEDIA_ERR_NETWORK', 'MEDIA_ERR_DECODE', 'MEDIA_ERR_SRC_NOT_SUPPORTED', 'MEDIA_ERR_ENCRYPTED'];
+/**
+ * The default `MediaError` messages based on the {@link MediaError.errorTypes}.
+ *
+ * @type {Array}
+ * @constant
+ */
+
+MediaError.defaultMessages = {
+ 1: 'You aborted the media playback',
+ 2: 'A network error caused the media download to fail part-way.',
+ 3: 'The media playback was aborted due to a corruption problem or because the media used features your browser did not support.',
+ 4: 'The media could not be loaded, either because the server or network failed or because the format is not supported.',
+ 5: 'The media is encrypted and we do not have the keys to decrypt it.'
+}; // Add types as properties on MediaError
+// e.g. MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED = 4;
+
+for (var errNum = 0; errNum < MediaError.errorTypes.length; errNum++) {
+ MediaError[MediaError.errorTypes[errNum]] = errNum; // values should be accessible on both the class and instance
+
+ MediaError.prototype[MediaError.errorTypes[errNum]] = errNum;
+} // jsdocs for instance/static members added above
+
+/**
+ * Returns whether an object is `Promise`-like (i.e. has a `then` method).
+ *
+ * @param {Object} value
+ * An object that may or may not be `Promise`-like.
+ *
+ * @return {boolean}
+ * Whether or not the object is `Promise`-like.
+ */
+function isPromise(value) {
+ return value !== undefined && value !== null && typeof value.then === 'function';
+}
+/**
+ * Silence a Promise-like object.
+ *
+ * This is useful for avoiding non-harmful, but potentially confusing "uncaught
+ * play promise" rejection error messages.
+ *
+ * @param {Object} value
+ * An object that may or may not be `Promise`-like.
+ */
+
+function silencePromise(value) {
+ if (isPromise(value)) {
+ value.then(null, function (e) {});
+ }
+}
+
+/**
+ * @file text-track-list-converter.js Utilities for capturing text track state and
+ * re-creating tracks based on a capture.
+ *
+ * @module text-track-list-converter
+ */
+
+/**
+ * Examine a single {@link TextTrack} and return a JSON-compatible javascript object that
+ * represents the {@link TextTrack}'s state.
+ *
+ * @param {TextTrack} track
+ * The text track to query.
+ *
+ * @return {Object}
+ * A serializable javascript representation of the TextTrack.
+ * @private
+ */
+var trackToJson_ = function trackToJson_(track) {
+ var ret = ['kind', 'label', 'language', 'id', 'inBandMetadataTrackDispatchType', 'mode', 'src'].reduce(function (acc, prop, i) {
+ if (track[prop]) {
+ acc[prop] = track[prop];
+ }
+
+ return acc;
+ }, {
+ cues: track.cues && Array.prototype.map.call(track.cues, function (cue) {
+ return {
+ startTime: cue.startTime,
+ endTime: cue.endTime,
+ text: cue.text,
+ id: cue.id
+ };
+ })
+ });
+ return ret;
+};
+/**
+ * Examine a {@link Tech} and return a JSON-compatible javascript array that represents the
+ * state of all {@link TextTrack}s currently configured. The return array is compatible with
+ * {@link text-track-list-converter:jsonToTextTracks}.
+ *
+ * @param {Tech} tech
+ * The tech object to query
+ *
+ * @return {Array}
+ * A serializable javascript representation of the {@link Tech}s
+ * {@link TextTrackList}.
+ */
+
+
+var textTracksToJson = function textTracksToJson(tech) {
+ var trackEls = tech.$$('track');
+ var trackObjs = Array.prototype.map.call(trackEls, function (t) {
+ return t.track;
+ });
+ var tracks = Array.prototype.map.call(trackEls, function (trackEl) {
+ var json = trackToJson_(trackEl.track);
+
+ if (trackEl.src) {
+ json.src = trackEl.src;
+ }
+
+ return json;
+ });
+ return tracks.concat(Array.prototype.filter.call(tech.textTracks(), function (track) {
+ return trackObjs.indexOf(track) === -1;
+ }).map(trackToJson_));
+};
+/**
+ * Create a set of remote {@link TextTrack}s on a {@link Tech} based on an array of javascript
+ * object {@link TextTrack} representations.
+ *
+ * @param {Array} json
+ * An array of `TextTrack` representation objects, like those that would be
+ * produced by `textTracksToJson`.
+ *
+ * @param {Tech} tech
+ * The `Tech` to create the `TextTrack`s on.
+ */
+
+
+var jsonToTextTracks = function jsonToTextTracks(json, tech) {
+ json.forEach(function (track) {
+ var addedTrack = tech.addRemoteTextTrack(track).track;
+
+ if (!track.src && track.cues) {
+ track.cues.forEach(function (cue) {
+ return addedTrack.addCue(cue);
+ });
+ }
+ });
+ return tech.textTracks();
+};
+
+var textTrackConverter = {
+ textTracksToJson: textTracksToJson,
+ jsonToTextTracks: jsonToTextTracks,
+ trackToJson_: trackToJson_
+};
+
+var MODAL_CLASS_NAME = 'vjs-modal-dialog';
+/**
+ * The `ModalDialog` displays over the video and its controls, which blocks
+ * interaction with the player until it is closed.
+ *
+ * Modal dialogs include a "Close" button and will close when that button
+ * is activated - or when ESC is pressed anywhere.
+ *
+ * @extends Component
+ */
+
+var ModalDialog = /*#__PURE__*/function (_Component) {
+ _inheritsLoose(ModalDialog, _Component);
+
+ /**
+ * Create an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ *
+ * @param {Mixed} [options.content=undefined]
+ * Provide customized content for this modal.
+ *
+ * @param {string} [options.description]
+ * A text description for the modal, primarily for accessibility.
+ *
+ * @param {boolean} [options.fillAlways=false]
+ * Normally, modals are automatically filled only the first time
+ * they open. This tells the modal to refresh its content
+ * every time it opens.
+ *
+ * @param {string} [options.label]
+ * A text label for the modal, primarily for accessibility.
+ *
+ * @param {boolean} [options.pauseOnOpen=true]
+ * If `true`, playback will will be paused if playing when
+ * the modal opens, and resumed when it closes.
+ *
+ * @param {boolean} [options.temporary=true]
+ * If `true`, the modal can only be opened once; it will be
+ * disposed as soon as it's closed.
+ *
+ * @param {boolean} [options.uncloseable=false]
+ * If `true`, the user will not be able to close the modal
+ * through the UI in the normal ways. Programmatic closing is
+ * still possible.
+ */
+ function ModalDialog(player, options) {
+ var _this;
+
+ _this = _Component.call(this, player, options) || this;
+
+ _this.handleKeyDown_ = function (e) {
+ return _this.handleKeyDown(e);
+ };
+
+ _this.close_ = function (e) {
+ return _this.close(e);
+ };
+
+ _this.opened_ = _this.hasBeenOpened_ = _this.hasBeenFilled_ = false;
+
+ _this.closeable(!_this.options_.uncloseable);
+
+ _this.content(_this.options_.content); // Make sure the contentEl is defined AFTER any children are initialized
+ // because we only want the contents of the modal in the contentEl
+ // (not the UI elements like the close button).
+
+
+ _this.contentEl_ = createEl('div', {
+ className: MODAL_CLASS_NAME + "-content"
+ }, {
+ role: 'document'
+ });
+ _this.descEl_ = createEl('p', {
+ className: MODAL_CLASS_NAME + "-description vjs-control-text",
+ id: _this.el().getAttribute('aria-describedby')
+ });
+ textContent(_this.descEl_, _this.description());
+
+ _this.el_.appendChild(_this.descEl_);
+
+ _this.el_.appendChild(_this.contentEl_);
+
+ return _this;
+ }
+ /**
+ * Create the `ModalDialog`'s DOM element
+ *
+ * @return {Element}
+ * The DOM element that gets created.
+ */
+
+
+ var _proto = ModalDialog.prototype;
+
+ _proto.createEl = function createEl() {
+ return _Component.prototype.createEl.call(this, 'div', {
+ className: this.buildCSSClass(),
+ tabIndex: -1
+ }, {
+ 'aria-describedby': this.id() + "_description",
+ 'aria-hidden': 'true',
+ 'aria-label': this.label(),
+ 'role': 'dialog'
+ });
+ };
+
+ _proto.dispose = function dispose() {
+ this.contentEl_ = null;
+ this.descEl_ = null;
+ this.previouslyActiveEl_ = null;
+
+ _Component.prototype.dispose.call(this);
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+ ;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return MODAL_CLASS_NAME + " vjs-hidden " + _Component.prototype.buildCSSClass.call(this);
+ }
+ /**
+ * Returns the label string for this modal. Primarily used for accessibility.
+ *
+ * @return {string}
+ * the localized or raw label of this modal.
+ */
+ ;
+
+ _proto.label = function label() {
+ return this.localize(this.options_.label || 'Modal Window');
+ }
+ /**
+ * Returns the description string for this modal. Primarily used for
+ * accessibility.
+ *
+ * @return {string}
+ * The localized or raw description of this modal.
+ */
+ ;
+
+ _proto.description = function description() {
+ var desc = this.options_.description || this.localize('This is a modal window.'); // Append a universal closeability message if the modal is closeable.
+
+ if (this.closeable()) {
+ desc += ' ' + this.localize('This modal can be closed by pressing the Escape key or activating the close button.');
+ }
+
+ return desc;
+ }
+ /**
+ * Opens the modal.
+ *
+ * @fires ModalDialog#beforemodalopen
+ * @fires ModalDialog#modalopen
+ */
+ ;
+
+ _proto.open = function open() {
+ if (!this.opened_) {
+ var player = this.player();
+ /**
+ * Fired just before a `ModalDialog` is opened.
+ *
+ * @event ModalDialog#beforemodalopen
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('beforemodalopen');
+ this.opened_ = true; // Fill content if the modal has never opened before and
+ // never been filled.
+
+ if (this.options_.fillAlways || !this.hasBeenOpened_ && !this.hasBeenFilled_) {
+ this.fill();
+ } // If the player was playing, pause it and take note of its previously
+ // playing state.
+
+
+ this.wasPlaying_ = !player.paused();
+
+ if (this.options_.pauseOnOpen && this.wasPlaying_) {
+ player.pause();
+ }
+
+ this.on('keydown', this.handleKeyDown_); // Hide controls and note if they were enabled.
+
+ this.hadControls_ = player.controls();
+ player.controls(false);
+ this.show();
+ this.conditionalFocus_();
+ this.el().setAttribute('aria-hidden', 'false');
+ /**
+ * Fired just after a `ModalDialog` is opened.
+ *
+ * @event ModalDialog#modalopen
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('modalopen');
+ this.hasBeenOpened_ = true;
+ }
+ }
+ /**
+ * If the `ModalDialog` is currently open or closed.
+ *
+ * @param {boolean} [value]
+ * If given, it will open (`true`) or close (`false`) the modal.
+ *
+ * @return {boolean}
+ * the current open state of the modaldialog
+ */
+ ;
+
+ _proto.opened = function opened(value) {
+ if (typeof value === 'boolean') {
+ this[value ? 'open' : 'close']();
+ }
+
+ return this.opened_;
+ }
+ /**
+ * Closes the modal, does nothing if the `ModalDialog` is
+ * not open.
+ *
+ * @fires ModalDialog#beforemodalclose
+ * @fires ModalDialog#modalclose
+ */
+ ;
+
+ _proto.close = function close() {
+ if (!this.opened_) {
+ return;
+ }
+
+ var player = this.player();
+ /**
+ * Fired just before a `ModalDialog` is closed.
+ *
+ * @event ModalDialog#beforemodalclose
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('beforemodalclose');
+ this.opened_ = false;
+
+ if (this.wasPlaying_ && this.options_.pauseOnOpen) {
+ player.play();
+ }
+
+ this.off('keydown', this.handleKeyDown_);
+
+ if (this.hadControls_) {
+ player.controls(true);
+ }
+
+ this.hide();
+ this.el().setAttribute('aria-hidden', 'true');
+ /**
+ * Fired just after a `ModalDialog` is closed.
+ *
+ * @event ModalDialog#modalclose
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('modalclose');
+ this.conditionalBlur_();
+
+ if (this.options_.temporary) {
+ this.dispose();
+ }
+ }
+ /**
+ * Check to see if the `ModalDialog` is closeable via the UI.
+ *
+ * @param {boolean} [value]
+ * If given as a boolean, it will set the `closeable` option.
+ *
+ * @return {boolean}
+ * Returns the final value of the closable option.
+ */
+ ;
+
+ _proto.closeable = function closeable(value) {
+ if (typeof value === 'boolean') {
+ var closeable = this.closeable_ = !!value;
+ var close = this.getChild('closeButton'); // If this is being made closeable and has no close button, add one.
+
+ if (closeable && !close) {
+ // The close button should be a child of the modal - not its
+ // content element, so temporarily change the content element.
+ var temp = this.contentEl_;
+ this.contentEl_ = this.el_;
+ close = this.addChild('closeButton', {
+ controlText: 'Close Modal Dialog'
+ });
+ this.contentEl_ = temp;
+ this.on(close, 'close', this.close_);
+ } // If this is being made uncloseable and has a close button, remove it.
+
+
+ if (!closeable && close) {
+ this.off(close, 'close', this.close_);
+ this.removeChild(close);
+ close.dispose();
+ }
+ }
+
+ return this.closeable_;
+ }
+ /**
+ * Fill the modal's content element with the modal's "content" option.
+ * The content element will be emptied before this change takes place.
+ */
+ ;
+
+ _proto.fill = function fill() {
+ this.fillWith(this.content());
+ }
+ /**
+ * Fill the modal's content element with arbitrary content.
+ * The content element will be emptied before this change takes place.
+ *
+ * @fires ModalDialog#beforemodalfill
+ * @fires ModalDialog#modalfill
+ *
+ * @param {Mixed} [content]
+ * The same rules apply to this as apply to the `content` option.
+ */
+ ;
+
+ _proto.fillWith = function fillWith(content) {
+ var contentEl = this.contentEl();
+ var parentEl = contentEl.parentNode;
+ var nextSiblingEl = contentEl.nextSibling;
+ /**
+ * Fired just before a `ModalDialog` is filled with content.
+ *
+ * @event ModalDialog#beforemodalfill
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('beforemodalfill');
+ this.hasBeenFilled_ = true; // Detach the content element from the DOM before performing
+ // manipulation to avoid modifying the live DOM multiple times.
+
+ parentEl.removeChild(contentEl);
+ this.empty();
+ insertContent(contentEl, content);
+ /**
+ * Fired just after a `ModalDialog` is filled with content.
+ *
+ * @event ModalDialog#modalfill
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('modalfill'); // Re-inject the re-filled content element.
+
+ if (nextSiblingEl) {
+ parentEl.insertBefore(contentEl, nextSiblingEl);
+ } else {
+ parentEl.appendChild(contentEl);
+ } // make sure that the close button is last in the dialog DOM
+
+
+ var closeButton = this.getChild('closeButton');
+
+ if (closeButton) {
+ parentEl.appendChild(closeButton.el_);
+ }
+ }
+ /**
+ * Empties the content element. This happens anytime the modal is filled.
+ *
+ * @fires ModalDialog#beforemodalempty
+ * @fires ModalDialog#modalempty
+ */
+ ;
+
+ _proto.empty = function empty() {
+ /**
+ * Fired just before a `ModalDialog` is emptied.
+ *
+ * @event ModalDialog#beforemodalempty
+ * @type {EventTarget~Event}
+ */
+ this.trigger('beforemodalempty');
+ emptyEl(this.contentEl());
+ /**
+ * Fired just after a `ModalDialog` is emptied.
+ *
+ * @event ModalDialog#modalempty
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('modalempty');
+ }
+ /**
+ * Gets or sets the modal content, which gets normalized before being
+ * rendered into the DOM.
+ *
+ * This does not update the DOM or fill the modal, but it is called during
+ * that process.
+ *
+ * @param {Mixed} [value]
+ * If defined, sets the internal content value to be used on the
+ * next call(s) to `fill`. This value is normalized before being
+ * inserted. To "clear" the internal content value, pass `null`.
+ *
+ * @return {Mixed}
+ * The current content of the modal dialog
+ */
+ ;
+
+ _proto.content = function content(value) {
+ if (typeof value !== 'undefined') {
+ this.content_ = value;
+ }
+
+ return this.content_;
+ }
+ /**
+ * conditionally focus the modal dialog if focus was previously on the player.
+ *
+ * @private
+ */
+ ;
+
+ _proto.conditionalFocus_ = function conditionalFocus_() {
+ var activeEl = document.activeElement;
+ var playerEl = this.player_.el_;
+ this.previouslyActiveEl_ = null;
+
+ if (playerEl.contains(activeEl) || playerEl === activeEl) {
+ this.previouslyActiveEl_ = activeEl;
+ this.focus();
+ }
+ }
+ /**
+ * conditionally blur the element and refocus the last focused element
+ *
+ * @private
+ */
+ ;
+
+ _proto.conditionalBlur_ = function conditionalBlur_() {
+ if (this.previouslyActiveEl_) {
+ this.previouslyActiveEl_.focus();
+ this.previouslyActiveEl_ = null;
+ }
+ }
+ /**
+ * Keydown handler. Attached when modal is focused.
+ *
+ * @listens keydown
+ */
+ ;
+
+ _proto.handleKeyDown = function handleKeyDown(event) {
+ // Do not allow keydowns to reach out of the modal dialog.
+ event.stopPropagation();
+
+ if (keycode.isEventKey(event, 'Escape') && this.closeable()) {
+ event.preventDefault();
+ this.close();
+ return;
+ } // exit early if it isn't a tab key
+
+
+ if (!keycode.isEventKey(event, 'Tab')) {
+ return;
+ }
+
+ var focusableEls = this.focusableEls_();
+ var activeEl = this.el_.querySelector(':focus');
+ var focusIndex;
+
+ for (var i = 0; i < focusableEls.length; i++) {
+ if (activeEl === focusableEls[i]) {
+ focusIndex = i;
+ break;
+ }
+ }
+
+ if (document.activeElement === this.el_) {
+ focusIndex = 0;
+ }
+
+ if (event.shiftKey && focusIndex === 0) {
+ focusableEls[focusableEls.length - 1].focus();
+ event.preventDefault();
+ } else if (!event.shiftKey && focusIndex === focusableEls.length - 1) {
+ focusableEls[0].focus();
+ event.preventDefault();
+ }
+ }
+ /**
+ * get all focusable elements
+ *
+ * @private
+ */
+ ;
+
+ _proto.focusableEls_ = function focusableEls_() {
+ var allChildren = this.el_.querySelectorAll('*');
+ return Array.prototype.filter.call(allChildren, function (child) {
+ return (child instanceof window$1.HTMLAnchorElement || child instanceof window$1.HTMLAreaElement) && child.hasAttribute('href') || (child instanceof window$1.HTMLInputElement || child instanceof window$1.HTMLSelectElement || child instanceof window$1.HTMLTextAreaElement || child instanceof window$1.HTMLButtonElement) && !child.hasAttribute('disabled') || child instanceof window$1.HTMLIFrameElement || child instanceof window$1.HTMLObjectElement || child instanceof window$1.HTMLEmbedElement || child.hasAttribute('tabindex') && child.getAttribute('tabindex') !== -1 || child.hasAttribute('contenteditable');
+ });
+ };
+
+ return ModalDialog;
+}(Component$1);
+/**
+ * Default options for `ModalDialog` default options.
+ *
+ * @type {Object}
+ * @private
+ */
+
+
+ModalDialog.prototype.options_ = {
+ pauseOnOpen: true,
+ temporary: true
+};
+Component$1.registerComponent('ModalDialog', ModalDialog);
+
+/**
+ * Common functionaliy between {@link TextTrackList}, {@link AudioTrackList}, and
+ * {@link VideoTrackList}
+ *
+ * @extends EventTarget
+ */
+
+var TrackList = /*#__PURE__*/function (_EventTarget) {
+ _inheritsLoose(TrackList, _EventTarget);
+
+ /**
+ * Create an instance of this class
+ *
+ * @param {Track[]} tracks
+ * A list of tracks to initialize the list with.
+ *
+ * @abstract
+ */
+ function TrackList(tracks) {
+ var _this;
+
+ if (tracks === void 0) {
+ tracks = [];
+ }
+
+ _this = _EventTarget.call(this) || this;
+ _this.tracks_ = [];
+ /**
+ * @memberof TrackList
+ * @member {number} length
+ * The current number of `Track`s in the this Trackist.
+ * @instance
+ */
+
+ Object.defineProperty(_assertThisInitialized(_this), 'length', {
+ get: function get() {
+ return this.tracks_.length;
+ }
+ });
+
+ for (var i = 0; i < tracks.length; i++) {
+ _this.addTrack(tracks[i]);
+ }
+
+ return _this;
+ }
+ /**
+ * Add a {@link Track} to the `TrackList`
+ *
+ * @param {Track} track
+ * The audio, video, or text track to add to the list.
+ *
+ * @fires TrackList#addtrack
+ */
+
+
+ var _proto = TrackList.prototype;
+
+ _proto.addTrack = function addTrack(track) {
+ var _this2 = this;
+
+ var index = this.tracks_.length;
+
+ if (!('' + index in this)) {
+ Object.defineProperty(this, index, {
+ get: function get() {
+ return this.tracks_[index];
+ }
+ });
+ } // Do not add duplicate tracks
+
+
+ if (this.tracks_.indexOf(track) === -1) {
+ this.tracks_.push(track);
+ /**
+ * Triggered when a track is added to a track list.
+ *
+ * @event TrackList#addtrack
+ * @type {EventTarget~Event}
+ * @property {Track} track
+ * A reference to track that was added.
+ */
+
+ this.trigger({
+ track: track,
+ type: 'addtrack',
+ target: this
+ });
+ }
+ /**
+ * Triggered when a track label is changed.
+ *
+ * @event TrackList#addtrack
+ * @type {EventTarget~Event}
+ * @property {Track} track
+ * A reference to track that was added.
+ */
+
+
+ track.labelchange_ = function () {
+ _this2.trigger({
+ track: track,
+ type: 'labelchange',
+ target: _this2
+ });
+ };
+
+ if (isEvented(track)) {
+ track.addEventListener('labelchange', track.labelchange_);
+ }
+ }
+ /**
+ * Remove a {@link Track} from the `TrackList`
+ *
+ * @param {Track} rtrack
+ * The audio, video, or text track to remove from the list.
+ *
+ * @fires TrackList#removetrack
+ */
+ ;
+
+ _proto.removeTrack = function removeTrack(rtrack) {
+ var track;
+
+ for (var i = 0, l = this.length; i < l; i++) {
+ if (this[i] === rtrack) {
+ track = this[i];
+
+ if (track.off) {
+ track.off();
+ }
+
+ this.tracks_.splice(i, 1);
+ break;
+ }
+ }
+
+ if (!track) {
+ return;
+ }
+ /**
+ * Triggered when a track is removed from track list.
+ *
+ * @event TrackList#removetrack
+ * @type {EventTarget~Event}
+ * @property {Track} track
+ * A reference to track that was removed.
+ */
+
+
+ this.trigger({
+ track: track,
+ type: 'removetrack',
+ target: this
+ });
+ }
+ /**
+ * Get a Track from the TrackList by a tracks id
+ *
+ * @param {string} id - the id of the track to get
+ * @method getTrackById
+ * @return {Track}
+ * @private
+ */
+ ;
+
+ _proto.getTrackById = function getTrackById(id) {
+ var result = null;
+
+ for (var i = 0, l = this.length; i < l; i++) {
+ var track = this[i];
+
+ if (track.id === id) {
+ result = track;
+ break;
+ }
+ }
+
+ return result;
+ };
+
+ return TrackList;
+}(EventTarget$2);
+/**
+ * Triggered when a different track is selected/enabled.
+ *
+ * @event TrackList#change
+ * @type {EventTarget~Event}
+ */
+
+/**
+ * Events that can be called with on + eventName. See {@link EventHandler}.
+ *
+ * @property {Object} TrackList#allowedEvents_
+ * @private
+ */
+
+
+TrackList.prototype.allowedEvents_ = {
+ change: 'change',
+ addtrack: 'addtrack',
+ removetrack: 'removetrack',
+ labelchange: 'labelchange'
+}; // emulate attribute EventHandler support to allow for feature detection
+
+for (var event in TrackList.prototype.allowedEvents_) {
+ TrackList.prototype['on' + event] = null;
+}
+
+/**
+ * Anywhere we call this function we diverge from the spec
+ * as we only support one enabled audiotrack at a time
+ *
+ * @param {AudioTrackList} list
+ * list to work on
+ *
+ * @param {AudioTrack} track
+ * The track to skip
+ *
+ * @private
+ */
+
+var disableOthers$1 = function disableOthers(list, track) {
+ for (var i = 0; i < list.length; i++) {
+ if (!Object.keys(list[i]).length || track.id === list[i].id) {
+ continue;
+ } // another audio track is enabled, disable it
+
+
+ list[i].enabled = false;
+ }
+};
+/**
+ * The current list of {@link AudioTrack} for a media file.
+ *
+ * @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#audiotracklist}
+ * @extends TrackList
+ */
+
+
+var AudioTrackList = /*#__PURE__*/function (_TrackList) {
+ _inheritsLoose(AudioTrackList, _TrackList);
+
+ /**
+ * Create an instance of this class.
+ *
+ * @param {AudioTrack[]} [tracks=[]]
+ * A list of `AudioTrack` to instantiate the list with.
+ */
+ function AudioTrackList(tracks) {
+ var _this;
+
+ if (tracks === void 0) {
+ tracks = [];
+ }
+
+ // make sure only 1 track is enabled
+ // sorted from last index to first index
+ for (var i = tracks.length - 1; i >= 0; i--) {
+ if (tracks[i].enabled) {
+ disableOthers$1(tracks, tracks[i]);
+ break;
+ }
+ }
+
+ _this = _TrackList.call(this, tracks) || this;
+ _this.changing_ = false;
+ return _this;
+ }
+ /**
+ * Add an {@link AudioTrack} to the `AudioTrackList`.
+ *
+ * @param {AudioTrack} track
+ * The AudioTrack to add to the list
+ *
+ * @fires TrackList#addtrack
+ */
+
+
+ var _proto = AudioTrackList.prototype;
+
+ _proto.addTrack = function addTrack(track) {
+ var _this2 = this;
+
+ if (track.enabled) {
+ disableOthers$1(this, track);
+ }
+
+ _TrackList.prototype.addTrack.call(this, track); // native tracks don't have this
+
+
+ if (!track.addEventListener) {
+ return;
+ }
+
+ track.enabledChange_ = function () {
+ // when we are disabling other tracks (since we don't support
+ // more than one track at a time) we will set changing_
+ // to true so that we don't trigger additional change events
+ if (_this2.changing_) {
+ return;
+ }
+
+ _this2.changing_ = true;
+ disableOthers$1(_this2, track);
+ _this2.changing_ = false;
+
+ _this2.trigger('change');
+ };
+ /**
+ * @listens AudioTrack#enabledchange
+ * @fires TrackList#change
+ */
+
+
+ track.addEventListener('enabledchange', track.enabledChange_);
+ };
+
+ _proto.removeTrack = function removeTrack(rtrack) {
+ _TrackList.prototype.removeTrack.call(this, rtrack);
+
+ if (rtrack.removeEventListener && rtrack.enabledChange_) {
+ rtrack.removeEventListener('enabledchange', rtrack.enabledChange_);
+ rtrack.enabledChange_ = null;
+ }
+ };
+
+ return AudioTrackList;
+}(TrackList);
+
+/**
+ * Un-select all other {@link VideoTrack}s that are selected.
+ *
+ * @param {VideoTrackList} list
+ * list to work on
+ *
+ * @param {VideoTrack} track
+ * The track to skip
+ *
+ * @private
+ */
+
+var disableOthers = function disableOthers(list, track) {
+ for (var i = 0; i < list.length; i++) {
+ if (!Object.keys(list[i]).length || track.id === list[i].id) {
+ continue;
+ } // another video track is enabled, disable it
+
+
+ list[i].selected = false;
+ }
+};
+/**
+ * The current list of {@link VideoTrack} for a video.
+ *
+ * @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#videotracklist}
+ * @extends TrackList
+ */
+
+
+var VideoTrackList = /*#__PURE__*/function (_TrackList) {
+ _inheritsLoose(VideoTrackList, _TrackList);
+
+ /**
+ * Create an instance of this class.
+ *
+ * @param {VideoTrack[]} [tracks=[]]
+ * A list of `VideoTrack` to instantiate the list with.
+ */
+ function VideoTrackList(tracks) {
+ var _this;
+
+ if (tracks === void 0) {
+ tracks = [];
+ }
+
+ // make sure only 1 track is enabled
+ // sorted from last index to first index
+ for (var i = tracks.length - 1; i >= 0; i--) {
+ if (tracks[i].selected) {
+ disableOthers(tracks, tracks[i]);
+ break;
+ }
+ }
+
+ _this = _TrackList.call(this, tracks) || this;
+ _this.changing_ = false;
+ /**
+ * @member {number} VideoTrackList#selectedIndex
+ * The current index of the selected {@link VideoTrack`}.
+ */
+
+ Object.defineProperty(_assertThisInitialized(_this), 'selectedIndex', {
+ get: function get() {
+ for (var _i = 0; _i < this.length; _i++) {
+ if (this[_i].selected) {
+ return _i;
+ }
+ }
+
+ return -1;
+ },
+ set: function set() {}
+ });
+ return _this;
+ }
+ /**
+ * Add a {@link VideoTrack} to the `VideoTrackList`.
+ *
+ * @param {VideoTrack} track
+ * The VideoTrack to add to the list
+ *
+ * @fires TrackList#addtrack
+ */
+
+
+ var _proto = VideoTrackList.prototype;
+
+ _proto.addTrack = function addTrack(track) {
+ var _this2 = this;
+
+ if (track.selected) {
+ disableOthers(this, track);
+ }
+
+ _TrackList.prototype.addTrack.call(this, track); // native tracks don't have this
+
+
+ if (!track.addEventListener) {
+ return;
+ }
+
+ track.selectedChange_ = function () {
+ if (_this2.changing_) {
+ return;
+ }
+
+ _this2.changing_ = true;
+ disableOthers(_this2, track);
+ _this2.changing_ = false;
+
+ _this2.trigger('change');
+ };
+ /**
+ * @listens VideoTrack#selectedchange
+ * @fires TrackList#change
+ */
+
+
+ track.addEventListener('selectedchange', track.selectedChange_);
+ };
+
+ _proto.removeTrack = function removeTrack(rtrack) {
+ _TrackList.prototype.removeTrack.call(this, rtrack);
+
+ if (rtrack.removeEventListener && rtrack.selectedChange_) {
+ rtrack.removeEventListener('selectedchange', rtrack.selectedChange_);
+ rtrack.selectedChange_ = null;
+ }
+ };
+
+ return VideoTrackList;
+}(TrackList);
+
+/**
+ * The current list of {@link TextTrack} for a media file.
+ *
+ * @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#texttracklist}
+ * @extends TrackList
+ */
+
+var TextTrackList = /*#__PURE__*/function (_TrackList) {
+ _inheritsLoose(TextTrackList, _TrackList);
+
+ function TextTrackList() {
+ return _TrackList.apply(this, arguments) || this;
+ }
+
+ var _proto = TextTrackList.prototype;
+
+ /**
+ * Add a {@link TextTrack} to the `TextTrackList`
+ *
+ * @param {TextTrack} track
+ * The text track to add to the list.
+ *
+ * @fires TrackList#addtrack
+ */
+ _proto.addTrack = function addTrack(track) {
+ var _this = this;
+
+ _TrackList.prototype.addTrack.call(this, track);
+
+ if (!this.queueChange_) {
+ this.queueChange_ = function () {
+ return _this.queueTrigger('change');
+ };
+ }
+
+ if (!this.triggerSelectedlanguagechange) {
+ this.triggerSelectedlanguagechange_ = function () {
+ return _this.trigger('selectedlanguagechange');
+ };
+ }
+ /**
+ * @listens TextTrack#modechange
+ * @fires TrackList#change
+ */
+
+
+ track.addEventListener('modechange', this.queueChange_);
+ var nonLanguageTextTrackKind = ['metadata', 'chapters'];
+
+ if (nonLanguageTextTrackKind.indexOf(track.kind) === -1) {
+ track.addEventListener('modechange', this.triggerSelectedlanguagechange_);
+ }
+ };
+
+ _proto.removeTrack = function removeTrack(rtrack) {
+ _TrackList.prototype.removeTrack.call(this, rtrack); // manually remove the event handlers we added
+
+
+ if (rtrack.removeEventListener) {
+ if (this.queueChange_) {
+ rtrack.removeEventListener('modechange', this.queueChange_);
+ }
+
+ if (this.selectedlanguagechange_) {
+ rtrack.removeEventListener('modechange', this.triggerSelectedlanguagechange_);
+ }
+ }
+ };
+
+ return TextTrackList;
+}(TrackList);
+
+/**
+ * @file html-track-element-list.js
+ */
+
+/**
+ * The current list of {@link HtmlTrackElement}s.
+ */
+var HtmlTrackElementList = /*#__PURE__*/function () {
+ /**
+ * Create an instance of this class.
+ *
+ * @param {HtmlTrackElement[]} [tracks=[]]
+ * A list of `HtmlTrackElement` to instantiate the list with.
+ */
+ function HtmlTrackElementList(trackElements) {
+ if (trackElements === void 0) {
+ trackElements = [];
+ }
+
+ this.trackElements_ = [];
+ /**
+ * @memberof HtmlTrackElementList
+ * @member {number} length
+ * The current number of `Track`s in the this Trackist.
+ * @instance
+ */
+
+ Object.defineProperty(this, 'length', {
+ get: function get() {
+ return this.trackElements_.length;
+ }
+ });
+
+ for (var i = 0, length = trackElements.length; i < length; i++) {
+ this.addTrackElement_(trackElements[i]);
+ }
+ }
+ /**
+ * Add an {@link HtmlTrackElement} to the `HtmlTrackElementList`
+ *
+ * @param {HtmlTrackElement} trackElement
+ * The track element to add to the list.
+ *
+ * @private
+ */
+
+
+ var _proto = HtmlTrackElementList.prototype;
+
+ _proto.addTrackElement_ = function addTrackElement_(trackElement) {
+ var index = this.trackElements_.length;
+
+ if (!('' + index in this)) {
+ Object.defineProperty(this, index, {
+ get: function get() {
+ return this.trackElements_[index];
+ }
+ });
+ } // Do not add duplicate elements
+
+
+ if (this.trackElements_.indexOf(trackElement) === -1) {
+ this.trackElements_.push(trackElement);
+ }
+ }
+ /**
+ * Get an {@link HtmlTrackElement} from the `HtmlTrackElementList` given an
+ * {@link TextTrack}.
+ *
+ * @param {TextTrack} track
+ * The track associated with a track element.
+ *
+ * @return {HtmlTrackElement|undefined}
+ * The track element that was found or undefined.
+ *
+ * @private
+ */
+ ;
+
+ _proto.getTrackElementByTrack_ = function getTrackElementByTrack_(track) {
+ var trackElement_;
+
+ for (var i = 0, length = this.trackElements_.length; i < length; i++) {
+ if (track === this.trackElements_[i].track) {
+ trackElement_ = this.trackElements_[i];
+ break;
+ }
+ }
+
+ return trackElement_;
+ }
+ /**
+ * Remove a {@link HtmlTrackElement} from the `HtmlTrackElementList`
+ *
+ * @param {HtmlTrackElement} trackElement
+ * The track element to remove from the list.
+ *
+ * @private
+ */
+ ;
+
+ _proto.removeTrackElement_ = function removeTrackElement_(trackElement) {
+ for (var i = 0, length = this.trackElements_.length; i < length; i++) {
+ if (trackElement === this.trackElements_[i]) {
+ if (this.trackElements_[i].track && typeof this.trackElements_[i].track.off === 'function') {
+ this.trackElements_[i].track.off();
+ }
+
+ if (typeof this.trackElements_[i].off === 'function') {
+ this.trackElements_[i].off();
+ }
+
+ this.trackElements_.splice(i, 1);
+ break;
+ }
+ }
+ };
+
+ return HtmlTrackElementList;
+}();
+
+/**
+ * @file text-track-cue-list.js
+ */
+
+/**
+ * @typedef {Object} TextTrackCueList~TextTrackCue
+ *
+ * @property {string} id
+ * The unique id for this text track cue
+ *
+ * @property {number} startTime
+ * The start time for this text track cue
+ *
+ * @property {number} endTime
+ * The end time for this text track cue
+ *
+ * @property {boolean} pauseOnExit
+ * Pause when the end time is reached if true.
+ *
+ * @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#texttrackcue}
+ */
+
+/**
+ * A List of TextTrackCues.
+ *
+ * @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#texttrackcuelist}
+ */
+var TextTrackCueList = /*#__PURE__*/function () {
+ /**
+ * Create an instance of this class..
+ *
+ * @param {Array} cues
+ * A list of cues to be initialized with
+ */
+ function TextTrackCueList(cues) {
+ TextTrackCueList.prototype.setCues_.call(this, cues);
+ /**
+ * @memberof TextTrackCueList
+ * @member {number} length
+ * The current number of `TextTrackCue`s in the TextTrackCueList.
+ * @instance
+ */
+
+ Object.defineProperty(this, 'length', {
+ get: function get() {
+ return this.length_;
+ }
+ });
+ }
+ /**
+ * A setter for cues in this list. Creates getters
+ * an an index for the cues.
+ *
+ * @param {Array} cues
+ * An array of cues to set
+ *
+ * @private
+ */
+
+
+ var _proto = TextTrackCueList.prototype;
+
+ _proto.setCues_ = function setCues_(cues) {
+ var oldLength = this.length || 0;
+ var i = 0;
+ var l = cues.length;
+ this.cues_ = cues;
+ this.length_ = cues.length;
+
+ var defineProp = function defineProp(index) {
+ if (!('' + index in this)) {
+ Object.defineProperty(this, '' + index, {
+ get: function get() {
+ return this.cues_[index];
+ }
+ });
+ }
+ };
+
+ if (oldLength < l) {
+ i = oldLength;
+
+ for (; i < l; i++) {
+ defineProp.call(this, i);
+ }
+ }
+ }
+ /**
+ * Get a `TextTrackCue` that is currently in the `TextTrackCueList` by id.
+ *
+ * @param {string} id
+ * The id of the cue that should be searched for.
+ *
+ * @return {TextTrackCueList~TextTrackCue|null}
+ * A single cue or null if none was found.
+ */
+ ;
+
+ _proto.getCueById = function getCueById(id) {
+ var result = null;
+
+ for (var i = 0, l = this.length; i < l; i++) {
+ var cue = this[i];
+
+ if (cue.id === id) {
+ result = cue;
+ break;
+ }
+ }
+
+ return result;
+ };
+
+ return TextTrackCueList;
+}();
+
+/**
+ * @file track-kinds.js
+ */
+
+/**
+ * All possible `VideoTrackKind`s
+ *
+ * @see https://html.spec.whatwg.org/multipage/embedded-content.html#dom-videotrack-kind
+ * @typedef VideoTrack~Kind
+ * @enum
+ */
+var VideoTrackKind = {
+ alternative: 'alternative',
+ captions: 'captions',
+ main: 'main',
+ sign: 'sign',
+ subtitles: 'subtitles',
+ commentary: 'commentary'
+};
+/**
+ * All possible `AudioTrackKind`s
+ *
+ * @see https://html.spec.whatwg.org/multipage/embedded-content.html#dom-audiotrack-kind
+ * @typedef AudioTrack~Kind
+ * @enum
+ */
+
+var AudioTrackKind = {
+ 'alternative': 'alternative',
+ 'descriptions': 'descriptions',
+ 'main': 'main',
+ 'main-desc': 'main-desc',
+ 'translation': 'translation',
+ 'commentary': 'commentary'
+};
+/**
+ * All possible `TextTrackKind`s
+ *
+ * @see https://html.spec.whatwg.org/multipage/embedded-content.html#dom-texttrack-kind
+ * @typedef TextTrack~Kind
+ * @enum
+ */
+
+var TextTrackKind = {
+ subtitles: 'subtitles',
+ captions: 'captions',
+ descriptions: 'descriptions',
+ chapters: 'chapters',
+ metadata: 'metadata'
+};
+/**
+ * All possible `TextTrackMode`s
+ *
+ * @see https://html.spec.whatwg.org/multipage/embedded-content.html#texttrackmode
+ * @typedef TextTrack~Mode
+ * @enum
+ */
+
+var TextTrackMode = {
+ disabled: 'disabled',
+ hidden: 'hidden',
+ showing: 'showing'
+};
+
+/**
+ * A Track class that contains all of the common functionality for {@link AudioTrack},
+ * {@link VideoTrack}, and {@link TextTrack}.
+ *
+ * > Note: This class should not be used directly
+ *
+ * @see {@link https://html.spec.whatwg.org/multipage/embedded-content.html}
+ * @extends EventTarget
+ * @abstract
+ */
+
+var Track = /*#__PURE__*/function (_EventTarget) {
+ _inheritsLoose(Track, _EventTarget);
+
+ /**
+ * Create an instance of this class.
+ *
+ * @param {Object} [options={}]
+ * Object of option names and values
+ *
+ * @param {string} [options.kind='']
+ * A valid kind for the track type you are creating.
+ *
+ * @param {string} [options.id='vjs_track_' + Guid.newGUID()]
+ * A unique id for this AudioTrack.
+ *
+ * @param {string} [options.label='']
+ * The menu label for this track.
+ *
+ * @param {string} [options.language='']
+ * A valid two character language code.
+ *
+ * @abstract
+ */
+ function Track(options) {
+ var _this;
+
+ if (options === void 0) {
+ options = {};
+ }
+
+ _this = _EventTarget.call(this) || this;
+ var trackProps = {
+ id: options.id || 'vjs_track_' + newGUID(),
+ kind: options.kind || '',
+ language: options.language || ''
+ };
+ var label = options.label || '';
+ /**
+ * @memberof Track
+ * @member {string} id
+ * The id of this track. Cannot be changed after creation.
+ * @instance
+ *
+ * @readonly
+ */
+
+ /**
+ * @memberof Track
+ * @member {string} kind
+ * The kind of track that this is. Cannot be changed after creation.
+ * @instance
+ *
+ * @readonly
+ */
+
+ /**
+ * @memberof Track
+ * @member {string} language
+ * The two letter language code for this track. Cannot be changed after
+ * creation.
+ * @instance
+ *
+ * @readonly
+ */
+
+ var _loop = function _loop(key) {
+ Object.defineProperty(_assertThisInitialized(_this), key, {
+ get: function get() {
+ return trackProps[key];
+ },
+ set: function set() {}
+ });
+ };
+
+ for (var key in trackProps) {
+ _loop(key);
+ }
+ /**
+ * @memberof Track
+ * @member {string} label
+ * The label of this track. Cannot be changed after creation.
+ * @instance
+ *
+ * @fires Track#labelchange
+ */
+
+
+ Object.defineProperty(_assertThisInitialized(_this), 'label', {
+ get: function get() {
+ return label;
+ },
+ set: function set(newLabel) {
+ if (newLabel !== label) {
+ label = newLabel;
+ /**
+ * An event that fires when label changes on this track.
+ *
+ * > Note: This is not part of the spec!
+ *
+ * @event Track#labelchange
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('labelchange');
+ }
+ }
+ });
+ return _this;
+ }
+
+ return Track;
+}(EventTarget$2);
+
+/**
+ * @file url.js
+ * @module url
+ */
+/**
+ * @typedef {Object} url:URLObject
+ *
+ * @property {string} protocol
+ * The protocol of the url that was parsed.
+ *
+ * @property {string} hostname
+ * The hostname of the url that was parsed.
+ *
+ * @property {string} port
+ * The port of the url that was parsed.
+ *
+ * @property {string} pathname
+ * The pathname of the url that was parsed.
+ *
+ * @property {string} search
+ * The search query of the url that was parsed.
+ *
+ * @property {string} hash
+ * The hash of the url that was parsed.
+ *
+ * @property {string} host
+ * The host of the url that was parsed.
+ */
+
+/**
+ * Resolve and parse the elements of a URL.
+ *
+ * @function
+ * @param {String} url
+ * The url to parse
+ *
+ * @return {url:URLObject}
+ * An object of url details
+ */
+
+var parseUrl = function parseUrl(url) {
+ // This entire method can be replace with URL once we are able to drop IE11
+ var props = ['protocol', 'hostname', 'port', 'pathname', 'search', 'hash', 'host']; // add the url to an anchor and let the browser parse the URL
+
+ var a = document.createElement('a');
+ a.href = url; // Copy the specific URL properties to a new object
+ // This is also needed for IE because the anchor loses its
+ // properties when it's removed from the dom
+
+ var details = {};
+
+ for (var i = 0; i < props.length; i++) {
+ details[props[i]] = a[props[i]];
+ } // IE adds the port to the host property unlike everyone else. If
+ // a port identifier is added for standard ports, strip it.
+
+
+ if (details.protocol === 'http:') {
+ details.host = details.host.replace(/:80$/, '');
+ }
+
+ if (details.protocol === 'https:') {
+ details.host = details.host.replace(/:443$/, '');
+ }
+
+ if (!details.protocol) {
+ details.protocol = window$1.location.protocol;
+ }
+ /* istanbul ignore if */
+
+
+ if (!details.host) {
+ details.host = window$1.location.host;
+ }
+
+ return details;
+};
+/**
+ * Get absolute version of relative URL. Used to tell Flash the correct URL.
+ *
+ * @function
+ * @param {string} url
+ * URL to make absolute
+ *
+ * @return {string}
+ * Absolute URL
+ *
+ * @see http://stackoverflow.com/questions/470832/getting-an-absolute-url-from-a-relative-one-ie6-issue
+ */
+
+var getAbsoluteURL = function getAbsoluteURL(url) {
+ // Check if absolute URL
+ if (!url.match(/^https?:\/\//)) {
+ // Convert to absolute URL. Flash hosted off-site needs an absolute URL.
+ // add the url to an anchor and let the browser parse the URL
+ var a = document.createElement('a');
+ a.href = url;
+ url = a.href;
+ }
+
+ return url;
+};
+/**
+ * Returns the extension of the passed file name. It will return an empty string
+ * if passed an invalid path.
+ *
+ * @function
+ * @param {string} path
+ * The fileName path like '/path/to/file.mp4'
+ *
+ * @return {string}
+ * The extension in lower case or an empty string if no
+ * extension could be found.
+ */
+
+var getFileExtension = function getFileExtension(path) {
+ if (typeof path === 'string') {
+ var splitPathRe = /^(\/?)([\s\S]*?)((?:\.{1,2}|[^\/]+?)(\.([^\.\/\?]+)))(?:[\/]*|[\?].*)$/;
+ var pathParts = splitPathRe.exec(path);
+
+ if (pathParts) {
+ return pathParts.pop().toLowerCase();
+ }
+ }
+
+ return '';
+};
+/**
+ * Returns whether the url passed is a cross domain request or not.
+ *
+ * @function
+ * @param {string} url
+ * The url to check.
+ *
+ * @param {Object} [winLoc]
+ * the domain to check the url against, defaults to window.location
+ *
+ * @param {string} [winLoc.protocol]
+ * The window location protocol defaults to window.location.protocol
+ *
+ * @param {string} [winLoc.host]
+ * The window location host defaults to window.location.host
+ *
+ * @return {boolean}
+ * Whether it is a cross domain request or not.
+ */
+
+var isCrossOrigin = function isCrossOrigin(url, winLoc) {
+ if (winLoc === void 0) {
+ winLoc = window$1.location;
+ }
+
+ var urlInfo = parseUrl(url); // IE8 protocol relative urls will return ':' for protocol
+
+ var srcProtocol = urlInfo.protocol === ':' ? winLoc.protocol : urlInfo.protocol; // Check if url is for another domain/origin
+ // IE8 doesn't know location.origin, so we won't rely on it here
+
+ var crossOrigin = srcProtocol + urlInfo.host !== winLoc.protocol + winLoc.host;
+ return crossOrigin;
+};
+
+var Url = /*#__PURE__*/Object.freeze({
+ __proto__: null,
+ parseUrl: parseUrl,
+ getAbsoluteURL: getAbsoluteURL,
+ getFileExtension: getFileExtension,
+ isCrossOrigin: isCrossOrigin
+});
+
+/**
+ * Takes a webvtt file contents and parses it into cues
+ *
+ * @param {string} srcContent
+ * webVTT file contents
+ *
+ * @param {TextTrack} track
+ * TextTrack to add cues to. Cues come from the srcContent.
+ *
+ * @private
+ */
+
+var parseCues = function parseCues(srcContent, track) {
+ var parser = new window$1.WebVTT.Parser(window$1, window$1.vttjs, window$1.WebVTT.StringDecoder());
+ var errors = [];
+
+ parser.oncue = function (cue) {
+ track.addCue(cue);
+ };
+
+ parser.onparsingerror = function (error) {
+ errors.push(error);
+ };
+
+ parser.onflush = function () {
+ track.trigger({
+ type: 'loadeddata',
+ target: track
+ });
+ };
+
+ parser.parse(srcContent);
+
+ if (errors.length > 0) {
+ if (window$1.console && window$1.console.groupCollapsed) {
+ window$1.console.groupCollapsed("Text Track parsing errors for " + track.src);
+ }
+
+ errors.forEach(function (error) {
+ return log$1.error(error);
+ });
+
+ if (window$1.console && window$1.console.groupEnd) {
+ window$1.console.groupEnd();
+ }
+ }
+
+ parser.flush();
+};
+/**
+ * Load a `TextTrack` from a specified url.
+ *
+ * @param {string} src
+ * Url to load track from.
+ *
+ * @param {TextTrack} track
+ * Track to add cues to. Comes from the content at the end of `url`.
+ *
+ * @private
+ */
+
+
+var loadTrack = function loadTrack(src, track) {
+ var opts = {
+ uri: src
+ };
+ var crossOrigin = isCrossOrigin(src);
+
+ if (crossOrigin) {
+ opts.cors = crossOrigin;
+ }
+
+ var withCredentials = track.tech_.crossOrigin() === 'use-credentials';
+
+ if (withCredentials) {
+ opts.withCredentials = withCredentials;
+ }
+
+ XHR(opts, bind(this, function (err, response, responseBody) {
+ if (err) {
+ return log$1.error(err, response);
+ }
+
+ track.loaded_ = true; // Make sure that vttjs has loaded, otherwise, wait till it finished loading
+ // NOTE: this is only used for the alt/video.novtt.js build
+
+ if (typeof window$1.WebVTT !== 'function') {
+ if (track.tech_) {
+ // to prevent use before define eslint error, we define loadHandler
+ // as a let here
+ track.tech_.any(['vttjsloaded', 'vttjserror'], function (event) {
+ if (event.type === 'vttjserror') {
+ log$1.error("vttjs failed to load, stopping trying to process " + track.src);
+ return;
+ }
+
+ return parseCues(responseBody, track);
+ });
+ }
+ } else {
+ parseCues(responseBody, track);
+ }
+ }));
+};
+/**
+ * A representation of a single `TextTrack`.
+ *
+ * @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#texttrack}
+ * @extends Track
+ */
+
+
+var TextTrack = /*#__PURE__*/function (_Track) {
+ _inheritsLoose(TextTrack, _Track);
+
+ /**
+ * Create an instance of this class.
+ *
+ * @param {Object} options={}
+ * Object of option names and values
+ *
+ * @param {Tech} options.tech
+ * A reference to the tech that owns this TextTrack.
+ *
+ * @param {TextTrack~Kind} [options.kind='subtitles']
+ * A valid text track kind.
+ *
+ * @param {TextTrack~Mode} [options.mode='disabled']
+ * A valid text track mode.
+ *
+ * @param {string} [options.id='vjs_track_' + Guid.newGUID()]
+ * A unique id for this TextTrack.
+ *
+ * @param {string} [options.label='']
+ * The menu label for this track.
+ *
+ * @param {string} [options.language='']
+ * A valid two character language code.
+ *
+ * @param {string} [options.srclang='']
+ * A valid two character language code. An alternative, but deprioritized
+ * version of `options.language`
+ *
+ * @param {string} [options.src]
+ * A url to TextTrack cues.
+ *
+ * @param {boolean} [options.default]
+ * If this track should default to on or off.
+ */
+ function TextTrack(options) {
+ var _this;
+
+ if (options === void 0) {
+ options = {};
+ }
+
+ if (!options.tech) {
+ throw new Error('A tech was not provided.');
+ }
+
+ var settings = mergeOptions$3(options, {
+ kind: TextTrackKind[options.kind] || 'subtitles',
+ language: options.language || options.srclang || ''
+ });
+ var mode = TextTrackMode[settings.mode] || 'disabled';
+ var default_ = settings["default"];
+
+ if (settings.kind === 'metadata' || settings.kind === 'chapters') {
+ mode = 'hidden';
+ }
+
+ _this = _Track.call(this, settings) || this;
+ _this.tech_ = settings.tech;
+ _this.cues_ = [];
+ _this.activeCues_ = [];
+ _this.preload_ = _this.tech_.preloadTextTracks !== false;
+ var cues = new TextTrackCueList(_this.cues_);
+ var activeCues = new TextTrackCueList(_this.activeCues_);
+ var changed = false;
+ _this.timeupdateHandler = bind(_assertThisInitialized(_this), function () {
+ if (this.tech_.isDisposed()) {
+ return;
+ }
+
+ if (!this.tech_.isReady_) {
+ this.rvf_ = this.tech_.requestVideoFrameCallback(this.timeupdateHandler);
+ return;
+ } // Accessing this.activeCues for the side-effects of updating itself
+ // due to its nature as a getter function. Do not remove or cues will
+ // stop updating!
+ // Use the setter to prevent deletion from uglify (pure_getters rule)
+
+
+ this.activeCues = this.activeCues;
+
+ if (changed) {
+ this.trigger('cuechange');
+ changed = false;
+ }
+
+ this.rvf_ = this.tech_.requestVideoFrameCallback(this.timeupdateHandler);
+ });
+
+ var disposeHandler = function disposeHandler() {
+ _this.stopTracking();
+ };
+
+ _this.tech_.one('dispose', disposeHandler);
+
+ if (mode !== 'disabled') {
+ _this.startTracking();
+ }
+
+ Object.defineProperties(_assertThisInitialized(_this), {
+ /**
+ * @memberof TextTrack
+ * @member {boolean} default
+ * If this track was set to be on or off by default. Cannot be changed after
+ * creation.
+ * @instance
+ *
+ * @readonly
+ */
+ "default": {
+ get: function get() {
+ return default_;
+ },
+ set: function set() {}
+ },
+
+ /**
+ * @memberof TextTrack
+ * @member {string} mode
+ * Set the mode of this TextTrack to a valid {@link TextTrack~Mode}. Will
+ * not be set if setting to an invalid mode.
+ * @instance
+ *
+ * @fires TextTrack#modechange
+ */
+ mode: {
+ get: function get() {
+ return mode;
+ },
+ set: function set(newMode) {
+ if (!TextTrackMode[newMode]) {
+ return;
+ }
+
+ if (mode === newMode) {
+ return;
+ }
+
+ mode = newMode;
+
+ if (!this.preload_ && mode !== 'disabled' && this.cues.length === 0) {
+ // On-demand load.
+ loadTrack(this.src, this);
+ }
+
+ this.stopTracking();
+
+ if (mode !== 'disabled') {
+ this.startTracking();
+ }
+ /**
+ * An event that fires when mode changes on this track. This allows
+ * the TextTrackList that holds this track to act accordingly.
+ *
+ * > Note: This is not part of the spec!
+ *
+ * @event TextTrack#modechange
+ * @type {EventTarget~Event}
+ */
+
+
+ this.trigger('modechange');
+ }
+ },
+
+ /**
+ * @memberof TextTrack
+ * @member {TextTrackCueList} cues
+ * The text track cue list for this TextTrack.
+ * @instance
+ */
+ cues: {
+ get: function get() {
+ if (!this.loaded_) {
+ return null;
+ }
+
+ return cues;
+ },
+ set: function set() {}
+ },
+
+ /**
+ * @memberof TextTrack
+ * @member {TextTrackCueList} activeCues
+ * The list text track cues that are currently active for this TextTrack.
+ * @instance
+ */
+ activeCues: {
+ get: function get() {
+ if (!this.loaded_) {
+ return null;
+ } // nothing to do
+
+
+ if (this.cues.length === 0) {
+ return activeCues;
+ }
+
+ var ct = this.tech_.currentTime();
+ var active = [];
+
+ for (var i = 0, l = this.cues.length; i < l; i++) {
+ var cue = this.cues[i];
+
+ if (cue.startTime <= ct && cue.endTime >= ct) {
+ active.push(cue);
+ } else if (cue.startTime === cue.endTime && cue.startTime <= ct && cue.startTime + 0.5 >= ct) {
+ active.push(cue);
+ }
+ }
+
+ changed = false;
+
+ if (active.length !== this.activeCues_.length) {
+ changed = true;
+ } else {
+ for (var _i = 0; _i < active.length; _i++) {
+ if (this.activeCues_.indexOf(active[_i]) === -1) {
+ changed = true;
+ }
+ }
+ }
+
+ this.activeCues_ = active;
+ activeCues.setCues_(this.activeCues_);
+ return activeCues;
+ },
+ // /!\ Keep this setter empty (see the timeupdate handler above)
+ set: function set() {}
+ }
+ });
+
+ if (settings.src) {
+ _this.src = settings.src;
+
+ if (!_this.preload_) {
+ // Tracks will load on-demand.
+ // Act like we're loaded for other purposes.
+ _this.loaded_ = true;
+ }
+
+ if (_this.preload_ || settings.kind !== 'subtitles' && settings.kind !== 'captions') {
+ loadTrack(_this.src, _assertThisInitialized(_this));
+ }
+ } else {
+ _this.loaded_ = true;
+ }
+
+ return _this;
+ }
+
+ var _proto = TextTrack.prototype;
+
+ _proto.startTracking = function startTracking() {
+ this.rvf_ = this.tech_.requestVideoFrameCallback(this.timeupdateHandler);
+ };
+
+ _proto.stopTracking = function stopTracking() {
+ if (this.rvf_) {
+ this.tech_.cancelVideoFrameCallback(this.rvf_);
+ this.rvf_ = undefined;
+ }
+ }
+ /**
+ * Add a cue to the internal list of cues.
+ *
+ * @param {TextTrack~Cue} cue
+ * The cue to add to our internal list
+ */
+ ;
+
+ _proto.addCue = function addCue(originalCue) {
+ var cue = originalCue;
+
+ if (window$1.vttjs && !(originalCue instanceof window$1.vttjs.VTTCue)) {
+ cue = new window$1.vttjs.VTTCue(originalCue.startTime, originalCue.endTime, originalCue.text);
+
+ for (var prop in originalCue) {
+ if (!(prop in cue)) {
+ cue[prop] = originalCue[prop];
+ }
+ } // make sure that `id` is copied over
+
+
+ cue.id = originalCue.id;
+ cue.originalCue_ = originalCue;
+ }
+
+ var tracks = this.tech_.textTracks();
+
+ for (var i = 0; i < tracks.length; i++) {
+ if (tracks[i] !== this) {
+ tracks[i].removeCue(cue);
+ }
+ }
+
+ this.cues_.push(cue);
+ this.cues.setCues_(this.cues_);
+ }
+ /**
+ * Remove a cue from our internal list
+ *
+ * @param {TextTrack~Cue} removeCue
+ * The cue to remove from our internal list
+ */
+ ;
+
+ _proto.removeCue = function removeCue(_removeCue) {
+ var i = this.cues_.length;
+
+ while (i--) {
+ var cue = this.cues_[i];
+
+ if (cue === _removeCue || cue.originalCue_ && cue.originalCue_ === _removeCue) {
+ this.cues_.splice(i, 1);
+ this.cues.setCues_(this.cues_);
+ break;
+ }
+ }
+ };
+
+ return TextTrack;
+}(Track);
+/**
+ * cuechange - One or more cues in the track have become active or stopped being active.
+ */
+
+
+TextTrack.prototype.allowedEvents_ = {
+ cuechange: 'cuechange'
+};
+
+/**
+ * A representation of a single `AudioTrack`. If it is part of an {@link AudioTrackList}
+ * only one `AudioTrack` in the list will be enabled at a time.
+ *
+ * @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#audiotrack}
+ * @extends Track
+ */
+
+var AudioTrack = /*#__PURE__*/function (_Track) {
+ _inheritsLoose(AudioTrack, _Track);
+
+ /**
+ * Create an instance of this class.
+ *
+ * @param {Object} [options={}]
+ * Object of option names and values
+ *
+ * @param {AudioTrack~Kind} [options.kind='']
+ * A valid audio track kind
+ *
+ * @param {string} [options.id='vjs_track_' + Guid.newGUID()]
+ * A unique id for this AudioTrack.
+ *
+ * @param {string} [options.label='']
+ * The menu label for this track.
+ *
+ * @param {string} [options.language='']
+ * A valid two character language code.
+ *
+ * @param {boolean} [options.enabled]
+ * If this track is the one that is currently playing. If this track is part of
+ * an {@link AudioTrackList}, only one {@link AudioTrack} will be enabled.
+ */
+ function AudioTrack(options) {
+ var _this;
+
+ if (options === void 0) {
+ options = {};
+ }
+
+ var settings = mergeOptions$3(options, {
+ kind: AudioTrackKind[options.kind] || ''
+ });
+ _this = _Track.call(this, settings) || this;
+ var enabled = false;
+ /**
+ * @memberof AudioTrack
+ * @member {boolean} enabled
+ * If this `AudioTrack` is enabled or not. When setting this will
+ * fire {@link AudioTrack#enabledchange} if the state of enabled is changed.
+ * @instance
+ *
+ * @fires VideoTrack#selectedchange
+ */
+
+ Object.defineProperty(_assertThisInitialized(_this), 'enabled', {
+ get: function get() {
+ return enabled;
+ },
+ set: function set(newEnabled) {
+ // an invalid or unchanged value
+ if (typeof newEnabled !== 'boolean' || newEnabled === enabled) {
+ return;
+ }
+
+ enabled = newEnabled;
+ /**
+ * An event that fires when enabled changes on this track. This allows
+ * the AudioTrackList that holds this track to act accordingly.
+ *
+ * > Note: This is not part of the spec! Native tracks will do
+ * this internally without an event.
+ *
+ * @event AudioTrack#enabledchange
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('enabledchange');
+ }
+ }); // if the user sets this track to selected then
+ // set selected to that true value otherwise
+ // we keep it false
+
+ if (settings.enabled) {
+ _this.enabled = settings.enabled;
+ }
+
+ _this.loaded_ = true;
+ return _this;
+ }
+
+ return AudioTrack;
+}(Track);
+
+/**
+ * A representation of a single `VideoTrack`.
+ *
+ * @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#videotrack}
+ * @extends Track
+ */
+
+var VideoTrack = /*#__PURE__*/function (_Track) {
+ _inheritsLoose(VideoTrack, _Track);
+
+ /**
+ * Create an instance of this class.
+ *
+ * @param {Object} [options={}]
+ * Object of option names and values
+ *
+ * @param {string} [options.kind='']
+ * A valid {@link VideoTrack~Kind}
+ *
+ * @param {string} [options.id='vjs_track_' + Guid.newGUID()]
+ * A unique id for this AudioTrack.
+ *
+ * @param {string} [options.label='']
+ * The menu label for this track.
+ *
+ * @param {string} [options.language='']
+ * A valid two character language code.
+ *
+ * @param {boolean} [options.selected]
+ * If this track is the one that is currently playing.
+ */
+ function VideoTrack(options) {
+ var _this;
+
+ if (options === void 0) {
+ options = {};
+ }
+
+ var settings = mergeOptions$3(options, {
+ kind: VideoTrackKind[options.kind] || ''
+ });
+ _this = _Track.call(this, settings) || this;
+ var selected = false;
+ /**
+ * @memberof VideoTrack
+ * @member {boolean} selected
+ * If this `VideoTrack` is selected or not. When setting this will
+ * fire {@link VideoTrack#selectedchange} if the state of selected changed.
+ * @instance
+ *
+ * @fires VideoTrack#selectedchange
+ */
+
+ Object.defineProperty(_assertThisInitialized(_this), 'selected', {
+ get: function get() {
+ return selected;
+ },
+ set: function set(newSelected) {
+ // an invalid or unchanged value
+ if (typeof newSelected !== 'boolean' || newSelected === selected) {
+ return;
+ }
+
+ selected = newSelected;
+ /**
+ * An event that fires when selected changes on this track. This allows
+ * the VideoTrackList that holds this track to act accordingly.
+ *
+ * > Note: This is not part of the spec! Native tracks will do
+ * this internally without an event.
+ *
+ * @event VideoTrack#selectedchange
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('selectedchange');
+ }
+ }); // if the user sets this track to selected then
+ // set selected to that true value otherwise
+ // we keep it false
+
+ if (settings.selected) {
+ _this.selected = settings.selected;
+ }
+
+ return _this;
+ }
+
+ return VideoTrack;
+}(Track);
+
+/**
+ * @memberof HTMLTrackElement
+ * @typedef {HTMLTrackElement~ReadyState}
+ * @enum {number}
+ */
+
+var NONE = 0;
+var LOADING = 1;
+var LOADED = 2;
+var ERROR = 3;
+/**
+ * A single track represented in the DOM.
+ *
+ * @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#htmltrackelement}
+ * @extends EventTarget
+ */
+
+var HTMLTrackElement = /*#__PURE__*/function (_EventTarget) {
+ _inheritsLoose(HTMLTrackElement, _EventTarget);
+
+ /**
+ * Create an instance of this class.
+ *
+ * @param {Object} options={}
+ * Object of option names and values
+ *
+ * @param {Tech} options.tech
+ * A reference to the tech that owns this HTMLTrackElement.
+ *
+ * @param {TextTrack~Kind} [options.kind='subtitles']
+ * A valid text track kind.
+ *
+ * @param {TextTrack~Mode} [options.mode='disabled']
+ * A valid text track mode.
+ *
+ * @param {string} [options.id='vjs_track_' + Guid.newGUID()]
+ * A unique id for this TextTrack.
+ *
+ * @param {string} [options.label='']
+ * The menu label for this track.
+ *
+ * @param {string} [options.language='']
+ * A valid two character language code.
+ *
+ * @param {string} [options.srclang='']
+ * A valid two character language code. An alternative, but deprioritized
+ * version of `options.language`
+ *
+ * @param {string} [options.src]
+ * A url to TextTrack cues.
+ *
+ * @param {boolean} [options.default]
+ * If this track should default to on or off.
+ */
+ function HTMLTrackElement(options) {
+ var _this;
+
+ if (options === void 0) {
+ options = {};
+ }
+
+ _this = _EventTarget.call(this) || this;
+ var readyState;
+ var track = new TextTrack(options);
+ _this.kind = track.kind;
+ _this.src = track.src;
+ _this.srclang = track.language;
+ _this.label = track.label;
+ _this["default"] = track["default"];
+ Object.defineProperties(_assertThisInitialized(_this), {
+ /**
+ * @memberof HTMLTrackElement
+ * @member {HTMLTrackElement~ReadyState} readyState
+ * The current ready state of the track element.
+ * @instance
+ */
+ readyState: {
+ get: function get() {
+ return readyState;
+ }
+ },
+
+ /**
+ * @memberof HTMLTrackElement
+ * @member {TextTrack} track
+ * The underlying TextTrack object.
+ * @instance
+ *
+ */
+ track: {
+ get: function get() {
+ return track;
+ }
+ }
+ });
+ readyState = NONE;
+ /**
+ * @listens TextTrack#loadeddata
+ * @fires HTMLTrackElement#load
+ */
+
+ track.addEventListener('loadeddata', function () {
+ readyState = LOADED;
+
+ _this.trigger({
+ type: 'load',
+ target: _assertThisInitialized(_this)
+ });
+ });
+ return _this;
+ }
+
+ return HTMLTrackElement;
+}(EventTarget$2);
+
+HTMLTrackElement.prototype.allowedEvents_ = {
+ load: 'load'
+};
+HTMLTrackElement.NONE = NONE;
+HTMLTrackElement.LOADING = LOADING;
+HTMLTrackElement.LOADED = LOADED;
+HTMLTrackElement.ERROR = ERROR;
+
+/*
+ * This file contains all track properties that are used in
+ * player.js, tech.js, html5.js and possibly other techs in the future.
+ */
+
+var NORMAL = {
+ audio: {
+ ListClass: AudioTrackList,
+ TrackClass: AudioTrack,
+ capitalName: 'Audio'
+ },
+ video: {
+ ListClass: VideoTrackList,
+ TrackClass: VideoTrack,
+ capitalName: 'Video'
+ },
+ text: {
+ ListClass: TextTrackList,
+ TrackClass: TextTrack,
+ capitalName: 'Text'
+ }
+};
+Object.keys(NORMAL).forEach(function (type) {
+ NORMAL[type].getterName = type + "Tracks";
+ NORMAL[type].privateName = type + "Tracks_";
+});
+var REMOTE = {
+ remoteText: {
+ ListClass: TextTrackList,
+ TrackClass: TextTrack,
+ capitalName: 'RemoteText',
+ getterName: 'remoteTextTracks',
+ privateName: 'remoteTextTracks_'
+ },
+ remoteTextEl: {
+ ListClass: HtmlTrackElementList,
+ TrackClass: HTMLTrackElement,
+ capitalName: 'RemoteTextTrackEls',
+ getterName: 'remoteTextTrackEls',
+ privateName: 'remoteTextTrackEls_'
+ }
+};
+
+var ALL = _extends({}, NORMAL, REMOTE);
+
+REMOTE.names = Object.keys(REMOTE);
+NORMAL.names = Object.keys(NORMAL);
+ALL.names = [].concat(REMOTE.names).concat(NORMAL.names);
+
+/**
+ * An Object containing a structure like: `{src: 'url', type: 'mimetype'}` or string
+ * that just contains the src url alone.
+ * * `var SourceObject = {src: 'http://ex.com/video.mp4', type: 'video/mp4'};`
+ * `var SourceString = 'http://example.com/some-video.mp4';`
+ *
+ * @typedef {Object|string} Tech~SourceObject
+ *
+ * @property {string} src
+ * The url to the source
+ *
+ * @property {string} type
+ * The mime type of the source
+ */
+
+/**
+ * A function used by {@link Tech} to create a new {@link TextTrack}.
+ *
+ * @private
+ *
+ * @param {Tech} self
+ * An instance of the Tech class.
+ *
+ * @param {string} kind
+ * `TextTrack` kind (subtitles, captions, descriptions, chapters, or metadata)
+ *
+ * @param {string} [label]
+ * Label to identify the text track
+ *
+ * @param {string} [language]
+ * Two letter language abbreviation
+ *
+ * @param {Object} [options={}]
+ * An object with additional text track options
+ *
+ * @return {TextTrack}
+ * The text track that was created.
+ */
+
+function createTrackHelper(self, kind, label, language, options) {
+ if (options === void 0) {
+ options = {};
+ }
+
+ var tracks = self.textTracks();
+ options.kind = kind;
+
+ if (label) {
+ options.label = label;
+ }
+
+ if (language) {
+ options.language = language;
+ }
+
+ options.tech = self;
+ var track = new ALL.text.TrackClass(options);
+ tracks.addTrack(track);
+ return track;
+}
+/**
+ * This is the base class for media playback technology controllers, such as
+ * {@link HTML5}
+ *
+ * @extends Component
+ */
+
+
+var Tech = /*#__PURE__*/function (_Component) {
+ _inheritsLoose(Tech, _Component);
+
+ /**
+ * Create an instance of this Tech.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ *
+ * @param {Component~ReadyCallback} ready
+ * Callback function to call when the `HTML5` Tech is ready.
+ */
+ function Tech(options, ready) {
+ var _this;
+
+ if (options === void 0) {
+ options = {};
+ }
+
+ if (ready === void 0) {
+ ready = function ready() {};
+ }
+
+ // we don't want the tech to report user activity automatically.
+ // This is done manually in addControlsListeners
+ options.reportTouchActivity = false;
+ _this = _Component.call(this, null, options, ready) || this;
+
+ _this.onDurationChange_ = function (e) {
+ return _this.onDurationChange(e);
+ };
+
+ _this.trackProgress_ = function (e) {
+ return _this.trackProgress(e);
+ };
+
+ _this.trackCurrentTime_ = function (e) {
+ return _this.trackCurrentTime(e);
+ };
+
+ _this.stopTrackingCurrentTime_ = function (e) {
+ return _this.stopTrackingCurrentTime(e);
+ };
+
+ _this.disposeSourceHandler_ = function (e) {
+ return _this.disposeSourceHandler(e);
+ };
+
+ _this.queuedHanders_ = new Set(); // keep track of whether the current source has played at all to
+ // implement a very limited played()
+
+ _this.hasStarted_ = false;
+
+ _this.on('playing', function () {
+ this.hasStarted_ = true;
+ });
+
+ _this.on('loadstart', function () {
+ this.hasStarted_ = false;
+ });
+
+ ALL.names.forEach(function (name) {
+ var props = ALL[name];
+
+ if (options && options[props.getterName]) {
+ _this[props.privateName] = options[props.getterName];
+ }
+ }); // Manually track progress in cases where the browser/tech doesn't report it.
+
+ if (!_this.featuresProgressEvents) {
+ _this.manualProgressOn();
+ } // Manually track timeupdates in cases where the browser/tech doesn't report it.
+
+
+ if (!_this.featuresTimeupdateEvents) {
+ _this.manualTimeUpdatesOn();
+ }
+
+ ['Text', 'Audio', 'Video'].forEach(function (track) {
+ if (options["native" + track + "Tracks"] === false) {
+ _this["featuresNative" + track + "Tracks"] = false;
+ }
+ });
+
+ if (options.nativeCaptions === false || options.nativeTextTracks === false) {
+ _this.featuresNativeTextTracks = false;
+ } else if (options.nativeCaptions === true || options.nativeTextTracks === true) {
+ _this.featuresNativeTextTracks = true;
+ }
+
+ if (!_this.featuresNativeTextTracks) {
+ _this.emulateTextTracks();
+ }
+
+ _this.preloadTextTracks = options.preloadTextTracks !== false;
+ _this.autoRemoteTextTracks_ = new ALL.text.ListClass();
+
+ _this.initTrackListeners(); // Turn on component tap events only if not using native controls
+
+
+ if (!options.nativeControlsForTouch) {
+ _this.emitTapEvents();
+ }
+
+ if (_this.constructor) {
+ _this.name_ = _this.constructor.name || 'Unknown Tech';
+ }
+
+ return _this;
+ }
+ /**
+ * A special function to trigger source set in a way that will allow player
+ * to re-trigger if the player or tech are not ready yet.
+ *
+ * @fires Tech#sourceset
+ * @param {string} src The source string at the time of the source changing.
+ */
+
+
+ var _proto = Tech.prototype;
+
+ _proto.triggerSourceset = function triggerSourceset(src) {
+ var _this2 = this;
+
+ if (!this.isReady_) {
+ // on initial ready we have to trigger source set
+ // 1ms after ready so that player can watch for it.
+ this.one('ready', function () {
+ return _this2.setTimeout(function () {
+ return _this2.triggerSourceset(src);
+ }, 1);
+ });
+ }
+ /**
+ * Fired when the source is set on the tech causing the media element
+ * to reload.
+ *
+ * @see {@link Player#event:sourceset}
+ * @event Tech#sourceset
+ * @type {EventTarget~Event}
+ */
+
+
+ this.trigger({
+ src: src,
+ type: 'sourceset'
+ });
+ }
+ /* Fallbacks for unsupported event types
+ ================================================================================ */
+
+ /**
+ * Polyfill the `progress` event for browsers that don't support it natively.
+ *
+ * @see {@link Tech#trackProgress}
+ */
+ ;
+
+ _proto.manualProgressOn = function manualProgressOn() {
+ this.on('durationchange', this.onDurationChange_);
+ this.manualProgress = true; // Trigger progress watching when a source begins loading
+
+ this.one('ready', this.trackProgress_);
+ }
+ /**
+ * Turn off the polyfill for `progress` events that was created in
+ * {@link Tech#manualProgressOn}
+ */
+ ;
+
+ _proto.manualProgressOff = function manualProgressOff() {
+ this.manualProgress = false;
+ this.stopTrackingProgress();
+ this.off('durationchange', this.onDurationChange_);
+ }
+ /**
+ * This is used to trigger a `progress` event when the buffered percent changes. It
+ * sets an interval function that will be called every 500 milliseconds to check if the
+ * buffer end percent has changed.
+ *
+ * > This function is called by {@link Tech#manualProgressOn}
+ *
+ * @param {EventTarget~Event} event
+ * The `ready` event that caused this to run.
+ *
+ * @listens Tech#ready
+ * @fires Tech#progress
+ */
+ ;
+
+ _proto.trackProgress = function trackProgress(event) {
+ this.stopTrackingProgress();
+ this.progressInterval = this.setInterval(bind(this, function () {
+ // Don't trigger unless buffered amount is greater than last time
+ var numBufferedPercent = this.bufferedPercent();
+
+ if (this.bufferedPercent_ !== numBufferedPercent) {
+ /**
+ * See {@link Player#progress}
+ *
+ * @event Tech#progress
+ * @type {EventTarget~Event}
+ */
+ this.trigger('progress');
+ }
+
+ this.bufferedPercent_ = numBufferedPercent;
+
+ if (numBufferedPercent === 1) {
+ this.stopTrackingProgress();
+ }
+ }), 500);
+ }
+ /**
+ * Update our internal duration on a `durationchange` event by calling
+ * {@link Tech#duration}.
+ *
+ * @param {EventTarget~Event} event
+ * The `durationchange` event that caused this to run.
+ *
+ * @listens Tech#durationchange
+ */
+ ;
+
+ _proto.onDurationChange = function onDurationChange(event) {
+ this.duration_ = this.duration();
+ }
+ /**
+ * Get and create a `TimeRange` object for buffering.
+ *
+ * @return {TimeRange}
+ * The time range object that was created.
+ */
+ ;
+
+ _proto.buffered = function buffered() {
+ return createTimeRanges(0, 0);
+ }
+ /**
+ * Get the percentage of the current video that is currently buffered.
+ *
+ * @return {number}
+ * A number from 0 to 1 that represents the decimal percentage of the
+ * video that is buffered.
+ *
+ */
+ ;
+
+ _proto.bufferedPercent = function bufferedPercent$1() {
+ return bufferedPercent(this.buffered(), this.duration_);
+ }
+ /**
+ * Turn off the polyfill for `progress` events that was created in
+ * {@link Tech#manualProgressOn}
+ * Stop manually tracking progress events by clearing the interval that was set in
+ * {@link Tech#trackProgress}.
+ */
+ ;
+
+ _proto.stopTrackingProgress = function stopTrackingProgress() {
+ this.clearInterval(this.progressInterval);
+ }
+ /**
+ * Polyfill the `timeupdate` event for browsers that don't support it.
+ *
+ * @see {@link Tech#trackCurrentTime}
+ */
+ ;
+
+ _proto.manualTimeUpdatesOn = function manualTimeUpdatesOn() {
+ this.manualTimeUpdates = true;
+ this.on('play', this.trackCurrentTime_);
+ this.on('pause', this.stopTrackingCurrentTime_);
+ }
+ /**
+ * Turn off the polyfill for `timeupdate` events that was created in
+ * {@link Tech#manualTimeUpdatesOn}
+ */
+ ;
+
+ _proto.manualTimeUpdatesOff = function manualTimeUpdatesOff() {
+ this.manualTimeUpdates = false;
+ this.stopTrackingCurrentTime();
+ this.off('play', this.trackCurrentTime_);
+ this.off('pause', this.stopTrackingCurrentTime_);
+ }
+ /**
+ * Sets up an interval function to track current time and trigger `timeupdate` every
+ * 250 milliseconds.
+ *
+ * @listens Tech#play
+ * @triggers Tech#timeupdate
+ */
+ ;
+
+ _proto.trackCurrentTime = function trackCurrentTime() {
+ if (this.currentTimeInterval) {
+ this.stopTrackingCurrentTime();
+ }
+
+ this.currentTimeInterval = this.setInterval(function () {
+ /**
+ * Triggered at an interval of 250ms to indicated that time is passing in the video.
+ *
+ * @event Tech#timeupdate
+ * @type {EventTarget~Event}
+ */
+ this.trigger({
+ type: 'timeupdate',
+ target: this,
+ manuallyTriggered: true
+ }); // 42 = 24 fps // 250 is what Webkit uses // FF uses 15
+ }, 250);
+ }
+ /**
+ * Stop the interval function created in {@link Tech#trackCurrentTime} so that the
+ * `timeupdate` event is no longer triggered.
+ *
+ * @listens {Tech#pause}
+ */
+ ;
+
+ _proto.stopTrackingCurrentTime = function stopTrackingCurrentTime() {
+ this.clearInterval(this.currentTimeInterval); // #1002 - if the video ends right before the next timeupdate would happen,
+ // the progress bar won't make it all the way to the end
+
+ this.trigger({
+ type: 'timeupdate',
+ target: this,
+ manuallyTriggered: true
+ });
+ }
+ /**
+ * Turn off all event polyfills, clear the `Tech`s {@link AudioTrackList},
+ * {@link VideoTrackList}, and {@link TextTrackList}, and dispose of this Tech.
+ *
+ * @fires Component#dispose
+ */
+ ;
+
+ _proto.dispose = function dispose() {
+ // clear out all tracks because we can't reuse them between techs
+ this.clearTracks(NORMAL.names); // Turn off any manual progress or timeupdate tracking
+
+ if (this.manualProgress) {
+ this.manualProgressOff();
+ }
+
+ if (this.manualTimeUpdates) {
+ this.manualTimeUpdatesOff();
+ }
+
+ _Component.prototype.dispose.call(this);
+ }
+ /**
+ * Clear out a single `TrackList` or an array of `TrackLists` given their names.
+ *
+ * > Note: Techs without source handlers should call this between sources for `video`
+ * & `audio` tracks. You don't want to use them between tracks!
+ *
+ * @param {string[]|string} types
+ * TrackList names to clear, valid names are `video`, `audio`, and
+ * `text`.
+ */
+ ;
+
+ _proto.clearTracks = function clearTracks(types) {
+ var _this3 = this;
+
+ types = [].concat(types); // clear out all tracks because we can't reuse them between techs
+
+ types.forEach(function (type) {
+ var list = _this3[type + "Tracks"]() || [];
+ var i = list.length;
+
+ while (i--) {
+ var track = list[i];
+
+ if (type === 'text') {
+ _this3.removeRemoteTextTrack(track);
+ }
+
+ list.removeTrack(track);
+ }
+ });
+ }
+ /**
+ * Remove any TextTracks added via addRemoteTextTrack that are
+ * flagged for automatic garbage collection
+ */
+ ;
+
+ _proto.cleanupAutoTextTracks = function cleanupAutoTextTracks() {
+ var list = this.autoRemoteTextTracks_ || [];
+ var i = list.length;
+
+ while (i--) {
+ var track = list[i];
+ this.removeRemoteTextTrack(track);
+ }
+ }
+ /**
+ * Reset the tech, which will removes all sources and reset the internal readyState.
+ *
+ * @abstract
+ */
+ ;
+
+ _proto.reset = function reset() {}
+ /**
+ * Get the value of `crossOrigin` from the tech.
+ *
+ * @abstract
+ *
+ * @see {Html5#crossOrigin}
+ */
+ ;
+
+ _proto.crossOrigin = function crossOrigin() {}
+ /**
+ * Set the value of `crossOrigin` on the tech.
+ *
+ * @abstract
+ *
+ * @param {string} crossOrigin the crossOrigin value
+ * @see {Html5#setCrossOrigin}
+ */
+ ;
+
+ _proto.setCrossOrigin = function setCrossOrigin() {}
+ /**
+ * Get or set an error on the Tech.
+ *
+ * @param {MediaError} [err]
+ * Error to set on the Tech
+ *
+ * @return {MediaError|null}
+ * The current error object on the tech, or null if there isn't one.
+ */
+ ;
+
+ _proto.error = function error(err) {
+ if (err !== undefined) {
+ this.error_ = new MediaError(err);
+ this.trigger('error');
+ }
+
+ return this.error_;
+ }
+ /**
+ * Returns the `TimeRange`s that have been played through for the current source.
+ *
+ * > NOTE: This implementation is incomplete. It does not track the played `TimeRange`.
+ * It only checks whether the source has played at all or not.
+ *
+ * @return {TimeRange}
+ * - A single time range if this video has played
+ * - An empty set of ranges if not.
+ */
+ ;
+
+ _proto.played = function played() {
+ if (this.hasStarted_) {
+ return createTimeRanges(0, 0);
+ }
+
+ return createTimeRanges();
+ }
+ /**
+ * Start playback
+ *
+ * @abstract
+ *
+ * @see {Html5#play}
+ */
+ ;
+
+ _proto.play = function play() {}
+ /**
+ * Set whether we are scrubbing or not
+ *
+ * @abstract
+ *
+ * @see {Html5#setScrubbing}
+ */
+ ;
+
+ _proto.setScrubbing = function setScrubbing() {}
+ /**
+ * Get whether we are scrubbing or not
+ *
+ * @abstract
+ *
+ * @see {Html5#scrubbing}
+ */
+ ;
+
+ _proto.scrubbing = function scrubbing() {}
+ /**
+ * Causes a manual time update to occur if {@link Tech#manualTimeUpdatesOn} was
+ * previously called.
+ *
+ * @fires Tech#timeupdate
+ */
+ ;
+
+ _proto.setCurrentTime = function setCurrentTime() {
+ // improve the accuracy of manual timeupdates
+ if (this.manualTimeUpdates) {
+ /**
+ * A manual `timeupdate` event.
+ *
+ * @event Tech#timeupdate
+ * @type {EventTarget~Event}
+ */
+ this.trigger({
+ type: 'timeupdate',
+ target: this,
+ manuallyTriggered: true
+ });
+ }
+ }
+ /**
+ * Turn on listeners for {@link VideoTrackList}, {@link {AudioTrackList}, and
+ * {@link TextTrackList} events.
+ *
+ * This adds {@link EventTarget~EventListeners} for `addtrack`, and `removetrack`.
+ *
+ * @fires Tech#audiotrackchange
+ * @fires Tech#videotrackchange
+ * @fires Tech#texttrackchange
+ */
+ ;
+
+ _proto.initTrackListeners = function initTrackListeners() {
+ var _this4 = this;
+
+ /**
+ * Triggered when tracks are added or removed on the Tech {@link AudioTrackList}
+ *
+ * @event Tech#audiotrackchange
+ * @type {EventTarget~Event}
+ */
+
+ /**
+ * Triggered when tracks are added or removed on the Tech {@link VideoTrackList}
+ *
+ * @event Tech#videotrackchange
+ * @type {EventTarget~Event}
+ */
+
+ /**
+ * Triggered when tracks are added or removed on the Tech {@link TextTrackList}
+ *
+ * @event Tech#texttrackchange
+ * @type {EventTarget~Event}
+ */
+ NORMAL.names.forEach(function (name) {
+ var props = NORMAL[name];
+
+ var trackListChanges = function trackListChanges() {
+ _this4.trigger(name + "trackchange");
+ };
+
+ var tracks = _this4[props.getterName]();
+
+ tracks.addEventListener('removetrack', trackListChanges);
+ tracks.addEventListener('addtrack', trackListChanges);
+
+ _this4.on('dispose', function () {
+ tracks.removeEventListener('removetrack', trackListChanges);
+ tracks.removeEventListener('addtrack', trackListChanges);
+ });
+ });
+ }
+ /**
+ * Emulate TextTracks using vtt.js if necessary
+ *
+ * @fires Tech#vttjsloaded
+ * @fires Tech#vttjserror
+ */
+ ;
+
+ _proto.addWebVttScript_ = function addWebVttScript_() {
+ var _this5 = this;
+
+ if (window$1.WebVTT) {
+ return;
+ } // Initially, Tech.el_ is a child of a dummy-div wait until the Component system
+ // signals that the Tech is ready at which point Tech.el_ is part of the DOM
+ // before inserting the WebVTT script
+
+
+ if (document.body.contains(this.el())) {
+ // load via require if available and vtt.js script location was not passed in
+ // as an option. novtt builds will turn the above require call into an empty object
+ // which will cause this if check to always fail.
+ if (!this.options_['vtt.js'] && isPlain(vtt) && Object.keys(vtt).length > 0) {
+ this.trigger('vttjsloaded');
+ return;
+ } // load vtt.js via the script location option or the cdn of no location was
+ // passed in
+
+
+ var script = document.createElement('script');
+ script.src = this.options_['vtt.js'] || 'https://vjs.zencdn.net/vttjs/0.14.1/vtt.min.js';
+
+ script.onload = function () {
+ /**
+ * Fired when vtt.js is loaded.
+ *
+ * @event Tech#vttjsloaded
+ * @type {EventTarget~Event}
+ */
+ _this5.trigger('vttjsloaded');
+ };
+
+ script.onerror = function () {
+ /**
+ * Fired when vtt.js was not loaded due to an error
+ *
+ * @event Tech#vttjsloaded
+ * @type {EventTarget~Event}
+ */
+ _this5.trigger('vttjserror');
+ };
+
+ this.on('dispose', function () {
+ script.onload = null;
+ script.onerror = null;
+ }); // but have not loaded yet and we set it to true before the inject so that
+ // we don't overwrite the injected window.WebVTT if it loads right away
+
+ window$1.WebVTT = true;
+ this.el().parentNode.appendChild(script);
+ } else {
+ this.ready(this.addWebVttScript_);
+ }
+ }
+ /**
+ * Emulate texttracks
+ *
+ */
+ ;
+
+ _proto.emulateTextTracks = function emulateTextTracks() {
+ var _this6 = this;
+
+ var tracks = this.textTracks();
+ var remoteTracks = this.remoteTextTracks();
+
+ var handleAddTrack = function handleAddTrack(e) {
+ return tracks.addTrack(e.track);
+ };
+
+ var handleRemoveTrack = function handleRemoveTrack(e) {
+ return tracks.removeTrack(e.track);
+ };
+
+ remoteTracks.on('addtrack', handleAddTrack);
+ remoteTracks.on('removetrack', handleRemoveTrack);
+ this.addWebVttScript_();
+
+ var updateDisplay = function updateDisplay() {
+ return _this6.trigger('texttrackchange');
+ };
+
+ var textTracksChanges = function textTracksChanges() {
+ updateDisplay();
+
+ for (var i = 0; i < tracks.length; i++) {
+ var track = tracks[i];
+ track.removeEventListener('cuechange', updateDisplay);
+
+ if (track.mode === 'showing') {
+ track.addEventListener('cuechange', updateDisplay);
+ }
+ }
+ };
+
+ textTracksChanges();
+ tracks.addEventListener('change', textTracksChanges);
+ tracks.addEventListener('addtrack', textTracksChanges);
+ tracks.addEventListener('removetrack', textTracksChanges);
+ this.on('dispose', function () {
+ remoteTracks.off('addtrack', handleAddTrack);
+ remoteTracks.off('removetrack', handleRemoveTrack);
+ tracks.removeEventListener('change', textTracksChanges);
+ tracks.removeEventListener('addtrack', textTracksChanges);
+ tracks.removeEventListener('removetrack', textTracksChanges);
+
+ for (var i = 0; i < tracks.length; i++) {
+ var track = tracks[i];
+ track.removeEventListener('cuechange', updateDisplay);
+ }
+ });
+ }
+ /**
+ * Create and returns a remote {@link TextTrack} object.
+ *
+ * @param {string} kind
+ * `TextTrack` kind (subtitles, captions, descriptions, chapters, or metadata)
+ *
+ * @param {string} [label]
+ * Label to identify the text track
+ *
+ * @param {string} [language]
+ * Two letter language abbreviation
+ *
+ * @return {TextTrack}
+ * The TextTrack that gets created.
+ */
+ ;
+
+ _proto.addTextTrack = function addTextTrack(kind, label, language) {
+ if (!kind) {
+ throw new Error('TextTrack kind is required but was not provided');
+ }
+
+ return createTrackHelper(this, kind, label, language);
+ }
+ /**
+ * Create an emulated TextTrack for use by addRemoteTextTrack
+ *
+ * This is intended to be overridden by classes that inherit from
+ * Tech in order to create native or custom TextTracks.
+ *
+ * @param {Object} options
+ * The object should contain the options to initialize the TextTrack with.
+ *
+ * @param {string} [options.kind]
+ * `TextTrack` kind (subtitles, captions, descriptions, chapters, or metadata).
+ *
+ * @param {string} [options.label].
+ * Label to identify the text track
+ *
+ * @param {string} [options.language]
+ * Two letter language abbreviation.
+ *
+ * @return {HTMLTrackElement}
+ * The track element that gets created.
+ */
+ ;
+
+ _proto.createRemoteTextTrack = function createRemoteTextTrack(options) {
+ var track = mergeOptions$3(options, {
+ tech: this
+ });
+ return new REMOTE.remoteTextEl.TrackClass(track);
+ }
+ /**
+ * Creates a remote text track object and returns an html track element.
+ *
+ * > Note: This can be an emulated {@link HTMLTrackElement} or a native one.
+ *
+ * @param {Object} options
+ * See {@link Tech#createRemoteTextTrack} for more detailed properties.
+ *
+ * @param {boolean} [manualCleanup=true]
+ * - When false: the TextTrack will be automatically removed from the video
+ * element whenever the source changes
+ * - When True: The TextTrack will have to be cleaned up manually
+ *
+ * @return {HTMLTrackElement}
+ * An Html Track Element.
+ *
+ * @deprecated The default functionality for this function will be equivalent
+ * to "manualCleanup=false" in the future. The manualCleanup parameter will
+ * also be removed.
+ */
+ ;
+
+ _proto.addRemoteTextTrack = function addRemoteTextTrack(options, manualCleanup) {
+ var _this7 = this;
+
+ if (options === void 0) {
+ options = {};
+ }
+
+ var htmlTrackElement = this.createRemoteTextTrack(options);
+
+ if (manualCleanup !== true && manualCleanup !== false) {
+ // deprecation warning
+ log$1.warn('Calling addRemoteTextTrack without explicitly setting the "manualCleanup" parameter to `true` is deprecated and default to `false` in future version of video.js');
+ manualCleanup = true;
+ } // store HTMLTrackElement and TextTrack to remote list
+
+
+ this.remoteTextTrackEls().addTrackElement_(htmlTrackElement);
+ this.remoteTextTracks().addTrack(htmlTrackElement.track);
+
+ if (manualCleanup !== true) {
+ // create the TextTrackList if it doesn't exist
+ this.ready(function () {
+ return _this7.autoRemoteTextTracks_.addTrack(htmlTrackElement.track);
+ });
+ }
+
+ return htmlTrackElement;
+ }
+ /**
+ * Remove a remote text track from the remote `TextTrackList`.
+ *
+ * @param {TextTrack} track
+ * `TextTrack` to remove from the `TextTrackList`
+ */
+ ;
+
+ _proto.removeRemoteTextTrack = function removeRemoteTextTrack(track) {
+ var trackElement = this.remoteTextTrackEls().getTrackElementByTrack_(track); // remove HTMLTrackElement and TextTrack from remote list
+
+ this.remoteTextTrackEls().removeTrackElement_(trackElement);
+ this.remoteTextTracks().removeTrack(track);
+ this.autoRemoteTextTracks_.removeTrack(track);
+ }
+ /**
+ * Gets available media playback quality metrics as specified by the W3C's Media
+ * Playback Quality API.
+ *
+ * @see [Spec]{@link https://wicg.github.io/media-playback-quality}
+ *
+ * @return {Object}
+ * An object with supported media playback quality metrics
+ *
+ * @abstract
+ */
+ ;
+
+ _proto.getVideoPlaybackQuality = function getVideoPlaybackQuality() {
+ return {};
+ }
+ /**
+ * Attempt to create a floating video window always on top of other windows
+ * so that users may continue consuming media while they interact with other
+ * content sites, or applications on their device.
+ *
+ * @see [Spec]{@link https://wicg.github.io/picture-in-picture}
+ *
+ * @return {Promise|undefined}
+ * A promise with a Picture-in-Picture window if the browser supports
+ * Promises (or one was passed in as an option). It returns undefined
+ * otherwise.
+ *
+ * @abstract
+ */
+ ;
+
+ _proto.requestPictureInPicture = function requestPictureInPicture() {
+ var PromiseClass = this.options_.Promise || window$1.Promise;
+
+ if (PromiseClass) {
+ return PromiseClass.reject();
+ }
+ }
+ /**
+ * A method to check for the value of the 'disablePictureInPicture' property.
+ * Defaults to true, as it should be considered disabled if the tech does not support pip
+ *
+ * @abstract
+ */
+ ;
+
+ _proto.disablePictureInPicture = function disablePictureInPicture() {
+ return true;
+ }
+ /**
+ * A method to set or unset the 'disablePictureInPicture' property.
+ *
+ * @abstract
+ */
+ ;
+
+ _proto.setDisablePictureInPicture = function setDisablePictureInPicture() {}
+ /**
+ * A fallback implementation of requestVideoFrameCallback using requestAnimationFrame
+ *
+ * @param {function} cb
+ * @return {number} request id
+ */
+ ;
+
+ _proto.requestVideoFrameCallback = function requestVideoFrameCallback(cb) {
+ var _this8 = this;
+
+ var id = newGUID();
+
+ if (this.paused()) {
+ this.queuedHanders_.add(id);
+ this.one('playing', function () {
+ if (_this8.queuedHanders_.has(id)) {
+ _this8.queuedHanders_["delete"](id);
+
+ cb();
+ }
+ });
+ } else {
+ this.requestNamedAnimationFrame(id, cb);
+ }
+
+ return id;
+ }
+ /**
+ * A fallback implementation of cancelVideoFrameCallback
+ *
+ * @param {number} id id of callback to be cancelled
+ */
+ ;
+
+ _proto.cancelVideoFrameCallback = function cancelVideoFrameCallback(id) {
+ if (this.queuedHanders_.has(id)) {
+ this.queuedHanders_["delete"](id);
+ } else {
+ this.cancelNamedAnimationFrame(id);
+ }
+ }
+ /**
+ * A method to set a poster from a `Tech`.
+ *
+ * @abstract
+ */
+ ;
+
+ _proto.setPoster = function setPoster() {}
+ /**
+ * A method to check for the presence of the 'playsinline' attribute.
+ *
+ * @abstract
+ */
+ ;
+
+ _proto.playsinline = function playsinline() {}
+ /**
+ * A method to set or unset the 'playsinline' attribute.
+ *
+ * @abstract
+ */
+ ;
+
+ _proto.setPlaysinline = function setPlaysinline() {}
+ /**
+ * Attempt to force override of native audio tracks.
+ *
+ * @param {boolean} override - If set to true native audio will be overridden,
+ * otherwise native audio will potentially be used.
+ *
+ * @abstract
+ */
+ ;
+
+ _proto.overrideNativeAudioTracks = function overrideNativeAudioTracks() {}
+ /**
+ * Attempt to force override of native video tracks.
+ *
+ * @param {boolean} override - If set to true native video will be overridden,
+ * otherwise native video will potentially be used.
+ *
+ * @abstract
+ */
+ ;
+
+ _proto.overrideNativeVideoTracks = function overrideNativeVideoTracks() {}
+ /*
+ * Check if the tech can support the given mime-type.
+ *
+ * The base tech does not support any type, but source handlers might
+ * overwrite this.
+ *
+ * @param {string} type
+ * The mimetype to check for support
+ *
+ * @return {string}
+ * 'probably', 'maybe', or empty string
+ *
+ * @see [Spec]{@link https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/canPlayType}
+ *
+ * @abstract
+ */
+ ;
+
+ _proto.canPlayType = function canPlayType() {
+ return '';
+ }
+ /**
+ * Check if the type is supported by this tech.
+ *
+ * The base tech does not support any type, but source handlers might
+ * overwrite this.
+ *
+ * @param {string} type
+ * The media type to check
+ * @return {string} Returns the native video element's response
+ */
+ ;
+
+ Tech.canPlayType = function canPlayType() {
+ return '';
+ }
+ /**
+ * Check if the tech can support the given source
+ *
+ * @param {Object} srcObj
+ * The source object
+ * @param {Object} options
+ * The options passed to the tech
+ * @return {string} 'probably', 'maybe', or '' (empty string)
+ */
+ ;
+
+ Tech.canPlaySource = function canPlaySource(srcObj, options) {
+ return Tech.canPlayType(srcObj.type);
+ }
+ /*
+ * Return whether the argument is a Tech or not.
+ * Can be passed either a Class like `Html5` or a instance like `player.tech_`
+ *
+ * @param {Object} component
+ * The item to check
+ *
+ * @return {boolean}
+ * Whether it is a tech or not
+ * - True if it is a tech
+ * - False if it is not
+ */
+ ;
+
+ Tech.isTech = function isTech(component) {
+ return component.prototype instanceof Tech || component instanceof Tech || component === Tech;
+ }
+ /**
+ * Registers a `Tech` into a shared list for videojs.
+ *
+ * @param {string} name
+ * Name of the `Tech` to register.
+ *
+ * @param {Object} tech
+ * The `Tech` class to register.
+ */
+ ;
+
+ Tech.registerTech = function registerTech(name, tech) {
+ if (!Tech.techs_) {
+ Tech.techs_ = {};
+ }
+
+ if (!Tech.isTech(tech)) {
+ throw new Error("Tech " + name + " must be a Tech");
+ }
+
+ if (!Tech.canPlayType) {
+ throw new Error('Techs must have a static canPlayType method on them');
+ }
+
+ if (!Tech.canPlaySource) {
+ throw new Error('Techs must have a static canPlaySource method on them');
+ }
+
+ name = toTitleCase$1(name);
+ Tech.techs_[name] = tech;
+ Tech.techs_[toLowerCase(name)] = tech;
+
+ if (name !== 'Tech') {
+ // camel case the techName for use in techOrder
+ Tech.defaultTechOrder_.push(name);
+ }
+
+ return tech;
+ }
+ /**
+ * Get a `Tech` from the shared list by name.
+ *
+ * @param {string} name
+ * `camelCase` or `TitleCase` name of the Tech to get
+ *
+ * @return {Tech|undefined}
+ * The `Tech` or undefined if there was no tech with the name requested.
+ */
+ ;
+
+ Tech.getTech = function getTech(name) {
+ if (!name) {
+ return;
+ }
+
+ if (Tech.techs_ && Tech.techs_[name]) {
+ return Tech.techs_[name];
+ }
+
+ name = toTitleCase$1(name);
+
+ if (window$1 && window$1.videojs && window$1.videojs[name]) {
+ log$1.warn("The " + name + " tech was added to the videojs object when it should be registered using videojs.registerTech(name, tech)");
+ return window$1.videojs[name];
+ }
+ };
+
+ return Tech;
+}(Component$1);
+/**
+ * Get the {@link VideoTrackList}
+ *
+ * @returns {VideoTrackList}
+ * @method Tech.prototype.videoTracks
+ */
+
+/**
+ * Get the {@link AudioTrackList}
+ *
+ * @returns {AudioTrackList}
+ * @method Tech.prototype.audioTracks
+ */
+
+/**
+ * Get the {@link TextTrackList}
+ *
+ * @returns {TextTrackList}
+ * @method Tech.prototype.textTracks
+ */
+
+/**
+ * Get the remote element {@link TextTrackList}
+ *
+ * @returns {TextTrackList}
+ * @method Tech.prototype.remoteTextTracks
+ */
+
+/**
+ * Get the remote element {@link HtmlTrackElementList}
+ *
+ * @returns {HtmlTrackElementList}
+ * @method Tech.prototype.remoteTextTrackEls
+ */
+
+
+ALL.names.forEach(function (name) {
+ var props = ALL[name];
+
+ Tech.prototype[props.getterName] = function () {
+ this[props.privateName] = this[props.privateName] || new props.ListClass();
+ return this[props.privateName];
+ };
+});
+/**
+ * List of associated text tracks
+ *
+ * @type {TextTrackList}
+ * @private
+ * @property Tech#textTracks_
+ */
+
+/**
+ * List of associated audio tracks.
+ *
+ * @type {AudioTrackList}
+ * @private
+ * @property Tech#audioTracks_
+ */
+
+/**
+ * List of associated video tracks.
+ *
+ * @type {VideoTrackList}
+ * @private
+ * @property Tech#videoTracks_
+ */
+
+/**
+ * Boolean indicating whether the `Tech` supports volume control.
+ *
+ * @type {boolean}
+ * @default
+ */
+
+Tech.prototype.featuresVolumeControl = true;
+/**
+ * Boolean indicating whether the `Tech` supports muting volume.
+ *
+ * @type {bolean}
+ * @default
+ */
+
+Tech.prototype.featuresMuteControl = true;
+/**
+ * Boolean indicating whether the `Tech` supports fullscreen resize control.
+ * Resizing plugins using request fullscreen reloads the plugin
+ *
+ * @type {boolean}
+ * @default
+ */
+
+Tech.prototype.featuresFullscreenResize = false;
+/**
+ * Boolean indicating whether the `Tech` supports changing the speed at which the video
+ * plays. Examples:
+ * - Set player to play 2x (twice) as fast
+ * - Set player to play 0.5x (half) as fast
+ *
+ * @type {boolean}
+ * @default
+ */
+
+Tech.prototype.featuresPlaybackRate = false;
+/**
+ * Boolean indicating whether the `Tech` supports the `progress` event. This is currently
+ * not triggered by video-js-swf. This will be used to determine if
+ * {@link Tech#manualProgressOn} should be called.
+ *
+ * @type {boolean}
+ * @default
+ */
+
+Tech.prototype.featuresProgressEvents = false;
+/**
+ * Boolean indicating whether the `Tech` supports the `sourceset` event.
+ *
+ * A tech should set this to `true` and then use {@link Tech#triggerSourceset}
+ * to trigger a {@link Tech#event:sourceset} at the earliest time after getting
+ * a new source.
+ *
+ * @type {boolean}
+ * @default
+ */
+
+Tech.prototype.featuresSourceset = false;
+/**
+ * Boolean indicating whether the `Tech` supports the `timeupdate` event. This is currently
+ * not triggered by video-js-swf. This will be used to determine if
+ * {@link Tech#manualTimeUpdates} should be called.
+ *
+ * @type {boolean}
+ * @default
+ */
+
+Tech.prototype.featuresTimeupdateEvents = false;
+/**
+ * Boolean indicating whether the `Tech` supports the native `TextTrack`s.
+ * This will help us integrate with native `TextTrack`s if the browser supports them.
+ *
+ * @type {boolean}
+ * @default
+ */
+
+Tech.prototype.featuresNativeTextTracks = false;
+/**
+ * Boolean indicating whether the `Tech` supports `requestVideoFrameCallback`.
+ *
+ * @type {boolean}
+ * @default
+ */
+
+Tech.prototype.featuresVideoFrameCallback = false;
+/**
+ * A functional mixin for techs that want to use the Source Handler pattern.
+ * Source handlers are scripts for handling specific formats.
+ * The source handler pattern is used for adaptive formats (HLS, DASH) that
+ * manually load video data and feed it into a Source Buffer (Media Source Extensions)
+ * Example: `Tech.withSourceHandlers.call(MyTech);`
+ *
+ * @param {Tech} _Tech
+ * The tech to add source handler functions to.
+ *
+ * @mixes Tech~SourceHandlerAdditions
+ */
+
+Tech.withSourceHandlers = function (_Tech) {
+ /**
+ * Register a source handler
+ *
+ * @param {Function} handler
+ * The source handler class
+ *
+ * @param {number} [index]
+ * Register it at the following index
+ */
+ _Tech.registerSourceHandler = function (handler, index) {
+ var handlers = _Tech.sourceHandlers;
+
+ if (!handlers) {
+ handlers = _Tech.sourceHandlers = [];
+ }
+
+ if (index === undefined) {
+ // add to the end of the list
+ index = handlers.length;
+ }
+
+ handlers.splice(index, 0, handler);
+ };
+ /**
+ * Check if the tech can support the given type. Also checks the
+ * Techs sourceHandlers.
+ *
+ * @param {string} type
+ * The mimetype to check.
+ *
+ * @return {string}
+ * 'probably', 'maybe', or '' (empty string)
+ */
+
+
+ _Tech.canPlayType = function (type) {
+ var handlers = _Tech.sourceHandlers || [];
+ var can;
+
+ for (var i = 0; i < handlers.length; i++) {
+ can = handlers[i].canPlayType(type);
+
+ if (can) {
+ return can;
+ }
+ }
+
+ return '';
+ };
+ /**
+ * Returns the first source handler that supports the source.
+ *
+ * TODO: Answer question: should 'probably' be prioritized over 'maybe'
+ *
+ * @param {Tech~SourceObject} source
+ * The source object
+ *
+ * @param {Object} options
+ * The options passed to the tech
+ *
+ * @return {SourceHandler|null}
+ * The first source handler that supports the source or null if
+ * no SourceHandler supports the source
+ */
+
+
+ _Tech.selectSourceHandler = function (source, options) {
+ var handlers = _Tech.sourceHandlers || [];
+ var can;
+
+ for (var i = 0; i < handlers.length; i++) {
+ can = handlers[i].canHandleSource(source, options);
+
+ if (can) {
+ return handlers[i];
+ }
+ }
+
+ return null;
+ };
+ /**
+ * Check if the tech can support the given source.
+ *
+ * @param {Tech~SourceObject} srcObj
+ * The source object
+ *
+ * @param {Object} options
+ * The options passed to the tech
+ *
+ * @return {string}
+ * 'probably', 'maybe', or '' (empty string)
+ */
+
+
+ _Tech.canPlaySource = function (srcObj, options) {
+ var sh = _Tech.selectSourceHandler(srcObj, options);
+
+ if (sh) {
+ return sh.canHandleSource(srcObj, options);
+ }
+
+ return '';
+ };
+ /**
+ * When using a source handler, prefer its implementation of
+ * any function normally provided by the tech.
+ */
+
+
+ var deferrable = ['seekable', 'seeking', 'duration'];
+ /**
+ * A wrapper around {@link Tech#seekable} that will call a `SourceHandler`s seekable
+ * function if it exists, with a fallback to the Techs seekable function.
+ *
+ * @method _Tech.seekable
+ */
+
+ /**
+ * A wrapper around {@link Tech#duration} that will call a `SourceHandler`s duration
+ * function if it exists, otherwise it will fallback to the techs duration function.
+ *
+ * @method _Tech.duration
+ */
+
+ deferrable.forEach(function (fnName) {
+ var originalFn = this[fnName];
+
+ if (typeof originalFn !== 'function') {
+ return;
+ }
+
+ this[fnName] = function () {
+ if (this.sourceHandler_ && this.sourceHandler_[fnName]) {
+ return this.sourceHandler_[fnName].apply(this.sourceHandler_, arguments);
+ }
+
+ return originalFn.apply(this, arguments);
+ };
+ }, _Tech.prototype);
+ /**
+ * Create a function for setting the source using a source object
+ * and source handlers.
+ * Should never be called unless a source handler was found.
+ *
+ * @param {Tech~SourceObject} source
+ * A source object with src and type keys
+ */
+
+ _Tech.prototype.setSource = function (source) {
+ var sh = _Tech.selectSourceHandler(source, this.options_);
+
+ if (!sh) {
+ // Fall back to a native source hander when unsupported sources are
+ // deliberately set
+ if (_Tech.nativeSourceHandler) {
+ sh = _Tech.nativeSourceHandler;
+ } else {
+ log$1.error('No source handler found for the current source.');
+ }
+ } // Dispose any existing source handler
+
+
+ this.disposeSourceHandler();
+ this.off('dispose', this.disposeSourceHandler_);
+
+ if (sh !== _Tech.nativeSourceHandler) {
+ this.currentSource_ = source;
+ }
+
+ this.sourceHandler_ = sh.handleSource(source, this, this.options_);
+ this.one('dispose', this.disposeSourceHandler_);
+ };
+ /**
+ * Clean up any existing SourceHandlers and listeners when the Tech is disposed.
+ *
+ * @listens Tech#dispose
+ */
+
+
+ _Tech.prototype.disposeSourceHandler = function () {
+ // if we have a source and get another one
+ // then we are loading something new
+ // than clear all of our current tracks
+ if (this.currentSource_) {
+ this.clearTracks(['audio', 'video']);
+ this.currentSource_ = null;
+ } // always clean up auto-text tracks
+
+
+ this.cleanupAutoTextTracks();
+
+ if (this.sourceHandler_) {
+ if (this.sourceHandler_.dispose) {
+ this.sourceHandler_.dispose();
+ }
+
+ this.sourceHandler_ = null;
+ }
+ };
+}; // The base Tech class needs to be registered as a Component. It is the only
+// Tech that can be registered as a Component.
+
+
+Component$1.registerComponent('Tech', Tech);
+Tech.registerTech('Tech', Tech);
+/**
+ * A list of techs that should be added to techOrder on Players
+ *
+ * @private
+ */
+
+Tech.defaultTechOrder_ = [];
+
+/**
+ * @file middleware.js
+ * @module middleware
+ */
+var middlewares = {};
+var middlewareInstances = {};
+var TERMINATOR = {};
+/**
+ * A middleware object is a plain JavaScript object that has methods that
+ * match the {@link Tech} methods found in the lists of allowed
+ * {@link module:middleware.allowedGetters|getters},
+ * {@link module:middleware.allowedSetters|setters}, and
+ * {@link module:middleware.allowedMediators|mediators}.
+ *
+ * @typedef {Object} MiddlewareObject
+ */
+
+/**
+ * A middleware factory function that should return a
+ * {@link module:middleware~MiddlewareObject|MiddlewareObject}.
+ *
+ * This factory will be called for each player when needed, with the player
+ * passed in as an argument.
+ *
+ * @callback MiddlewareFactory
+ * @param {Player} player
+ * A Video.js player.
+ */
+
+/**
+ * Define a middleware that the player should use by way of a factory function
+ * that returns a middleware object.
+ *
+ * @param {string} type
+ * The MIME type to match or `"*"` for all MIME types.
+ *
+ * @param {MiddlewareFactory} middleware
+ * A middleware factory function that will be executed for
+ * matching types.
+ */
+
+function use(type, middleware) {
+ middlewares[type] = middlewares[type] || [];
+ middlewares[type].push(middleware);
+}
+/**
+ * Asynchronously sets a source using middleware by recursing through any
+ * matching middlewares and calling `setSource` on each, passing along the
+ * previous returned value each time.
+ *
+ * @param {Player} player
+ * A {@link Player} instance.
+ *
+ * @param {Tech~SourceObject} src
+ * A source object.
+ *
+ * @param {Function}
+ * The next middleware to run.
+ */
+
+function setSource(player, src, next) {
+ player.setTimeout(function () {
+ return setSourceHelper(src, middlewares[src.type], next, player);
+ }, 1);
+}
+/**
+ * When the tech is set, passes the tech to each middleware's `setTech` method.
+ *
+ * @param {Object[]} middleware
+ * An array of middleware instances.
+ *
+ * @param {Tech} tech
+ * A Video.js tech.
+ */
+
+function setTech(middleware, tech) {
+ middleware.forEach(function (mw) {
+ return mw.setTech && mw.setTech(tech);
+ });
+}
+/**
+ * Calls a getter on the tech first, through each middleware
+ * from right to left to the player.
+ *
+ * @param {Object[]} middleware
+ * An array of middleware instances.
+ *
+ * @param {Tech} tech
+ * The current tech.
+ *
+ * @param {string} method
+ * A method name.
+ *
+ * @return {Mixed}
+ * The final value from the tech after middleware has intercepted it.
+ */
+
+function get(middleware, tech, method) {
+ return middleware.reduceRight(middlewareIterator(method), tech[method]());
+}
+/**
+ * Takes the argument given to the player and calls the setter method on each
+ * middleware from left to right to the tech.
+ *
+ * @param {Object[]} middleware
+ * An array of middleware instances.
+ *
+ * @param {Tech} tech
+ * The current tech.
+ *
+ * @param {string} method
+ * A method name.
+ *
+ * @param {Mixed} arg
+ * The value to set on the tech.
+ *
+ * @return {Mixed}
+ * The return value of the `method` of the `tech`.
+ */
+
+function set(middleware, tech, method, arg) {
+ return tech[method](middleware.reduce(middlewareIterator(method), arg));
+}
+/**
+ * Takes the argument given to the player and calls the `call` version of the
+ * method on each middleware from left to right.
+ *
+ * Then, call the passed in method on the tech and return the result unchanged
+ * back to the player, through middleware, this time from right to left.
+ *
+ * @param {Object[]} middleware
+ * An array of middleware instances.
+ *
+ * @param {Tech} tech
+ * The current tech.
+ *
+ * @param {string} method
+ * A method name.
+ *
+ * @param {Mixed} arg
+ * The value to set on the tech.
+ *
+ * @return {Mixed}
+ * The return value of the `method` of the `tech`, regardless of the
+ * return values of middlewares.
+ */
+
+function mediate(middleware, tech, method, arg) {
+ if (arg === void 0) {
+ arg = null;
+ }
+
+ var callMethod = 'call' + toTitleCase$1(method);
+ var middlewareValue = middleware.reduce(middlewareIterator(callMethod), arg);
+ var terminated = middlewareValue === TERMINATOR; // deprecated. The `null` return value should instead return TERMINATOR to
+ // prevent confusion if a techs method actually returns null.
+
+ var returnValue = terminated ? null : tech[method](middlewareValue);
+ executeRight(middleware, method, returnValue, terminated);
+ return returnValue;
+}
+/**
+ * Enumeration of allowed getters where the keys are method names.
+ *
+ * @type {Object}
+ */
+
+var allowedGetters = {
+ buffered: 1,
+ currentTime: 1,
+ duration: 1,
+ muted: 1,
+ played: 1,
+ paused: 1,
+ seekable: 1,
+ volume: 1,
+ ended: 1
+};
+/**
+ * Enumeration of allowed setters where the keys are method names.
+ *
+ * @type {Object}
+ */
+
+var allowedSetters = {
+ setCurrentTime: 1,
+ setMuted: 1,
+ setVolume: 1
+};
+/**
+ * Enumeration of allowed mediators where the keys are method names.
+ *
+ * @type {Object}
+ */
+
+var allowedMediators = {
+ play: 1,
+ pause: 1
+};
+
+function middlewareIterator(method) {
+ return function (value, mw) {
+ // if the previous middleware terminated, pass along the termination
+ if (value === TERMINATOR) {
+ return TERMINATOR;
+ }
+
+ if (mw[method]) {
+ return mw[method](value);
+ }
+
+ return value;
+ };
+}
+
+function executeRight(mws, method, value, terminated) {
+ for (var i = mws.length - 1; i >= 0; i--) {
+ var mw = mws[i];
+
+ if (mw[method]) {
+ mw[method](terminated, value);
+ }
+ }
+}
+/**
+ * Clear the middleware cache for a player.
+ *
+ * @param {Player} player
+ * A {@link Player} instance.
+ */
+
+
+function clearCacheForPlayer(player) {
+ middlewareInstances[player.id()] = null;
+}
+/**
+ * {
+ * [playerId]: [[mwFactory, mwInstance], ...]
+ * }
+ *
+ * @private
+ */
+
+function getOrCreateFactory(player, mwFactory) {
+ var mws = middlewareInstances[player.id()];
+ var mw = null;
+
+ if (mws === undefined || mws === null) {
+ mw = mwFactory(player);
+ middlewareInstances[player.id()] = [[mwFactory, mw]];
+ return mw;
+ }
+
+ for (var i = 0; i < mws.length; i++) {
+ var _mws$i = mws[i],
+ mwf = _mws$i[0],
+ mwi = _mws$i[1];
+
+ if (mwf !== mwFactory) {
+ continue;
+ }
+
+ mw = mwi;
+ }
+
+ if (mw === null) {
+ mw = mwFactory(player);
+ mws.push([mwFactory, mw]);
+ }
+
+ return mw;
+}
+
+function setSourceHelper(src, middleware, next, player, acc, lastRun) {
+ if (src === void 0) {
+ src = {};
+ }
+
+ if (middleware === void 0) {
+ middleware = [];
+ }
+
+ if (acc === void 0) {
+ acc = [];
+ }
+
+ if (lastRun === void 0) {
+ lastRun = false;
+ }
+
+ var _middleware = middleware,
+ mwFactory = _middleware[0],
+ mwrest = _middleware.slice(1); // if mwFactory is a string, then we're at a fork in the road
+
+
+ if (typeof mwFactory === 'string') {
+ setSourceHelper(src, middlewares[mwFactory], next, player, acc, lastRun); // if we have an mwFactory, call it with the player to get the mw,
+ // then call the mw's setSource method
+ } else if (mwFactory) {
+ var mw = getOrCreateFactory(player, mwFactory); // if setSource isn't present, implicitly select this middleware
+
+ if (!mw.setSource) {
+ acc.push(mw);
+ return setSourceHelper(src, mwrest, next, player, acc, lastRun);
+ }
+
+ mw.setSource(assign({}, src), function (err, _src) {
+ // something happened, try the next middleware on the current level
+ // make sure to use the old src
+ if (err) {
+ return setSourceHelper(src, mwrest, next, player, acc, lastRun);
+ } // we've succeeded, now we need to go deeper
+
+
+ acc.push(mw); // if it's the same type, continue down the current chain
+ // otherwise, we want to go down the new chain
+
+ setSourceHelper(_src, src.type === _src.type ? mwrest : middlewares[_src.type], next, player, acc, lastRun);
+ });
+ } else if (mwrest.length) {
+ setSourceHelper(src, mwrest, next, player, acc, lastRun);
+ } else if (lastRun) {
+ next(src, acc);
+ } else {
+ setSourceHelper(src, middlewares['*'], next, player, acc, true);
+ }
+}
+
+/**
+ * Mimetypes
+ *
+ * @see https://www.iana.org/assignments/media-types/media-types.xhtml
+ * @typedef Mimetypes~Kind
+ * @enum
+ */
+
+var MimetypesKind = {
+ opus: 'video/ogg',
+ ogv: 'video/ogg',
+ mp4: 'video/mp4',
+ mov: 'video/mp4',
+ m4v: 'video/mp4',
+ mkv: 'video/x-matroska',
+ m4a: 'audio/mp4',
+ mp3: 'audio/mpeg',
+ aac: 'audio/aac',
+ caf: 'audio/x-caf',
+ flac: 'audio/flac',
+ oga: 'audio/ogg',
+ wav: 'audio/wav',
+ m3u8: 'application/x-mpegURL',
+ mpd: 'application/dash+xml',
+ jpg: 'image/jpeg',
+ jpeg: 'image/jpeg',
+ gif: 'image/gif',
+ png: 'image/png',
+ svg: 'image/svg+xml',
+ webp: 'image/webp'
+};
+/**
+ * Get the mimetype of a given src url if possible
+ *
+ * @param {string} src
+ * The url to the src
+ *
+ * @return {string}
+ * return the mimetype if it was known or empty string otherwise
+ */
+
+var getMimetype = function getMimetype(src) {
+ if (src === void 0) {
+ src = '';
+ }
+
+ var ext = getFileExtension(src);
+ var mimetype = MimetypesKind[ext.toLowerCase()];
+ return mimetype || '';
+};
+/**
+ * Find the mime type of a given source string if possible. Uses the player
+ * source cache.
+ *
+ * @param {Player} player
+ * The player object
+ *
+ * @param {string} src
+ * The source string
+ *
+ * @return {string}
+ * The type that was found
+ */
+
+var findMimetype = function findMimetype(player, src) {
+ if (!src) {
+ return '';
+ } // 1. check for the type in the `source` cache
+
+
+ if (player.cache_.source.src === src && player.cache_.source.type) {
+ return player.cache_.source.type;
+ } // 2. see if we have this source in our `currentSources` cache
+
+
+ var matchingSources = player.cache_.sources.filter(function (s) {
+ return s.src === src;
+ });
+
+ if (matchingSources.length) {
+ return matchingSources[0].type;
+ } // 3. look for the src url in source elements and use the type there
+
+
+ var sources = player.$$('source');
+
+ for (var i = 0; i < sources.length; i++) {
+ var s = sources[i];
+
+ if (s.type && s.src && s.src === src) {
+ return s.type;
+ }
+ } // 4. finally fallback to our list of mime types based on src url extension
+
+
+ return getMimetype(src);
+};
+
+/**
+ * @module filter-source
+ */
+/**
+ * Filter out single bad source objects or multiple source objects in an
+ * array. Also flattens nested source object arrays into a 1 dimensional
+ * array of source objects.
+ *
+ * @param {Tech~SourceObject|Tech~SourceObject[]} src
+ * The src object to filter
+ *
+ * @return {Tech~SourceObject[]}
+ * An array of sourceobjects containing only valid sources
+ *
+ * @private
+ */
+
+var filterSource = function filterSource(src) {
+ // traverse array
+ if (Array.isArray(src)) {
+ var newsrc = [];
+ src.forEach(function (srcobj) {
+ srcobj = filterSource(srcobj);
+
+ if (Array.isArray(srcobj)) {
+ newsrc = newsrc.concat(srcobj);
+ } else if (isObject(srcobj)) {
+ newsrc.push(srcobj);
+ }
+ });
+ src = newsrc;
+ } else if (typeof src === 'string' && src.trim()) {
+ // convert string into object
+ src = [fixSource({
+ src: src
+ })];
+ } else if (isObject(src) && typeof src.src === 'string' && src.src && src.src.trim()) {
+ // src is already valid
+ src = [fixSource(src)];
+ } else {
+ // invalid source, turn it into an empty array
+ src = [];
+ }
+
+ return src;
+};
+/**
+ * Checks src mimetype, adding it when possible
+ *
+ * @param {Tech~SourceObject} src
+ * The src object to check
+ * @return {Tech~SourceObject}
+ * src Object with known type
+ */
+
+
+function fixSource(src) {
+ if (!src.type) {
+ var mimetype = getMimetype(src.src);
+
+ if (mimetype) {
+ src.type = mimetype;
+ }
+ }
+
+ return src;
+}
+
+/**
+ * The `MediaLoader` is the `Component` that decides which playback technology to load
+ * when a player is initialized.
+ *
+ * @extends Component
+ */
+
+var MediaLoader = /*#__PURE__*/function (_Component) {
+ _inheritsLoose(MediaLoader, _Component);
+
+ /**
+ * Create an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should attach to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ *
+ * @param {Component~ReadyCallback} [ready]
+ * The function that is run when this component is ready.
+ */
+ function MediaLoader(player, options, ready) {
+ var _this;
+
+ // MediaLoader has no element
+ var options_ = mergeOptions$3({
+ createEl: false
+ }, options);
+ _this = _Component.call(this, player, options_, ready) || this; // If there are no sources when the player is initialized,
+ // load the first supported playback technology.
+
+ if (!options.playerOptions.sources || options.playerOptions.sources.length === 0) {
+ for (var i = 0, j = options.playerOptions.techOrder; i < j.length; i++) {
+ var techName = toTitleCase$1(j[i]);
+ var tech = Tech.getTech(techName); // Support old behavior of techs being registered as components.
+ // Remove once that deprecated behavior is removed.
+
+ if (!techName) {
+ tech = Component$1.getComponent(techName);
+ } // Check if the browser supports this technology
+
+
+ if (tech && tech.isSupported()) {
+ player.loadTech_(techName);
+ break;
+ }
+ }
+ } else {
+ // Loop through playback technologies (e.g. HTML5) and check for support.
+ // Then load the best source.
+ // A few assumptions here:
+ // All playback technologies respect preload false.
+ player.src(options.playerOptions.sources);
+ }
+
+ return _this;
+ }
+
+ return MediaLoader;
+}(Component$1);
+
+Component$1.registerComponent('MediaLoader', MediaLoader);
+
+/**
+ * Component which is clickable or keyboard actionable, but is not a
+ * native HTML button.
+ *
+ * @extends Component
+ */
+
+var ClickableComponent = /*#__PURE__*/function (_Component) {
+ _inheritsLoose(ClickableComponent, _Component);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of component options.
+ *
+ * @param {function} [options.clickHandler]
+ * The function to call when the button is clicked / activated
+ *
+ * @param {string} [options.controlText]
+ * The text to set on the button
+ *
+ * @param {string} [options.className]
+ * A class or space separated list of classes to add the component
+ *
+ */
+ function ClickableComponent(player, options) {
+ var _this;
+
+ _this = _Component.call(this, player, options) || this;
+
+ if (_this.options_.controlText) {
+ _this.controlText(_this.options_.controlText);
+ }
+
+ _this.handleMouseOver_ = function (e) {
+ return _this.handleMouseOver(e);
+ };
+
+ _this.handleMouseOut_ = function (e) {
+ return _this.handleMouseOut(e);
+ };
+
+ _this.handleClick_ = function (e) {
+ return _this.handleClick(e);
+ };
+
+ _this.handleKeyDown_ = function (e) {
+ return _this.handleKeyDown(e);
+ };
+
+ _this.emitTapEvents();
+
+ _this.enable();
+
+ return _this;
+ }
+ /**
+ * Create the `ClickableComponent`s DOM element.
+ *
+ * @param {string} [tag=div]
+ * The element's node type.
+ *
+ * @param {Object} [props={}]
+ * An object of properties that should be set on the element.
+ *
+ * @param {Object} [attributes={}]
+ * An object of attributes that should be set on the element.
+ *
+ * @return {Element}
+ * The element that gets created.
+ */
+
+
+ var _proto = ClickableComponent.prototype;
+
+ _proto.createEl = function createEl$1(tag, props, attributes) {
+ if (tag === void 0) {
+ tag = 'div';
+ }
+
+ if (props === void 0) {
+ props = {};
+ }
+
+ if (attributes === void 0) {
+ attributes = {};
+ }
+
+ props = assign({
+ className: this.buildCSSClass(),
+ tabIndex: 0
+ }, props);
+
+ if (tag === 'button') {
+ log$1.error("Creating a ClickableComponent with an HTML element of " + tag + " is not supported; use a Button instead.");
+ } // Add ARIA attributes for clickable element which is not a native HTML button
+
+
+ attributes = assign({
+ role: 'button'
+ }, attributes);
+ this.tabIndex_ = props.tabIndex;
+ var el = createEl(tag, props, attributes);
+ el.appendChild(createEl('span', {
+ className: 'vjs-icon-placeholder'
+ }, {
+ 'aria-hidden': true
+ }));
+ this.createControlTextEl(el);
+ return el;
+ };
+
+ _proto.dispose = function dispose() {
+ // remove controlTextEl_ on dispose
+ this.controlTextEl_ = null;
+
+ _Component.prototype.dispose.call(this);
+ }
+ /**
+ * Create a control text element on this `ClickableComponent`
+ *
+ * @param {Element} [el]
+ * Parent element for the control text.
+ *
+ * @return {Element}
+ * The control text element that gets created.
+ */
+ ;
+
+ _proto.createControlTextEl = function createControlTextEl(el) {
+ this.controlTextEl_ = createEl('span', {
+ className: 'vjs-control-text'
+ }, {
+ // let the screen reader user know that the text of the element may change
+ 'aria-live': 'polite'
+ });
+
+ if (el) {
+ el.appendChild(this.controlTextEl_);
+ }
+
+ this.controlText(this.controlText_, el);
+ return this.controlTextEl_;
+ }
+ /**
+ * Get or set the localize text to use for the controls on the `ClickableComponent`.
+ *
+ * @param {string} [text]
+ * Control text for element.
+ *
+ * @param {Element} [el=this.el()]
+ * Element to set the title on.
+ *
+ * @return {string}
+ * - The control text when getting
+ */
+ ;
+
+ _proto.controlText = function controlText(text, el) {
+ if (el === void 0) {
+ el = this.el();
+ }
+
+ if (text === undefined) {
+ return this.controlText_ || 'Need Text';
+ }
+
+ var localizedText = this.localize(text);
+ this.controlText_ = text;
+ textContent(this.controlTextEl_, localizedText);
+
+ if (!this.nonIconControl && !this.player_.options_.noUITitleAttributes) {
+ // Set title attribute if only an icon is shown
+ el.setAttribute('title', localizedText);
+ }
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+ ;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return "vjs-control vjs-button " + _Component.prototype.buildCSSClass.call(this);
+ }
+ /**
+ * Enable this `ClickableComponent`
+ */
+ ;
+
+ _proto.enable = function enable() {
+ if (!this.enabled_) {
+ this.enabled_ = true;
+ this.removeClass('vjs-disabled');
+ this.el_.setAttribute('aria-disabled', 'false');
+
+ if (typeof this.tabIndex_ !== 'undefined') {
+ this.el_.setAttribute('tabIndex', this.tabIndex_);
+ }
+
+ this.on(['tap', 'click'], this.handleClick_);
+ this.on('keydown', this.handleKeyDown_);
+ }
+ }
+ /**
+ * Disable this `ClickableComponent`
+ */
+ ;
+
+ _proto.disable = function disable() {
+ this.enabled_ = false;
+ this.addClass('vjs-disabled');
+ this.el_.setAttribute('aria-disabled', 'true');
+
+ if (typeof this.tabIndex_ !== 'undefined') {
+ this.el_.removeAttribute('tabIndex');
+ }
+
+ this.off('mouseover', this.handleMouseOver_);
+ this.off('mouseout', this.handleMouseOut_);
+ this.off(['tap', 'click'], this.handleClick_);
+ this.off('keydown', this.handleKeyDown_);
+ }
+ /**
+ * Handles language change in ClickableComponent for the player in components
+ *
+ *
+ */
+ ;
+
+ _proto.handleLanguagechange = function handleLanguagechange() {
+ this.controlText(this.controlText_);
+ }
+ /**
+ * Event handler that is called when a `ClickableComponent` receives a
+ * `click` or `tap` event.
+ *
+ * @param {EventTarget~Event} event
+ * The `tap` or `click` event that caused this function to be called.
+ *
+ * @listens tap
+ * @listens click
+ * @abstract
+ */
+ ;
+
+ _proto.handleClick = function handleClick(event) {
+ if (this.options_.clickHandler) {
+ this.options_.clickHandler.call(this, arguments);
+ }
+ }
+ /**
+ * Event handler that is called when a `ClickableComponent` receives a
+ * `keydown` event.
+ *
+ * By default, if the key is Space or Enter, it will trigger a `click` event.
+ *
+ * @param {EventTarget~Event} event
+ * The `keydown` event that caused this function to be called.
+ *
+ * @listens keydown
+ */
+ ;
+
+ _proto.handleKeyDown = function handleKeyDown(event) {
+ // Support Space or Enter key operation to fire a click event. Also,
+ // prevent the event from propagating through the DOM and triggering
+ // Player hotkeys.
+ if (keycode.isEventKey(event, 'Space') || keycode.isEventKey(event, 'Enter')) {
+ event.preventDefault();
+ event.stopPropagation();
+ this.trigger('click');
+ } else {
+ // Pass keypress handling up for unsupported keys
+ _Component.prototype.handleKeyDown.call(this, event);
+ }
+ };
+
+ return ClickableComponent;
+}(Component$1);
+
+Component$1.registerComponent('ClickableComponent', ClickableComponent);
+
+/**
+ * A `ClickableComponent` that handles showing the poster image for the player.
+ *
+ * @extends ClickableComponent
+ */
+
+var PosterImage = /*#__PURE__*/function (_ClickableComponent) {
+ _inheritsLoose(PosterImage, _ClickableComponent);
+
+ /**
+ * Create an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should attach to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function PosterImage(player, options) {
+ var _this;
+
+ _this = _ClickableComponent.call(this, player, options) || this;
+
+ _this.update();
+
+ _this.update_ = function (e) {
+ return _this.update(e);
+ };
+
+ player.on('posterchange', _this.update_);
+ return _this;
+ }
+ /**
+ * Clean up and dispose of the `PosterImage`.
+ */
+
+
+ var _proto = PosterImage.prototype;
+
+ _proto.dispose = function dispose() {
+ this.player().off('posterchange', this.update_);
+
+ _ClickableComponent.prototype.dispose.call(this);
+ }
+ /**
+ * Create the `PosterImage`s DOM element.
+ *
+ * @return {Element}
+ * The element that gets created.
+ */
+ ;
+
+ _proto.createEl = function createEl$1() {
+ var el = createEl('div', {
+ className: 'vjs-poster',
+ // Don't want poster to be tabbable.
+ tabIndex: -1
+ });
+ return el;
+ }
+ /**
+ * An {@link EventTarget~EventListener} for {@link Player#posterchange} events.
+ *
+ * @listens Player#posterchange
+ *
+ * @param {EventTarget~Event} [event]
+ * The `Player#posterchange` event that triggered this function.
+ */
+ ;
+
+ _proto.update = function update(event) {
+ var url = this.player().poster();
+ this.setSrc(url); // If there's no poster source we should display:none on this component
+ // so it's not still clickable or right-clickable
+
+ if (url) {
+ this.show();
+ } else {
+ this.hide();
+ }
+ }
+ /**
+ * Set the source of the `PosterImage` depending on the display method.
+ *
+ * @param {string} url
+ * The URL to the source for the `PosterImage`.
+ */
+ ;
+
+ _proto.setSrc = function setSrc(url) {
+ var backgroundImage = ''; // Any falsy value should stay as an empty string, otherwise
+ // this will throw an extra error
+
+ if (url) {
+ backgroundImage = "url(\"" + url + "\")";
+ }
+
+ this.el_.style.backgroundImage = backgroundImage;
+ }
+ /**
+ * An {@link EventTarget~EventListener} for clicks on the `PosterImage`. See
+ * {@link ClickableComponent#handleClick} for instances where this will be triggered.
+ *
+ * @listens tap
+ * @listens click
+ * @listens keydown
+ *
+ * @param {EventTarget~Event} event
+ + The `click`, `tap` or `keydown` event that caused this function to be called.
+ */
+ ;
+
+ _proto.handleClick = function handleClick(event) {
+ // We don't want a click to trigger playback when controls are disabled
+ if (!this.player_.controls()) {
+ return;
+ }
+
+ var sourceIsEncrypted = this.player_.usingPlugin('eme') && this.player_.eme.sessions && this.player_.eme.sessions.length > 0;
+
+ if (this.player_.tech(true) && // We've observed a bug in IE and Edge when playing back DRM content where
+ // calling .focus() on the video element causes the video to go black,
+ // so we avoid it in that specific case
+ !((IE_VERSION || IS_EDGE) && sourceIsEncrypted)) {
+ this.player_.tech(true).focus();
+ }
+
+ if (this.player_.paused()) {
+ silencePromise(this.player_.play());
+ } else {
+ this.player_.pause();
+ }
+ };
+
+ return PosterImage;
+}(ClickableComponent);
+
+Component$1.registerComponent('PosterImage', PosterImage);
+
+var darkGray = '#222';
+var lightGray = '#ccc';
+var fontMap = {
+ monospace: 'monospace',
+ sansSerif: 'sans-serif',
+ serif: 'serif',
+ monospaceSansSerif: '"Andale Mono", "Lucida Console", monospace',
+ monospaceSerif: '"Courier New", monospace',
+ proportionalSansSerif: 'sans-serif',
+ proportionalSerif: 'serif',
+ casual: '"Comic Sans MS", Impact, fantasy',
+ script: '"Monotype Corsiva", cursive',
+ smallcaps: '"Andale Mono", "Lucida Console", monospace, sans-serif'
+};
+/**
+ * Construct an rgba color from a given hex color code.
+ *
+ * @param {number} color
+ * Hex number for color, like #f0e or #f604e2.
+ *
+ * @param {number} opacity
+ * Value for opacity, 0.0 - 1.0.
+ *
+ * @return {string}
+ * The rgba color that was created, like 'rgba(255, 0, 0, 0.3)'.
+ */
+
+function constructColor(color, opacity) {
+ var hex;
+
+ if (color.length === 4) {
+ // color looks like "#f0e"
+ hex = color[1] + color[1] + color[2] + color[2] + color[3] + color[3];
+ } else if (color.length === 7) {
+ // color looks like "#f604e2"
+ hex = color.slice(1);
+ } else {
+ throw new Error('Invalid color code provided, ' + color + '; must be formatted as e.g. #f0e or #f604e2.');
+ }
+
+ return 'rgba(' + parseInt(hex.slice(0, 2), 16) + ',' + parseInt(hex.slice(2, 4), 16) + ',' + parseInt(hex.slice(4, 6), 16) + ',' + opacity + ')';
+}
+/**
+ * Try to update the style of a DOM element. Some style changes will throw an error,
+ * particularly in IE8. Those should be noops.
+ *
+ * @param {Element} el
+ * The DOM element to be styled.
+ *
+ * @param {string} style
+ * The CSS property on the element that should be styled.
+ *
+ * @param {string} rule
+ * The style rule that should be applied to the property.
+ *
+ * @private
+ */
+
+function tryUpdateStyle(el, style, rule) {
+ try {
+ el.style[style] = rule;
+ } catch (e) {
+ // Satisfies linter.
+ return;
+ }
+}
+/**
+ * The component for displaying text track cues.
+ *
+ * @extends Component
+ */
+
+
+var TextTrackDisplay = /*#__PURE__*/function (_Component) {
+ _inheritsLoose(TextTrackDisplay, _Component);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ *
+ * @param {Component~ReadyCallback} [ready]
+ * The function to call when `TextTrackDisplay` is ready.
+ */
+ function TextTrackDisplay(player, options, ready) {
+ var _this;
+
+ _this = _Component.call(this, player, options, ready) || this;
+
+ var updateDisplayHandler = function updateDisplayHandler(e) {
+ return _this.updateDisplay(e);
+ };
+
+ player.on('loadstart', function (e) {
+ return _this.toggleDisplay(e);
+ });
+ player.on('texttrackchange', updateDisplayHandler);
+ player.on('loadedmetadata', function (e) {
+ return _this.preselectTrack(e);
+ }); // This used to be called during player init, but was causing an error
+ // if a track should show by default and the display hadn't loaded yet.
+ // Should probably be moved to an external track loader when we support
+ // tracks that don't need a display.
+
+ player.ready(bind(_assertThisInitialized(_this), function () {
+ if (player.tech_ && player.tech_.featuresNativeTextTracks) {
+ this.hide();
+ return;
+ }
+
+ player.on('fullscreenchange', updateDisplayHandler);
+ player.on('playerresize', updateDisplayHandler);
+ window$1.addEventListener('orientationchange', updateDisplayHandler);
+ player.on('dispose', function () {
+ return window$1.removeEventListener('orientationchange', updateDisplayHandler);
+ });
+ var tracks = this.options_.playerOptions.tracks || [];
+
+ for (var i = 0; i < tracks.length; i++) {
+ this.player_.addRemoteTextTrack(tracks[i], true);
+ }
+
+ this.preselectTrack();
+ }));
+ return _this;
+ }
+ /**
+ * Preselect a track following this precedence:
+ * - matches the previously selected {@link TextTrack}'s language and kind
+ * - matches the previously selected {@link TextTrack}'s language only
+ * - is the first default captions track
+ * - is the first default descriptions track
+ *
+ * @listens Player#loadstart
+ */
+
+
+ var _proto = TextTrackDisplay.prototype;
+
+ _proto.preselectTrack = function preselectTrack() {
+ var modes = {
+ captions: 1,
+ subtitles: 1
+ };
+ var trackList = this.player_.textTracks();
+ var userPref = this.player_.cache_.selectedLanguage;
+ var firstDesc;
+ var firstCaptions;
+ var preferredTrack;
+
+ for (var i = 0; i < trackList.length; i++) {
+ var track = trackList[i];
+
+ if (userPref && userPref.enabled && userPref.language && userPref.language === track.language && track.kind in modes) {
+ // Always choose the track that matches both language and kind
+ if (track.kind === userPref.kind) {
+ preferredTrack = track; // or choose the first track that matches language
+ } else if (!preferredTrack) {
+ preferredTrack = track;
+ } // clear everything if offTextTrackMenuItem was clicked
+
+ } else if (userPref && !userPref.enabled) {
+ preferredTrack = null;
+ firstDesc = null;
+ firstCaptions = null;
+ } else if (track["default"]) {
+ if (track.kind === 'descriptions' && !firstDesc) {
+ firstDesc = track;
+ } else if (track.kind in modes && !firstCaptions) {
+ firstCaptions = track;
+ }
+ }
+ } // The preferredTrack matches the user preference and takes
+ // precedence over all the other tracks.
+ // So, display the preferredTrack before the first default track
+ // and the subtitles/captions track before the descriptions track
+
+
+ if (preferredTrack) {
+ preferredTrack.mode = 'showing';
+ } else if (firstCaptions) {
+ firstCaptions.mode = 'showing';
+ } else if (firstDesc) {
+ firstDesc.mode = 'showing';
+ }
+ }
+ /**
+ * Turn display of {@link TextTrack}'s from the current state into the other state.
+ * There are only two states:
+ * - 'shown'
+ * - 'hidden'
+ *
+ * @listens Player#loadstart
+ */
+ ;
+
+ _proto.toggleDisplay = function toggleDisplay() {
+ if (this.player_.tech_ && this.player_.tech_.featuresNativeTextTracks) {
+ this.hide();
+ } else {
+ this.show();
+ }
+ }
+ /**
+ * Create the {@link Component}'s DOM element.
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+ ;
+
+ _proto.createEl = function createEl() {
+ return _Component.prototype.createEl.call(this, 'div', {
+ className: 'vjs-text-track-display'
+ }, {
+ 'translate': 'yes',
+ 'aria-live': 'off',
+ 'aria-atomic': 'true'
+ });
+ }
+ /**
+ * Clear all displayed {@link TextTrack}s.
+ */
+ ;
+
+ _proto.clearDisplay = function clearDisplay() {
+ if (typeof window$1.WebVTT === 'function') {
+ window$1.WebVTT.processCues(window$1, [], this.el_);
+ }
+ }
+ /**
+ * Update the displayed TextTrack when a either a {@link Player#texttrackchange} or
+ * a {@link Player#fullscreenchange} is fired.
+ *
+ * @listens Player#texttrackchange
+ * @listens Player#fullscreenchange
+ */
+ ;
+
+ _proto.updateDisplay = function updateDisplay() {
+ var tracks = this.player_.textTracks();
+ var allowMultipleShowingTracks = this.options_.allowMultipleShowingTracks;
+ this.clearDisplay();
+
+ if (allowMultipleShowingTracks) {
+ var showingTracks = [];
+
+ for (var _i = 0; _i < tracks.length; ++_i) {
+ var track = tracks[_i];
+
+ if (track.mode !== 'showing') {
+ continue;
+ }
+
+ showingTracks.push(track);
+ }
+
+ this.updateForTrack(showingTracks);
+ return;
+ } // Track display prioritization model: if multiple tracks are 'showing',
+ // display the first 'subtitles' or 'captions' track which is 'showing',
+ // otherwise display the first 'descriptions' track which is 'showing'
+
+
+ var descriptionsTrack = null;
+ var captionsSubtitlesTrack = null;
+ var i = tracks.length;
+
+ while (i--) {
+ var _track = tracks[i];
+
+ if (_track.mode === 'showing') {
+ if (_track.kind === 'descriptions') {
+ descriptionsTrack = _track;
+ } else {
+ captionsSubtitlesTrack = _track;
+ }
+ }
+ }
+
+ if (captionsSubtitlesTrack) {
+ if (this.getAttribute('aria-live') !== 'off') {
+ this.setAttribute('aria-live', 'off');
+ }
+
+ this.updateForTrack(captionsSubtitlesTrack);
+ } else if (descriptionsTrack) {
+ if (this.getAttribute('aria-live') !== 'assertive') {
+ this.setAttribute('aria-live', 'assertive');
+ }
+
+ this.updateForTrack(descriptionsTrack);
+ }
+ }
+ /**
+ * Style {@Link TextTrack} activeCues according to {@Link TextTrackSettings}.
+ *
+ * @param {TextTrack} track
+ * Text track object containing active cues to style.
+ */
+ ;
+
+ _proto.updateDisplayState = function updateDisplayState(track) {
+ var overrides = this.player_.textTrackSettings.getValues();
+ var cues = track.activeCues;
+ var i = cues.length;
+
+ while (i--) {
+ var cue = cues[i];
+
+ if (!cue) {
+ continue;
+ }
+
+ var cueDiv = cue.displayState;
+
+ if (overrides.color) {
+ cueDiv.firstChild.style.color = overrides.color;
+ }
+
+ if (overrides.textOpacity) {
+ tryUpdateStyle(cueDiv.firstChild, 'color', constructColor(overrides.color || '#fff', overrides.textOpacity));
+ }
+
+ if (overrides.backgroundColor) {
+ cueDiv.firstChild.style.backgroundColor = overrides.backgroundColor;
+ }
+
+ if (overrides.backgroundOpacity) {
+ tryUpdateStyle(cueDiv.firstChild, 'backgroundColor', constructColor(overrides.backgroundColor || '#000', overrides.backgroundOpacity));
+ }
+
+ if (overrides.windowColor) {
+ if (overrides.windowOpacity) {
+ tryUpdateStyle(cueDiv, 'backgroundColor', constructColor(overrides.windowColor, overrides.windowOpacity));
+ } else {
+ cueDiv.style.backgroundColor = overrides.windowColor;
+ }
+ }
+
+ if (overrides.edgeStyle) {
+ if (overrides.edgeStyle === 'dropshadow') {
+ cueDiv.firstChild.style.textShadow = "2px 2px 3px " + darkGray + ", 2px 2px 4px " + darkGray + ", 2px 2px 5px " + darkGray;
+ } else if (overrides.edgeStyle === 'raised') {
+ cueDiv.firstChild.style.textShadow = "1px 1px " + darkGray + ", 2px 2px " + darkGray + ", 3px 3px " + darkGray;
+ } else if (overrides.edgeStyle === 'depressed') {
+ cueDiv.firstChild.style.textShadow = "1px 1px " + lightGray + ", 0 1px " + lightGray + ", -1px -1px " + darkGray + ", 0 -1px " + darkGray;
+ } else if (overrides.edgeStyle === 'uniform') {
+ cueDiv.firstChild.style.textShadow = "0 0 4px " + darkGray + ", 0 0 4px " + darkGray + ", 0 0 4px " + darkGray + ", 0 0 4px " + darkGray;
+ }
+ }
+
+ if (overrides.fontPercent && overrides.fontPercent !== 1) {
+ var fontSize = window$1.parseFloat(cueDiv.style.fontSize);
+ cueDiv.style.fontSize = fontSize * overrides.fontPercent + 'px';
+ cueDiv.style.height = 'auto';
+ cueDiv.style.top = 'auto';
+ }
+
+ if (overrides.fontFamily && overrides.fontFamily !== 'default') {
+ if (overrides.fontFamily === 'small-caps') {
+ cueDiv.firstChild.style.fontVariant = 'small-caps';
+ } else {
+ cueDiv.firstChild.style.fontFamily = fontMap[overrides.fontFamily];
+ }
+ }
+ }
+ }
+ /**
+ * Add an {@link TextTrack} to to the {@link Tech}s {@link TextTrackList}.
+ *
+ * @param {TextTrack|TextTrack[]} tracks
+ * Text track object or text track array to be added to the list.
+ */
+ ;
+
+ _proto.updateForTrack = function updateForTrack(tracks) {
+ if (!Array.isArray(tracks)) {
+ tracks = [tracks];
+ }
+
+ if (typeof window$1.WebVTT !== 'function' || tracks.every(function (track) {
+ return !track.activeCues;
+ })) {
+ return;
+ }
+
+ var cues = []; // push all active track cues
+
+ for (var i = 0; i < tracks.length; ++i) {
+ var track = tracks[i];
+
+ for (var j = 0; j < track.activeCues.length; ++j) {
+ cues.push(track.activeCues[j]);
+ }
+ } // removes all cues before it processes new ones
+
+
+ window$1.WebVTT.processCues(window$1, cues, this.el_); // add unique class to each language text track & add settings styling if necessary
+
+ for (var _i2 = 0; _i2 < tracks.length; ++_i2) {
+ var _track2 = tracks[_i2];
+
+ for (var _j = 0; _j < _track2.activeCues.length; ++_j) {
+ var cueEl = _track2.activeCues[_j].displayState;
+ addClass(cueEl, 'vjs-text-track-cue');
+ addClass(cueEl, 'vjs-text-track-cue-' + (_track2.language ? _track2.language : _i2));
+
+ if (_track2.language) {
+ setAttribute(cueEl, 'lang', _track2.language);
+ }
+ }
+
+ if (this.player_.textTrackSettings) {
+ this.updateDisplayState(_track2);
+ }
+ }
+ };
+
+ return TextTrackDisplay;
+}(Component$1);
+
+Component$1.registerComponent('TextTrackDisplay', TextTrackDisplay);
+
+/**
+ * A loading spinner for use during waiting/loading events.
+ *
+ * @extends Component
+ */
+
+var LoadingSpinner = /*#__PURE__*/function (_Component) {
+ _inheritsLoose(LoadingSpinner, _Component);
+
+ function LoadingSpinner() {
+ return _Component.apply(this, arguments) || this;
+ }
+
+ var _proto = LoadingSpinner.prototype;
+
+ /**
+ * Create the `LoadingSpinner`s DOM element.
+ *
+ * @return {Element}
+ * The dom element that gets created.
+ */
+ _proto.createEl = function createEl$1() {
+ var isAudio = this.player_.isAudio();
+ var playerType = this.localize(isAudio ? 'Audio Player' : 'Video Player');
+ var controlText = createEl('span', {
+ className: 'vjs-control-text',
+ textContent: this.localize('{1} is loading.', [playerType])
+ });
+
+ var el = _Component.prototype.createEl.call(this, 'div', {
+ className: 'vjs-loading-spinner',
+ dir: 'ltr'
+ });
+
+ el.appendChild(controlText);
+ return el;
+ };
+
+ return LoadingSpinner;
+}(Component$1);
+
+Component$1.registerComponent('LoadingSpinner', LoadingSpinner);
+
+/**
+ * Base class for all buttons.
+ *
+ * @extends ClickableComponent
+ */
+
+var Button = /*#__PURE__*/function (_ClickableComponent) {
+ _inheritsLoose(Button, _ClickableComponent);
+
+ function Button() {
+ return _ClickableComponent.apply(this, arguments) || this;
+ }
+
+ var _proto = Button.prototype;
+
+ /**
+ * Create the `Button`s DOM element.
+ *
+ * @param {string} [tag="button"]
+ * The element's node type. This argument is IGNORED: no matter what
+ * is passed, it will always create a `button` element.
+ *
+ * @param {Object} [props={}]
+ * An object of properties that should be set on the element.
+ *
+ * @param {Object} [attributes={}]
+ * An object of attributes that should be set on the element.
+ *
+ * @return {Element}
+ * The element that gets created.
+ */
+ _proto.createEl = function createEl$1(tag, props, attributes) {
+ if (props === void 0) {
+ props = {};
+ }
+
+ if (attributes === void 0) {
+ attributes = {};
+ }
+
+ tag = 'button';
+ props = assign({
+ className: this.buildCSSClass()
+ }, props); // Add attributes for button element
+
+ attributes = assign({
+ // Necessary since the default button type is "submit"
+ type: 'button'
+ }, attributes);
+
+ var el = createEl(tag, props, attributes);
+
+ el.appendChild(createEl('span', {
+ className: 'vjs-icon-placeholder'
+ }, {
+ 'aria-hidden': true
+ }));
+ this.createControlTextEl(el);
+ return el;
+ }
+ /**
+ * Add a child `Component` inside of this `Button`.
+ *
+ * @param {string|Component} child
+ * The name or instance of a child to add.
+ *
+ * @param {Object} [options={}]
+ * The key/value store of options that will get passed to children of
+ * the child.
+ *
+ * @return {Component}
+ * The `Component` that gets added as a child. When using a string the
+ * `Component` will get created by this process.
+ *
+ * @deprecated since version 5
+ */
+ ;
+
+ _proto.addChild = function addChild(child, options) {
+ if (options === void 0) {
+ options = {};
+ }
+
+ var className = this.constructor.name;
+ log$1.warn("Adding an actionable (user controllable) child to a Button (" + className + ") is not supported; use a ClickableComponent instead."); // Avoid the error message generated by ClickableComponent's addChild method
+
+ return Component$1.prototype.addChild.call(this, child, options);
+ }
+ /**
+ * Enable the `Button` element so that it can be activated or clicked. Use this with
+ * {@link Button#disable}.
+ */
+ ;
+
+ _proto.enable = function enable() {
+ _ClickableComponent.prototype.enable.call(this);
+
+ this.el_.removeAttribute('disabled');
+ }
+ /**
+ * Disable the `Button` element so that it cannot be activated or clicked. Use this with
+ * {@link Button#enable}.
+ */
+ ;
+
+ _proto.disable = function disable() {
+ _ClickableComponent.prototype.disable.call(this);
+
+ this.el_.setAttribute('disabled', 'disabled');
+ }
+ /**
+ * This gets called when a `Button` has focus and `keydown` is triggered via a key
+ * press.
+ *
+ * @param {EventTarget~Event} event
+ * The event that caused this function to get called.
+ *
+ * @listens keydown
+ */
+ ;
+
+ _proto.handleKeyDown = function handleKeyDown(event) {
+ // Ignore Space or Enter key operation, which is handled by the browser for
+ // a button - though not for its super class, ClickableComponent. Also,
+ // prevent the event from propagating through the DOM and triggering Player
+ // hotkeys. We do not preventDefault here because we _want_ the browser to
+ // handle it.
+ if (keycode.isEventKey(event, 'Space') || keycode.isEventKey(event, 'Enter')) {
+ event.stopPropagation();
+ return;
+ } // Pass keypress handling up for unsupported keys
+
+
+ _ClickableComponent.prototype.handleKeyDown.call(this, event);
+ };
+
+ return Button;
+}(ClickableComponent);
+
+Component$1.registerComponent('Button', Button);
+
+/**
+ * The initial play button that shows before the video has played. The hiding of the
+ * `BigPlayButton` get done via CSS and `Player` states.
+ *
+ * @extends Button
+ */
+
+var BigPlayButton = /*#__PURE__*/function (_Button) {
+ _inheritsLoose(BigPlayButton, _Button);
+
+ function BigPlayButton(player, options) {
+ var _this;
+
+ _this = _Button.call(this, player, options) || this;
+ _this.mouseused_ = false;
+
+ _this.on('mousedown', function (e) {
+ return _this.handleMouseDown(e);
+ });
+
+ return _this;
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object. Always returns 'vjs-big-play-button'.
+ */
+
+
+ var _proto = BigPlayButton.prototype;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return 'vjs-big-play-button';
+ }
+ /**
+ * This gets called when a `BigPlayButton` "clicked". See {@link ClickableComponent}
+ * for more detailed information on what a click can be.
+ *
+ * @param {EventTarget~Event} event
+ * The `keydown`, `tap`, or `click` event that caused this function to be
+ * called.
+ *
+ * @listens tap
+ * @listens click
+ */
+ ;
+
+ _proto.handleClick = function handleClick(event) {
+ var playPromise = this.player_.play(); // exit early if clicked via the mouse
+
+ if (this.mouseused_ && event.clientX && event.clientY) {
+ var sourceIsEncrypted = this.player_.usingPlugin('eme') && this.player_.eme.sessions && this.player_.eme.sessions.length > 0;
+ silencePromise(playPromise);
+
+ if (this.player_.tech(true) && // We've observed a bug in IE and Edge when playing back DRM content where
+ // calling .focus() on the video element causes the video to go black,
+ // so we avoid it in that specific case
+ !((IE_VERSION || IS_EDGE) && sourceIsEncrypted)) {
+ this.player_.tech(true).focus();
+ }
+
+ return;
+ }
+
+ var cb = this.player_.getChild('controlBar');
+ var playToggle = cb && cb.getChild('playToggle');
+
+ if (!playToggle) {
+ this.player_.tech(true).focus();
+ return;
+ }
+
+ var playFocus = function playFocus() {
+ return playToggle.focus();
+ };
+
+ if (isPromise(playPromise)) {
+ playPromise.then(playFocus, function () {});
+ } else {
+ this.setTimeout(playFocus, 1);
+ }
+ };
+
+ _proto.handleKeyDown = function handleKeyDown(event) {
+ this.mouseused_ = false;
+
+ _Button.prototype.handleKeyDown.call(this, event);
+ };
+
+ _proto.handleMouseDown = function handleMouseDown(event) {
+ this.mouseused_ = true;
+ };
+
+ return BigPlayButton;
+}(Button);
+/**
+ * The text that should display over the `BigPlayButton`s controls. Added to for localization.
+ *
+ * @type {string}
+ * @private
+ */
+
+
+BigPlayButton.prototype.controlText_ = 'Play Video';
+Component$1.registerComponent('BigPlayButton', BigPlayButton);
+
+/**
+ * The `CloseButton` is a `{@link Button}` that fires a `close` event when
+ * it gets clicked.
+ *
+ * @extends Button
+ */
+
+var CloseButton = /*#__PURE__*/function (_Button) {
+ _inheritsLoose(CloseButton, _Button);
+
+ /**
+ * Creates an instance of the this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function CloseButton(player, options) {
+ var _this;
+
+ _this = _Button.call(this, player, options) || this;
+
+ _this.controlText(options && options.controlText || _this.localize('Close'));
+
+ return _this;
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+
+
+ var _proto = CloseButton.prototype;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return "vjs-close-button " + _Button.prototype.buildCSSClass.call(this);
+ }
+ /**
+ * This gets called when a `CloseButton` gets clicked. See
+ * {@link ClickableComponent#handleClick} for more information on when
+ * this will be triggered
+ *
+ * @param {EventTarget~Event} event
+ * The `keydown`, `tap`, or `click` event that caused this function to be
+ * called.
+ *
+ * @listens tap
+ * @listens click
+ * @fires CloseButton#close
+ */
+ ;
+
+ _proto.handleClick = function handleClick(event) {
+ /**
+ * Triggered when the a `CloseButton` is clicked.
+ *
+ * @event CloseButton#close
+ * @type {EventTarget~Event}
+ *
+ * @property {boolean} [bubbles=false]
+ * set to false so that the close event does not
+ * bubble up to parents if there is no listener
+ */
+ this.trigger({
+ type: 'close',
+ bubbles: false
+ });
+ }
+ /**
+ * Event handler that is called when a `CloseButton` receives a
+ * `keydown` event.
+ *
+ * By default, if the key is Esc, it will trigger a `click` event.
+ *
+ * @param {EventTarget~Event} event
+ * The `keydown` event that caused this function to be called.
+ *
+ * @listens keydown
+ */
+ ;
+
+ _proto.handleKeyDown = function handleKeyDown(event) {
+ // Esc button will trigger `click` event
+ if (keycode.isEventKey(event, 'Esc')) {
+ event.preventDefault();
+ event.stopPropagation();
+ this.trigger('click');
+ } else {
+ // Pass keypress handling up for unsupported keys
+ _Button.prototype.handleKeyDown.call(this, event);
+ }
+ };
+
+ return CloseButton;
+}(Button);
+
+Component$1.registerComponent('CloseButton', CloseButton);
+
+/**
+ * Button to toggle between play and pause.
+ *
+ * @extends Button
+ */
+
+var PlayToggle = /*#__PURE__*/function (_Button) {
+ _inheritsLoose(PlayToggle, _Button);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options={}]
+ * The key/value store of player options.
+ */
+ function PlayToggle(player, options) {
+ var _this;
+
+ if (options === void 0) {
+ options = {};
+ }
+
+ _this = _Button.call(this, player, options) || this; // show or hide replay icon
+
+ options.replay = options.replay === undefined || options.replay;
+
+ _this.on(player, 'play', function (e) {
+ return _this.handlePlay(e);
+ });
+
+ _this.on(player, 'pause', function (e) {
+ return _this.handlePause(e);
+ });
+
+ if (options.replay) {
+ _this.on(player, 'ended', function (e) {
+ return _this.handleEnded(e);
+ });
+ }
+
+ return _this;
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+
+
+ var _proto = PlayToggle.prototype;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return "vjs-play-control " + _Button.prototype.buildCSSClass.call(this);
+ }
+ /**
+ * This gets called when an `PlayToggle` is "clicked". See
+ * {@link ClickableComponent} for more detailed information on what a click can be.
+ *
+ * @param {EventTarget~Event} [event]
+ * The `keydown`, `tap`, or `click` event that caused this function to be
+ * called.
+ *
+ * @listens tap
+ * @listens click
+ */
+ ;
+
+ _proto.handleClick = function handleClick(event) {
+ if (this.player_.paused()) {
+ silencePromise(this.player_.play());
+ } else {
+ this.player_.pause();
+ }
+ }
+ /**
+ * This gets called once after the video has ended and the user seeks so that
+ * we can change the replay button back to a play button.
+ *
+ * @param {EventTarget~Event} [event]
+ * The event that caused this function to run.
+ *
+ * @listens Player#seeked
+ */
+ ;
+
+ _proto.handleSeeked = function handleSeeked(event) {
+ this.removeClass('vjs-ended');
+
+ if (this.player_.paused()) {
+ this.handlePause(event);
+ } else {
+ this.handlePlay(event);
+ }
+ }
+ /**
+ * Add the vjs-playing class to the element so it can change appearance.
+ *
+ * @param {EventTarget~Event} [event]
+ * The event that caused this function to run.
+ *
+ * @listens Player#play
+ */
+ ;
+
+ _proto.handlePlay = function handlePlay(event) {
+ this.removeClass('vjs-ended');
+ this.removeClass('vjs-paused');
+ this.addClass('vjs-playing'); // change the button text to "Pause"
+
+ this.controlText('Pause');
+ }
+ /**
+ * Add the vjs-paused class to the element so it can change appearance.
+ *
+ * @param {EventTarget~Event} [event]
+ * The event that caused this function to run.
+ *
+ * @listens Player#pause
+ */
+ ;
+
+ _proto.handlePause = function handlePause(event) {
+ this.removeClass('vjs-playing');
+ this.addClass('vjs-paused'); // change the button text to "Play"
+
+ this.controlText('Play');
+ }
+ /**
+ * Add the vjs-ended class to the element so it can change appearance
+ *
+ * @param {EventTarget~Event} [event]
+ * The event that caused this function to run.
+ *
+ * @listens Player#ended
+ */
+ ;
+
+ _proto.handleEnded = function handleEnded(event) {
+ var _this2 = this;
+
+ this.removeClass('vjs-playing');
+ this.addClass('vjs-ended'); // change the button text to "Replay"
+
+ this.controlText('Replay'); // on the next seek remove the replay button
+
+ this.one(this.player_, 'seeked', function (e) {
+ return _this2.handleSeeked(e);
+ });
+ };
+
+ return PlayToggle;
+}(Button);
+/**
+ * The text that should display over the `PlayToggle`s controls. Added for localization.
+ *
+ * @type {string}
+ * @private
+ */
+
+
+PlayToggle.prototype.controlText_ = 'Play';
+Component$1.registerComponent('PlayToggle', PlayToggle);
+
+/**
+ * @file format-time.js
+ * @module format-time
+ */
+
+/**
+ * Format seconds as a time string, H:MM:SS or M:SS. Supplying a guide (in
+ * seconds) will force a number of leading zeros to cover the length of the
+ * guide.
+ *
+ * @private
+ * @param {number} seconds
+ * Number of seconds to be turned into a string
+ *
+ * @param {number} guide
+ * Number (in seconds) to model the string after
+ *
+ * @return {string}
+ * Time formatted as H:MM:SS or M:SS
+ */
+var defaultImplementation = function defaultImplementation(seconds, guide) {
+ seconds = seconds < 0 ? 0 : seconds;
+ var s = Math.floor(seconds % 60);
+ var m = Math.floor(seconds / 60 % 60);
+ var h = Math.floor(seconds / 3600);
+ var gm = Math.floor(guide / 60 % 60);
+ var gh = Math.floor(guide / 3600); // handle invalid times
+
+ if (isNaN(seconds) || seconds === Infinity) {
+ // '-' is false for all relational operators (e.g. <, >=) so this setting
+ // will add the minimum number of fields specified by the guide
+ h = m = s = '-';
+ } // Check if we need to show hours
+
+
+ h = h > 0 || gh > 0 ? h + ':' : ''; // If hours are showing, we may need to add a leading zero.
+ // Always show at least one digit of minutes.
+
+ m = ((h || gm >= 10) && m < 10 ? '0' + m : m) + ':'; // Check if leading zero is need for seconds
+
+ s = s < 10 ? '0' + s : s;
+ return h + m + s;
+}; // Internal pointer to the current implementation.
+
+
+var implementation = defaultImplementation;
+/**
+ * Replaces the default formatTime implementation with a custom implementation.
+ *
+ * @param {Function} customImplementation
+ * A function which will be used in place of the default formatTime
+ * implementation. Will receive the current time in seconds and the
+ * guide (in seconds) as arguments.
+ */
+
+function setFormatTime(customImplementation) {
+ implementation = customImplementation;
+}
+/**
+ * Resets formatTime to the default implementation.
+ */
+
+function resetFormatTime() {
+ implementation = defaultImplementation;
+}
+/**
+ * Delegates to either the default time formatting function or a custom
+ * function supplied via `setFormatTime`.
+ *
+ * Formats seconds as a time string (H:MM:SS or M:SS). Supplying a
+ * guide (in seconds) will force a number of leading zeros to cover the
+ * length of the guide.
+ *
+ * @static
+ * @example formatTime(125, 600) === "02:05"
+ * @param {number} seconds
+ * Number of seconds to be turned into a string
+ *
+ * @param {number} guide
+ * Number (in seconds) to model the string after
+ *
+ * @return {string}
+ * Time formatted as H:MM:SS or M:SS
+ */
+
+function formatTime(seconds, guide) {
+ if (guide === void 0) {
+ guide = seconds;
+ }
+
+ return implementation(seconds, guide);
+}
+
+/**
+ * Displays time information about the video
+ *
+ * @extends Component
+ */
+
+var TimeDisplay = /*#__PURE__*/function (_Component) {
+ _inheritsLoose(TimeDisplay, _Component);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function TimeDisplay(player, options) {
+ var _this;
+
+ _this = _Component.call(this, player, options) || this;
+
+ _this.on(player, ['timeupdate', 'ended'], function (e) {
+ return _this.updateContent(e);
+ });
+
+ _this.updateTextNode_();
+
+ return _this;
+ }
+ /**
+ * Create the `Component`'s DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+
+
+ var _proto = TimeDisplay.prototype;
+
+ _proto.createEl = function createEl$1() {
+ var className = this.buildCSSClass();
+
+ var el = _Component.prototype.createEl.call(this, 'div', {
+ className: className + " vjs-time-control vjs-control"
+ });
+
+ var span = createEl('span', {
+ className: 'vjs-control-text',
+ textContent: this.localize(this.labelText_) + "\xA0"
+ }, {
+ role: 'presentation'
+ });
+ el.appendChild(span);
+ this.contentEl_ = createEl('span', {
+ className: className + "-display"
+ }, {
+ // tell screen readers not to automatically read the time as it changes
+ 'aria-live': 'off',
+ // span elements have no implicit role, but some screen readers (notably VoiceOver)
+ // treat them as a break between items in the DOM when using arrow keys
+ // (or left-to-right swipes on iOS) to read contents of a page. Using
+ // role='presentation' causes VoiceOver to NOT treat this span as a break.
+ 'role': 'presentation'
+ });
+ el.appendChild(this.contentEl_);
+ return el;
+ };
+
+ _proto.dispose = function dispose() {
+ this.contentEl_ = null;
+ this.textNode_ = null;
+
+ _Component.prototype.dispose.call(this);
+ }
+ /**
+ * Updates the time display text node with a new time
+ *
+ * @param {number} [time=0] the time to update to
+ *
+ * @private
+ */
+ ;
+
+ _proto.updateTextNode_ = function updateTextNode_(time) {
+ var _this2 = this;
+
+ if (time === void 0) {
+ time = 0;
+ }
+
+ time = formatTime(time);
+
+ if (this.formattedTime_ === time) {
+ return;
+ }
+
+ this.formattedTime_ = time;
+ this.requestNamedAnimationFrame('TimeDisplay#updateTextNode_', function () {
+ if (!_this2.contentEl_) {
+ return;
+ }
+
+ var oldNode = _this2.textNode_;
+
+ if (oldNode && _this2.contentEl_.firstChild !== oldNode) {
+ oldNode = null;
+ log$1.warn('TimeDisplay#updateTextnode_: Prevented replacement of text node element since it was no longer a child of this node. Appending a new node instead.');
+ }
+
+ _this2.textNode_ = document.createTextNode(_this2.formattedTime_);
+
+ if (!_this2.textNode_) {
+ return;
+ }
+
+ if (oldNode) {
+ _this2.contentEl_.replaceChild(_this2.textNode_, oldNode);
+ } else {
+ _this2.contentEl_.appendChild(_this2.textNode_);
+ }
+ });
+ }
+ /**
+ * To be filled out in the child class, should update the displayed time
+ * in accordance with the fact that the current time has changed.
+ *
+ * @param {EventTarget~Event} [event]
+ * The `timeupdate` event that caused this to run.
+ *
+ * @listens Player#timeupdate
+ */
+ ;
+
+ _proto.updateContent = function updateContent(event) {};
+
+ return TimeDisplay;
+}(Component$1);
+/**
+ * The text that is added to the `TimeDisplay` for screen reader users.
+ *
+ * @type {string}
+ * @private
+ */
+
+
+TimeDisplay.prototype.labelText_ = 'Time';
+/**
+ * The text that should display over the `TimeDisplay`s controls. Added to for localization.
+ *
+ * @type {string}
+ * @private
+ *
+ * @deprecated in v7; controlText_ is not used in non-active display Components
+ */
+
+TimeDisplay.prototype.controlText_ = 'Time';
+Component$1.registerComponent('TimeDisplay', TimeDisplay);
+
+/**
+ * Displays the current time
+ *
+ * @extends Component
+ */
+
+var CurrentTimeDisplay = /*#__PURE__*/function (_TimeDisplay) {
+ _inheritsLoose(CurrentTimeDisplay, _TimeDisplay);
+
+ function CurrentTimeDisplay() {
+ return _TimeDisplay.apply(this, arguments) || this;
+ }
+
+ var _proto = CurrentTimeDisplay.prototype;
+
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+ _proto.buildCSSClass = function buildCSSClass() {
+ return 'vjs-current-time';
+ }
+ /**
+ * Update current time display
+ *
+ * @param {EventTarget~Event} [event]
+ * The `timeupdate` event that caused this function to run.
+ *
+ * @listens Player#timeupdate
+ */
+ ;
+
+ _proto.updateContent = function updateContent(event) {
+ // Allows for smooth scrubbing, when player can't keep up.
+ var time;
+
+ if (this.player_.ended()) {
+ time = this.player_.duration();
+ } else {
+ time = this.player_.scrubbing() ? this.player_.getCache().currentTime : this.player_.currentTime();
+ }
+
+ this.updateTextNode_(time);
+ };
+
+ return CurrentTimeDisplay;
+}(TimeDisplay);
+/**
+ * The text that is added to the `CurrentTimeDisplay` for screen reader users.
+ *
+ * @type {string}
+ * @private
+ */
+
+
+CurrentTimeDisplay.prototype.labelText_ = 'Current Time';
+/**
+ * The text that should display over the `CurrentTimeDisplay`s controls. Added to for localization.
+ *
+ * @type {string}
+ * @private
+ *
+ * @deprecated in v7; controlText_ is not used in non-active display Components
+ */
+
+CurrentTimeDisplay.prototype.controlText_ = 'Current Time';
+Component$1.registerComponent('CurrentTimeDisplay', CurrentTimeDisplay);
+
+/**
+ * Displays the duration
+ *
+ * @extends Component
+ */
+
+var DurationDisplay = /*#__PURE__*/function (_TimeDisplay) {
+ _inheritsLoose(DurationDisplay, _TimeDisplay);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function DurationDisplay(player, options) {
+ var _this;
+
+ _this = _TimeDisplay.call(this, player, options) || this;
+
+ var updateContent = function updateContent(e) {
+ return _this.updateContent(e);
+ }; // we do not want to/need to throttle duration changes,
+ // as they should always display the changed duration as
+ // it has changed
+
+
+ _this.on(player, 'durationchange', updateContent); // Listen to loadstart because the player duration is reset when a new media element is loaded,
+ // but the durationchange on the user agent will not fire.
+ // @see [Spec]{@link https://www.w3.org/TR/2011/WD-html5-20110113/video.html#media-element-load-algorithm}
+
+
+ _this.on(player, 'loadstart', updateContent); // Also listen for timeupdate (in the parent) and loadedmetadata because removing those
+ // listeners could have broken dependent applications/libraries. These
+ // can likely be removed for 7.0.
+
+
+ _this.on(player, 'loadedmetadata', updateContent);
+
+ return _this;
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+
+
+ var _proto = DurationDisplay.prototype;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return 'vjs-duration';
+ }
+ /**
+ * Update duration time display.
+ *
+ * @param {EventTarget~Event} [event]
+ * The `durationchange`, `timeupdate`, or `loadedmetadata` event that caused
+ * this function to be called.
+ *
+ * @listens Player#durationchange
+ * @listens Player#timeupdate
+ * @listens Player#loadedmetadata
+ */
+ ;
+
+ _proto.updateContent = function updateContent(event) {
+ var duration = this.player_.duration();
+ this.updateTextNode_(duration);
+ };
+
+ return DurationDisplay;
+}(TimeDisplay);
+/**
+ * The text that is added to the `DurationDisplay` for screen reader users.
+ *
+ * @type {string}
+ * @private
+ */
+
+
+DurationDisplay.prototype.labelText_ = 'Duration';
+/**
+ * The text that should display over the `DurationDisplay`s controls. Added to for localization.
+ *
+ * @type {string}
+ * @private
+ *
+ * @deprecated in v7; controlText_ is not used in non-active display Components
+ */
+
+DurationDisplay.prototype.controlText_ = 'Duration';
+Component$1.registerComponent('DurationDisplay', DurationDisplay);
+
+/**
+ * The separator between the current time and duration.
+ * Can be hidden if it's not needed in the design.
+ *
+ * @extends Component
+ */
+
+var TimeDivider = /*#__PURE__*/function (_Component) {
+ _inheritsLoose(TimeDivider, _Component);
+
+ function TimeDivider() {
+ return _Component.apply(this, arguments) || this;
+ }
+
+ var _proto = TimeDivider.prototype;
+
+ /**
+ * Create the component's DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+ _proto.createEl = function createEl() {
+ var el = _Component.prototype.createEl.call(this, 'div', {
+ className: 'vjs-time-control vjs-time-divider'
+ }, {
+ // this element and its contents can be hidden from assistive techs since
+ // it is made extraneous by the announcement of the control text
+ // for the current time and duration displays
+ 'aria-hidden': true
+ });
+
+ var div = _Component.prototype.createEl.call(this, 'div');
+
+ var span = _Component.prototype.createEl.call(this, 'span', {
+ textContent: '/'
+ });
+
+ div.appendChild(span);
+ el.appendChild(div);
+ return el;
+ };
+
+ return TimeDivider;
+}(Component$1);
+
+Component$1.registerComponent('TimeDivider', TimeDivider);
+
+/**
+ * Displays the time left in the video
+ *
+ * @extends Component
+ */
+
+var RemainingTimeDisplay = /*#__PURE__*/function (_TimeDisplay) {
+ _inheritsLoose(RemainingTimeDisplay, _TimeDisplay);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function RemainingTimeDisplay(player, options) {
+ var _this;
+
+ _this = _TimeDisplay.call(this, player, options) || this;
+
+ _this.on(player, 'durationchange', function (e) {
+ return _this.updateContent(e);
+ });
+
+ return _this;
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+
+
+ var _proto = RemainingTimeDisplay.prototype;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return 'vjs-remaining-time';
+ }
+ /**
+ * Create the `Component`'s DOM element with the "minus" characted prepend to the time
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+ ;
+
+ _proto.createEl = function createEl$1() {
+ var el = _TimeDisplay.prototype.createEl.call(this);
+
+ if (this.options_.displayNegative !== false) {
+ el.insertBefore(createEl('span', {}, {
+ 'aria-hidden': true
+ }, '-'), this.contentEl_);
+ }
+
+ return el;
+ }
+ /**
+ * Update remaining time display.
+ *
+ * @param {EventTarget~Event} [event]
+ * The `timeupdate` or `durationchange` event that caused this to run.
+ *
+ * @listens Player#timeupdate
+ * @listens Player#durationchange
+ */
+ ;
+
+ _proto.updateContent = function updateContent(event) {
+ if (typeof this.player_.duration() !== 'number') {
+ return;
+ }
+
+ var time; // @deprecated We should only use remainingTimeDisplay
+ // as of video.js 7
+
+ if (this.player_.ended()) {
+ time = 0;
+ } else if (this.player_.remainingTimeDisplay) {
+ time = this.player_.remainingTimeDisplay();
+ } else {
+ time = this.player_.remainingTime();
+ }
+
+ this.updateTextNode_(time);
+ };
+
+ return RemainingTimeDisplay;
+}(TimeDisplay);
+/**
+ * The text that is added to the `RemainingTimeDisplay` for screen reader users.
+ *
+ * @type {string}
+ * @private
+ */
+
+
+RemainingTimeDisplay.prototype.labelText_ = 'Remaining Time';
+/**
+ * The text that should display over the `RemainingTimeDisplay`s controls. Added to for localization.
+ *
+ * @type {string}
+ * @private
+ *
+ * @deprecated in v7; controlText_ is not used in non-active display Components
+ */
+
+RemainingTimeDisplay.prototype.controlText_ = 'Remaining Time';
+Component$1.registerComponent('RemainingTimeDisplay', RemainingTimeDisplay);
+
+/**
+ * Displays the live indicator when duration is Infinity.
+ *
+ * @extends Component
+ */
+
+var LiveDisplay = /*#__PURE__*/function (_Component) {
+ _inheritsLoose(LiveDisplay, _Component);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function LiveDisplay(player, options) {
+ var _this;
+
+ _this = _Component.call(this, player, options) || this;
+
+ _this.updateShowing();
+
+ _this.on(_this.player(), 'durationchange', function (e) {
+ return _this.updateShowing(e);
+ });
+
+ return _this;
+ }
+ /**
+ * Create the `Component`'s DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+
+
+ var _proto = LiveDisplay.prototype;
+
+ _proto.createEl = function createEl$1() {
+ var el = _Component.prototype.createEl.call(this, 'div', {
+ className: 'vjs-live-control vjs-control'
+ });
+
+ this.contentEl_ = createEl('div', {
+ className: 'vjs-live-display'
+ }, {
+ 'aria-live': 'off'
+ });
+ this.contentEl_.appendChild(createEl('span', {
+ className: 'vjs-control-text',
+ textContent: this.localize('Stream Type') + "\xA0"
+ }));
+ this.contentEl_.appendChild(document.createTextNode(this.localize('LIVE')));
+ el.appendChild(this.contentEl_);
+ return el;
+ };
+
+ _proto.dispose = function dispose() {
+ this.contentEl_ = null;
+
+ _Component.prototype.dispose.call(this);
+ }
+ /**
+ * Check the duration to see if the LiveDisplay should be showing or not. Then show/hide
+ * it accordingly
+ *
+ * @param {EventTarget~Event} [event]
+ * The {@link Player#durationchange} event that caused this function to run.
+ *
+ * @listens Player#durationchange
+ */
+ ;
+
+ _proto.updateShowing = function updateShowing(event) {
+ if (this.player().duration() === Infinity) {
+ this.show();
+ } else {
+ this.hide();
+ }
+ };
+
+ return LiveDisplay;
+}(Component$1);
+
+Component$1.registerComponent('LiveDisplay', LiveDisplay);
+
+/**
+ * Displays the live indicator when duration is Infinity.
+ *
+ * @extends Component
+ */
+
+var SeekToLive = /*#__PURE__*/function (_Button) {
+ _inheritsLoose(SeekToLive, _Button);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function SeekToLive(player, options) {
+ var _this;
+
+ _this = _Button.call(this, player, options) || this;
+
+ _this.updateLiveEdgeStatus();
+
+ if (_this.player_.liveTracker) {
+ _this.updateLiveEdgeStatusHandler_ = function (e) {
+ return _this.updateLiveEdgeStatus(e);
+ };
+
+ _this.on(_this.player_.liveTracker, 'liveedgechange', _this.updateLiveEdgeStatusHandler_);
+ }
+
+ return _this;
+ }
+ /**
+ * Create the `Component`'s DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+
+
+ var _proto = SeekToLive.prototype;
+
+ _proto.createEl = function createEl$1() {
+ var el = _Button.prototype.createEl.call(this, 'button', {
+ className: 'vjs-seek-to-live-control vjs-control'
+ });
+
+ this.textEl_ = createEl('span', {
+ className: 'vjs-seek-to-live-text',
+ textContent: this.localize('LIVE')
+ }, {
+ 'aria-hidden': 'true'
+ });
+ el.appendChild(this.textEl_);
+ return el;
+ }
+ /**
+ * Update the state of this button if we are at the live edge
+ * or not
+ */
+ ;
+
+ _proto.updateLiveEdgeStatus = function updateLiveEdgeStatus() {
+ // default to live edge
+ if (!this.player_.liveTracker || this.player_.liveTracker.atLiveEdge()) {
+ this.setAttribute('aria-disabled', true);
+ this.addClass('vjs-at-live-edge');
+ this.controlText('Seek to live, currently playing live');
+ } else {
+ this.setAttribute('aria-disabled', false);
+ this.removeClass('vjs-at-live-edge');
+ this.controlText('Seek to live, currently behind live');
+ }
+ }
+ /**
+ * On click bring us as near to the live point as possible.
+ * This requires that we wait for the next `live-seekable-change`
+ * event which will happen every segment length seconds.
+ */
+ ;
+
+ _proto.handleClick = function handleClick() {
+ this.player_.liveTracker.seekToLiveEdge();
+ }
+ /**
+ * Dispose of the element and stop tracking
+ */
+ ;
+
+ _proto.dispose = function dispose() {
+ if (this.player_.liveTracker) {
+ this.off(this.player_.liveTracker, 'liveedgechange', this.updateLiveEdgeStatusHandler_);
+ }
+
+ this.textEl_ = null;
+
+ _Button.prototype.dispose.call(this);
+ };
+
+ return SeekToLive;
+}(Button);
+
+SeekToLive.prototype.controlText_ = 'Seek to live, currently playing live';
+Component$1.registerComponent('SeekToLive', SeekToLive);
+
+/**
+ * Keep a number between a min and a max value
+ *
+ * @param {number} number
+ * The number to clamp
+ *
+ * @param {number} min
+ * The minimum value
+ * @param {number} max
+ * The maximum value
+ *
+ * @return {number}
+ * the clamped number
+ */
+var clamp = function clamp(number, min, max) {
+ number = Number(number);
+ return Math.min(max, Math.max(min, isNaN(number) ? min : number));
+};
+
+/**
+ * The base functionality for a slider. Can be vertical or horizontal.
+ * For instance the volume bar or the seek bar on a video is a slider.
+ *
+ * @extends Component
+ */
+
+var Slider = /*#__PURE__*/function (_Component) {
+ _inheritsLoose(Slider, _Component);
+
+ /**
+ * Create an instance of this class
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function Slider(player, options) {
+ var _this;
+
+ _this = _Component.call(this, player, options) || this;
+
+ _this.handleMouseDown_ = function (e) {
+ return _this.handleMouseDown(e);
+ };
+
+ _this.handleMouseUp_ = function (e) {
+ return _this.handleMouseUp(e);
+ };
+
+ _this.handleKeyDown_ = function (e) {
+ return _this.handleKeyDown(e);
+ };
+
+ _this.handleClick_ = function (e) {
+ return _this.handleClick(e);
+ };
+
+ _this.handleMouseMove_ = function (e) {
+ return _this.handleMouseMove(e);
+ };
+
+ _this.update_ = function (e) {
+ return _this.update(e);
+ }; // Set property names to bar to match with the child Slider class is looking for
+
+
+ _this.bar = _this.getChild(_this.options_.barName); // Set a horizontal or vertical class on the slider depending on the slider type
+
+ _this.vertical(!!_this.options_.vertical);
+
+ _this.enable();
+
+ return _this;
+ }
+ /**
+ * Are controls are currently enabled for this slider or not.
+ *
+ * @return {boolean}
+ * true if controls are enabled, false otherwise
+ */
+
+
+ var _proto = Slider.prototype;
+
+ _proto.enabled = function enabled() {
+ return this.enabled_;
+ }
+ /**
+ * Enable controls for this slider if they are disabled
+ */
+ ;
+
+ _proto.enable = function enable() {
+ if (this.enabled()) {
+ return;
+ }
+
+ this.on('mousedown', this.handleMouseDown_);
+ this.on('touchstart', this.handleMouseDown_);
+ this.on('keydown', this.handleKeyDown_);
+ this.on('click', this.handleClick_); // TODO: deprecated, controlsvisible does not seem to be fired
+
+ this.on(this.player_, 'controlsvisible', this.update);
+
+ if (this.playerEvent) {
+ this.on(this.player_, this.playerEvent, this.update);
+ }
+
+ this.removeClass('disabled');
+ this.setAttribute('tabindex', 0);
+ this.enabled_ = true;
+ }
+ /**
+ * Disable controls for this slider if they are enabled
+ */
+ ;
+
+ _proto.disable = function disable() {
+ if (!this.enabled()) {
+ return;
+ }
+
+ var doc = this.bar.el_.ownerDocument;
+ this.off('mousedown', this.handleMouseDown_);
+ this.off('touchstart', this.handleMouseDown_);
+ this.off('keydown', this.handleKeyDown_);
+ this.off('click', this.handleClick_);
+ this.off(this.player_, 'controlsvisible', this.update_);
+ this.off(doc, 'mousemove', this.handleMouseMove_);
+ this.off(doc, 'mouseup', this.handleMouseUp_);
+ this.off(doc, 'touchmove', this.handleMouseMove_);
+ this.off(doc, 'touchend', this.handleMouseUp_);
+ this.removeAttribute('tabindex');
+ this.addClass('disabled');
+
+ if (this.playerEvent) {
+ this.off(this.player_, this.playerEvent, this.update);
+ }
+
+ this.enabled_ = false;
+ }
+ /**
+ * Create the `Slider`s DOM element.
+ *
+ * @param {string} type
+ * Type of element to create.
+ *
+ * @param {Object} [props={}]
+ * List of properties in Object form.
+ *
+ * @param {Object} [attributes={}]
+ * list of attributes in Object form.
+ *
+ * @return {Element}
+ * The element that gets created.
+ */
+ ;
+
+ _proto.createEl = function createEl(type, props, attributes) {
+ if (props === void 0) {
+ props = {};
+ }
+
+ if (attributes === void 0) {
+ attributes = {};
+ }
+
+ // Add the slider element class to all sub classes
+ props.className = props.className + ' vjs-slider';
+ props = assign({
+ tabIndex: 0
+ }, props);
+ attributes = assign({
+ 'role': 'slider',
+ 'aria-valuenow': 0,
+ 'aria-valuemin': 0,
+ 'aria-valuemax': 100,
+ 'tabIndex': 0
+ }, attributes);
+ return _Component.prototype.createEl.call(this, type, props, attributes);
+ }
+ /**
+ * Handle `mousedown` or `touchstart` events on the `Slider`.
+ *
+ * @param {EventTarget~Event} event
+ * `mousedown` or `touchstart` event that triggered this function
+ *
+ * @listens mousedown
+ * @listens touchstart
+ * @fires Slider#slideractive
+ */
+ ;
+
+ _proto.handleMouseDown = function handleMouseDown(event) {
+ var doc = this.bar.el_.ownerDocument;
+
+ if (event.type === 'mousedown') {
+ event.preventDefault();
+ } // Do not call preventDefault() on touchstart in Chrome
+ // to avoid console warnings. Use a 'touch-action: none' style
+ // instead to prevent unintented scrolling.
+ // https://developers.google.com/web/updates/2017/01/scrolling-intervention
+
+
+ if (event.type === 'touchstart' && !IS_CHROME) {
+ event.preventDefault();
+ }
+
+ blockTextSelection();
+ this.addClass('vjs-sliding');
+ /**
+ * Triggered when the slider is in an active state
+ *
+ * @event Slider#slideractive
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('slideractive');
+ this.on(doc, 'mousemove', this.handleMouseMove_);
+ this.on(doc, 'mouseup', this.handleMouseUp_);
+ this.on(doc, 'touchmove', this.handleMouseMove_);
+ this.on(doc, 'touchend', this.handleMouseUp_);
+ this.handleMouseMove(event, true);
+ }
+ /**
+ * Handle the `mousemove`, `touchmove`, and `mousedown` events on this `Slider`.
+ * The `mousemove` and `touchmove` events will only only trigger this function during
+ * `mousedown` and `touchstart`. This is due to {@link Slider#handleMouseDown} and
+ * {@link Slider#handleMouseUp}.
+ *
+ * @param {EventTarget~Event} event
+ * `mousedown`, `mousemove`, `touchstart`, or `touchmove` event that triggered
+ * this function
+ * @param {boolean} mouseDown this is a flag that should be set to true if `handleMouseMove` is called directly. It allows us to skip things that should not happen if coming from mouse down but should happen on regular mouse move handler. Defaults to false.
+ *
+ * @listens mousemove
+ * @listens touchmove
+ */
+ ;
+
+ _proto.handleMouseMove = function handleMouseMove(event) {}
+ /**
+ * Handle `mouseup` or `touchend` events on the `Slider`.
+ *
+ * @param {EventTarget~Event} event
+ * `mouseup` or `touchend` event that triggered this function.
+ *
+ * @listens touchend
+ * @listens mouseup
+ * @fires Slider#sliderinactive
+ */
+ ;
+
+ _proto.handleMouseUp = function handleMouseUp() {
+ var doc = this.bar.el_.ownerDocument;
+ unblockTextSelection();
+ this.removeClass('vjs-sliding');
+ /**
+ * Triggered when the slider is no longer in an active state.
+ *
+ * @event Slider#sliderinactive
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('sliderinactive');
+ this.off(doc, 'mousemove', this.handleMouseMove_);
+ this.off(doc, 'mouseup', this.handleMouseUp_);
+ this.off(doc, 'touchmove', this.handleMouseMove_);
+ this.off(doc, 'touchend', this.handleMouseUp_);
+ this.update();
+ }
+ /**
+ * Update the progress bar of the `Slider`.
+ *
+ * @return {number}
+ * The percentage of progress the progress bar represents as a
+ * number from 0 to 1.
+ */
+ ;
+
+ _proto.update = function update() {
+ var _this2 = this;
+
+ // In VolumeBar init we have a setTimeout for update that pops and update
+ // to the end of the execution stack. The player is destroyed before then
+ // update will cause an error
+ // If there's no bar...
+ if (!this.el_ || !this.bar) {
+ return;
+ } // clamp progress between 0 and 1
+ // and only round to four decimal places, as we round to two below
+
+
+ var progress = this.getProgress();
+
+ if (progress === this.progress_) {
+ return progress;
+ }
+
+ this.progress_ = progress;
+ this.requestNamedAnimationFrame('Slider#update', function () {
+ // Set the new bar width or height
+ var sizeKey = _this2.vertical() ? 'height' : 'width'; // Convert to a percentage for css value
+
+ _this2.bar.el().style[sizeKey] = (progress * 100).toFixed(2) + '%';
+ });
+ return progress;
+ }
+ /**
+ * Get the percentage of the bar that should be filled
+ * but clamped and rounded.
+ *
+ * @return {number}
+ * percentage filled that the slider is
+ */
+ ;
+
+ _proto.getProgress = function getProgress() {
+ return Number(clamp(this.getPercent(), 0, 1).toFixed(4));
+ }
+ /**
+ * Calculate distance for slider
+ *
+ * @param {EventTarget~Event} event
+ * The event that caused this function to run.
+ *
+ * @return {number}
+ * The current position of the Slider.
+ * - position.x for vertical `Slider`s
+ * - position.y for horizontal `Slider`s
+ */
+ ;
+
+ _proto.calculateDistance = function calculateDistance(event) {
+ var position = getPointerPosition(this.el_, event);
+
+ if (this.vertical()) {
+ return position.y;
+ }
+
+ return position.x;
+ }
+ /**
+ * Handle a `keydown` event on the `Slider`. Watches for left, rigth, up, and down
+ * arrow keys. This function will only be called when the slider has focus. See
+ * {@link Slider#handleFocus} and {@link Slider#handleBlur}.
+ *
+ * @param {EventTarget~Event} event
+ * the `keydown` event that caused this function to run.
+ *
+ * @listens keydown
+ */
+ ;
+
+ _proto.handleKeyDown = function handleKeyDown(event) {
+ // Left and Down Arrows
+ if (keycode.isEventKey(event, 'Left') || keycode.isEventKey(event, 'Down')) {
+ event.preventDefault();
+ event.stopPropagation();
+ this.stepBack(); // Up and Right Arrows
+ } else if (keycode.isEventKey(event, 'Right') || keycode.isEventKey(event, 'Up')) {
+ event.preventDefault();
+ event.stopPropagation();
+ this.stepForward();
+ } else {
+ // Pass keydown handling up for unsupported keys
+ _Component.prototype.handleKeyDown.call(this, event);
+ }
+ }
+ /**
+ * Listener for click events on slider, used to prevent clicks
+ * from bubbling up to parent elements like button menus.
+ *
+ * @param {Object} event
+ * Event that caused this object to run
+ */
+ ;
+
+ _proto.handleClick = function handleClick(event) {
+ event.stopPropagation();
+ event.preventDefault();
+ }
+ /**
+ * Get/set if slider is horizontal for vertical
+ *
+ * @param {boolean} [bool]
+ * - true if slider is vertical,
+ * - false is horizontal
+ *
+ * @return {boolean}
+ * - true if slider is vertical, and getting
+ * - false if the slider is horizontal, and getting
+ */
+ ;
+
+ _proto.vertical = function vertical(bool) {
+ if (bool === undefined) {
+ return this.vertical_ || false;
+ }
+
+ this.vertical_ = !!bool;
+
+ if (this.vertical_) {
+ this.addClass('vjs-slider-vertical');
+ } else {
+ this.addClass('vjs-slider-horizontal');
+ }
+ };
+
+ return Slider;
+}(Component$1);
+
+Component$1.registerComponent('Slider', Slider);
+
+var percentify = function percentify(time, end) {
+ return clamp(time / end * 100, 0, 100).toFixed(2) + '%';
+};
+/**
+ * Shows loading progress
+ *
+ * @extends Component
+ */
+
+
+var LoadProgressBar = /*#__PURE__*/function (_Component) {
+ _inheritsLoose(LoadProgressBar, _Component);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function LoadProgressBar(player, options) {
+ var _this;
+
+ _this = _Component.call(this, player, options) || this;
+ _this.partEls_ = [];
+
+ _this.on(player, 'progress', function (e) {
+ return _this.update(e);
+ });
+
+ return _this;
+ }
+ /**
+ * Create the `Component`'s DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+
+
+ var _proto = LoadProgressBar.prototype;
+
+ _proto.createEl = function createEl$1() {
+ var el = _Component.prototype.createEl.call(this, 'div', {
+ className: 'vjs-load-progress'
+ });
+
+ var wrapper = createEl('span', {
+ className: 'vjs-control-text'
+ });
+ var loadedText = createEl('span', {
+ textContent: this.localize('Loaded')
+ });
+ var separator = document.createTextNode(': ');
+ this.percentageEl_ = createEl('span', {
+ className: 'vjs-control-text-loaded-percentage',
+ textContent: '0%'
+ });
+ el.appendChild(wrapper);
+ wrapper.appendChild(loadedText);
+ wrapper.appendChild(separator);
+ wrapper.appendChild(this.percentageEl_);
+ return el;
+ };
+
+ _proto.dispose = function dispose() {
+ this.partEls_ = null;
+ this.percentageEl_ = null;
+
+ _Component.prototype.dispose.call(this);
+ }
+ /**
+ * Update progress bar
+ *
+ * @param {EventTarget~Event} [event]
+ * The `progress` event that caused this function to run.
+ *
+ * @listens Player#progress
+ */
+ ;
+
+ _proto.update = function update(event) {
+ var _this2 = this;
+
+ this.requestNamedAnimationFrame('LoadProgressBar#update', function () {
+ var liveTracker = _this2.player_.liveTracker;
+
+ var buffered = _this2.player_.buffered();
+
+ var duration = liveTracker && liveTracker.isLive() ? liveTracker.seekableEnd() : _this2.player_.duration();
+
+ var bufferedEnd = _this2.player_.bufferedEnd();
+
+ var children = _this2.partEls_;
+ var percent = percentify(bufferedEnd, duration);
+
+ if (_this2.percent_ !== percent) {
+ // update the width of the progress bar
+ _this2.el_.style.width = percent; // update the control-text
+
+ textContent(_this2.percentageEl_, percent);
+ _this2.percent_ = percent;
+ } // add child elements to represent the individual buffered time ranges
+
+
+ for (var i = 0; i < buffered.length; i++) {
+ var start = buffered.start(i);
+ var end = buffered.end(i);
+ var part = children[i];
+
+ if (!part) {
+ part = _this2.el_.appendChild(createEl());
+ children[i] = part;
+ } // only update if changed
+
+
+ if (part.dataset.start === start && part.dataset.end === end) {
+ continue;
+ }
+
+ part.dataset.start = start;
+ part.dataset.end = end; // set the percent based on the width of the progress bar (bufferedEnd)
+
+ part.style.left = percentify(start, bufferedEnd);
+ part.style.width = percentify(end - start, bufferedEnd);
+ } // remove unused buffered range elements
+
+
+ for (var _i = children.length; _i > buffered.length; _i--) {
+ _this2.el_.removeChild(children[_i - 1]);
+ }
+
+ children.length = buffered.length;
+ });
+ };
+
+ return LoadProgressBar;
+}(Component$1);
+
+Component$1.registerComponent('LoadProgressBar', LoadProgressBar);
+
+/**
+ * Time tooltips display a time above the progress bar.
+ *
+ * @extends Component
+ */
+
+var TimeTooltip = /*#__PURE__*/function (_Component) {
+ _inheritsLoose(TimeTooltip, _Component);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The {@link Player} that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function TimeTooltip(player, options) {
+ var _this;
+
+ _this = _Component.call(this, player, options) || this;
+ _this.update = throttle(bind(_assertThisInitialized(_this), _this.update), UPDATE_REFRESH_INTERVAL);
+ return _this;
+ }
+ /**
+ * Create the time tooltip DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+
+
+ var _proto = TimeTooltip.prototype;
+
+ _proto.createEl = function createEl() {
+ return _Component.prototype.createEl.call(this, 'div', {
+ className: 'vjs-time-tooltip'
+ }, {
+ 'aria-hidden': 'true'
+ });
+ }
+ /**
+ * Updates the position of the time tooltip relative to the `SeekBar`.
+ *
+ * @param {Object} seekBarRect
+ * The `ClientRect` for the {@link SeekBar} element.
+ *
+ * @param {number} seekBarPoint
+ * A number from 0 to 1, representing a horizontal reference point
+ * from the left edge of the {@link SeekBar}
+ */
+ ;
+
+ _proto.update = function update(seekBarRect, seekBarPoint, content) {
+ var tooltipRect = findPosition(this.el_);
+ var playerRect = getBoundingClientRect(this.player_.el());
+ var seekBarPointPx = seekBarRect.width * seekBarPoint; // do nothing if either rect isn't available
+ // for example, if the player isn't in the DOM for testing
+
+ if (!playerRect || !tooltipRect) {
+ return;
+ } // This is the space left of the `seekBarPoint` available within the bounds
+ // of the player. We calculate any gap between the left edge of the player
+ // and the left edge of the `SeekBar` and add the number of pixels in the
+ // `SeekBar` before hitting the `seekBarPoint`
+
+
+ var spaceLeftOfPoint = seekBarRect.left - playerRect.left + seekBarPointPx; // This is the space right of the `seekBarPoint` available within the bounds
+ // of the player. We calculate the number of pixels from the `seekBarPoint`
+ // to the right edge of the `SeekBar` and add to that any gap between the
+ // right edge of the `SeekBar` and the player.
+
+ var spaceRightOfPoint = seekBarRect.width - seekBarPointPx + (playerRect.right - seekBarRect.right); // This is the number of pixels by which the tooltip will need to be pulled
+ // further to the right to center it over the `seekBarPoint`.
+
+ var pullTooltipBy = tooltipRect.width / 2; // Adjust the `pullTooltipBy` distance to the left or right depending on
+ // the results of the space calculations above.
+
+ if (spaceLeftOfPoint < pullTooltipBy) {
+ pullTooltipBy += pullTooltipBy - spaceLeftOfPoint;
+ } else if (spaceRightOfPoint < pullTooltipBy) {
+ pullTooltipBy = spaceRightOfPoint;
+ } // Due to the imprecision of decimal/ratio based calculations and varying
+ // rounding behaviors, there are cases where the spacing adjustment is off
+ // by a pixel or two. This adds insurance to these calculations.
+
+
+ if (pullTooltipBy < 0) {
+ pullTooltipBy = 0;
+ } else if (pullTooltipBy > tooltipRect.width) {
+ pullTooltipBy = tooltipRect.width;
+ } // prevent small width fluctuations within 0.4px from
+ // changing the value below.
+ // This really helps for live to prevent the play
+ // progress time tooltip from jittering
+
+
+ pullTooltipBy = Math.round(pullTooltipBy);
+ this.el_.style.right = "-" + pullTooltipBy + "px";
+ this.write(content);
+ }
+ /**
+ * Write the time to the tooltip DOM element.
+ *
+ * @param {string} content
+ * The formatted time for the tooltip.
+ */
+ ;
+
+ _proto.write = function write(content) {
+ textContent(this.el_, content);
+ }
+ /**
+ * Updates the position of the time tooltip relative to the `SeekBar`.
+ *
+ * @param {Object} seekBarRect
+ * The `ClientRect` for the {@link SeekBar} element.
+ *
+ * @param {number} seekBarPoint
+ * A number from 0 to 1, representing a horizontal reference point
+ * from the left edge of the {@link SeekBar}
+ *
+ * @param {number} time
+ * The time to update the tooltip to, not used during live playback
+ *
+ * @param {Function} cb
+ * A function that will be called during the request animation frame
+ * for tooltips that need to do additional animations from the default
+ */
+ ;
+
+ _proto.updateTime = function updateTime(seekBarRect, seekBarPoint, time, cb) {
+ var _this2 = this;
+
+ this.requestNamedAnimationFrame('TimeTooltip#updateTime', function () {
+ var content;
+
+ var duration = _this2.player_.duration();
+
+ if (_this2.player_.liveTracker && _this2.player_.liveTracker.isLive()) {
+ var liveWindow = _this2.player_.liveTracker.liveWindow();
+
+ var secondsBehind = liveWindow - seekBarPoint * liveWindow;
+ content = (secondsBehind < 1 ? '' : '-') + formatTime(secondsBehind, liveWindow);
+ } else {
+ content = formatTime(time, duration);
+ }
+
+ _this2.update(seekBarRect, seekBarPoint, content);
+
+ if (cb) {
+ cb();
+ }
+ });
+ };
+
+ return TimeTooltip;
+}(Component$1);
+
+Component$1.registerComponent('TimeTooltip', TimeTooltip);
+
+/**
+ * Used by {@link SeekBar} to display media playback progress as part of the
+ * {@link ProgressControl}.
+ *
+ * @extends Component
+ */
+
+var PlayProgressBar = /*#__PURE__*/function (_Component) {
+ _inheritsLoose(PlayProgressBar, _Component);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The {@link Player} that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function PlayProgressBar(player, options) {
+ var _this;
+
+ _this = _Component.call(this, player, options) || this;
+ _this.update = throttle(bind(_assertThisInitialized(_this), _this.update), UPDATE_REFRESH_INTERVAL);
+ return _this;
+ }
+ /**
+ * Create the the DOM element for this class.
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+
+
+ var _proto = PlayProgressBar.prototype;
+
+ _proto.createEl = function createEl() {
+ return _Component.prototype.createEl.call(this, 'div', {
+ className: 'vjs-play-progress vjs-slider-bar'
+ }, {
+ 'aria-hidden': 'true'
+ });
+ }
+ /**
+ * Enqueues updates to its own DOM as well as the DOM of its
+ * {@link TimeTooltip} child.
+ *
+ * @param {Object} seekBarRect
+ * The `ClientRect` for the {@link SeekBar} element.
+ *
+ * @param {number} seekBarPoint
+ * A number from 0 to 1, representing a horizontal reference point
+ * from the left edge of the {@link SeekBar}
+ */
+ ;
+
+ _proto.update = function update(seekBarRect, seekBarPoint) {
+ var timeTooltip = this.getChild('timeTooltip');
+
+ if (!timeTooltip) {
+ return;
+ }
+
+ var time = this.player_.scrubbing() ? this.player_.getCache().currentTime : this.player_.currentTime();
+ timeTooltip.updateTime(seekBarRect, seekBarPoint, time);
+ };
+
+ return PlayProgressBar;
+}(Component$1);
+/**
+ * Default options for {@link PlayProgressBar}.
+ *
+ * @type {Object}
+ * @private
+ */
+
+
+PlayProgressBar.prototype.options_ = {
+ children: []
+}; // Time tooltips should not be added to a player on mobile devices
+
+if (!IS_IOS && !IS_ANDROID) {
+ PlayProgressBar.prototype.options_.children.push('timeTooltip');
+}
+
+Component$1.registerComponent('PlayProgressBar', PlayProgressBar);
+
+/**
+ * The {@link MouseTimeDisplay} component tracks mouse movement over the
+ * {@link ProgressControl}. It displays an indicator and a {@link TimeTooltip}
+ * indicating the time which is represented by a given point in the
+ * {@link ProgressControl}.
+ *
+ * @extends Component
+ */
+
+var MouseTimeDisplay = /*#__PURE__*/function (_Component) {
+ _inheritsLoose(MouseTimeDisplay, _Component);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The {@link Player} that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function MouseTimeDisplay(player, options) {
+ var _this;
+
+ _this = _Component.call(this, player, options) || this;
+ _this.update = throttle(bind(_assertThisInitialized(_this), _this.update), UPDATE_REFRESH_INTERVAL);
+ return _this;
+ }
+ /**
+ * Create the DOM element for this class.
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+
+
+ var _proto = MouseTimeDisplay.prototype;
+
+ _proto.createEl = function createEl() {
+ return _Component.prototype.createEl.call(this, 'div', {
+ className: 'vjs-mouse-display'
+ });
+ }
+ /**
+ * Enqueues updates to its own DOM as well as the DOM of its
+ * {@link TimeTooltip} child.
+ *
+ * @param {Object} seekBarRect
+ * The `ClientRect` for the {@link SeekBar} element.
+ *
+ * @param {number} seekBarPoint
+ * A number from 0 to 1, representing a horizontal reference point
+ * from the left edge of the {@link SeekBar}
+ */
+ ;
+
+ _proto.update = function update(seekBarRect, seekBarPoint) {
+ var _this2 = this;
+
+ var time = seekBarPoint * this.player_.duration();
+ this.getChild('timeTooltip').updateTime(seekBarRect, seekBarPoint, time, function () {
+ _this2.el_.style.left = seekBarRect.width * seekBarPoint + "px";
+ });
+ };
+
+ return MouseTimeDisplay;
+}(Component$1);
+/**
+ * Default options for `MouseTimeDisplay`
+ *
+ * @type {Object}
+ * @private
+ */
+
+
+MouseTimeDisplay.prototype.options_ = {
+ children: ['timeTooltip']
+};
+Component$1.registerComponent('MouseTimeDisplay', MouseTimeDisplay);
+
+var STEP_SECONDS = 5; // The multiplier of STEP_SECONDS that PgUp/PgDown move the timeline.
+
+var PAGE_KEY_MULTIPLIER = 12;
+/**
+ * Seek bar and container for the progress bars. Uses {@link PlayProgressBar}
+ * as its `bar`.
+ *
+ * @extends Slider
+ */
+
+var SeekBar = /*#__PURE__*/function (_Slider) {
+ _inheritsLoose(SeekBar, _Slider);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function SeekBar(player, options) {
+ var _this;
+
+ _this = _Slider.call(this, player, options) || this;
+
+ _this.setEventHandlers_();
+
+ return _this;
+ }
+ /**
+ * Sets the event handlers
+ *
+ * @private
+ */
+
+
+ var _proto = SeekBar.prototype;
+
+ _proto.setEventHandlers_ = function setEventHandlers_() {
+ var _this2 = this;
+
+ this.update_ = bind(this, this.update);
+ this.update = throttle(this.update_, UPDATE_REFRESH_INTERVAL);
+ this.on(this.player_, ['ended', 'durationchange', 'timeupdate'], this.update);
+
+ if (this.player_.liveTracker) {
+ this.on(this.player_.liveTracker, 'liveedgechange', this.update);
+ } // when playing, let's ensure we smoothly update the play progress bar
+ // via an interval
+
+
+ this.updateInterval = null;
+
+ this.enableIntervalHandler_ = function (e) {
+ return _this2.enableInterval_(e);
+ };
+
+ this.disableIntervalHandler_ = function (e) {
+ return _this2.disableInterval_(e);
+ };
+
+ this.on(this.player_, ['playing'], this.enableIntervalHandler_);
+ this.on(this.player_, ['ended', 'pause', 'waiting'], this.disableIntervalHandler_); // we don't need to update the play progress if the document is hidden,
+ // also, this causes the CPU to spike and eventually crash the page on IE11.
+
+ if ('hidden' in document && 'visibilityState' in document) {
+ this.on(document, 'visibilitychange', this.toggleVisibility_);
+ }
+ };
+
+ _proto.toggleVisibility_ = function toggleVisibility_(e) {
+ if (document.visibilityState === 'hidden') {
+ this.cancelNamedAnimationFrame('SeekBar#update');
+ this.cancelNamedAnimationFrame('Slider#update');
+ this.disableInterval_(e);
+ } else {
+ if (!this.player_.ended() && !this.player_.paused()) {
+ this.enableInterval_();
+ } // we just switched back to the page and someone may be looking, so, update ASAP
+
+
+ this.update();
+ }
+ };
+
+ _proto.enableInterval_ = function enableInterval_() {
+ if (this.updateInterval) {
+ return;
+ }
+
+ this.updateInterval = this.setInterval(this.update, UPDATE_REFRESH_INTERVAL);
+ };
+
+ _proto.disableInterval_ = function disableInterval_(e) {
+ if (this.player_.liveTracker && this.player_.liveTracker.isLive() && e && e.type !== 'ended') {
+ return;
+ }
+
+ if (!this.updateInterval) {
+ return;
+ }
+
+ this.clearInterval(this.updateInterval);
+ this.updateInterval = null;
+ }
+ /**
+ * Create the `Component`'s DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+ ;
+
+ _proto.createEl = function createEl() {
+ return _Slider.prototype.createEl.call(this, 'div', {
+ className: 'vjs-progress-holder'
+ }, {
+ 'aria-label': this.localize('Progress Bar')
+ });
+ }
+ /**
+ * This function updates the play progress bar and accessibility
+ * attributes to whatever is passed in.
+ *
+ * @param {EventTarget~Event} [event]
+ * The `timeupdate` or `ended` event that caused this to run.
+ *
+ * @listens Player#timeupdate
+ *
+ * @return {number}
+ * The current percent at a number from 0-1
+ */
+ ;
+
+ _proto.update = function update(event) {
+ var _this3 = this;
+
+ // ignore updates while the tab is hidden
+ if (document.visibilityState === 'hidden') {
+ return;
+ }
+
+ var percent = _Slider.prototype.update.call(this);
+
+ this.requestNamedAnimationFrame('SeekBar#update', function () {
+ var currentTime = _this3.player_.ended() ? _this3.player_.duration() : _this3.getCurrentTime_();
+ var liveTracker = _this3.player_.liveTracker;
+
+ var duration = _this3.player_.duration();
+
+ if (liveTracker && liveTracker.isLive()) {
+ duration = _this3.player_.liveTracker.liveCurrentTime();
+ }
+
+ if (_this3.percent_ !== percent) {
+ // machine readable value of progress bar (percentage complete)
+ _this3.el_.setAttribute('aria-valuenow', (percent * 100).toFixed(2));
+
+ _this3.percent_ = percent;
+ }
+
+ if (_this3.currentTime_ !== currentTime || _this3.duration_ !== duration) {
+ // human readable value of progress bar (time complete)
+ _this3.el_.setAttribute('aria-valuetext', _this3.localize('progress bar timing: currentTime={1} duration={2}', [formatTime(currentTime, duration), formatTime(duration, duration)], '{1} of {2}'));
+
+ _this3.currentTime_ = currentTime;
+ _this3.duration_ = duration;
+ } // update the progress bar time tooltip with the current time
+
+
+ if (_this3.bar) {
+ _this3.bar.update(getBoundingClientRect(_this3.el()), _this3.getProgress());
+ }
+ });
+ return percent;
+ }
+ /**
+ * Prevent liveThreshold from causing seeks to seem like they
+ * are not happening from a user perspective.
+ *
+ * @param {number} ct
+ * current time to seek to
+ */
+ ;
+
+ _proto.userSeek_ = function userSeek_(ct) {
+ if (this.player_.liveTracker && this.player_.liveTracker.isLive()) {
+ this.player_.liveTracker.nextSeekedFromUser();
+ }
+
+ this.player_.currentTime(ct);
+ }
+ /**
+ * Get the value of current time but allows for smooth scrubbing,
+ * when player can't keep up.
+ *
+ * @return {number}
+ * The current time value to display
+ *
+ * @private
+ */
+ ;
+
+ _proto.getCurrentTime_ = function getCurrentTime_() {
+ return this.player_.scrubbing() ? this.player_.getCache().currentTime : this.player_.currentTime();
+ }
+ /**
+ * Get the percentage of media played so far.
+ *
+ * @return {number}
+ * The percentage of media played so far (0 to 1).
+ */
+ ;
+
+ _proto.getPercent = function getPercent() {
+ var currentTime = this.getCurrentTime_();
+ var percent;
+ var liveTracker = this.player_.liveTracker;
+
+ if (liveTracker && liveTracker.isLive()) {
+ percent = (currentTime - liveTracker.seekableStart()) / liveTracker.liveWindow(); // prevent the percent from changing at the live edge
+
+ if (liveTracker.atLiveEdge()) {
+ percent = 1;
+ }
+ } else {
+ percent = currentTime / this.player_.duration();
+ }
+
+ return percent;
+ }
+ /**
+ * Handle mouse down on seek bar
+ *
+ * @param {EventTarget~Event} event
+ * The `mousedown` event that caused this to run.
+ *
+ * @listens mousedown
+ */
+ ;
+
+ _proto.handleMouseDown = function handleMouseDown(event) {
+ if (!isSingleLeftClick(event)) {
+ return;
+ } // Stop event propagation to prevent double fire in progress-control.js
+
+
+ event.stopPropagation();
+ this.videoWasPlaying = !this.player_.paused();
+ this.player_.pause();
+
+ _Slider.prototype.handleMouseDown.call(this, event);
+ }
+ /**
+ * Handle mouse move on seek bar
+ *
+ * @param {EventTarget~Event} event
+ * The `mousemove` event that caused this to run.
+ * @param {boolean} mouseDown this is a flag that should be set to true if `handleMouseMove` is called directly. It allows us to skip things that should not happen if coming from mouse down but should happen on regular mouse move handler. Defaults to false
+ *
+ * @listens mousemove
+ */
+ ;
+
+ _proto.handleMouseMove = function handleMouseMove(event, mouseDown) {
+ if (mouseDown === void 0) {
+ mouseDown = false;
+ }
+
+ if (!isSingleLeftClick(event)) {
+ return;
+ }
+
+ if (!mouseDown && !this.player_.scrubbing()) {
+ this.player_.scrubbing(true);
+ }
+
+ var newTime;
+ var distance = this.calculateDistance(event);
+ var liveTracker = this.player_.liveTracker;
+
+ if (!liveTracker || !liveTracker.isLive()) {
+ newTime = distance * this.player_.duration(); // Don't let video end while scrubbing.
+
+ if (newTime === this.player_.duration()) {
+ newTime = newTime - 0.1;
+ }
+ } else {
+ if (distance >= 0.99) {
+ liveTracker.seekToLiveEdge();
+ return;
+ }
+
+ var seekableStart = liveTracker.seekableStart();
+ var seekableEnd = liveTracker.liveCurrentTime();
+ newTime = seekableStart + distance * liveTracker.liveWindow(); // Don't let video end while scrubbing.
+
+ if (newTime >= seekableEnd) {
+ newTime = seekableEnd;
+ } // Compensate for precision differences so that currentTime is not less
+ // than seekable start
+
+
+ if (newTime <= seekableStart) {
+ newTime = seekableStart + 0.1;
+ } // On android seekableEnd can be Infinity sometimes,
+ // this will cause newTime to be Infinity, which is
+ // not a valid currentTime.
+
+
+ if (newTime === Infinity) {
+ return;
+ }
+ } // Set new time (tell player to seek to new time)
+
+
+ this.userSeek_(newTime);
+ };
+
+ _proto.enable = function enable() {
+ _Slider.prototype.enable.call(this);
+
+ var mouseTimeDisplay = this.getChild('mouseTimeDisplay');
+
+ if (!mouseTimeDisplay) {
+ return;
+ }
+
+ mouseTimeDisplay.show();
+ };
+
+ _proto.disable = function disable() {
+ _Slider.prototype.disable.call(this);
+
+ var mouseTimeDisplay = this.getChild('mouseTimeDisplay');
+
+ if (!mouseTimeDisplay) {
+ return;
+ }
+
+ mouseTimeDisplay.hide();
+ }
+ /**
+ * Handle mouse up on seek bar
+ *
+ * @param {EventTarget~Event} event
+ * The `mouseup` event that caused this to run.
+ *
+ * @listens mouseup
+ */
+ ;
+
+ _proto.handleMouseUp = function handleMouseUp(event) {
+ _Slider.prototype.handleMouseUp.call(this, event); // Stop event propagation to prevent double fire in progress-control.js
+
+
+ if (event) {
+ event.stopPropagation();
+ }
+
+ this.player_.scrubbing(false);
+ /**
+ * Trigger timeupdate because we're done seeking and the time has changed.
+ * This is particularly useful for if the player is paused to time the time displays.
+ *
+ * @event Tech#timeupdate
+ * @type {EventTarget~Event}
+ */
+
+ this.player_.trigger({
+ type: 'timeupdate',
+ target: this,
+ manuallyTriggered: true
+ });
+
+ if (this.videoWasPlaying) {
+ silencePromise(this.player_.play());
+ } else {
+ // We're done seeking and the time has changed.
+ // If the player is paused, make sure we display the correct time on the seek bar.
+ this.update_();
+ }
+ }
+ /**
+ * Move more quickly fast forward for keyboard-only users
+ */
+ ;
+
+ _proto.stepForward = function stepForward() {
+ this.userSeek_(this.player_.currentTime() + STEP_SECONDS);
+ }
+ /**
+ * Move more quickly rewind for keyboard-only users
+ */
+ ;
+
+ _proto.stepBack = function stepBack() {
+ this.userSeek_(this.player_.currentTime() - STEP_SECONDS);
+ }
+ /**
+ * Toggles the playback state of the player
+ * This gets called when enter or space is used on the seekbar
+ *
+ * @param {EventTarget~Event} event
+ * The `keydown` event that caused this function to be called
+ *
+ */
+ ;
+
+ _proto.handleAction = function handleAction(event) {
+ if (this.player_.paused()) {
+ this.player_.play();
+ } else {
+ this.player_.pause();
+ }
+ }
+ /**
+ * Called when this SeekBar has focus and a key gets pressed down.
+ * Supports the following keys:
+ *
+ * Space or Enter key fire a click event
+ * Home key moves to start of the timeline
+ * End key moves to end of the timeline
+ * Digit "0" through "9" keys move to 0%, 10% ... 80%, 90% of the timeline
+ * PageDown key moves back a larger step than ArrowDown
+ * PageUp key moves forward a large step
+ *
+ * @param {EventTarget~Event} event
+ * The `keydown` event that caused this function to be called.
+ *
+ * @listens keydown
+ */
+ ;
+
+ _proto.handleKeyDown = function handleKeyDown(event) {
+ var liveTracker = this.player_.liveTracker;
+
+ if (keycode.isEventKey(event, 'Space') || keycode.isEventKey(event, 'Enter')) {
+ event.preventDefault();
+ event.stopPropagation();
+ this.handleAction(event);
+ } else if (keycode.isEventKey(event, 'Home')) {
+ event.preventDefault();
+ event.stopPropagation();
+ this.userSeek_(0);
+ } else if (keycode.isEventKey(event, 'End')) {
+ event.preventDefault();
+ event.stopPropagation();
+
+ if (liveTracker && liveTracker.isLive()) {
+ this.userSeek_(liveTracker.liveCurrentTime());
+ } else {
+ this.userSeek_(this.player_.duration());
+ }
+ } else if (/^[0-9]$/.test(keycode(event))) {
+ event.preventDefault();
+ event.stopPropagation();
+ var gotoFraction = (keycode.codes[keycode(event)] - keycode.codes['0']) * 10.0 / 100.0;
+
+ if (liveTracker && liveTracker.isLive()) {
+ this.userSeek_(liveTracker.seekableStart() + liveTracker.liveWindow() * gotoFraction);
+ } else {
+ this.userSeek_(this.player_.duration() * gotoFraction);
+ }
+ } else if (keycode.isEventKey(event, 'PgDn')) {
+ event.preventDefault();
+ event.stopPropagation();
+ this.userSeek_(this.player_.currentTime() - STEP_SECONDS * PAGE_KEY_MULTIPLIER);
+ } else if (keycode.isEventKey(event, 'PgUp')) {
+ event.preventDefault();
+ event.stopPropagation();
+ this.userSeek_(this.player_.currentTime() + STEP_SECONDS * PAGE_KEY_MULTIPLIER);
+ } else {
+ // Pass keydown handling up for unsupported keys
+ _Slider.prototype.handleKeyDown.call(this, event);
+ }
+ };
+
+ _proto.dispose = function dispose() {
+ this.disableInterval_();
+ this.off(this.player_, ['ended', 'durationchange', 'timeupdate'], this.update);
+
+ if (this.player_.liveTracker) {
+ this.off(this.player_.liveTracker, 'liveedgechange', this.update);
+ }
+
+ this.off(this.player_, ['playing'], this.enableIntervalHandler_);
+ this.off(this.player_, ['ended', 'pause', 'waiting'], this.disableIntervalHandler_); // we don't need to update the play progress if the document is hidden,
+ // also, this causes the CPU to spike and eventually crash the page on IE11.
+
+ if ('hidden' in document && 'visibilityState' in document) {
+ this.off(document, 'visibilitychange', this.toggleVisibility_);
+ }
+
+ _Slider.prototype.dispose.call(this);
+ };
+
+ return SeekBar;
+}(Slider);
+/**
+ * Default options for the `SeekBar`
+ *
+ * @type {Object}
+ * @private
+ */
+
+
+SeekBar.prototype.options_ = {
+ children: ['loadProgressBar', 'playProgressBar'],
+ barName: 'playProgressBar'
+}; // MouseTimeDisplay tooltips should not be added to a player on mobile devices
+
+if (!IS_IOS && !IS_ANDROID) {
+ SeekBar.prototype.options_.children.splice(1, 0, 'mouseTimeDisplay');
+}
+
+Component$1.registerComponent('SeekBar', SeekBar);
+
+/**
+ * The Progress Control component contains the seek bar, load progress,
+ * and play progress.
+ *
+ * @extends Component
+ */
+
+var ProgressControl = /*#__PURE__*/function (_Component) {
+ _inheritsLoose(ProgressControl, _Component);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function ProgressControl(player, options) {
+ var _this;
+
+ _this = _Component.call(this, player, options) || this;
+ _this.handleMouseMove = throttle(bind(_assertThisInitialized(_this), _this.handleMouseMove), UPDATE_REFRESH_INTERVAL);
+ _this.throttledHandleMouseSeek = throttle(bind(_assertThisInitialized(_this), _this.handleMouseSeek), UPDATE_REFRESH_INTERVAL);
+
+ _this.handleMouseUpHandler_ = function (e) {
+ return _this.handleMouseUp(e);
+ };
+
+ _this.handleMouseDownHandler_ = function (e) {
+ return _this.handleMouseDown(e);
+ };
+
+ _this.enable();
+
+ return _this;
+ }
+ /**
+ * Create the `Component`'s DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+
+
+ var _proto = ProgressControl.prototype;
+
+ _proto.createEl = function createEl() {
+ return _Component.prototype.createEl.call(this, 'div', {
+ className: 'vjs-progress-control vjs-control'
+ });
+ }
+ /**
+ * When the mouse moves over the `ProgressControl`, the pointer position
+ * gets passed down to the `MouseTimeDisplay` component.
+ *
+ * @param {EventTarget~Event} event
+ * The `mousemove` event that caused this function to run.
+ *
+ * @listen mousemove
+ */
+ ;
+
+ _proto.handleMouseMove = function handleMouseMove(event) {
+ var seekBar = this.getChild('seekBar');
+
+ if (!seekBar) {
+ return;
+ }
+
+ var playProgressBar = seekBar.getChild('playProgressBar');
+ var mouseTimeDisplay = seekBar.getChild('mouseTimeDisplay');
+
+ if (!playProgressBar && !mouseTimeDisplay) {
+ return;
+ }
+
+ var seekBarEl = seekBar.el();
+ var seekBarRect = findPosition(seekBarEl);
+ var seekBarPoint = getPointerPosition(seekBarEl, event).x; // The default skin has a gap on either side of the `SeekBar`. This means
+ // that it's possible to trigger this behavior outside the boundaries of
+ // the `SeekBar`. This ensures we stay within it at all times.
+
+ seekBarPoint = clamp(seekBarPoint, 0, 1);
+
+ if (mouseTimeDisplay) {
+ mouseTimeDisplay.update(seekBarRect, seekBarPoint);
+ }
+
+ if (playProgressBar) {
+ playProgressBar.update(seekBarRect, seekBar.getProgress());
+ }
+ }
+ /**
+ * A throttled version of the {@link ProgressControl#handleMouseSeek} listener.
+ *
+ * @method ProgressControl#throttledHandleMouseSeek
+ * @param {EventTarget~Event} event
+ * The `mousemove` event that caused this function to run.
+ *
+ * @listen mousemove
+ * @listen touchmove
+ */
+
+ /**
+ * Handle `mousemove` or `touchmove` events on the `ProgressControl`.
+ *
+ * @param {EventTarget~Event} event
+ * `mousedown` or `touchstart` event that triggered this function
+ *
+ * @listens mousemove
+ * @listens touchmove
+ */
+ ;
+
+ _proto.handleMouseSeek = function handleMouseSeek(event) {
+ var seekBar = this.getChild('seekBar');
+
+ if (seekBar) {
+ seekBar.handleMouseMove(event);
+ }
+ }
+ /**
+ * Are controls are currently enabled for this progress control.
+ *
+ * @return {boolean}
+ * true if controls are enabled, false otherwise
+ */
+ ;
+
+ _proto.enabled = function enabled() {
+ return this.enabled_;
+ }
+ /**
+ * Disable all controls on the progress control and its children
+ */
+ ;
+
+ _proto.disable = function disable() {
+ this.children().forEach(function (child) {
+ return child.disable && child.disable();
+ });
+
+ if (!this.enabled()) {
+ return;
+ }
+
+ this.off(['mousedown', 'touchstart'], this.handleMouseDownHandler_);
+ this.off(this.el_, 'mousemove', this.handleMouseMove);
+ this.removeListenersAddedOnMousedownAndTouchstart();
+ this.addClass('disabled');
+ this.enabled_ = false; // Restore normal playback state if controls are disabled while scrubbing
+
+ if (this.player_.scrubbing()) {
+ var seekBar = this.getChild('seekBar');
+ this.player_.scrubbing(false);
+
+ if (seekBar.videoWasPlaying) {
+ silencePromise(this.player_.play());
+ }
+ }
+ }
+ /**
+ * Enable all controls on the progress control and its children
+ */
+ ;
+
+ _proto.enable = function enable() {
+ this.children().forEach(function (child) {
+ return child.enable && child.enable();
+ });
+
+ if (this.enabled()) {
+ return;
+ }
+
+ this.on(['mousedown', 'touchstart'], this.handleMouseDownHandler_);
+ this.on(this.el_, 'mousemove', this.handleMouseMove);
+ this.removeClass('disabled');
+ this.enabled_ = true;
+ }
+ /**
+ * Cleanup listeners after the user finishes interacting with the progress controls
+ */
+ ;
+
+ _proto.removeListenersAddedOnMousedownAndTouchstart = function removeListenersAddedOnMousedownAndTouchstart() {
+ var doc = this.el_.ownerDocument;
+ this.off(doc, 'mousemove', this.throttledHandleMouseSeek);
+ this.off(doc, 'touchmove', this.throttledHandleMouseSeek);
+ this.off(doc, 'mouseup', this.handleMouseUpHandler_);
+ this.off(doc, 'touchend', this.handleMouseUpHandler_);
+ }
+ /**
+ * Handle `mousedown` or `touchstart` events on the `ProgressControl`.
+ *
+ * @param {EventTarget~Event} event
+ * `mousedown` or `touchstart` event that triggered this function
+ *
+ * @listens mousedown
+ * @listens touchstart
+ */
+ ;
+
+ _proto.handleMouseDown = function handleMouseDown(event) {
+ var doc = this.el_.ownerDocument;
+ var seekBar = this.getChild('seekBar');
+
+ if (seekBar) {
+ seekBar.handleMouseDown(event);
+ }
+
+ this.on(doc, 'mousemove', this.throttledHandleMouseSeek);
+ this.on(doc, 'touchmove', this.throttledHandleMouseSeek);
+ this.on(doc, 'mouseup', this.handleMouseUpHandler_);
+ this.on(doc, 'touchend', this.handleMouseUpHandler_);
+ }
+ /**
+ * Handle `mouseup` or `touchend` events on the `ProgressControl`.
+ *
+ * @param {EventTarget~Event} event
+ * `mouseup` or `touchend` event that triggered this function.
+ *
+ * @listens touchend
+ * @listens mouseup
+ */
+ ;
+
+ _proto.handleMouseUp = function handleMouseUp(event) {
+ var seekBar = this.getChild('seekBar');
+
+ if (seekBar) {
+ seekBar.handleMouseUp(event);
+ }
+
+ this.removeListenersAddedOnMousedownAndTouchstart();
+ };
+
+ return ProgressControl;
+}(Component$1);
+/**
+ * Default options for `ProgressControl`
+ *
+ * @type {Object}
+ * @private
+ */
+
+
+ProgressControl.prototype.options_ = {
+ children: ['seekBar']
+};
+Component$1.registerComponent('ProgressControl', ProgressControl);
+
+/**
+ * Toggle Picture-in-Picture mode
+ *
+ * @extends Button
+ */
+
+var PictureInPictureToggle = /*#__PURE__*/function (_Button) {
+ _inheritsLoose(PictureInPictureToggle, _Button);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ *
+ * @listens Player#enterpictureinpicture
+ * @listens Player#leavepictureinpicture
+ */
+ function PictureInPictureToggle(player, options) {
+ var _this;
+
+ _this = _Button.call(this, player, options) || this;
+
+ _this.on(player, ['enterpictureinpicture', 'leavepictureinpicture'], function (e) {
+ return _this.handlePictureInPictureChange(e);
+ });
+
+ _this.on(player, ['disablepictureinpicturechanged', 'loadedmetadata'], function (e) {
+ return _this.handlePictureInPictureEnabledChange(e);
+ });
+
+ _this.on(player, ['loadedmetadata', 'audioonlymodechange', 'audiopostermodechange'], function () {
+ // This audio detection will not detect HLS or DASH audio-only streams because there was no reliable way to detect them at the time
+ var isSourceAudio = player.currentType().substring(0, 5) === 'audio';
+
+ if (isSourceAudio || player.audioPosterMode() || player.audioOnlyMode()) {
+ if (player.isInPictureInPicture()) {
+ player.exitPictureInPicture();
+ }
+
+ _this.hide();
+ } else {
+ _this.show();
+ }
+ }); // TODO: Deactivate button on player emptied event.
+
+
+ _this.disable();
+
+ return _this;
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+
+
+ var _proto = PictureInPictureToggle.prototype;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return "vjs-picture-in-picture-control " + _Button.prototype.buildCSSClass.call(this);
+ }
+ /**
+ * Enables or disables button based on document.pictureInPictureEnabled property value
+ * or on value returned by player.disablePictureInPicture() method.
+ */
+ ;
+
+ _proto.handlePictureInPictureEnabledChange = function handlePictureInPictureEnabledChange() {
+ if (document.pictureInPictureEnabled && this.player_.disablePictureInPicture() === false) {
+ this.enable();
+ } else {
+ this.disable();
+ }
+ }
+ /**
+ * Handles enterpictureinpicture and leavepictureinpicture on the player and change control text accordingly.
+ *
+ * @param {EventTarget~Event} [event]
+ * The {@link Player#enterpictureinpicture} or {@link Player#leavepictureinpicture} event that caused this function to be
+ * called.
+ *
+ * @listens Player#enterpictureinpicture
+ * @listens Player#leavepictureinpicture
+ */
+ ;
+
+ _proto.handlePictureInPictureChange = function handlePictureInPictureChange(event) {
+ if (this.player_.isInPictureInPicture()) {
+ this.controlText('Exit Picture-in-Picture');
+ } else {
+ this.controlText('Picture-in-Picture');
+ }
+
+ this.handlePictureInPictureEnabledChange();
+ }
+ /**
+ * This gets called when an `PictureInPictureToggle` is "clicked". See
+ * {@link ClickableComponent} for more detailed information on what a click can be.
+ *
+ * @param {EventTarget~Event} [event]
+ * The `keydown`, `tap`, or `click` event that caused this function to be
+ * called.
+ *
+ * @listens tap
+ * @listens click
+ */
+ ;
+
+ _proto.handleClick = function handleClick(event) {
+ if (!this.player_.isInPictureInPicture()) {
+ this.player_.requestPictureInPicture();
+ } else {
+ this.player_.exitPictureInPicture();
+ }
+ };
+
+ return PictureInPictureToggle;
+}(Button);
+/**
+ * The text that should display over the `PictureInPictureToggle`s controls. Added for localization.
+ *
+ * @type {string}
+ * @private
+ */
+
+
+PictureInPictureToggle.prototype.controlText_ = 'Picture-in-Picture';
+Component$1.registerComponent('PictureInPictureToggle', PictureInPictureToggle);
+
+/**
+ * Toggle fullscreen video
+ *
+ * @extends Button
+ */
+
+var FullscreenToggle = /*#__PURE__*/function (_Button) {
+ _inheritsLoose(FullscreenToggle, _Button);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function FullscreenToggle(player, options) {
+ var _this;
+
+ _this = _Button.call(this, player, options) || this;
+
+ _this.on(player, 'fullscreenchange', function (e) {
+ return _this.handleFullscreenChange(e);
+ });
+
+ if (document[player.fsApi_.fullscreenEnabled] === false) {
+ _this.disable();
+ }
+
+ return _this;
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+
+
+ var _proto = FullscreenToggle.prototype;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return "vjs-fullscreen-control " + _Button.prototype.buildCSSClass.call(this);
+ }
+ /**
+ * Handles fullscreenchange on the player and change control text accordingly.
+ *
+ * @param {EventTarget~Event} [event]
+ * The {@link Player#fullscreenchange} event that caused this function to be
+ * called.
+ *
+ * @listens Player#fullscreenchange
+ */
+ ;
+
+ _proto.handleFullscreenChange = function handleFullscreenChange(event) {
+ if (this.player_.isFullscreen()) {
+ this.controlText('Non-Fullscreen');
+ } else {
+ this.controlText('Fullscreen');
+ }
+ }
+ /**
+ * This gets called when an `FullscreenToggle` is "clicked". See
+ * {@link ClickableComponent} for more detailed information on what a click can be.
+ *
+ * @param {EventTarget~Event} [event]
+ * The `keydown`, `tap`, or `click` event that caused this function to be
+ * called.
+ *
+ * @listens tap
+ * @listens click
+ */
+ ;
+
+ _proto.handleClick = function handleClick(event) {
+ if (!this.player_.isFullscreen()) {
+ this.player_.requestFullscreen();
+ } else {
+ this.player_.exitFullscreen();
+ }
+ };
+
+ return FullscreenToggle;
+}(Button);
+/**
+ * The text that should display over the `FullscreenToggle`s controls. Added for localization.
+ *
+ * @type {string}
+ * @private
+ */
+
+
+FullscreenToggle.prototype.controlText_ = 'Fullscreen';
+Component$1.registerComponent('FullscreenToggle', FullscreenToggle);
+
+/**
+ * Check if volume control is supported and if it isn't hide the
+ * `Component` that was passed using the `vjs-hidden` class.
+ *
+ * @param {Component} self
+ * The component that should be hidden if volume is unsupported
+ *
+ * @param {Player} player
+ * A reference to the player
+ *
+ * @private
+ */
+var checkVolumeSupport = function checkVolumeSupport(self, player) {
+ // hide volume controls when they're not supported by the current tech
+ if (player.tech_ && !player.tech_.featuresVolumeControl) {
+ self.addClass('vjs-hidden');
+ }
+
+ self.on(player, 'loadstart', function () {
+ if (!player.tech_.featuresVolumeControl) {
+ self.addClass('vjs-hidden');
+ } else {
+ self.removeClass('vjs-hidden');
+ }
+ });
+};
+
+/**
+ * Shows volume level
+ *
+ * @extends Component
+ */
+
+var VolumeLevel = /*#__PURE__*/function (_Component) {
+ _inheritsLoose(VolumeLevel, _Component);
+
+ function VolumeLevel() {
+ return _Component.apply(this, arguments) || this;
+ }
+
+ var _proto = VolumeLevel.prototype;
+
+ /**
+ * Create the `Component`'s DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+ _proto.createEl = function createEl() {
+ var el = _Component.prototype.createEl.call(this, 'div', {
+ className: 'vjs-volume-level'
+ });
+
+ el.appendChild(_Component.prototype.createEl.call(this, 'span', {
+ className: 'vjs-control-text'
+ }));
+ return el;
+ };
+
+ return VolumeLevel;
+}(Component$1);
+
+Component$1.registerComponent('VolumeLevel', VolumeLevel);
+
+/**
+ * Volume level tooltips display a volume above or side by side the volume bar.
+ *
+ * @extends Component
+ */
+
+var VolumeLevelTooltip = /*#__PURE__*/function (_Component) {
+ _inheritsLoose(VolumeLevelTooltip, _Component);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The {@link Player} that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function VolumeLevelTooltip(player, options) {
+ var _this;
+
+ _this = _Component.call(this, player, options) || this;
+ _this.update = throttle(bind(_assertThisInitialized(_this), _this.update), UPDATE_REFRESH_INTERVAL);
+ return _this;
+ }
+ /**
+ * Create the volume tooltip DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+
+
+ var _proto = VolumeLevelTooltip.prototype;
+
+ _proto.createEl = function createEl() {
+ return _Component.prototype.createEl.call(this, 'div', {
+ className: 'vjs-volume-tooltip'
+ }, {
+ 'aria-hidden': 'true'
+ });
+ }
+ /**
+ * Updates the position of the tooltip relative to the `VolumeBar` and
+ * its content text.
+ *
+ * @param {Object} rangeBarRect
+ * The `ClientRect` for the {@link VolumeBar} element.
+ *
+ * @param {number} rangeBarPoint
+ * A number from 0 to 1, representing a horizontal/vertical reference point
+ * from the left edge of the {@link VolumeBar}
+ *
+ * @param {boolean} vertical
+ * Referees to the Volume control position
+ * in the control bar{@link VolumeControl}
+ *
+ */
+ ;
+
+ _proto.update = function update(rangeBarRect, rangeBarPoint, vertical, content) {
+ if (!vertical) {
+ var tooltipRect = getBoundingClientRect(this.el_);
+ var playerRect = getBoundingClientRect(this.player_.el());
+ var volumeBarPointPx = rangeBarRect.width * rangeBarPoint;
+
+ if (!playerRect || !tooltipRect) {
+ return;
+ }
+
+ var spaceLeftOfPoint = rangeBarRect.left - playerRect.left + volumeBarPointPx;
+ var spaceRightOfPoint = rangeBarRect.width - volumeBarPointPx + (playerRect.right - rangeBarRect.right);
+ var pullTooltipBy = tooltipRect.width / 2;
+
+ if (spaceLeftOfPoint < pullTooltipBy) {
+ pullTooltipBy += pullTooltipBy - spaceLeftOfPoint;
+ } else if (spaceRightOfPoint < pullTooltipBy) {
+ pullTooltipBy = spaceRightOfPoint;
+ }
+
+ if (pullTooltipBy < 0) {
+ pullTooltipBy = 0;
+ } else if (pullTooltipBy > tooltipRect.width) {
+ pullTooltipBy = tooltipRect.width;
+ }
+
+ this.el_.style.right = "-" + pullTooltipBy + "px";
+ }
+
+ this.write(content + "%");
+ }
+ /**
+ * Write the volume to the tooltip DOM element.
+ *
+ * @param {string} content
+ * The formatted volume for the tooltip.
+ */
+ ;
+
+ _proto.write = function write(content) {
+ textContent(this.el_, content);
+ }
+ /**
+ * Updates the position of the volume tooltip relative to the `VolumeBar`.
+ *
+ * @param {Object} rangeBarRect
+ * The `ClientRect` for the {@link VolumeBar} element.
+ *
+ * @param {number} rangeBarPoint
+ * A number from 0 to 1, representing a horizontal/vertical reference point
+ * from the left edge of the {@link VolumeBar}
+ *
+ * @param {boolean} vertical
+ * Referees to the Volume control position
+ * in the control bar{@link VolumeControl}
+ *
+ * @param {number} volume
+ * The volume level to update the tooltip to
+ *
+ * @param {Function} cb
+ * A function that will be called during the request animation frame
+ * for tooltips that need to do additional animations from the default
+ */
+ ;
+
+ _proto.updateVolume = function updateVolume(rangeBarRect, rangeBarPoint, vertical, volume, cb) {
+ var _this2 = this;
+
+ this.requestNamedAnimationFrame('VolumeLevelTooltip#updateVolume', function () {
+ _this2.update(rangeBarRect, rangeBarPoint, vertical, volume.toFixed(0));
+
+ if (cb) {
+ cb();
+ }
+ });
+ };
+
+ return VolumeLevelTooltip;
+}(Component$1);
+
+Component$1.registerComponent('VolumeLevelTooltip', VolumeLevelTooltip);
+
+/**
+ * The {@link MouseVolumeLevelDisplay} component tracks mouse movement over the
+ * {@link VolumeControl}. It displays an indicator and a {@link VolumeLevelTooltip}
+ * indicating the volume level which is represented by a given point in the
+ * {@link VolumeBar}.
+ *
+ * @extends Component
+ */
+
+var MouseVolumeLevelDisplay = /*#__PURE__*/function (_Component) {
+ _inheritsLoose(MouseVolumeLevelDisplay, _Component);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The {@link Player} that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function MouseVolumeLevelDisplay(player, options) {
+ var _this;
+
+ _this = _Component.call(this, player, options) || this;
+ _this.update = throttle(bind(_assertThisInitialized(_this), _this.update), UPDATE_REFRESH_INTERVAL);
+ return _this;
+ }
+ /**
+ * Create the DOM element for this class.
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+
+
+ var _proto = MouseVolumeLevelDisplay.prototype;
+
+ _proto.createEl = function createEl() {
+ return _Component.prototype.createEl.call(this, 'div', {
+ className: 'vjs-mouse-display'
+ });
+ }
+ /**
+ * Enquires updates to its own DOM as well as the DOM of its
+ * {@link VolumeLevelTooltip} child.
+ *
+ * @param {Object} rangeBarRect
+ * The `ClientRect` for the {@link VolumeBar} element.
+ *
+ * @param {number} rangeBarPoint
+ * A number from 0 to 1, representing a horizontal/vertical reference point
+ * from the left edge of the {@link VolumeBar}
+ *
+ * @param {boolean} vertical
+ * Referees to the Volume control position
+ * in the control bar{@link VolumeControl}
+ *
+ */
+ ;
+
+ _proto.update = function update(rangeBarRect, rangeBarPoint, vertical) {
+ var _this2 = this;
+
+ var volume = 100 * rangeBarPoint;
+ this.getChild('volumeLevelTooltip').updateVolume(rangeBarRect, rangeBarPoint, vertical, volume, function () {
+ if (vertical) {
+ _this2.el_.style.bottom = rangeBarRect.height * rangeBarPoint + "px";
+ } else {
+ _this2.el_.style.left = rangeBarRect.width * rangeBarPoint + "px";
+ }
+ });
+ };
+
+ return MouseVolumeLevelDisplay;
+}(Component$1);
+/**
+ * Default options for `MouseVolumeLevelDisplay`
+ *
+ * @type {Object}
+ * @private
+ */
+
+
+MouseVolumeLevelDisplay.prototype.options_ = {
+ children: ['volumeLevelTooltip']
+};
+Component$1.registerComponent('MouseVolumeLevelDisplay', MouseVolumeLevelDisplay);
+
+/**
+ * The bar that contains the volume level and can be clicked on to adjust the level
+ *
+ * @extends Slider
+ */
+
+var VolumeBar = /*#__PURE__*/function (_Slider) {
+ _inheritsLoose(VolumeBar, _Slider);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function VolumeBar(player, options) {
+ var _this;
+
+ _this = _Slider.call(this, player, options) || this;
+
+ _this.on('slideractive', function (e) {
+ return _this.updateLastVolume_(e);
+ });
+
+ _this.on(player, 'volumechange', function (e) {
+ return _this.updateARIAAttributes(e);
+ });
+
+ player.ready(function () {
+ return _this.updateARIAAttributes();
+ });
+ return _this;
+ }
+ /**
+ * Create the `Component`'s DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+
+
+ var _proto = VolumeBar.prototype;
+
+ _proto.createEl = function createEl() {
+ return _Slider.prototype.createEl.call(this, 'div', {
+ className: 'vjs-volume-bar vjs-slider-bar'
+ }, {
+ 'aria-label': this.localize('Volume Level'),
+ 'aria-live': 'polite'
+ });
+ }
+ /**
+ * Handle mouse down on volume bar
+ *
+ * @param {EventTarget~Event} event
+ * The `mousedown` event that caused this to run.
+ *
+ * @listens mousedown
+ */
+ ;
+
+ _proto.handleMouseDown = function handleMouseDown(event) {
+ if (!isSingleLeftClick(event)) {
+ return;
+ }
+
+ _Slider.prototype.handleMouseDown.call(this, event);
+ }
+ /**
+ * Handle movement events on the {@link VolumeMenuButton}.
+ *
+ * @param {EventTarget~Event} event
+ * The event that caused this function to run.
+ *
+ * @listens mousemove
+ */
+ ;
+
+ _proto.handleMouseMove = function handleMouseMove(event) {
+ var mouseVolumeLevelDisplay = this.getChild('mouseVolumeLevelDisplay');
+
+ if (mouseVolumeLevelDisplay) {
+ var volumeBarEl = this.el();
+ var volumeBarRect = getBoundingClientRect(volumeBarEl);
+ var vertical = this.vertical();
+ var volumeBarPoint = getPointerPosition(volumeBarEl, event);
+ volumeBarPoint = vertical ? volumeBarPoint.y : volumeBarPoint.x; // The default skin has a gap on either side of the `VolumeBar`. This means
+ // that it's possible to trigger this behavior outside the boundaries of
+ // the `VolumeBar`. This ensures we stay within it at all times.
+
+ volumeBarPoint = clamp(volumeBarPoint, 0, 1);
+ mouseVolumeLevelDisplay.update(volumeBarRect, volumeBarPoint, vertical);
+ }
+
+ if (!isSingleLeftClick(event)) {
+ return;
+ }
+
+ this.checkMuted();
+ this.player_.volume(this.calculateDistance(event));
+ }
+ /**
+ * If the player is muted unmute it.
+ */
+ ;
+
+ _proto.checkMuted = function checkMuted() {
+ if (this.player_.muted()) {
+ this.player_.muted(false);
+ }
+ }
+ /**
+ * Get percent of volume level
+ *
+ * @return {number}
+ * Volume level percent as a decimal number.
+ */
+ ;
+
+ _proto.getPercent = function getPercent() {
+ if (this.player_.muted()) {
+ return 0;
+ }
+
+ return this.player_.volume();
+ }
+ /**
+ * Increase volume level for keyboard users
+ */
+ ;
+
+ _proto.stepForward = function stepForward() {
+ this.checkMuted();
+ this.player_.volume(this.player_.volume() + 0.1);
+ }
+ /**
+ * Decrease volume level for keyboard users
+ */
+ ;
+
+ _proto.stepBack = function stepBack() {
+ this.checkMuted();
+ this.player_.volume(this.player_.volume() - 0.1);
+ }
+ /**
+ * Update ARIA accessibility attributes
+ *
+ * @param {EventTarget~Event} [event]
+ * The `volumechange` event that caused this function to run.
+ *
+ * @listens Player#volumechange
+ */
+ ;
+
+ _proto.updateARIAAttributes = function updateARIAAttributes(event) {
+ var ariaValue = this.player_.muted() ? 0 : this.volumeAsPercentage_();
+ this.el_.setAttribute('aria-valuenow', ariaValue);
+ this.el_.setAttribute('aria-valuetext', ariaValue + '%');
+ }
+ /**
+ * Returns the current value of the player volume as a percentage
+ *
+ * @private
+ */
+ ;
+
+ _proto.volumeAsPercentage_ = function volumeAsPercentage_() {
+ return Math.round(this.player_.volume() * 100);
+ }
+ /**
+ * When user starts dragging the VolumeBar, store the volume and listen for
+ * the end of the drag. When the drag ends, if the volume was set to zero,
+ * set lastVolume to the stored volume.
+ *
+ * @listens slideractive
+ * @private
+ */
+ ;
+
+ _proto.updateLastVolume_ = function updateLastVolume_() {
+ var _this2 = this;
+
+ var volumeBeforeDrag = this.player_.volume();
+ this.one('sliderinactive', function () {
+ if (_this2.player_.volume() === 0) {
+ _this2.player_.lastVolume_(volumeBeforeDrag);
+ }
+ });
+ };
+
+ return VolumeBar;
+}(Slider);
+/**
+ * Default options for the `VolumeBar`
+ *
+ * @type {Object}
+ * @private
+ */
+
+
+VolumeBar.prototype.options_ = {
+ children: ['volumeLevel'],
+ barName: 'volumeLevel'
+}; // MouseVolumeLevelDisplay tooltip should not be added to a player on mobile devices
+
+if (!IS_IOS && !IS_ANDROID) {
+ VolumeBar.prototype.options_.children.splice(0, 0, 'mouseVolumeLevelDisplay');
+}
+/**
+ * Call the update event for this Slider when this event happens on the player.
+ *
+ * @type {string}
+ */
+
+
+VolumeBar.prototype.playerEvent = 'volumechange';
+Component$1.registerComponent('VolumeBar', VolumeBar);
+
+/**
+ * The component for controlling the volume level
+ *
+ * @extends Component
+ */
+
+var VolumeControl = /*#__PURE__*/function (_Component) {
+ _inheritsLoose(VolumeControl, _Component);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options={}]
+ * The key/value store of player options.
+ */
+ function VolumeControl(player, options) {
+ var _this;
+
+ if (options === void 0) {
+ options = {};
+ }
+
+ options.vertical = options.vertical || false; // Pass the vertical option down to the VolumeBar if
+ // the VolumeBar is turned on.
+
+ if (typeof options.volumeBar === 'undefined' || isPlain(options.volumeBar)) {
+ options.volumeBar = options.volumeBar || {};
+ options.volumeBar.vertical = options.vertical;
+ }
+
+ _this = _Component.call(this, player, options) || this; // hide this control if volume support is missing
+
+ checkVolumeSupport(_assertThisInitialized(_this), player);
+ _this.throttledHandleMouseMove = throttle(bind(_assertThisInitialized(_this), _this.handleMouseMove), UPDATE_REFRESH_INTERVAL);
+
+ _this.handleMouseUpHandler_ = function (e) {
+ return _this.handleMouseUp(e);
+ };
+
+ _this.on('mousedown', function (e) {
+ return _this.handleMouseDown(e);
+ });
+
+ _this.on('touchstart', function (e) {
+ return _this.handleMouseDown(e);
+ });
+
+ _this.on('mousemove', function (e) {
+ return _this.handleMouseMove(e);
+ }); // while the slider is active (the mouse has been pressed down and
+ // is dragging) or in focus we do not want to hide the VolumeBar
+
+
+ _this.on(_this.volumeBar, ['focus', 'slideractive'], function () {
+ _this.volumeBar.addClass('vjs-slider-active');
+
+ _this.addClass('vjs-slider-active');
+
+ _this.trigger('slideractive');
+ });
+
+ _this.on(_this.volumeBar, ['blur', 'sliderinactive'], function () {
+ _this.volumeBar.removeClass('vjs-slider-active');
+
+ _this.removeClass('vjs-slider-active');
+
+ _this.trigger('sliderinactive');
+ });
+
+ return _this;
+ }
+ /**
+ * Create the `Component`'s DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+
+
+ var _proto = VolumeControl.prototype;
+
+ _proto.createEl = function createEl() {
+ var orientationClass = 'vjs-volume-horizontal';
+
+ if (this.options_.vertical) {
+ orientationClass = 'vjs-volume-vertical';
+ }
+
+ return _Component.prototype.createEl.call(this, 'div', {
+ className: "vjs-volume-control vjs-control " + orientationClass
+ });
+ }
+ /**
+ * Handle `mousedown` or `touchstart` events on the `VolumeControl`.
+ *
+ * @param {EventTarget~Event} event
+ * `mousedown` or `touchstart` event that triggered this function
+ *
+ * @listens mousedown
+ * @listens touchstart
+ */
+ ;
+
+ _proto.handleMouseDown = function handleMouseDown(event) {
+ var doc = this.el_.ownerDocument;
+ this.on(doc, 'mousemove', this.throttledHandleMouseMove);
+ this.on(doc, 'touchmove', this.throttledHandleMouseMove);
+ this.on(doc, 'mouseup', this.handleMouseUpHandler_);
+ this.on(doc, 'touchend', this.handleMouseUpHandler_);
+ }
+ /**
+ * Handle `mouseup` or `touchend` events on the `VolumeControl`.
+ *
+ * @param {EventTarget~Event} event
+ * `mouseup` or `touchend` event that triggered this function.
+ *
+ * @listens touchend
+ * @listens mouseup
+ */
+ ;
+
+ _proto.handleMouseUp = function handleMouseUp(event) {
+ var doc = this.el_.ownerDocument;
+ this.off(doc, 'mousemove', this.throttledHandleMouseMove);
+ this.off(doc, 'touchmove', this.throttledHandleMouseMove);
+ this.off(doc, 'mouseup', this.handleMouseUpHandler_);
+ this.off(doc, 'touchend', this.handleMouseUpHandler_);
+ }
+ /**
+ * Handle `mousedown` or `touchstart` events on the `VolumeControl`.
+ *
+ * @param {EventTarget~Event} event
+ * `mousedown` or `touchstart` event that triggered this function
+ *
+ * @listens mousedown
+ * @listens touchstart
+ */
+ ;
+
+ _proto.handleMouseMove = function handleMouseMove(event) {
+ this.volumeBar.handleMouseMove(event);
+ };
+
+ return VolumeControl;
+}(Component$1);
+/**
+ * Default options for the `VolumeControl`
+ *
+ * @type {Object}
+ * @private
+ */
+
+
+VolumeControl.prototype.options_ = {
+ children: ['volumeBar']
+};
+Component$1.registerComponent('VolumeControl', VolumeControl);
+
+/**
+ * Check if muting volume is supported and if it isn't hide the mute toggle
+ * button.
+ *
+ * @param {Component} self
+ * A reference to the mute toggle button
+ *
+ * @param {Player} player
+ * A reference to the player
+ *
+ * @private
+ */
+var checkMuteSupport = function checkMuteSupport(self, player) {
+ // hide mute toggle button if it's not supported by the current tech
+ if (player.tech_ && !player.tech_.featuresMuteControl) {
+ self.addClass('vjs-hidden');
+ }
+
+ self.on(player, 'loadstart', function () {
+ if (!player.tech_.featuresMuteControl) {
+ self.addClass('vjs-hidden');
+ } else {
+ self.removeClass('vjs-hidden');
+ }
+ });
+};
+
+/**
+ * A button component for muting the audio.
+ *
+ * @extends Button
+ */
+
+var MuteToggle = /*#__PURE__*/function (_Button) {
+ _inheritsLoose(MuteToggle, _Button);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function MuteToggle(player, options) {
+ var _this;
+
+ _this = _Button.call(this, player, options) || this; // hide this control if volume support is missing
+
+ checkMuteSupport(_assertThisInitialized(_this), player);
+
+ _this.on(player, ['loadstart', 'volumechange'], function (e) {
+ return _this.update(e);
+ });
+
+ return _this;
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+
+
+ var _proto = MuteToggle.prototype;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return "vjs-mute-control " + _Button.prototype.buildCSSClass.call(this);
+ }
+ /**
+ * This gets called when an `MuteToggle` is "clicked". See
+ * {@link ClickableComponent} for more detailed information on what a click can be.
+ *
+ * @param {EventTarget~Event} [event]
+ * The `keydown`, `tap`, or `click` event that caused this function to be
+ * called.
+ *
+ * @listens tap
+ * @listens click
+ */
+ ;
+
+ _proto.handleClick = function handleClick(event) {
+ var vol = this.player_.volume();
+ var lastVolume = this.player_.lastVolume_();
+
+ if (vol === 0) {
+ var volumeToSet = lastVolume < 0.1 ? 0.1 : lastVolume;
+ this.player_.volume(volumeToSet);
+ this.player_.muted(false);
+ } else {
+ this.player_.muted(this.player_.muted() ? false : true);
+ }
+ }
+ /**
+ * Update the `MuteToggle` button based on the state of `volume` and `muted`
+ * on the player.
+ *
+ * @param {EventTarget~Event} [event]
+ * The {@link Player#loadstart} event if this function was called
+ * through an event.
+ *
+ * @listens Player#loadstart
+ * @listens Player#volumechange
+ */
+ ;
+
+ _proto.update = function update(event) {
+ this.updateIcon_();
+ this.updateControlText_();
+ }
+ /**
+ * Update the appearance of the `MuteToggle` icon.
+ *
+ * Possible states (given `level` variable below):
+ * - 0: crossed out
+ * - 1: zero bars of volume
+ * - 2: one bar of volume
+ * - 3: two bars of volume
+ *
+ * @private
+ */
+ ;
+
+ _proto.updateIcon_ = function updateIcon_() {
+ var vol = this.player_.volume();
+ var level = 3; // in iOS when a player is loaded with muted attribute
+ // and volume is changed with a native mute button
+ // we want to make sure muted state is updated
+
+ if (IS_IOS && this.player_.tech_ && this.player_.tech_.el_) {
+ this.player_.muted(this.player_.tech_.el_.muted);
+ }
+
+ if (vol === 0 || this.player_.muted()) {
+ level = 0;
+ } else if (vol < 0.33) {
+ level = 1;
+ } else if (vol < 0.67) {
+ level = 2;
+ } // TODO improve muted icon classes
+
+
+ for (var i = 0; i < 4; i++) {
+ removeClass(this.el_, "vjs-vol-" + i);
+ }
+
+ addClass(this.el_, "vjs-vol-" + level);
+ }
+ /**
+ * If `muted` has changed on the player, update the control text
+ * (`title` attribute on `vjs-mute-control` element and content of
+ * `vjs-control-text` element).
+ *
+ * @private
+ */
+ ;
+
+ _proto.updateControlText_ = function updateControlText_() {
+ var soundOff = this.player_.muted() || this.player_.volume() === 0;
+ var text = soundOff ? 'Unmute' : 'Mute';
+
+ if (this.controlText() !== text) {
+ this.controlText(text);
+ }
+ };
+
+ return MuteToggle;
+}(Button);
+/**
+ * The text that should display over the `MuteToggle`s controls. Added for localization.
+ *
+ * @type {string}
+ * @private
+ */
+
+
+MuteToggle.prototype.controlText_ = 'Mute';
+Component$1.registerComponent('MuteToggle', MuteToggle);
+
+/**
+ * A Component to contain the MuteToggle and VolumeControl so that
+ * they can work together.
+ *
+ * @extends Component
+ */
+
+var VolumePanel = /*#__PURE__*/function (_Component) {
+ _inheritsLoose(VolumePanel, _Component);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options={}]
+ * The key/value store of player options.
+ */
+ function VolumePanel(player, options) {
+ var _this;
+
+ if (options === void 0) {
+ options = {};
+ }
+
+ if (typeof options.inline !== 'undefined') {
+ options.inline = options.inline;
+ } else {
+ options.inline = true;
+ } // pass the inline option down to the VolumeControl as vertical if
+ // the VolumeControl is on.
+
+
+ if (typeof options.volumeControl === 'undefined' || isPlain(options.volumeControl)) {
+ options.volumeControl = options.volumeControl || {};
+ options.volumeControl.vertical = !options.inline;
+ }
+
+ _this = _Component.call(this, player, options) || this; // this handler is used by mouse handler methods below
+
+ _this.handleKeyPressHandler_ = function (e) {
+ return _this.handleKeyPress(e);
+ };
+
+ _this.on(player, ['loadstart'], function (e) {
+ return _this.volumePanelState_(e);
+ });
+
+ _this.on(_this.muteToggle, 'keyup', function (e) {
+ return _this.handleKeyPress(e);
+ });
+
+ _this.on(_this.volumeControl, 'keyup', function (e) {
+ return _this.handleVolumeControlKeyUp(e);
+ });
+
+ _this.on('keydown', function (e) {
+ return _this.handleKeyPress(e);
+ });
+
+ _this.on('mouseover', function (e) {
+ return _this.handleMouseOver(e);
+ });
+
+ _this.on('mouseout', function (e) {
+ return _this.handleMouseOut(e);
+ }); // while the slider is active (the mouse has been pressed down and
+ // is dragging) we do not want to hide the VolumeBar
+
+
+ _this.on(_this.volumeControl, ['slideractive'], _this.sliderActive_);
+
+ _this.on(_this.volumeControl, ['sliderinactive'], _this.sliderInactive_);
+
+ return _this;
+ }
+ /**
+ * Add vjs-slider-active class to the VolumePanel
+ *
+ * @listens VolumeControl#slideractive
+ * @private
+ */
+
+
+ var _proto = VolumePanel.prototype;
+
+ _proto.sliderActive_ = function sliderActive_() {
+ this.addClass('vjs-slider-active');
+ }
+ /**
+ * Removes vjs-slider-active class to the VolumePanel
+ *
+ * @listens VolumeControl#sliderinactive
+ * @private
+ */
+ ;
+
+ _proto.sliderInactive_ = function sliderInactive_() {
+ this.removeClass('vjs-slider-active');
+ }
+ /**
+ * Adds vjs-hidden or vjs-mute-toggle-only to the VolumePanel
+ * depending on MuteToggle and VolumeControl state
+ *
+ * @listens Player#loadstart
+ * @private
+ */
+ ;
+
+ _proto.volumePanelState_ = function volumePanelState_() {
+ // hide volume panel if neither volume control or mute toggle
+ // are displayed
+ if (this.volumeControl.hasClass('vjs-hidden') && this.muteToggle.hasClass('vjs-hidden')) {
+ this.addClass('vjs-hidden');
+ } // if only mute toggle is visible we don't want
+ // volume panel expanding when hovered or active
+
+
+ if (this.volumeControl.hasClass('vjs-hidden') && !this.muteToggle.hasClass('vjs-hidden')) {
+ this.addClass('vjs-mute-toggle-only');
+ }
+ }
+ /**
+ * Create the `Component`'s DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+ ;
+
+ _proto.createEl = function createEl() {
+ var orientationClass = 'vjs-volume-panel-horizontal';
+
+ if (!this.options_.inline) {
+ orientationClass = 'vjs-volume-panel-vertical';
+ }
+
+ return _Component.prototype.createEl.call(this, 'div', {
+ className: "vjs-volume-panel vjs-control " + orientationClass
+ });
+ }
+ /**
+ * Dispose of the `volume-panel` and all child components.
+ */
+ ;
+
+ _proto.dispose = function dispose() {
+ this.handleMouseOut();
+
+ _Component.prototype.dispose.call(this);
+ }
+ /**
+ * Handles `keyup` events on the `VolumeControl`, looking for ESC, which closes
+ * the volume panel and sets focus on `MuteToggle`.
+ *
+ * @param {EventTarget~Event} event
+ * The `keyup` event that caused this function to be called.
+ *
+ * @listens keyup
+ */
+ ;
+
+ _proto.handleVolumeControlKeyUp = function handleVolumeControlKeyUp(event) {
+ if (keycode.isEventKey(event, 'Esc')) {
+ this.muteToggle.focus();
+ }
+ }
+ /**
+ * This gets called when a `VolumePanel` gains hover via a `mouseover` event.
+ * Turns on listening for `mouseover` event. When they happen it
+ * calls `this.handleMouseOver`.
+ *
+ * @param {EventTarget~Event} event
+ * The `mouseover` event that caused this function to be called.
+ *
+ * @listens mouseover
+ */
+ ;
+
+ _proto.handleMouseOver = function handleMouseOver(event) {
+ this.addClass('vjs-hover');
+ on(document, 'keyup', this.handleKeyPressHandler_);
+ }
+ /**
+ * This gets called when a `VolumePanel` gains hover via a `mouseout` event.
+ * Turns on listening for `mouseout` event. When they happen it
+ * calls `this.handleMouseOut`.
+ *
+ * @param {EventTarget~Event} event
+ * The `mouseout` event that caused this function to be called.
+ *
+ * @listens mouseout
+ */
+ ;
+
+ _proto.handleMouseOut = function handleMouseOut(event) {
+ this.removeClass('vjs-hover');
+ off(document, 'keyup', this.handleKeyPressHandler_);
+ }
+ /**
+ * Handles `keyup` event on the document or `keydown` event on the `VolumePanel`,
+ * looking for ESC, which hides the `VolumeControl`.
+ *
+ * @param {EventTarget~Event} event
+ * The keypress that triggered this event.
+ *
+ * @listens keydown | keyup
+ */
+ ;
+
+ _proto.handleKeyPress = function handleKeyPress(event) {
+ if (keycode.isEventKey(event, 'Esc')) {
+ this.handleMouseOut();
+ }
+ };
+
+ return VolumePanel;
+}(Component$1);
+/**
+ * Default options for the `VolumeControl`
+ *
+ * @type {Object}
+ * @private
+ */
+
+
+VolumePanel.prototype.options_ = {
+ children: ['muteToggle', 'volumeControl']
+};
+Component$1.registerComponent('VolumePanel', VolumePanel);
+
+/**
+ * The Menu component is used to build popup menus, including subtitle and
+ * captions selection menus.
+ *
+ * @extends Component
+ */
+
+var Menu = /*#__PURE__*/function (_Component) {
+ _inheritsLoose(Menu, _Component);
+
+ /**
+ * Create an instance of this class.
+ *
+ * @param {Player} player
+ * the player that this component should attach to
+ *
+ * @param {Object} [options]
+ * Object of option names and values
+ *
+ */
+ function Menu(player, options) {
+ var _this;
+
+ _this = _Component.call(this, player, options) || this;
+
+ if (options) {
+ _this.menuButton_ = options.menuButton;
+ }
+
+ _this.focusedChild_ = -1;
+
+ _this.on('keydown', function (e) {
+ return _this.handleKeyDown(e);
+ }); // All the menu item instances share the same blur handler provided by the menu container.
+
+
+ _this.boundHandleBlur_ = function (e) {
+ return _this.handleBlur(e);
+ };
+
+ _this.boundHandleTapClick_ = function (e) {
+ return _this.handleTapClick(e);
+ };
+
+ return _this;
+ }
+ /**
+ * Add event listeners to the {@link MenuItem}.
+ *
+ * @param {Object} component
+ * The instance of the `MenuItem` to add listeners to.
+ *
+ */
+
+
+ var _proto = Menu.prototype;
+
+ _proto.addEventListenerForItem = function addEventListenerForItem(component) {
+ if (!(component instanceof Component$1)) {
+ return;
+ }
+
+ this.on(component, 'blur', this.boundHandleBlur_);
+ this.on(component, ['tap', 'click'], this.boundHandleTapClick_);
+ }
+ /**
+ * Remove event listeners from the {@link MenuItem}.
+ *
+ * @param {Object} component
+ * The instance of the `MenuItem` to remove listeners.
+ *
+ */
+ ;
+
+ _proto.removeEventListenerForItem = function removeEventListenerForItem(component) {
+ if (!(component instanceof Component$1)) {
+ return;
+ }
+
+ this.off(component, 'blur', this.boundHandleBlur_);
+ this.off(component, ['tap', 'click'], this.boundHandleTapClick_);
+ }
+ /**
+ * This method will be called indirectly when the component has been added
+ * before the component adds to the new menu instance by `addItem`.
+ * In this case, the original menu instance will remove the component
+ * by calling `removeChild`.
+ *
+ * @param {Object} component
+ * The instance of the `MenuItem`
+ */
+ ;
+
+ _proto.removeChild = function removeChild(component) {
+ if (typeof component === 'string') {
+ component = this.getChild(component);
+ }
+
+ this.removeEventListenerForItem(component);
+
+ _Component.prototype.removeChild.call(this, component);
+ }
+ /**
+ * Add a {@link MenuItem} to the menu.
+ *
+ * @param {Object|string} component
+ * The name or instance of the `MenuItem` to add.
+ *
+ */
+ ;
+
+ _proto.addItem = function addItem(component) {
+ var childComponent = this.addChild(component);
+
+ if (childComponent) {
+ this.addEventListenerForItem(childComponent);
+ }
+ }
+ /**
+ * Create the `Menu`s DOM element.
+ *
+ * @return {Element}
+ * the element that was created
+ */
+ ;
+
+ _proto.createEl = function createEl$1() {
+ var contentElType = this.options_.contentElType || 'ul';
+ this.contentEl_ = createEl(contentElType, {
+ className: 'vjs-menu-content'
+ });
+ this.contentEl_.setAttribute('role', 'menu');
+
+ var el = _Component.prototype.createEl.call(this, 'div', {
+ append: this.contentEl_,
+ className: 'vjs-menu'
+ });
+
+ el.appendChild(this.contentEl_); // Prevent clicks from bubbling up. Needed for Menu Buttons,
+ // where a click on the parent is significant
+
+ on(el, 'click', function (event) {
+ event.preventDefault();
+ event.stopImmediatePropagation();
+ });
+ return el;
+ };
+
+ _proto.dispose = function dispose() {
+ this.contentEl_ = null;
+ this.boundHandleBlur_ = null;
+ this.boundHandleTapClick_ = null;
+
+ _Component.prototype.dispose.call(this);
+ }
+ /**
+ * Called when a `MenuItem` loses focus.
+ *
+ * @param {EventTarget~Event} event
+ * The `blur` event that caused this function to be called.
+ *
+ * @listens blur
+ */
+ ;
+
+ _proto.handleBlur = function handleBlur(event) {
+ var relatedTarget = event.relatedTarget || document.activeElement; // Close menu popup when a user clicks outside the menu
+
+ if (!this.children().some(function (element) {
+ return element.el() === relatedTarget;
+ })) {
+ var btn = this.menuButton_;
+
+ if (btn && btn.buttonPressed_ && relatedTarget !== btn.el().firstChild) {
+ btn.unpressButton();
+ }
+ }
+ }
+ /**
+ * Called when a `MenuItem` gets clicked or tapped.
+ *
+ * @param {EventTarget~Event} event
+ * The `click` or `tap` event that caused this function to be called.
+ *
+ * @listens click,tap
+ */
+ ;
+
+ _proto.handleTapClick = function handleTapClick(event) {
+ // Unpress the associated MenuButton, and move focus back to it
+ if (this.menuButton_) {
+ this.menuButton_.unpressButton();
+ var childComponents = this.children();
+
+ if (!Array.isArray(childComponents)) {
+ return;
+ }
+
+ var foundComponent = childComponents.filter(function (component) {
+ return component.el() === event.target;
+ })[0];
+
+ if (!foundComponent) {
+ return;
+ } // don't focus menu button if item is a caption settings item
+ // because focus will move elsewhere
+
+
+ if (foundComponent.name() !== 'CaptionSettingsMenuItem') {
+ this.menuButton_.focus();
+ }
+ }
+ }
+ /**
+ * Handle a `keydown` event on this menu. This listener is added in the constructor.
+ *
+ * @param {EventTarget~Event} event
+ * A `keydown` event that happened on the menu.
+ *
+ * @listens keydown
+ */
+ ;
+
+ _proto.handleKeyDown = function handleKeyDown(event) {
+ // Left and Down Arrows
+ if (keycode.isEventKey(event, 'Left') || keycode.isEventKey(event, 'Down')) {
+ event.preventDefault();
+ event.stopPropagation();
+ this.stepForward(); // Up and Right Arrows
+ } else if (keycode.isEventKey(event, 'Right') || keycode.isEventKey(event, 'Up')) {
+ event.preventDefault();
+ event.stopPropagation();
+ this.stepBack();
+ }
+ }
+ /**
+ * Move to next (lower) menu item for keyboard users.
+ */
+ ;
+
+ _proto.stepForward = function stepForward() {
+ var stepChild = 0;
+
+ if (this.focusedChild_ !== undefined) {
+ stepChild = this.focusedChild_ + 1;
+ }
+
+ this.focus(stepChild);
+ }
+ /**
+ * Move to previous (higher) menu item for keyboard users.
+ */
+ ;
+
+ _proto.stepBack = function stepBack() {
+ var stepChild = 0;
+
+ if (this.focusedChild_ !== undefined) {
+ stepChild = this.focusedChild_ - 1;
+ }
+
+ this.focus(stepChild);
+ }
+ /**
+ * Set focus on a {@link MenuItem} in the `Menu`.
+ *
+ * @param {Object|string} [item=0]
+ * Index of child item set focus on.
+ */
+ ;
+
+ _proto.focus = function focus(item) {
+ if (item === void 0) {
+ item = 0;
+ }
+
+ var children = this.children().slice();
+ var haveTitle = children.length && children[0].hasClass('vjs-menu-title');
+
+ if (haveTitle) {
+ children.shift();
+ }
+
+ if (children.length > 0) {
+ if (item < 0) {
+ item = 0;
+ } else if (item >= children.length) {
+ item = children.length - 1;
+ }
+
+ this.focusedChild_ = item;
+ children[item].el_.focus();
+ }
+ };
+
+ return Menu;
+}(Component$1);
+
+Component$1.registerComponent('Menu', Menu);
+
+/**
+ * A `MenuButton` class for any popup {@link Menu}.
+ *
+ * @extends Component
+ */
+
+var MenuButton = /*#__PURE__*/function (_Component) {
+ _inheritsLoose(MenuButton, _Component);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options={}]
+ * The key/value store of player options.
+ */
+ function MenuButton(player, options) {
+ var _this;
+
+ if (options === void 0) {
+ options = {};
+ }
+
+ _this = _Component.call(this, player, options) || this;
+ _this.menuButton_ = new Button(player, options);
+
+ _this.menuButton_.controlText(_this.controlText_);
+
+ _this.menuButton_.el_.setAttribute('aria-haspopup', 'true'); // Add buildCSSClass values to the button, not the wrapper
+
+
+ var buttonClass = Button.prototype.buildCSSClass();
+ _this.menuButton_.el_.className = _this.buildCSSClass() + ' ' + buttonClass;
+
+ _this.menuButton_.removeClass('vjs-control');
+
+ _this.addChild(_this.menuButton_);
+
+ _this.update();
+
+ _this.enabled_ = true;
+
+ var handleClick = function handleClick(e) {
+ return _this.handleClick(e);
+ };
+
+ _this.handleMenuKeyUp_ = function (e) {
+ return _this.handleMenuKeyUp(e);
+ };
+
+ _this.on(_this.menuButton_, 'tap', handleClick);
+
+ _this.on(_this.menuButton_, 'click', handleClick);
+
+ _this.on(_this.menuButton_, 'keydown', function (e) {
+ return _this.handleKeyDown(e);
+ });
+
+ _this.on(_this.menuButton_, 'mouseenter', function () {
+ _this.addClass('vjs-hover');
+
+ _this.menu.show();
+
+ on(document, 'keyup', _this.handleMenuKeyUp_);
+ });
+
+ _this.on('mouseleave', function (e) {
+ return _this.handleMouseLeave(e);
+ });
+
+ _this.on('keydown', function (e) {
+ return _this.handleSubmenuKeyDown(e);
+ });
+
+ return _this;
+ }
+ /**
+ * Update the menu based on the current state of its items.
+ */
+
+
+ var _proto = MenuButton.prototype;
+
+ _proto.update = function update() {
+ var menu = this.createMenu();
+
+ if (this.menu) {
+ this.menu.dispose();
+ this.removeChild(this.menu);
+ }
+
+ this.menu = menu;
+ this.addChild(menu);
+ /**
+ * Track the state of the menu button
+ *
+ * @type {Boolean}
+ * @private
+ */
+
+ this.buttonPressed_ = false;
+ this.menuButton_.el_.setAttribute('aria-expanded', 'false');
+
+ if (this.items && this.items.length <= this.hideThreshold_) {
+ this.hide();
+ this.menu.contentEl_.removeAttribute('role');
+ } else {
+ this.show();
+ this.menu.contentEl_.setAttribute('role', 'menu');
+ }
+ }
+ /**
+ * Create the menu and add all items to it.
+ *
+ * @return {Menu}
+ * The constructed menu
+ */
+ ;
+
+ _proto.createMenu = function createMenu() {
+ var menu = new Menu(this.player_, {
+ menuButton: this
+ });
+ /**
+ * Hide the menu if the number of items is less than or equal to this threshold. This defaults
+ * to 0 and whenever we add items which can be hidden to the menu we'll increment it. We list
+ * it here because every time we run `createMenu` we need to reset the value.
+ *
+ * @protected
+ * @type {Number}
+ */
+
+ this.hideThreshold_ = 0; // Add a title list item to the top
+
+ if (this.options_.title) {
+ var titleEl = createEl('li', {
+ className: 'vjs-menu-title',
+ textContent: toTitleCase$1(this.options_.title),
+ tabIndex: -1
+ });
+ var titleComponent = new Component$1(this.player_, {
+ el: titleEl
+ });
+ menu.addItem(titleComponent);
+ }
+
+ this.items = this.createItems();
+
+ if (this.items) {
+ // Add menu items to the menu
+ for (var i = 0; i < this.items.length; i++) {
+ menu.addItem(this.items[i]);
+ }
+ }
+
+ return menu;
+ }
+ /**
+ * Create the list of menu items. Specific to each subclass.
+ *
+ * @abstract
+ */
+ ;
+
+ _proto.createItems = function createItems() {}
+ /**
+ * Create the `MenuButtons`s DOM element.
+ *
+ * @return {Element}
+ * The element that gets created.
+ */
+ ;
+
+ _proto.createEl = function createEl() {
+ return _Component.prototype.createEl.call(this, 'div', {
+ className: this.buildWrapperCSSClass()
+ }, {});
+ }
+ /**
+ * Allow sub components to stack CSS class names for the wrapper element
+ *
+ * @return {string}
+ * The constructed wrapper DOM `className`
+ */
+ ;
+
+ _proto.buildWrapperCSSClass = function buildWrapperCSSClass() {
+ var menuButtonClass = 'vjs-menu-button'; // If the inline option is passed, we want to use different styles altogether.
+
+ if (this.options_.inline === true) {
+ menuButtonClass += '-inline';
+ } else {
+ menuButtonClass += '-popup';
+ } // TODO: Fix the CSS so that this isn't necessary
+
+
+ var buttonClass = Button.prototype.buildCSSClass();
+ return "vjs-menu-button " + menuButtonClass + " " + buttonClass + " " + _Component.prototype.buildCSSClass.call(this);
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+ ;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ var menuButtonClass = 'vjs-menu-button'; // If the inline option is passed, we want to use different styles altogether.
+
+ if (this.options_.inline === true) {
+ menuButtonClass += '-inline';
+ } else {
+ menuButtonClass += '-popup';
+ }
+
+ return "vjs-menu-button " + menuButtonClass + " " + _Component.prototype.buildCSSClass.call(this);
+ }
+ /**
+ * Get or set the localized control text that will be used for accessibility.
+ *
+ * > NOTE: This will come from the internal `menuButton_` element.
+ *
+ * @param {string} [text]
+ * Control text for element.
+ *
+ * @param {Element} [el=this.menuButton_.el()]
+ * Element to set the title on.
+ *
+ * @return {string}
+ * - The control text when getting
+ */
+ ;
+
+ _proto.controlText = function controlText(text, el) {
+ if (el === void 0) {
+ el = this.menuButton_.el();
+ }
+
+ return this.menuButton_.controlText(text, el);
+ }
+ /**
+ * Dispose of the `menu-button` and all child components.
+ */
+ ;
+
+ _proto.dispose = function dispose() {
+ this.handleMouseLeave();
+
+ _Component.prototype.dispose.call(this);
+ }
+ /**
+ * Handle a click on a `MenuButton`.
+ * See {@link ClickableComponent#handleClick} for instances where this is called.
+ *
+ * @param {EventTarget~Event} event
+ * The `keydown`, `tap`, or `click` event that caused this function to be
+ * called.
+ *
+ * @listens tap
+ * @listens click
+ */
+ ;
+
+ _proto.handleClick = function handleClick(event) {
+ if (this.buttonPressed_) {
+ this.unpressButton();
+ } else {
+ this.pressButton();
+ }
+ }
+ /**
+ * Handle `mouseleave` for `MenuButton`.
+ *
+ * @param {EventTarget~Event} event
+ * The `mouseleave` event that caused this function to be called.
+ *
+ * @listens mouseleave
+ */
+ ;
+
+ _proto.handleMouseLeave = function handleMouseLeave(event) {
+ this.removeClass('vjs-hover');
+ off(document, 'keyup', this.handleMenuKeyUp_);
+ }
+ /**
+ * Set the focus to the actual button, not to this element
+ */
+ ;
+
+ _proto.focus = function focus() {
+ this.menuButton_.focus();
+ }
+ /**
+ * Remove the focus from the actual button, not this element
+ */
+ ;
+
+ _proto.blur = function blur() {
+ this.menuButton_.blur();
+ }
+ /**
+ * Handle tab, escape, down arrow, and up arrow keys for `MenuButton`. See
+ * {@link ClickableComponent#handleKeyDown} for instances where this is called.
+ *
+ * @param {EventTarget~Event} event
+ * The `keydown` event that caused this function to be called.
+ *
+ * @listens keydown
+ */
+ ;
+
+ _proto.handleKeyDown = function handleKeyDown(event) {
+ // Escape or Tab unpress the 'button'
+ if (keycode.isEventKey(event, 'Esc') || keycode.isEventKey(event, 'Tab')) {
+ if (this.buttonPressed_) {
+ this.unpressButton();
+ } // Don't preventDefault for Tab key - we still want to lose focus
+
+
+ if (!keycode.isEventKey(event, 'Tab')) {
+ event.preventDefault(); // Set focus back to the menu button's button
+
+ this.menuButton_.focus();
+ } // Up Arrow or Down Arrow also 'press' the button to open the menu
+
+ } else if (keycode.isEventKey(event, 'Up') || keycode.isEventKey(event, 'Down')) {
+ if (!this.buttonPressed_) {
+ event.preventDefault();
+ this.pressButton();
+ }
+ }
+ }
+ /**
+ * Handle a `keyup` event on a `MenuButton`. The listener for this is added in
+ * the constructor.
+ *
+ * @param {EventTarget~Event} event
+ * Key press event
+ *
+ * @listens keyup
+ */
+ ;
+
+ _proto.handleMenuKeyUp = function handleMenuKeyUp(event) {
+ // Escape hides popup menu
+ if (keycode.isEventKey(event, 'Esc') || keycode.isEventKey(event, 'Tab')) {
+ this.removeClass('vjs-hover');
+ }
+ }
+ /**
+ * This method name now delegates to `handleSubmenuKeyDown`. This means
+ * anyone calling `handleSubmenuKeyPress` will not see their method calls
+ * stop working.
+ *
+ * @param {EventTarget~Event} event
+ * The event that caused this function to be called.
+ */
+ ;
+
+ _proto.handleSubmenuKeyPress = function handleSubmenuKeyPress(event) {
+ this.handleSubmenuKeyDown(event);
+ }
+ /**
+ * Handle a `keydown` event on a sub-menu. The listener for this is added in
+ * the constructor.
+ *
+ * @param {EventTarget~Event} event
+ * Key press event
+ *
+ * @listens keydown
+ */
+ ;
+
+ _proto.handleSubmenuKeyDown = function handleSubmenuKeyDown(event) {
+ // Escape or Tab unpress the 'button'
+ if (keycode.isEventKey(event, 'Esc') || keycode.isEventKey(event, 'Tab')) {
+ if (this.buttonPressed_) {
+ this.unpressButton();
+ } // Don't preventDefault for Tab key - we still want to lose focus
+
+
+ if (!keycode.isEventKey(event, 'Tab')) {
+ event.preventDefault(); // Set focus back to the menu button's button
+
+ this.menuButton_.focus();
+ }
+ }
+ }
+ /**
+ * Put the current `MenuButton` into a pressed state.
+ */
+ ;
+
+ _proto.pressButton = function pressButton() {
+ if (this.enabled_) {
+ this.buttonPressed_ = true;
+ this.menu.show();
+ this.menu.lockShowing();
+ this.menuButton_.el_.setAttribute('aria-expanded', 'true'); // set the focus into the submenu, except on iOS where it is resulting in
+ // undesired scrolling behavior when the player is in an iframe
+
+ if (IS_IOS && isInFrame()) {
+ // Return early so that the menu isn't focused
+ return;
+ }
+
+ this.menu.focus();
+ }
+ }
+ /**
+ * Take the current `MenuButton` out of a pressed state.
+ */
+ ;
+
+ _proto.unpressButton = function unpressButton() {
+ if (this.enabled_) {
+ this.buttonPressed_ = false;
+ this.menu.unlockShowing();
+ this.menu.hide();
+ this.menuButton_.el_.setAttribute('aria-expanded', 'false');
+ }
+ }
+ /**
+ * Disable the `MenuButton`. Don't allow it to be clicked.
+ */
+ ;
+
+ _proto.disable = function disable() {
+ this.unpressButton();
+ this.enabled_ = false;
+ this.addClass('vjs-disabled');
+ this.menuButton_.disable();
+ }
+ /**
+ * Enable the `MenuButton`. Allow it to be clicked.
+ */
+ ;
+
+ _proto.enable = function enable() {
+ this.enabled_ = true;
+ this.removeClass('vjs-disabled');
+ this.menuButton_.enable();
+ };
+
+ return MenuButton;
+}(Component$1);
+
+Component$1.registerComponent('MenuButton', MenuButton);
+
+/**
+ * The base class for buttons that toggle specific track types (e.g. subtitles).
+ *
+ * @extends MenuButton
+ */
+
+var TrackButton = /*#__PURE__*/function (_MenuButton) {
+ _inheritsLoose(TrackButton, _MenuButton);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function TrackButton(player, options) {
+ var _this;
+
+ var tracks = options.tracks;
+ _this = _MenuButton.call(this, player, options) || this;
+
+ if (_this.items.length <= 1) {
+ _this.hide();
+ }
+
+ if (!tracks) {
+ return _assertThisInitialized(_this);
+ }
+
+ var updateHandler = bind(_assertThisInitialized(_this), _this.update);
+ tracks.addEventListener('removetrack', updateHandler);
+ tracks.addEventListener('addtrack', updateHandler);
+ tracks.addEventListener('labelchange', updateHandler);
+
+ _this.player_.on('ready', updateHandler);
+
+ _this.player_.on('dispose', function () {
+ tracks.removeEventListener('removetrack', updateHandler);
+ tracks.removeEventListener('addtrack', updateHandler);
+ tracks.removeEventListener('labelchange', updateHandler);
+ });
+
+ return _this;
+ }
+
+ return TrackButton;
+}(MenuButton);
+
+Component$1.registerComponent('TrackButton', TrackButton);
+
+/**
+ * @file menu-keys.js
+ */
+
+/**
+ * All keys used for operation of a menu (`MenuButton`, `Menu`, and `MenuItem`)
+ * Note that 'Enter' and 'Space' are not included here (otherwise they would
+ * prevent the `MenuButton` and `MenuItem` from being keyboard-clickable)
+ * @typedef MenuKeys
+ * @array
+ */
+var MenuKeys = ['Tab', 'Esc', 'Up', 'Down', 'Right', 'Left'];
+
+/**
+ * The component for a menu item. ``
+ *
+ * @extends ClickableComponent
+ */
+
+var MenuItem = /*#__PURE__*/function (_ClickableComponent) {
+ _inheritsLoose(MenuItem, _ClickableComponent);
+
+ /**
+ * Creates an instance of the this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options={}]
+ * The key/value store of player options.
+ *
+ */
+ function MenuItem(player, options) {
+ var _this;
+
+ _this = _ClickableComponent.call(this, player, options) || this;
+ _this.selectable = options.selectable;
+ _this.isSelected_ = options.selected || false;
+ _this.multiSelectable = options.multiSelectable;
+
+ _this.selected(_this.isSelected_);
+
+ if (_this.selectable) {
+ if (_this.multiSelectable) {
+ _this.el_.setAttribute('role', 'menuitemcheckbox');
+ } else {
+ _this.el_.setAttribute('role', 'menuitemradio');
+ }
+ } else {
+ _this.el_.setAttribute('role', 'menuitem');
+ }
+
+ return _this;
+ }
+ /**
+ * Create the `MenuItem's DOM element
+ *
+ * @param {string} [type=li]
+ * Element's node type, not actually used, always set to `li`.
+ *
+ * @param {Object} [props={}]
+ * An object of properties that should be set on the element
+ *
+ * @param {Object} [attrs={}]
+ * An object of attributes that should be set on the element
+ *
+ * @return {Element}
+ * The element that gets created.
+ */
+
+
+ var _proto = MenuItem.prototype;
+
+ _proto.createEl = function createEl$1(type, props, attrs) {
+ // The control is textual, not just an icon
+ this.nonIconControl = true;
+
+ var el = _ClickableComponent.prototype.createEl.call(this, 'li', assign({
+ className: 'vjs-menu-item',
+ tabIndex: -1
+ }, props), attrs); // swap icon with menu item text.
+
+
+ el.replaceChild(createEl('span', {
+ className: 'vjs-menu-item-text',
+ textContent: this.localize(this.options_.label)
+ }), el.querySelector('.vjs-icon-placeholder'));
+ return el;
+ }
+ /**
+ * Ignore keys which are used by the menu, but pass any other ones up. See
+ * {@link ClickableComponent#handleKeyDown} for instances where this is called.
+ *
+ * @param {EventTarget~Event} event
+ * The `keydown` event that caused this function to be called.
+ *
+ * @listens keydown
+ */
+ ;
+
+ _proto.handleKeyDown = function handleKeyDown(event) {
+ if (!MenuKeys.some(function (key) {
+ return keycode.isEventKey(event, key);
+ })) {
+ // Pass keydown handling up for unused keys
+ _ClickableComponent.prototype.handleKeyDown.call(this, event);
+ }
+ }
+ /**
+ * Any click on a `MenuItem` puts it into the selected state.
+ * See {@link ClickableComponent#handleClick} for instances where this is called.
+ *
+ * @param {EventTarget~Event} event
+ * The `keydown`, `tap`, or `click` event that caused this function to be
+ * called.
+ *
+ * @listens tap
+ * @listens click
+ */
+ ;
+
+ _proto.handleClick = function handleClick(event) {
+ this.selected(true);
+ }
+ /**
+ * Set the state for this menu item as selected or not.
+ *
+ * @param {boolean} selected
+ * if the menu item is selected or not
+ */
+ ;
+
+ _proto.selected = function selected(_selected) {
+ if (this.selectable) {
+ if (_selected) {
+ this.addClass('vjs-selected');
+ this.el_.setAttribute('aria-checked', 'true'); // aria-checked isn't fully supported by browsers/screen readers,
+ // so indicate selected state to screen reader in the control text.
+
+ this.controlText(', selected');
+ this.isSelected_ = true;
+ } else {
+ this.removeClass('vjs-selected');
+ this.el_.setAttribute('aria-checked', 'false'); // Indicate un-selected state to screen reader
+
+ this.controlText('');
+ this.isSelected_ = false;
+ }
+ }
+ };
+
+ return MenuItem;
+}(ClickableComponent);
+
+Component$1.registerComponent('MenuItem', MenuItem);
+
+/**
+ * The specific menu item type for selecting a language within a text track kind
+ *
+ * @extends MenuItem
+ */
+
+var TextTrackMenuItem = /*#__PURE__*/function (_MenuItem) {
+ _inheritsLoose(TextTrackMenuItem, _MenuItem);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function TextTrackMenuItem(player, options) {
+ var _this;
+
+ var track = options.track;
+ var tracks = player.textTracks(); // Modify options for parent MenuItem class's init.
+
+ options.label = track.label || track.language || 'Unknown';
+ options.selected = track.mode === 'showing';
+ _this = _MenuItem.call(this, player, options) || this;
+ _this.track = track; // Determine the relevant kind(s) of tracks for this component and filter
+ // out empty kinds.
+
+ _this.kinds = (options.kinds || [options.kind || _this.track.kind]).filter(Boolean);
+
+ var changeHandler = function changeHandler() {
+ for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
+ args[_key] = arguments[_key];
+ }
+
+ _this.handleTracksChange.apply(_assertThisInitialized(_this), args);
+ };
+
+ var selectedLanguageChangeHandler = function selectedLanguageChangeHandler() {
+ for (var _len2 = arguments.length, args = new Array(_len2), _key2 = 0; _key2 < _len2; _key2++) {
+ args[_key2] = arguments[_key2];
+ }
+
+ _this.handleSelectedLanguageChange.apply(_assertThisInitialized(_this), args);
+ };
+
+ player.on(['loadstart', 'texttrackchange'], changeHandler);
+ tracks.addEventListener('change', changeHandler);
+ tracks.addEventListener('selectedlanguagechange', selectedLanguageChangeHandler);
+
+ _this.on('dispose', function () {
+ player.off(['loadstart', 'texttrackchange'], changeHandler);
+ tracks.removeEventListener('change', changeHandler);
+ tracks.removeEventListener('selectedlanguagechange', selectedLanguageChangeHandler);
+ }); // iOS7 doesn't dispatch change events to TextTrackLists when an
+ // associated track's mode changes. Without something like
+ // Object.observe() (also not present on iOS7), it's not
+ // possible to detect changes to the mode attribute and polyfill
+ // the change event. As a poor substitute, we manually dispatch
+ // change events whenever the controls modify the mode.
+
+
+ if (tracks.onchange === undefined) {
+ var event;
+
+ _this.on(['tap', 'click'], function () {
+ if (typeof window$1.Event !== 'object') {
+ // Android 2.3 throws an Illegal Constructor error for window.Event
+ try {
+ event = new window$1.Event('change');
+ } catch (err) {// continue regardless of error
+ }
+ }
+
+ if (!event) {
+ event = document.createEvent('Event');
+ event.initEvent('change', true, true);
+ }
+
+ tracks.dispatchEvent(event);
+ });
+ } // set the default state based on current tracks
+
+
+ _this.handleTracksChange();
+
+ return _this;
+ }
+ /**
+ * This gets called when an `TextTrackMenuItem` is "clicked". See
+ * {@link ClickableComponent} for more detailed information on what a click can be.
+ *
+ * @param {EventTarget~Event} event
+ * The `keydown`, `tap`, or `click` event that caused this function to be
+ * called.
+ *
+ * @listens tap
+ * @listens click
+ */
+
+
+ var _proto = TextTrackMenuItem.prototype;
+
+ _proto.handleClick = function handleClick(event) {
+ var referenceTrack = this.track;
+ var tracks = this.player_.textTracks();
+
+ _MenuItem.prototype.handleClick.call(this, event);
+
+ if (!tracks) {
+ return;
+ }
+
+ for (var i = 0; i < tracks.length; i++) {
+ var track = tracks[i]; // If the track from the text tracks list is not of the right kind,
+ // skip it. We do not want to affect tracks of incompatible kind(s).
+
+ if (this.kinds.indexOf(track.kind) === -1) {
+ continue;
+ } // If this text track is the component's track and it is not showing,
+ // set it to showing.
+
+
+ if (track === referenceTrack) {
+ if (track.mode !== 'showing') {
+ track.mode = 'showing';
+ } // If this text track is not the component's track and it is not
+ // disabled, set it to disabled.
+
+ } else if (track.mode !== 'disabled') {
+ track.mode = 'disabled';
+ }
+ }
+ }
+ /**
+ * Handle text track list change
+ *
+ * @param {EventTarget~Event} event
+ * The `change` event that caused this function to be called.
+ *
+ * @listens TextTrackList#change
+ */
+ ;
+
+ _proto.handleTracksChange = function handleTracksChange(event) {
+ var shouldBeSelected = this.track.mode === 'showing'; // Prevent redundant selected() calls because they may cause
+ // screen readers to read the appended control text unnecessarily
+
+ if (shouldBeSelected !== this.isSelected_) {
+ this.selected(shouldBeSelected);
+ }
+ };
+
+ _proto.handleSelectedLanguageChange = function handleSelectedLanguageChange(event) {
+ if (this.track.mode === 'showing') {
+ var selectedLanguage = this.player_.cache_.selectedLanguage; // Don't replace the kind of track across the same language
+
+ if (selectedLanguage && selectedLanguage.enabled && selectedLanguage.language === this.track.language && selectedLanguage.kind !== this.track.kind) {
+ return;
+ }
+
+ this.player_.cache_.selectedLanguage = {
+ enabled: true,
+ language: this.track.language,
+ kind: this.track.kind
+ };
+ }
+ };
+
+ _proto.dispose = function dispose() {
+ // remove reference to track object on dispose
+ this.track = null;
+
+ _MenuItem.prototype.dispose.call(this);
+ };
+
+ return TextTrackMenuItem;
+}(MenuItem);
+
+Component$1.registerComponent('TextTrackMenuItem', TextTrackMenuItem);
+
+/**
+ * A special menu item for turning of a specific type of text track
+ *
+ * @extends TextTrackMenuItem
+ */
+
+var OffTextTrackMenuItem = /*#__PURE__*/function (_TextTrackMenuItem) {
+ _inheritsLoose(OffTextTrackMenuItem, _TextTrackMenuItem);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function OffTextTrackMenuItem(player, options) {
+ // Create pseudo track info
+ // Requires options['kind']
+ options.track = {
+ player: player,
+ // it is no longer necessary to store `kind` or `kinds` on the track itself
+ // since they are now stored in the `kinds` property of all instances of
+ // TextTrackMenuItem, but this will remain for backwards compatibility
+ kind: options.kind,
+ kinds: options.kinds,
+ "default": false,
+ mode: 'disabled'
+ };
+
+ if (!options.kinds) {
+ options.kinds = [options.kind];
+ }
+
+ if (options.label) {
+ options.track.label = options.label;
+ } else {
+ options.track.label = options.kinds.join(' and ') + ' off';
+ } // MenuItem is selectable
+
+
+ options.selectable = true; // MenuItem is NOT multiSelectable (i.e. only one can be marked "selected" at a time)
+
+ options.multiSelectable = false;
+ return _TextTrackMenuItem.call(this, player, options) || this;
+ }
+ /**
+ * Handle text track change
+ *
+ * @param {EventTarget~Event} event
+ * The event that caused this function to run
+ */
+
+
+ var _proto = OffTextTrackMenuItem.prototype;
+
+ _proto.handleTracksChange = function handleTracksChange(event) {
+ var tracks = this.player().textTracks();
+ var shouldBeSelected = true;
+
+ for (var i = 0, l = tracks.length; i < l; i++) {
+ var track = tracks[i];
+
+ if (this.options_.kinds.indexOf(track.kind) > -1 && track.mode === 'showing') {
+ shouldBeSelected = false;
+ break;
+ }
+ } // Prevent redundant selected() calls because they may cause
+ // screen readers to read the appended control text unnecessarily
+
+
+ if (shouldBeSelected !== this.isSelected_) {
+ this.selected(shouldBeSelected);
+ }
+ };
+
+ _proto.handleSelectedLanguageChange = function handleSelectedLanguageChange(event) {
+ var tracks = this.player().textTracks();
+ var allHidden = true;
+
+ for (var i = 0, l = tracks.length; i < l; i++) {
+ var track = tracks[i];
+
+ if (['captions', 'descriptions', 'subtitles'].indexOf(track.kind) > -1 && track.mode === 'showing') {
+ allHidden = false;
+ break;
+ }
+ }
+
+ if (allHidden) {
+ this.player_.cache_.selectedLanguage = {
+ enabled: false
+ };
+ }
+ };
+
+ return OffTextTrackMenuItem;
+}(TextTrackMenuItem);
+
+Component$1.registerComponent('OffTextTrackMenuItem', OffTextTrackMenuItem);
+
+/**
+ * The base class for buttons that toggle specific text track types (e.g. subtitles)
+ *
+ * @extends MenuButton
+ */
+
+var TextTrackButton = /*#__PURE__*/function (_TrackButton) {
+ _inheritsLoose(TextTrackButton, _TrackButton);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options={}]
+ * The key/value store of player options.
+ */
+ function TextTrackButton(player, options) {
+ if (options === void 0) {
+ options = {};
+ }
+
+ options.tracks = player.textTracks();
+ return _TrackButton.call(this, player, options) || this;
+ }
+ /**
+ * Create a menu item for each text track
+ *
+ * @param {TextTrackMenuItem[]} [items=[]]
+ * Existing array of items to use during creation
+ *
+ * @return {TextTrackMenuItem[]}
+ * Array of menu items that were created
+ */
+
+
+ var _proto = TextTrackButton.prototype;
+
+ _proto.createItems = function createItems(items, TrackMenuItem) {
+ if (items === void 0) {
+ items = [];
+ }
+
+ if (TrackMenuItem === void 0) {
+ TrackMenuItem = TextTrackMenuItem;
+ }
+
+ // Label is an override for the [track] off label
+ // USed to localise captions/subtitles
+ var label;
+
+ if (this.label_) {
+ label = this.label_ + " off";
+ } // Add an OFF menu item to turn all tracks off
+
+
+ items.push(new OffTextTrackMenuItem(this.player_, {
+ kinds: this.kinds_,
+ kind: this.kind_,
+ label: label
+ }));
+ this.hideThreshold_ += 1;
+ var tracks = this.player_.textTracks();
+
+ if (!Array.isArray(this.kinds_)) {
+ this.kinds_ = [this.kind_];
+ }
+
+ for (var i = 0; i < tracks.length; i++) {
+ var track = tracks[i]; // only add tracks that are of an appropriate kind and have a label
+
+ if (this.kinds_.indexOf(track.kind) > -1) {
+ var item = new TrackMenuItem(this.player_, {
+ track: track,
+ kinds: this.kinds_,
+ kind: this.kind_,
+ // MenuItem is selectable
+ selectable: true,
+ // MenuItem is NOT multiSelectable (i.e. only one can be marked "selected" at a time)
+ multiSelectable: false
+ });
+ item.addClass("vjs-" + track.kind + "-menu-item");
+ items.push(item);
+ }
+ }
+
+ return items;
+ };
+
+ return TextTrackButton;
+}(TrackButton);
+
+Component$1.registerComponent('TextTrackButton', TextTrackButton);
+
+/**
+ * The chapter track menu item
+ *
+ * @extends MenuItem
+ */
+
+var ChaptersTrackMenuItem = /*#__PURE__*/function (_MenuItem) {
+ _inheritsLoose(ChaptersTrackMenuItem, _MenuItem);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function ChaptersTrackMenuItem(player, options) {
+ var _this;
+
+ var track = options.track;
+ var cue = options.cue;
+ var currentTime = player.currentTime(); // Modify options for parent MenuItem class's init.
+
+ options.selectable = true;
+ options.multiSelectable = false;
+ options.label = cue.text;
+ options.selected = cue.startTime <= currentTime && currentTime < cue.endTime;
+ _this = _MenuItem.call(this, player, options) || this;
+ _this.track = track;
+ _this.cue = cue;
+ return _this;
+ }
+ /**
+ * This gets called when an `ChaptersTrackMenuItem` is "clicked". See
+ * {@link ClickableComponent} for more detailed information on what a click can be.
+ *
+ * @param {EventTarget~Event} [event]
+ * The `keydown`, `tap`, or `click` event that caused this function to be
+ * called.
+ *
+ * @listens tap
+ * @listens click
+ */
+
+
+ var _proto = ChaptersTrackMenuItem.prototype;
+
+ _proto.handleClick = function handleClick(event) {
+ _MenuItem.prototype.handleClick.call(this);
+
+ this.player_.currentTime(this.cue.startTime);
+ };
+
+ return ChaptersTrackMenuItem;
+}(MenuItem);
+
+Component$1.registerComponent('ChaptersTrackMenuItem', ChaptersTrackMenuItem);
+
+/**
+ * The button component for toggling and selecting chapters
+ * Chapters act much differently than other text tracks
+ * Cues are navigation vs. other tracks of alternative languages
+ *
+ * @extends TextTrackButton
+ */
+
+var ChaptersButton = /*#__PURE__*/function (_TextTrackButton) {
+ _inheritsLoose(ChaptersButton, _TextTrackButton);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ *
+ * @param {Component~ReadyCallback} [ready]
+ * The function to call when this function is ready.
+ */
+ function ChaptersButton(player, options, ready) {
+ var _this;
+
+ _this = _TextTrackButton.call(this, player, options, ready) || this;
+
+ _this.selectCurrentItem_ = function () {
+ _this.items.forEach(function (item) {
+ item.selected(_this.track_.activeCues[0] === item.cue);
+ });
+ };
+
+ return _this;
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+
+
+ var _proto = ChaptersButton.prototype;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return "vjs-chapters-button " + _TextTrackButton.prototype.buildCSSClass.call(this);
+ };
+
+ _proto.buildWrapperCSSClass = function buildWrapperCSSClass() {
+ return "vjs-chapters-button " + _TextTrackButton.prototype.buildWrapperCSSClass.call(this);
+ }
+ /**
+ * Update the menu based on the current state of its items.
+ *
+ * @param {EventTarget~Event} [event]
+ * An event that triggered this function to run.
+ *
+ * @listens TextTrackList#addtrack
+ * @listens TextTrackList#removetrack
+ * @listens TextTrackList#change
+ */
+ ;
+
+ _proto.update = function update(event) {
+ if (event && event.track && event.track.kind !== 'chapters') {
+ return;
+ }
+
+ var track = this.findChaptersTrack();
+
+ if (track !== this.track_) {
+ this.setTrack(track);
+
+ _TextTrackButton.prototype.update.call(this);
+ } else if (!this.items || track && track.cues && track.cues.length !== this.items.length) {
+ // Update the menu initially or if the number of cues has changed since set
+ _TextTrackButton.prototype.update.call(this);
+ }
+ }
+ /**
+ * Set the currently selected track for the chapters button.
+ *
+ * @param {TextTrack} track
+ * The new track to select. Nothing will change if this is the currently selected
+ * track.
+ */
+ ;
+
+ _proto.setTrack = function setTrack(track) {
+ if (this.track_ === track) {
+ return;
+ }
+
+ if (!this.updateHandler_) {
+ this.updateHandler_ = this.update.bind(this);
+ } // here this.track_ refers to the old track instance
+
+
+ if (this.track_) {
+ var remoteTextTrackEl = this.player_.remoteTextTrackEls().getTrackElementByTrack_(this.track_);
+
+ if (remoteTextTrackEl) {
+ remoteTextTrackEl.removeEventListener('load', this.updateHandler_);
+ }
+
+ this.track_.removeEventListener('cuechange', this.selectCurrentItem_);
+ this.track_ = null;
+ }
+
+ this.track_ = track; // here this.track_ refers to the new track instance
+
+ if (this.track_) {
+ this.track_.mode = 'hidden';
+
+ var _remoteTextTrackEl = this.player_.remoteTextTrackEls().getTrackElementByTrack_(this.track_);
+
+ if (_remoteTextTrackEl) {
+ _remoteTextTrackEl.addEventListener('load', this.updateHandler_);
+ }
+
+ this.track_.addEventListener('cuechange', this.selectCurrentItem_);
+ }
+ }
+ /**
+ * Find the track object that is currently in use by this ChaptersButton
+ *
+ * @return {TextTrack|undefined}
+ * The current track or undefined if none was found.
+ */
+ ;
+
+ _proto.findChaptersTrack = function findChaptersTrack() {
+ var tracks = this.player_.textTracks() || [];
+
+ for (var i = tracks.length - 1; i >= 0; i--) {
+ // We will always choose the last track as our chaptersTrack
+ var track = tracks[i];
+
+ if (track.kind === this.kind_) {
+ return track;
+ }
+ }
+ }
+ /**
+ * Get the caption for the ChaptersButton based on the track label. This will also
+ * use the current tracks localized kind as a fallback if a label does not exist.
+ *
+ * @return {string}
+ * The tracks current label or the localized track kind.
+ */
+ ;
+
+ _proto.getMenuCaption = function getMenuCaption() {
+ if (this.track_ && this.track_.label) {
+ return this.track_.label;
+ }
+
+ return this.localize(toTitleCase$1(this.kind_));
+ }
+ /**
+ * Create menu from chapter track
+ *
+ * @return {Menu}
+ * New menu for the chapter buttons
+ */
+ ;
+
+ _proto.createMenu = function createMenu() {
+ this.options_.title = this.getMenuCaption();
+ return _TextTrackButton.prototype.createMenu.call(this);
+ }
+ /**
+ * Create a menu item for each text track
+ *
+ * @return {TextTrackMenuItem[]}
+ * Array of menu items
+ */
+ ;
+
+ _proto.createItems = function createItems() {
+ var items = [];
+
+ if (!this.track_) {
+ return items;
+ }
+
+ var cues = this.track_.cues;
+
+ if (!cues) {
+ return items;
+ }
+
+ for (var i = 0, l = cues.length; i < l; i++) {
+ var cue = cues[i];
+ var mi = new ChaptersTrackMenuItem(this.player_, {
+ track: this.track_,
+ cue: cue
+ });
+ items.push(mi);
+ }
+
+ return items;
+ };
+
+ return ChaptersButton;
+}(TextTrackButton);
+/**
+ * `kind` of TextTrack to look for to associate it with this menu.
+ *
+ * @type {string}
+ * @private
+ */
+
+
+ChaptersButton.prototype.kind_ = 'chapters';
+/**
+ * The text that should display over the `ChaptersButton`s controls. Added for localization.
+ *
+ * @type {string}
+ * @private
+ */
+
+ChaptersButton.prototype.controlText_ = 'Chapters';
+Component$1.registerComponent('ChaptersButton', ChaptersButton);
+
+/**
+ * The button component for toggling and selecting descriptions
+ *
+ * @extends TextTrackButton
+ */
+
+var DescriptionsButton = /*#__PURE__*/function (_TextTrackButton) {
+ _inheritsLoose(DescriptionsButton, _TextTrackButton);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ *
+ * @param {Component~ReadyCallback} [ready]
+ * The function to call when this component is ready.
+ */
+ function DescriptionsButton(player, options, ready) {
+ var _this;
+
+ _this = _TextTrackButton.call(this, player, options, ready) || this;
+ var tracks = player.textTracks();
+ var changeHandler = bind(_assertThisInitialized(_this), _this.handleTracksChange);
+ tracks.addEventListener('change', changeHandler);
+
+ _this.on('dispose', function () {
+ tracks.removeEventListener('change', changeHandler);
+ });
+
+ return _this;
+ }
+ /**
+ * Handle text track change
+ *
+ * @param {EventTarget~Event} event
+ * The event that caused this function to run
+ *
+ * @listens TextTrackList#change
+ */
+
+
+ var _proto = DescriptionsButton.prototype;
+
+ _proto.handleTracksChange = function handleTracksChange(event) {
+ var tracks = this.player().textTracks();
+ var disabled = false; // Check whether a track of a different kind is showing
+
+ for (var i = 0, l = tracks.length; i < l; i++) {
+ var track = tracks[i];
+
+ if (track.kind !== this.kind_ && track.mode === 'showing') {
+ disabled = true;
+ break;
+ }
+ } // If another track is showing, disable this menu button
+
+
+ if (disabled) {
+ this.disable();
+ } else {
+ this.enable();
+ }
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+ ;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return "vjs-descriptions-button " + _TextTrackButton.prototype.buildCSSClass.call(this);
+ };
+
+ _proto.buildWrapperCSSClass = function buildWrapperCSSClass() {
+ return "vjs-descriptions-button " + _TextTrackButton.prototype.buildWrapperCSSClass.call(this);
+ };
+
+ return DescriptionsButton;
+}(TextTrackButton);
+/**
+ * `kind` of TextTrack to look for to associate it with this menu.
+ *
+ * @type {string}
+ * @private
+ */
+
+
+DescriptionsButton.prototype.kind_ = 'descriptions';
+/**
+ * The text that should display over the `DescriptionsButton`s controls. Added for localization.
+ *
+ * @type {string}
+ * @private
+ */
+
+DescriptionsButton.prototype.controlText_ = 'Descriptions';
+Component$1.registerComponent('DescriptionsButton', DescriptionsButton);
+
+/**
+ * The button component for toggling and selecting subtitles
+ *
+ * @extends TextTrackButton
+ */
+
+var SubtitlesButton = /*#__PURE__*/function (_TextTrackButton) {
+ _inheritsLoose(SubtitlesButton, _TextTrackButton);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ *
+ * @param {Component~ReadyCallback} [ready]
+ * The function to call when this component is ready.
+ */
+ function SubtitlesButton(player, options, ready) {
+ return _TextTrackButton.call(this, player, options, ready) || this;
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+
+
+ var _proto = SubtitlesButton.prototype;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return "vjs-subtitles-button " + _TextTrackButton.prototype.buildCSSClass.call(this);
+ };
+
+ _proto.buildWrapperCSSClass = function buildWrapperCSSClass() {
+ return "vjs-subtitles-button " + _TextTrackButton.prototype.buildWrapperCSSClass.call(this);
+ };
+
+ return SubtitlesButton;
+}(TextTrackButton);
+/**
+ * `kind` of TextTrack to look for to associate it with this menu.
+ *
+ * @type {string}
+ * @private
+ */
+
+
+SubtitlesButton.prototype.kind_ = 'subtitles';
+/**
+ * The text that should display over the `SubtitlesButton`s controls. Added for localization.
+ *
+ * @type {string}
+ * @private
+ */
+
+SubtitlesButton.prototype.controlText_ = 'Subtitles';
+Component$1.registerComponent('SubtitlesButton', SubtitlesButton);
+
+/**
+ * The menu item for caption track settings menu
+ *
+ * @extends TextTrackMenuItem
+ */
+
+var CaptionSettingsMenuItem = /*#__PURE__*/function (_TextTrackMenuItem) {
+ _inheritsLoose(CaptionSettingsMenuItem, _TextTrackMenuItem);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function CaptionSettingsMenuItem(player, options) {
+ var _this;
+
+ options.track = {
+ player: player,
+ kind: options.kind,
+ label: options.kind + ' settings',
+ selectable: false,
+ "default": false,
+ mode: 'disabled'
+ }; // CaptionSettingsMenuItem has no concept of 'selected'
+
+ options.selectable = false;
+ options.name = 'CaptionSettingsMenuItem';
+ _this = _TextTrackMenuItem.call(this, player, options) || this;
+
+ _this.addClass('vjs-texttrack-settings');
+
+ _this.controlText(', opens ' + options.kind + ' settings dialog');
+
+ return _this;
+ }
+ /**
+ * This gets called when an `CaptionSettingsMenuItem` is "clicked". See
+ * {@link ClickableComponent} for more detailed information on what a click can be.
+ *
+ * @param {EventTarget~Event} [event]
+ * The `keydown`, `tap`, or `click` event that caused this function to be
+ * called.
+ *
+ * @listens tap
+ * @listens click
+ */
+
+
+ var _proto = CaptionSettingsMenuItem.prototype;
+
+ _proto.handleClick = function handleClick(event) {
+ this.player().getChild('textTrackSettings').open();
+ };
+
+ return CaptionSettingsMenuItem;
+}(TextTrackMenuItem);
+
+Component$1.registerComponent('CaptionSettingsMenuItem', CaptionSettingsMenuItem);
+
+/**
+ * The button component for toggling and selecting captions
+ *
+ * @extends TextTrackButton
+ */
+
+var CaptionsButton = /*#__PURE__*/function (_TextTrackButton) {
+ _inheritsLoose(CaptionsButton, _TextTrackButton);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ *
+ * @param {Component~ReadyCallback} [ready]
+ * The function to call when this component is ready.
+ */
+ function CaptionsButton(player, options, ready) {
+ return _TextTrackButton.call(this, player, options, ready) || this;
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+
+
+ var _proto = CaptionsButton.prototype;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return "vjs-captions-button " + _TextTrackButton.prototype.buildCSSClass.call(this);
+ };
+
+ _proto.buildWrapperCSSClass = function buildWrapperCSSClass() {
+ return "vjs-captions-button " + _TextTrackButton.prototype.buildWrapperCSSClass.call(this);
+ }
+ /**
+ * Create caption menu items
+ *
+ * @return {CaptionSettingsMenuItem[]}
+ * The array of current menu items.
+ */
+ ;
+
+ _proto.createItems = function createItems() {
+ var items = [];
+
+ if (!(this.player().tech_ && this.player().tech_.featuresNativeTextTracks) && this.player().getChild('textTrackSettings')) {
+ items.push(new CaptionSettingsMenuItem(this.player_, {
+ kind: this.kind_
+ }));
+ this.hideThreshold_ += 1;
+ }
+
+ return _TextTrackButton.prototype.createItems.call(this, items);
+ };
+
+ return CaptionsButton;
+}(TextTrackButton);
+/**
+ * `kind` of TextTrack to look for to associate it with this menu.
+ *
+ * @type {string}
+ * @private
+ */
+
+
+CaptionsButton.prototype.kind_ = 'captions';
+/**
+ * The text that should display over the `CaptionsButton`s controls. Added for localization.
+ *
+ * @type {string}
+ * @private
+ */
+
+CaptionsButton.prototype.controlText_ = 'Captions';
+Component$1.registerComponent('CaptionsButton', CaptionsButton);
+
+/**
+ * SubsCapsMenuItem has an [cc] icon to distinguish captions from subtitles
+ * in the SubsCapsMenu.
+ *
+ * @extends TextTrackMenuItem
+ */
+
+var SubsCapsMenuItem = /*#__PURE__*/function (_TextTrackMenuItem) {
+ _inheritsLoose(SubsCapsMenuItem, _TextTrackMenuItem);
+
+ function SubsCapsMenuItem() {
+ return _TextTrackMenuItem.apply(this, arguments) || this;
+ }
+
+ var _proto = SubsCapsMenuItem.prototype;
+
+ _proto.createEl = function createEl$1(type, props, attrs) {
+ var el = _TextTrackMenuItem.prototype.createEl.call(this, type, props, attrs);
+
+ var parentSpan = el.querySelector('.vjs-menu-item-text');
+
+ if (this.options_.track.kind === 'captions') {
+ parentSpan.appendChild(createEl('span', {
+ className: 'vjs-icon-placeholder'
+ }, {
+ 'aria-hidden': true
+ }));
+ parentSpan.appendChild(createEl('span', {
+ className: 'vjs-control-text',
+ // space added as the text will visually flow with the
+ // label
+ textContent: " " + this.localize('Captions')
+ }));
+ }
+
+ return el;
+ };
+
+ return SubsCapsMenuItem;
+}(TextTrackMenuItem);
+
+Component$1.registerComponent('SubsCapsMenuItem', SubsCapsMenuItem);
+
+/**
+ * The button component for toggling and selecting captions and/or subtitles
+ *
+ * @extends TextTrackButton
+ */
+
+var SubsCapsButton = /*#__PURE__*/function (_TextTrackButton) {
+ _inheritsLoose(SubsCapsButton, _TextTrackButton);
+
+ function SubsCapsButton(player, options) {
+ var _this;
+
+ if (options === void 0) {
+ options = {};
+ }
+
+ _this = _TextTrackButton.call(this, player, options) || this; // Although North America uses "captions" in most cases for
+ // "captions and subtitles" other locales use "subtitles"
+
+ _this.label_ = 'subtitles';
+
+ if (['en', 'en-us', 'en-ca', 'fr-ca'].indexOf(_this.player_.language_) > -1) {
+ _this.label_ = 'captions';
+ }
+
+ _this.menuButton_.controlText(toTitleCase$1(_this.label_));
+
+ return _this;
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+
+
+ var _proto = SubsCapsButton.prototype;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return "vjs-subs-caps-button " + _TextTrackButton.prototype.buildCSSClass.call(this);
+ };
+
+ _proto.buildWrapperCSSClass = function buildWrapperCSSClass() {
+ return "vjs-subs-caps-button " + _TextTrackButton.prototype.buildWrapperCSSClass.call(this);
+ }
+ /**
+ * Create caption/subtitles menu items
+ *
+ * @return {CaptionSettingsMenuItem[]}
+ * The array of current menu items.
+ */
+ ;
+
+ _proto.createItems = function createItems() {
+ var items = [];
+
+ if (!(this.player().tech_ && this.player().tech_.featuresNativeTextTracks) && this.player().getChild('textTrackSettings')) {
+ items.push(new CaptionSettingsMenuItem(this.player_, {
+ kind: this.label_
+ }));
+ this.hideThreshold_ += 1;
+ }
+
+ items = _TextTrackButton.prototype.createItems.call(this, items, SubsCapsMenuItem);
+ return items;
+ };
+
+ return SubsCapsButton;
+}(TextTrackButton);
+/**
+ * `kind`s of TextTrack to look for to associate it with this menu.
+ *
+ * @type {array}
+ * @private
+ */
+
+
+SubsCapsButton.prototype.kinds_ = ['captions', 'subtitles'];
+/**
+ * The text that should display over the `SubsCapsButton`s controls.
+ *
+ *
+ * @type {string}
+ * @private
+ */
+
+SubsCapsButton.prototype.controlText_ = 'Subtitles';
+Component$1.registerComponent('SubsCapsButton', SubsCapsButton);
+
+/**
+ * An {@link AudioTrack} {@link MenuItem}
+ *
+ * @extends MenuItem
+ */
+
+var AudioTrackMenuItem = /*#__PURE__*/function (_MenuItem) {
+ _inheritsLoose(AudioTrackMenuItem, _MenuItem);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function AudioTrackMenuItem(player, options) {
+ var _this;
+
+ var track = options.track;
+ var tracks = player.audioTracks(); // Modify options for parent MenuItem class's init.
+
+ options.label = track.label || track.language || 'Unknown';
+ options.selected = track.enabled;
+ _this = _MenuItem.call(this, player, options) || this;
+ _this.track = track;
+
+ _this.addClass("vjs-" + track.kind + "-menu-item");
+
+ var changeHandler = function changeHandler() {
+ for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
+ args[_key] = arguments[_key];
+ }
+
+ _this.handleTracksChange.apply(_assertThisInitialized(_this), args);
+ };
+
+ tracks.addEventListener('change', changeHandler);
+
+ _this.on('dispose', function () {
+ tracks.removeEventListener('change', changeHandler);
+ });
+
+ return _this;
+ }
+
+ var _proto = AudioTrackMenuItem.prototype;
+
+ _proto.createEl = function createEl$1(type, props, attrs) {
+ var el = _MenuItem.prototype.createEl.call(this, type, props, attrs);
+
+ var parentSpan = el.querySelector('.vjs-menu-item-text');
+
+ if (this.options_.track.kind === 'main-desc') {
+ parentSpan.appendChild(createEl('span', {
+ className: 'vjs-icon-placeholder'
+ }, {
+ 'aria-hidden': true
+ }));
+ parentSpan.appendChild(createEl('span', {
+ className: 'vjs-control-text',
+ textContent: ' ' + this.localize('Descriptions')
+ }));
+ }
+
+ return el;
+ }
+ /**
+ * This gets called when an `AudioTrackMenuItem is "clicked". See {@link ClickableComponent}
+ * for more detailed information on what a click can be.
+ *
+ * @param {EventTarget~Event} [event]
+ * The `keydown`, `tap`, or `click` event that caused this function to be
+ * called.
+ *
+ * @listens tap
+ * @listens click
+ */
+ ;
+
+ _proto.handleClick = function handleClick(event) {
+ _MenuItem.prototype.handleClick.call(this, event); // the audio track list will automatically toggle other tracks
+ // off for us.
+
+
+ this.track.enabled = true; // when native audio tracks are used, we want to make sure that other tracks are turned off
+
+ if (this.player_.tech_.featuresNativeAudioTracks) {
+ var tracks = this.player_.audioTracks();
+
+ for (var i = 0; i < tracks.length; i++) {
+ var track = tracks[i]; // skip the current track since we enabled it above
+
+ if (track === this.track) {
+ continue;
+ }
+
+ track.enabled = track === this.track;
+ }
+ }
+ }
+ /**
+ * Handle any {@link AudioTrack} change.
+ *
+ * @param {EventTarget~Event} [event]
+ * The {@link AudioTrackList#change} event that caused this to run.
+ *
+ * @listens AudioTrackList#change
+ */
+ ;
+
+ _proto.handleTracksChange = function handleTracksChange(event) {
+ this.selected(this.track.enabled);
+ };
+
+ return AudioTrackMenuItem;
+}(MenuItem);
+
+Component$1.registerComponent('AudioTrackMenuItem', AudioTrackMenuItem);
+
+/**
+ * The base class for buttons that toggle specific {@link AudioTrack} types.
+ *
+ * @extends TrackButton
+ */
+
+var AudioTrackButton = /*#__PURE__*/function (_TrackButton) {
+ _inheritsLoose(AudioTrackButton, _TrackButton);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options={}]
+ * The key/value store of player options.
+ */
+ function AudioTrackButton(player, options) {
+ if (options === void 0) {
+ options = {};
+ }
+
+ options.tracks = player.audioTracks();
+ return _TrackButton.call(this, player, options) || this;
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+
+
+ var _proto = AudioTrackButton.prototype;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return "vjs-audio-button " + _TrackButton.prototype.buildCSSClass.call(this);
+ };
+
+ _proto.buildWrapperCSSClass = function buildWrapperCSSClass() {
+ return "vjs-audio-button " + _TrackButton.prototype.buildWrapperCSSClass.call(this);
+ }
+ /**
+ * Create a menu item for each audio track
+ *
+ * @param {AudioTrackMenuItem[]} [items=[]]
+ * An array of existing menu items to use.
+ *
+ * @return {AudioTrackMenuItem[]}
+ * An array of menu items
+ */
+ ;
+
+ _proto.createItems = function createItems(items) {
+ if (items === void 0) {
+ items = [];
+ }
+
+ // if there's only one audio track, there no point in showing it
+ this.hideThreshold_ = 1;
+ var tracks = this.player_.audioTracks();
+
+ for (var i = 0; i < tracks.length; i++) {
+ var track = tracks[i];
+ items.push(new AudioTrackMenuItem(this.player_, {
+ track: track,
+ // MenuItem is selectable
+ selectable: true,
+ // MenuItem is NOT multiSelectable (i.e. only one can be marked "selected" at a time)
+ multiSelectable: false
+ }));
+ }
+
+ return items;
+ };
+
+ return AudioTrackButton;
+}(TrackButton);
+/**
+ * The text that should display over the `AudioTrackButton`s controls. Added for localization.
+ *
+ * @type {string}
+ * @private
+ */
+
+
+AudioTrackButton.prototype.controlText_ = 'Audio Track';
+Component$1.registerComponent('AudioTrackButton', AudioTrackButton);
+
+/**
+ * The specific menu item type for selecting a playback rate.
+ *
+ * @extends MenuItem
+ */
+
+var PlaybackRateMenuItem = /*#__PURE__*/function (_MenuItem) {
+ _inheritsLoose(PlaybackRateMenuItem, _MenuItem);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function PlaybackRateMenuItem(player, options) {
+ var _this;
+
+ var label = options.rate;
+ var rate = parseFloat(label, 10); // Modify options for parent MenuItem class's init.
+
+ options.label = label;
+ options.selected = rate === player.playbackRate();
+ options.selectable = true;
+ options.multiSelectable = false;
+ _this = _MenuItem.call(this, player, options) || this;
+ _this.label = label;
+ _this.rate = rate;
+
+ _this.on(player, 'ratechange', function (e) {
+ return _this.update(e);
+ });
+
+ return _this;
+ }
+ /**
+ * This gets called when an `PlaybackRateMenuItem` is "clicked". See
+ * {@link ClickableComponent} for more detailed information on what a click can be.
+ *
+ * @param {EventTarget~Event} [event]
+ * The `keydown`, `tap`, or `click` event that caused this function to be
+ * called.
+ *
+ * @listens tap
+ * @listens click
+ */
+
+
+ var _proto = PlaybackRateMenuItem.prototype;
+
+ _proto.handleClick = function handleClick(event) {
+ _MenuItem.prototype.handleClick.call(this);
+
+ this.player().playbackRate(this.rate);
+ }
+ /**
+ * Update the PlaybackRateMenuItem when the playbackrate changes.
+ *
+ * @param {EventTarget~Event} [event]
+ * The `ratechange` event that caused this function to run.
+ *
+ * @listens Player#ratechange
+ */
+ ;
+
+ _proto.update = function update(event) {
+ this.selected(this.player().playbackRate() === this.rate);
+ };
+
+ return PlaybackRateMenuItem;
+}(MenuItem);
+/**
+ * The text that should display over the `PlaybackRateMenuItem`s controls. Added for localization.
+ *
+ * @type {string}
+ * @private
+ */
+
+
+PlaybackRateMenuItem.prototype.contentElType = 'button';
+Component$1.registerComponent('PlaybackRateMenuItem', PlaybackRateMenuItem);
+
+/**
+ * The component for controlling the playback rate.
+ *
+ * @extends MenuButton
+ */
+
+var PlaybackRateMenuButton = /*#__PURE__*/function (_MenuButton) {
+ _inheritsLoose(PlaybackRateMenuButton, _MenuButton);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function PlaybackRateMenuButton(player, options) {
+ var _this;
+
+ _this = _MenuButton.call(this, player, options) || this;
+
+ _this.menuButton_.el_.setAttribute('aria-describedby', _this.labelElId_);
+
+ _this.updateVisibility();
+
+ _this.updateLabel();
+
+ _this.on(player, 'loadstart', function (e) {
+ return _this.updateVisibility(e);
+ });
+
+ _this.on(player, 'ratechange', function (e) {
+ return _this.updateLabel(e);
+ });
+
+ _this.on(player, 'playbackrateschange', function (e) {
+ return _this.handlePlaybackRateschange(e);
+ });
+
+ return _this;
+ }
+ /**
+ * Create the `Component`'s DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+
+
+ var _proto = PlaybackRateMenuButton.prototype;
+
+ _proto.createEl = function createEl$1() {
+ var el = _MenuButton.prototype.createEl.call(this);
+
+ this.labelElId_ = 'vjs-playback-rate-value-label-' + this.id_;
+ this.labelEl_ = createEl('div', {
+ className: 'vjs-playback-rate-value',
+ id: this.labelElId_,
+ textContent: '1x'
+ });
+ el.appendChild(this.labelEl_);
+ return el;
+ };
+
+ _proto.dispose = function dispose() {
+ this.labelEl_ = null;
+
+ _MenuButton.prototype.dispose.call(this);
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+ ;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return "vjs-playback-rate " + _MenuButton.prototype.buildCSSClass.call(this);
+ };
+
+ _proto.buildWrapperCSSClass = function buildWrapperCSSClass() {
+ return "vjs-playback-rate " + _MenuButton.prototype.buildWrapperCSSClass.call(this);
+ }
+ /**
+ * Create the list of menu items. Specific to each subclass.
+ *
+ */
+ ;
+
+ _proto.createItems = function createItems() {
+ var rates = this.playbackRates();
+ var items = [];
+
+ for (var i = rates.length - 1; i >= 0; i--) {
+ items.push(new PlaybackRateMenuItem(this.player(), {
+ rate: rates[i] + 'x'
+ }));
+ }
+
+ return items;
+ }
+ /**
+ * Updates ARIA accessibility attributes
+ */
+ ;
+
+ _proto.updateARIAAttributes = function updateARIAAttributes() {
+ // Current playback rate
+ this.el().setAttribute('aria-valuenow', this.player().playbackRate());
+ }
+ /**
+ * This gets called when an `PlaybackRateMenuButton` is "clicked". See
+ * {@link ClickableComponent} for more detailed information on what a click can be.
+ *
+ * @param {EventTarget~Event} [event]
+ * The `keydown`, `tap`, or `click` event that caused this function to be
+ * called.
+ *
+ * @listens tap
+ * @listens click
+ */
+ ;
+
+ _proto.handleClick = function handleClick(event) {
+ // select next rate option
+ var currentRate = this.player().playbackRate();
+ var rates = this.playbackRates();
+ var currentIndex = rates.indexOf(currentRate); // this get the next rate and it will select first one if the last one currently selected
+
+ var newIndex = (currentIndex + 1) % rates.length;
+ this.player().playbackRate(rates[newIndex]);
+ }
+ /**
+ * On playbackrateschange, update the menu to account for the new items.
+ *
+ * @listens Player#playbackrateschange
+ */
+ ;
+
+ _proto.handlePlaybackRateschange = function handlePlaybackRateschange(event) {
+ this.update();
+ }
+ /**
+ * Get possible playback rates
+ *
+ * @return {Array}
+ * All possible playback rates
+ */
+ ;
+
+ _proto.playbackRates = function playbackRates() {
+ var player = this.player();
+ return player.playbackRates && player.playbackRates() || [];
+ }
+ /**
+ * Get whether playback rates is supported by the tech
+ * and an array of playback rates exists
+ *
+ * @return {boolean}
+ * Whether changing playback rate is supported
+ */
+ ;
+
+ _proto.playbackRateSupported = function playbackRateSupported() {
+ return this.player().tech_ && this.player().tech_.featuresPlaybackRate && this.playbackRates() && this.playbackRates().length > 0;
+ }
+ /**
+ * Hide playback rate controls when they're no playback rate options to select
+ *
+ * @param {EventTarget~Event} [event]
+ * The event that caused this function to run.
+ *
+ * @listens Player#loadstart
+ */
+ ;
+
+ _proto.updateVisibility = function updateVisibility(event) {
+ if (this.playbackRateSupported()) {
+ this.removeClass('vjs-hidden');
+ } else {
+ this.addClass('vjs-hidden');
+ }
+ }
+ /**
+ * Update button label when rate changed
+ *
+ * @param {EventTarget~Event} [event]
+ * The event that caused this function to run.
+ *
+ * @listens Player#ratechange
+ */
+ ;
+
+ _proto.updateLabel = function updateLabel(event) {
+ if (this.playbackRateSupported()) {
+ this.labelEl_.textContent = this.player().playbackRate() + 'x';
+ }
+ };
+
+ return PlaybackRateMenuButton;
+}(MenuButton);
+/**
+ * The text that should display over the `FullscreenToggle`s controls. Added for localization.
+ *
+ * @type {string}
+ * @private
+ */
+
+
+PlaybackRateMenuButton.prototype.controlText_ = 'Playback Rate';
+Component$1.registerComponent('PlaybackRateMenuButton', PlaybackRateMenuButton);
+
+/**
+ * Just an empty spacer element that can be used as an append point for plugins, etc.
+ * Also can be used to create space between elements when necessary.
+ *
+ * @extends Component
+ */
+
+var Spacer = /*#__PURE__*/function (_Component) {
+ _inheritsLoose(Spacer, _Component);
+
+ function Spacer() {
+ return _Component.apply(this, arguments) || this;
+ }
+
+ var _proto = Spacer.prototype;
+
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+ _proto.buildCSSClass = function buildCSSClass() {
+ return "vjs-spacer " + _Component.prototype.buildCSSClass.call(this);
+ }
+ /**
+ * Create the `Component`'s DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+ ;
+
+ _proto.createEl = function createEl(tag, props, attributes) {
+ if (tag === void 0) {
+ tag = 'div';
+ }
+
+ if (props === void 0) {
+ props = {};
+ }
+
+ if (attributes === void 0) {
+ attributes = {};
+ }
+
+ if (!props.className) {
+ props.className = this.buildCSSClass();
+ }
+
+ return _Component.prototype.createEl.call(this, tag, props, attributes);
+ };
+
+ return Spacer;
+}(Component$1);
+
+Component$1.registerComponent('Spacer', Spacer);
+
+/**
+ * Spacer specifically meant to be used as an insertion point for new plugins, etc.
+ *
+ * @extends Spacer
+ */
+
+var CustomControlSpacer = /*#__PURE__*/function (_Spacer) {
+ _inheritsLoose(CustomControlSpacer, _Spacer);
+
+ function CustomControlSpacer() {
+ return _Spacer.apply(this, arguments) || this;
+ }
+
+ var _proto = CustomControlSpacer.prototype;
+
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ */
+ _proto.buildCSSClass = function buildCSSClass() {
+ return "vjs-custom-control-spacer " + _Spacer.prototype.buildCSSClass.call(this);
+ }
+ /**
+ * Create the `Component`'s DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+ ;
+
+ _proto.createEl = function createEl() {
+ return _Spacer.prototype.createEl.call(this, 'div', {
+ className: this.buildCSSClass(),
+ // No-flex/table-cell mode requires there be some content
+ // in the cell to fill the remaining space of the table.
+ textContent: "\xA0"
+ });
+ };
+
+ return CustomControlSpacer;
+}(Spacer);
+
+Component$1.registerComponent('CustomControlSpacer', CustomControlSpacer);
+
+/**
+ * Container of main controls.
+ *
+ * @extends Component
+ */
+
+var ControlBar = /*#__PURE__*/function (_Component) {
+ _inheritsLoose(ControlBar, _Component);
+
+ function ControlBar() {
+ return _Component.apply(this, arguments) || this;
+ }
+
+ var _proto = ControlBar.prototype;
+
+ /**
+ * Create the `Component`'s DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+ _proto.createEl = function createEl() {
+ return _Component.prototype.createEl.call(this, 'div', {
+ className: 'vjs-control-bar',
+ dir: 'ltr'
+ });
+ };
+
+ return ControlBar;
+}(Component$1);
+/**
+ * Default options for `ControlBar`
+ *
+ * @type {Object}
+ * @private
+ */
+
+
+ControlBar.prototype.options_ = {
+ children: ['playToggle', 'volumePanel', 'currentTimeDisplay', 'timeDivider', 'durationDisplay', 'progressControl', 'liveDisplay', 'seekToLive', 'remainingTimeDisplay', 'customControlSpacer', 'playbackRateMenuButton', 'chaptersButton', 'descriptionsButton', 'subsCapsButton', 'audioTrackButton', 'fullscreenToggle']
+};
+
+if ('exitPictureInPicture' in document) {
+ ControlBar.prototype.options_.children.splice(ControlBar.prototype.options_.children.length - 1, 0, 'pictureInPictureToggle');
+}
+
+Component$1.registerComponent('ControlBar', ControlBar);
+
+/**
+ * A display that indicates an error has occurred. This means that the video
+ * is unplayable.
+ *
+ * @extends ModalDialog
+ */
+
+var ErrorDisplay = /*#__PURE__*/function (_ModalDialog) {
+ _inheritsLoose(ErrorDisplay, _ModalDialog);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function ErrorDisplay(player, options) {
+ var _this;
+
+ _this = _ModalDialog.call(this, player, options) || this;
+
+ _this.on(player, 'error', function (e) {
+ return _this.open(e);
+ });
+
+ return _this;
+ }
+ /**
+ * Builds the default DOM `className`.
+ *
+ * @return {string}
+ * The DOM `className` for this object.
+ *
+ * @deprecated Since version 5.
+ */
+
+
+ var _proto = ErrorDisplay.prototype;
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return "vjs-error-display " + _ModalDialog.prototype.buildCSSClass.call(this);
+ }
+ /**
+ * Gets the localized error message based on the `Player`s error.
+ *
+ * @return {string}
+ * The `Player`s error message localized or an empty string.
+ */
+ ;
+
+ _proto.content = function content() {
+ var error = this.player().error();
+ return error ? this.localize(error.message) : '';
+ };
+
+ return ErrorDisplay;
+}(ModalDialog);
+/**
+ * The default options for an `ErrorDisplay`.
+ *
+ * @private
+ */
+
+
+ErrorDisplay.prototype.options_ = _extends({}, ModalDialog.prototype.options_, {
+ pauseOnOpen: false,
+ fillAlways: true,
+ temporary: false,
+ uncloseable: true
+});
+Component$1.registerComponent('ErrorDisplay', ErrorDisplay);
+
+var LOCAL_STORAGE_KEY$1 = 'vjs-text-track-settings';
+var COLOR_BLACK = ['#000', 'Black'];
+var COLOR_BLUE = ['#00F', 'Blue'];
+var COLOR_CYAN = ['#0FF', 'Cyan'];
+var COLOR_GREEN = ['#0F0', 'Green'];
+var COLOR_MAGENTA = ['#F0F', 'Magenta'];
+var COLOR_RED = ['#F00', 'Red'];
+var COLOR_WHITE = ['#FFF', 'White'];
+var COLOR_YELLOW = ['#FF0', 'Yellow'];
+var OPACITY_OPAQUE = ['1', 'Opaque'];
+var OPACITY_SEMI = ['0.5', 'Semi-Transparent'];
+var OPACITY_TRANS = ['0', 'Transparent']; // Configuration for the various elements in the DOM of this component.
+//
+// Possible keys include:
+//
+// `default`:
+// The default option index. Only needs to be provided if not zero.
+// `parser`:
+// A function which is used to parse the value from the selected option in
+// a customized way.
+// `selector`:
+// The selector used to find the associated element.
+
+var selectConfigs = {
+ backgroundColor: {
+ selector: '.vjs-bg-color > select',
+ id: 'captions-background-color-%s',
+ label: 'Color',
+ options: [COLOR_BLACK, COLOR_WHITE, COLOR_RED, COLOR_GREEN, COLOR_BLUE, COLOR_YELLOW, COLOR_MAGENTA, COLOR_CYAN]
+ },
+ backgroundOpacity: {
+ selector: '.vjs-bg-opacity > select',
+ id: 'captions-background-opacity-%s',
+ label: 'Transparency',
+ options: [OPACITY_OPAQUE, OPACITY_SEMI, OPACITY_TRANS]
+ },
+ color: {
+ selector: '.vjs-fg-color > select',
+ id: 'captions-foreground-color-%s',
+ label: 'Color',
+ options: [COLOR_WHITE, COLOR_BLACK, COLOR_RED, COLOR_GREEN, COLOR_BLUE, COLOR_YELLOW, COLOR_MAGENTA, COLOR_CYAN]
+ },
+ edgeStyle: {
+ selector: '.vjs-edge-style > select',
+ id: '%s',
+ label: 'Text Edge Style',
+ options: [['none', 'None'], ['raised', 'Raised'], ['depressed', 'Depressed'], ['uniform', 'Uniform'], ['dropshadow', 'Dropshadow']]
+ },
+ fontFamily: {
+ selector: '.vjs-font-family > select',
+ id: 'captions-font-family-%s',
+ label: 'Font Family',
+ options: [['proportionalSansSerif', 'Proportional Sans-Serif'], ['monospaceSansSerif', 'Monospace Sans-Serif'], ['proportionalSerif', 'Proportional Serif'], ['monospaceSerif', 'Monospace Serif'], ['casual', 'Casual'], ['script', 'Script'], ['small-caps', 'Small Caps']]
+ },
+ fontPercent: {
+ selector: '.vjs-font-percent > select',
+ id: 'captions-font-size-%s',
+ label: 'Font Size',
+ options: [['0.50', '50%'], ['0.75', '75%'], ['1.00', '100%'], ['1.25', '125%'], ['1.50', '150%'], ['1.75', '175%'], ['2.00', '200%'], ['3.00', '300%'], ['4.00', '400%']],
+ "default": 2,
+ parser: function parser(v) {
+ return v === '1.00' ? null : Number(v);
+ }
+ },
+ textOpacity: {
+ selector: '.vjs-text-opacity > select',
+ id: 'captions-foreground-opacity-%s',
+ label: 'Transparency',
+ options: [OPACITY_OPAQUE, OPACITY_SEMI]
+ },
+ // Options for this object are defined below.
+ windowColor: {
+ selector: '.vjs-window-color > select',
+ id: 'captions-window-color-%s',
+ label: 'Color'
+ },
+ // Options for this object are defined below.
+ windowOpacity: {
+ selector: '.vjs-window-opacity > select',
+ id: 'captions-window-opacity-%s',
+ label: 'Transparency',
+ options: [OPACITY_TRANS, OPACITY_SEMI, OPACITY_OPAQUE]
+ }
+};
+selectConfigs.windowColor.options = selectConfigs.backgroundColor.options;
+/**
+ * Get the actual value of an option.
+ *
+ * @param {string} value
+ * The value to get
+ *
+ * @param {Function} [parser]
+ * Optional function to adjust the value.
+ *
+ * @return {Mixed}
+ * - Will be `undefined` if no value exists
+ * - Will be `undefined` if the given value is "none".
+ * - Will be the actual value otherwise.
+ *
+ * @private
+ */
+
+function parseOptionValue(value, parser) {
+ if (parser) {
+ value = parser(value);
+ }
+
+ if (value && value !== 'none') {
+ return value;
+ }
+}
+/**
+ * Gets the value of the selected element within a element.
+ *
+ * @param {Element} el
+ * the element to look in
+ *
+ * @param {Function} [parser]
+ * Optional function to adjust the value.
+ *
+ * @return {Mixed}
+ * - Will be `undefined` if no value exists
+ * - Will be `undefined` if the given value is "none".
+ * - Will be the actual value otherwise.
+ *
+ * @private
+ */
+
+
+function getSelectedOptionValue(el, parser) {
+ var value = el.options[el.options.selectedIndex].value;
+ return parseOptionValue(value, parser);
+}
+/**
+ * Sets the selected element within a element based on a
+ * given value.
+ *
+ * @param {Element} el
+ * The element to look in.
+ *
+ * @param {string} value
+ * the property to look on.
+ *
+ * @param {Function} [parser]
+ * Optional function to adjust the value before comparing.
+ *
+ * @private
+ */
+
+
+function setSelectedOption(el, value, parser) {
+ if (!value) {
+ return;
+ }
+
+ for (var i = 0; i < el.options.length; i++) {
+ if (parseOptionValue(el.options[i].value, parser) === value) {
+ el.selectedIndex = i;
+ break;
+ }
+ }
+}
+/**
+ * Manipulate Text Tracks settings.
+ *
+ * @extends ModalDialog
+ */
+
+
+var TextTrackSettings = /*#__PURE__*/function (_ModalDialog) {
+ _inheritsLoose(TextTrackSettings, _ModalDialog);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function TextTrackSettings(player, options) {
+ var _this;
+
+ options.temporary = false;
+ _this = _ModalDialog.call(this, player, options) || this;
+ _this.updateDisplay = _this.updateDisplay.bind(_assertThisInitialized(_this)); // fill the modal and pretend we have opened it
+
+ _this.fill();
+
+ _this.hasBeenOpened_ = _this.hasBeenFilled_ = true;
+ _this.endDialog = createEl('p', {
+ className: 'vjs-control-text',
+ textContent: _this.localize('End of dialog window.')
+ });
+
+ _this.el().appendChild(_this.endDialog);
+
+ _this.setDefaults(); // Grab `persistTextTrackSettings` from the player options if not passed in child options
+
+
+ if (options.persistTextTrackSettings === undefined) {
+ _this.options_.persistTextTrackSettings = _this.options_.playerOptions.persistTextTrackSettings;
+ }
+
+ _this.on(_this.$('.vjs-done-button'), 'click', function () {
+ _this.saveSettings();
+
+ _this.close();
+ });
+
+ _this.on(_this.$('.vjs-default-button'), 'click', function () {
+ _this.setDefaults();
+
+ _this.updateDisplay();
+ });
+
+ each(selectConfigs, function (config) {
+ _this.on(_this.$(config.selector), 'change', _this.updateDisplay);
+ });
+
+ if (_this.options_.persistTextTrackSettings) {
+ _this.restoreSettings();
+ }
+
+ return _this;
+ }
+
+ var _proto = TextTrackSettings.prototype;
+
+ _proto.dispose = function dispose() {
+ this.endDialog = null;
+
+ _ModalDialog.prototype.dispose.call(this);
+ }
+ /**
+ * Create a element with configured options.
+ *
+ * @param {string} key
+ * Configuration key to use during creation.
+ *
+ * @return {string}
+ * An HTML string.
+ *
+ * @private
+ */
+ ;
+
+ _proto.createElSelect_ = function createElSelect_(key, legendId, type) {
+ var _this2 = this;
+
+ if (legendId === void 0) {
+ legendId = '';
+ }
+
+ if (type === void 0) {
+ type = 'label';
+ }
+
+ var config = selectConfigs[key];
+ var id = config.id.replace('%s', this.id_);
+ var selectLabelledbyIds = [legendId, id].join(' ').trim();
+ return ["<" + type + " id=\"" + id + "\" class=\"" + (type === 'label' ? 'vjs-label' : '') + "\">", this.localize(config.label), "" + type + ">", ""].concat(config.options.map(function (o) {
+ var optionId = id + '-' + o[1].replace(/\W+/g, '');
+ return ["", _this2.localize(o[1]), ' '].join('');
+ })).concat(' ').join('');
+ }
+ /**
+ * Create foreground color element for the component
+ *
+ * @return {string}
+ * An HTML string.
+ *
+ * @private
+ */
+ ;
+
+ _proto.createElFgColor_ = function createElFgColor_() {
+ var legendId = "captions-text-legend-" + this.id_;
+ return ['', "", this.localize('Text'), ' ', this.createElSelect_('color', legendId), '', this.createElSelect_('textOpacity', legendId), ' ', ' '].join('');
+ }
+ /**
+ * Create background color element for the component
+ *
+ * @return {string}
+ * An HTML string.
+ *
+ * @private
+ */
+ ;
+
+ _proto.createElBgColor_ = function createElBgColor_() {
+ var legendId = "captions-background-" + this.id_;
+ return ['', "", this.localize('Background'), ' ', this.createElSelect_('backgroundColor', legendId), '', this.createElSelect_('backgroundOpacity', legendId), ' ', ' '].join('');
+ }
+ /**
+ * Create window color element for the component
+ *
+ * @return {string}
+ * An HTML string.
+ *
+ * @private
+ */
+ ;
+
+ _proto.createElWinColor_ = function createElWinColor_() {
+ var legendId = "captions-window-" + this.id_;
+ return ['', "", this.localize('Window'), ' ', this.createElSelect_('windowColor', legendId), '', this.createElSelect_('windowOpacity', legendId), ' ', ' '].join('');
+ }
+ /**
+ * Create color elements for the component
+ *
+ * @return {Element}
+ * The element that was created
+ *
+ * @private
+ */
+ ;
+
+ _proto.createElColors_ = function createElColors_() {
+ return createEl('div', {
+ className: 'vjs-track-settings-colors',
+ innerHTML: [this.createElFgColor_(), this.createElBgColor_(), this.createElWinColor_()].join('')
+ });
+ }
+ /**
+ * Create font elements for the component
+ *
+ * @return {Element}
+ * The element that was created.
+ *
+ * @private
+ */
+ ;
+
+ _proto.createElFont_ = function createElFont_() {
+ return createEl('div', {
+ className: 'vjs-track-settings-font',
+ innerHTML: ['', this.createElSelect_('fontPercent', '', 'legend'), ' ', '', this.createElSelect_('edgeStyle', '', 'legend'), ' ', '', this.createElSelect_('fontFamily', '', 'legend'), ' '].join('')
+ });
+ }
+ /**
+ * Create controls for the component
+ *
+ * @return {Element}
+ * The element that was created.
+ *
+ * @private
+ */
+ ;
+
+ _proto.createElControls_ = function createElControls_() {
+ var defaultsDescription = this.localize('restore all settings to the default values');
+ return createEl('div', {
+ className: 'vjs-track-settings-controls',
+ innerHTML: ["", this.localize('Reset'), " " + defaultsDescription + " ", ' ', "" + this.localize('Done') + " "].join('')
+ });
+ };
+
+ _proto.content = function content() {
+ return [this.createElColors_(), this.createElFont_(), this.createElControls_()];
+ };
+
+ _proto.label = function label() {
+ return this.localize('Caption Settings Dialog');
+ };
+
+ _proto.description = function description() {
+ return this.localize('Beginning of dialog window. Escape will cancel and close the window.');
+ };
+
+ _proto.buildCSSClass = function buildCSSClass() {
+ return _ModalDialog.prototype.buildCSSClass.call(this) + ' vjs-text-track-settings';
+ }
+ /**
+ * Gets an object of text track settings (or null).
+ *
+ * @return {Object}
+ * An object with config values parsed from the DOM or localStorage.
+ */
+ ;
+
+ _proto.getValues = function getValues() {
+ var _this3 = this;
+
+ return reduce(selectConfigs, function (accum, config, key) {
+ var value = getSelectedOptionValue(_this3.$(config.selector), config.parser);
+
+ if (value !== undefined) {
+ accum[key] = value;
+ }
+
+ return accum;
+ }, {});
+ }
+ /**
+ * Sets text track settings from an object of values.
+ *
+ * @param {Object} values
+ * An object with config values parsed from the DOM or localStorage.
+ */
+ ;
+
+ _proto.setValues = function setValues(values) {
+ var _this4 = this;
+
+ each(selectConfigs, function (config, key) {
+ setSelectedOption(_this4.$(config.selector), values[key], config.parser);
+ });
+ }
+ /**
+ * Sets all `` elements to their default values.
+ */
+ ;
+
+ _proto.setDefaults = function setDefaults() {
+ var _this5 = this;
+
+ each(selectConfigs, function (config) {
+ var index = config.hasOwnProperty('default') ? config["default"] : 0;
+ _this5.$(config.selector).selectedIndex = index;
+ });
+ }
+ /**
+ * Restore texttrack settings from localStorage
+ */
+ ;
+
+ _proto.restoreSettings = function restoreSettings() {
+ var values;
+
+ try {
+ values = JSON.parse(window$1.localStorage.getItem(LOCAL_STORAGE_KEY$1));
+ } catch (err) {
+ log$1.warn(err);
+ }
+
+ if (values) {
+ this.setValues(values);
+ }
+ }
+ /**
+ * Save text track settings to localStorage
+ */
+ ;
+
+ _proto.saveSettings = function saveSettings() {
+ if (!this.options_.persistTextTrackSettings) {
+ return;
+ }
+
+ var values = this.getValues();
+
+ try {
+ if (Object.keys(values).length) {
+ window$1.localStorage.setItem(LOCAL_STORAGE_KEY$1, JSON.stringify(values));
+ } else {
+ window$1.localStorage.removeItem(LOCAL_STORAGE_KEY$1);
+ }
+ } catch (err) {
+ log$1.warn(err);
+ }
+ }
+ /**
+ * Update display of text track settings
+ */
+ ;
+
+ _proto.updateDisplay = function updateDisplay() {
+ var ttDisplay = this.player_.getChild('textTrackDisplay');
+
+ if (ttDisplay) {
+ ttDisplay.updateDisplay();
+ }
+ }
+ /**
+ * conditionally blur the element and refocus the captions button
+ *
+ * @private
+ */
+ ;
+
+ _proto.conditionalBlur_ = function conditionalBlur_() {
+ this.previouslyActiveEl_ = null;
+ var cb = this.player_.controlBar;
+ var subsCapsBtn = cb && cb.subsCapsButton;
+ var ccBtn = cb && cb.captionsButton;
+
+ if (subsCapsBtn) {
+ subsCapsBtn.focus();
+ } else if (ccBtn) {
+ ccBtn.focus();
+ }
+ };
+
+ return TextTrackSettings;
+}(ModalDialog);
+
+Component$1.registerComponent('TextTrackSettings', TextTrackSettings);
+
+/**
+ * A Resize Manager. It is in charge of triggering `playerresize` on the player in the right conditions.
+ *
+ * It'll either create an iframe and use a debounced resize handler on it or use the new {@link https://wicg.github.io/ResizeObserver/|ResizeObserver}.
+ *
+ * If the ResizeObserver is available natively, it will be used. A polyfill can be passed in as an option.
+ * If a `playerresize` event is not needed, the ResizeManager component can be removed from the player, see the example below.
+ * @example How to disable the resize manager
+ * const player = videojs('#vid', {
+ * resizeManager: false
+ * });
+ *
+ * @see {@link https://wicg.github.io/ResizeObserver/|ResizeObserver specification}
+ *
+ * @extends Component
+ */
+
+var ResizeManager = /*#__PURE__*/function (_Component) {
+ _inheritsLoose(ResizeManager, _Component);
+
+ /**
+ * Create the ResizeManager.
+ *
+ * @param {Object} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of ResizeManager options.
+ *
+ * @param {Object} [options.ResizeObserver]
+ * A polyfill for ResizeObserver can be passed in here.
+ * If this is set to null it will ignore the native ResizeObserver and fall back to the iframe fallback.
+ */
+ function ResizeManager(player, options) {
+ var _this;
+
+ var RESIZE_OBSERVER_AVAILABLE = options.ResizeObserver || window$1.ResizeObserver; // if `null` was passed, we want to disable the ResizeObserver
+
+ if (options.ResizeObserver === null) {
+ RESIZE_OBSERVER_AVAILABLE = false;
+ } // Only create an element when ResizeObserver isn't available
+
+
+ var options_ = mergeOptions$3({
+ createEl: !RESIZE_OBSERVER_AVAILABLE,
+ reportTouchActivity: false
+ }, options);
+ _this = _Component.call(this, player, options_) || this;
+ _this.ResizeObserver = options.ResizeObserver || window$1.ResizeObserver;
+ _this.loadListener_ = null;
+ _this.resizeObserver_ = null;
+ _this.debouncedHandler_ = debounce(function () {
+ _this.resizeHandler();
+ }, 100, false, _assertThisInitialized(_this));
+
+ if (RESIZE_OBSERVER_AVAILABLE) {
+ _this.resizeObserver_ = new _this.ResizeObserver(_this.debouncedHandler_);
+
+ _this.resizeObserver_.observe(player.el());
+ } else {
+ _this.loadListener_ = function () {
+ if (!_this.el_ || !_this.el_.contentWindow) {
+ return;
+ }
+
+ var debouncedHandler_ = _this.debouncedHandler_;
+
+ var unloadListener_ = _this.unloadListener_ = function () {
+ off(this, 'resize', debouncedHandler_);
+ off(this, 'unload', unloadListener_);
+ unloadListener_ = null;
+ }; // safari and edge can unload the iframe before resizemanager dispose
+ // we have to dispose of event handlers correctly before that happens
+
+
+ on(_this.el_.contentWindow, 'unload', unloadListener_);
+ on(_this.el_.contentWindow, 'resize', debouncedHandler_);
+ };
+
+ _this.one('load', _this.loadListener_);
+ }
+
+ return _this;
+ }
+
+ var _proto = ResizeManager.prototype;
+
+ _proto.createEl = function createEl() {
+ return _Component.prototype.createEl.call(this, 'iframe', {
+ className: 'vjs-resize-manager',
+ tabIndex: -1,
+ title: this.localize('No content')
+ }, {
+ 'aria-hidden': 'true'
+ });
+ }
+ /**
+ * Called when a resize is triggered on the iframe or a resize is observed via the ResizeObserver
+ *
+ * @fires Player#playerresize
+ */
+ ;
+
+ _proto.resizeHandler = function resizeHandler() {
+ /**
+ * Called when the player size has changed
+ *
+ * @event Player#playerresize
+ * @type {EventTarget~Event}
+ */
+ // make sure player is still around to trigger
+ // prevents this from causing an error after dispose
+ if (!this.player_ || !this.player_.trigger) {
+ return;
+ }
+
+ this.player_.trigger('playerresize');
+ };
+
+ _proto.dispose = function dispose() {
+ if (this.debouncedHandler_) {
+ this.debouncedHandler_.cancel();
+ }
+
+ if (this.resizeObserver_) {
+ if (this.player_.el()) {
+ this.resizeObserver_.unobserve(this.player_.el());
+ }
+
+ this.resizeObserver_.disconnect();
+ }
+
+ if (this.loadListener_) {
+ this.off('load', this.loadListener_);
+ }
+
+ if (this.el_ && this.el_.contentWindow && this.unloadListener_) {
+ this.unloadListener_.call(this.el_.contentWindow);
+ }
+
+ this.ResizeObserver = null;
+ this.resizeObserver = null;
+ this.debouncedHandler_ = null;
+ this.loadListener_ = null;
+
+ _Component.prototype.dispose.call(this);
+ };
+
+ return ResizeManager;
+}(Component$1);
+
+Component$1.registerComponent('ResizeManager', ResizeManager);
+
+var defaults = {
+ trackingThreshold: 20,
+ liveTolerance: 15
+};
+/*
+ track when we are at the live edge, and other helpers for live playback */
+
+/**
+ * A class for checking live current time and determining when the player
+ * is at or behind the live edge.
+ */
+
+var LiveTracker = /*#__PURE__*/function (_Component) {
+ _inheritsLoose(LiveTracker, _Component);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ *
+ * @param {number} [options.trackingThreshold=20]
+ * Number of seconds of live window (seekableEnd - seekableStart) that
+ * media needs to have before the liveui will be shown.
+ *
+ * @param {number} [options.liveTolerance=15]
+ * Number of seconds behind live that we have to be
+ * before we will be considered non-live. Note that this will only
+ * be used when playing at the live edge. This allows large seekable end
+ * changes to not effect wether we are live or not.
+ */
+ function LiveTracker(player, options) {
+ var _this;
+
+ // LiveTracker does not need an element
+ var options_ = mergeOptions$3(defaults, options, {
+ createEl: false
+ });
+ _this = _Component.call(this, player, options_) || this;
+
+ _this.handleVisibilityChange_ = function (e) {
+ return _this.handleVisibilityChange(e);
+ };
+
+ _this.trackLiveHandler_ = function () {
+ return _this.trackLive_();
+ };
+
+ _this.handlePlay_ = function (e) {
+ return _this.handlePlay(e);
+ };
+
+ _this.handleFirstTimeupdate_ = function (e) {
+ return _this.handleFirstTimeupdate(e);
+ };
+
+ _this.handleSeeked_ = function (e) {
+ return _this.handleSeeked(e);
+ };
+
+ _this.seekToLiveEdge_ = function (e) {
+ return _this.seekToLiveEdge(e);
+ };
+
+ _this.reset_();
+
+ _this.on(_this.player_, 'durationchange', function (e) {
+ return _this.handleDurationchange(e);
+ }); // we should try to toggle tracking on canplay as native playback engines, like Safari
+ // may not have the proper values for things like seekableEnd until then
+
+
+ _this.on(_this.player_, 'canplay', function () {
+ return _this.toggleTracking();
+ }); // we don't need to track live playback if the document is hidden,
+ // also, tracking when the document is hidden can
+ // cause the CPU to spike and eventually crash the page on IE11.
+
+
+ if (IE_VERSION && 'hidden' in document && 'visibilityState' in document) {
+ _this.on(document, 'visibilitychange', _this.handleVisibilityChange_);
+ }
+
+ return _this;
+ }
+ /**
+ * toggle tracking based on document visiblility
+ */
+
+
+ var _proto = LiveTracker.prototype;
+
+ _proto.handleVisibilityChange = function handleVisibilityChange() {
+ if (this.player_.duration() !== Infinity) {
+ return;
+ }
+
+ if (document.hidden) {
+ this.stopTracking();
+ } else {
+ this.startTracking();
+ }
+ }
+ /**
+ * all the functionality for tracking when seek end changes
+ * and for tracking how far past seek end we should be
+ */
+ ;
+
+ _proto.trackLive_ = function trackLive_() {
+ var seekable = this.player_.seekable(); // skip undefined seekable
+
+ if (!seekable || !seekable.length) {
+ return;
+ }
+
+ var newTime = Number(window$1.performance.now().toFixed(4));
+ var deltaTime = this.lastTime_ === -1 ? 0 : (newTime - this.lastTime_) / 1000;
+ this.lastTime_ = newTime;
+ this.pastSeekEnd_ = this.pastSeekEnd() + deltaTime;
+ var liveCurrentTime = this.liveCurrentTime();
+ var currentTime = this.player_.currentTime(); // we are behind live if any are true
+ // 1. the player is paused
+ // 2. the user seeked to a location 2 seconds away from live
+ // 3. the difference between live and current time is greater
+ // liveTolerance which defaults to 15s
+
+ var isBehind = this.player_.paused() || this.seekedBehindLive_ || Math.abs(liveCurrentTime - currentTime) > this.options_.liveTolerance; // we cannot be behind if
+ // 1. until we have not seen a timeupdate yet
+ // 2. liveCurrentTime is Infinity, which happens on Android and Native Safari
+
+ if (!this.timeupdateSeen_ || liveCurrentTime === Infinity) {
+ isBehind = false;
+ }
+
+ if (isBehind !== this.behindLiveEdge_) {
+ this.behindLiveEdge_ = isBehind;
+ this.trigger('liveedgechange');
+ }
+ }
+ /**
+ * handle a durationchange event on the player
+ * and start/stop tracking accordingly.
+ */
+ ;
+
+ _proto.handleDurationchange = function handleDurationchange() {
+ this.toggleTracking();
+ }
+ /**
+ * start/stop tracking
+ */
+ ;
+
+ _proto.toggleTracking = function toggleTracking() {
+ if (this.player_.duration() === Infinity && this.liveWindow() >= this.options_.trackingThreshold) {
+ if (this.player_.options_.liveui) {
+ this.player_.addClass('vjs-liveui');
+ }
+
+ this.startTracking();
+ } else {
+ this.player_.removeClass('vjs-liveui');
+ this.stopTracking();
+ }
+ }
+ /**
+ * start tracking live playback
+ */
+ ;
+
+ _proto.startTracking = function startTracking() {
+ if (this.isTracking()) {
+ return;
+ } // If we haven't seen a timeupdate, we need to check whether playback
+ // began before this component started tracking. This can happen commonly
+ // when using autoplay.
+
+
+ if (!this.timeupdateSeen_) {
+ this.timeupdateSeen_ = this.player_.hasStarted();
+ }
+
+ this.trackingInterval_ = this.setInterval(this.trackLiveHandler_, UPDATE_REFRESH_INTERVAL);
+ this.trackLive_();
+ this.on(this.player_, ['play', 'pause'], this.trackLiveHandler_);
+
+ if (!this.timeupdateSeen_) {
+ this.one(this.player_, 'play', this.handlePlay_);
+ this.one(this.player_, 'timeupdate', this.handleFirstTimeupdate_);
+ } else {
+ this.on(this.player_, 'seeked', this.handleSeeked_);
+ }
+ }
+ /**
+ * handle the first timeupdate on the player if it wasn't already playing
+ * when live tracker started tracking.
+ */
+ ;
+
+ _proto.handleFirstTimeupdate = function handleFirstTimeupdate() {
+ this.timeupdateSeen_ = true;
+ this.on(this.player_, 'seeked', this.handleSeeked_);
+ }
+ /**
+ * Keep track of what time a seek starts, and listen for seeked
+ * to find where a seek ends.
+ */
+ ;
+
+ _proto.handleSeeked = function handleSeeked() {
+ var timeDiff = Math.abs(this.liveCurrentTime() - this.player_.currentTime());
+ this.seekedBehindLive_ = this.nextSeekedFromUser_ && timeDiff > 2;
+ this.nextSeekedFromUser_ = false;
+ this.trackLive_();
+ }
+ /**
+ * handle the first play on the player, and make sure that we seek
+ * right to the live edge.
+ */
+ ;
+
+ _proto.handlePlay = function handlePlay() {
+ this.one(this.player_, 'timeupdate', this.seekToLiveEdge_);
+ }
+ /**
+ * Stop tracking, and set all internal variables to
+ * their initial value.
+ */
+ ;
+
+ _proto.reset_ = function reset_() {
+ this.lastTime_ = -1;
+ this.pastSeekEnd_ = 0;
+ this.lastSeekEnd_ = -1;
+ this.behindLiveEdge_ = true;
+ this.timeupdateSeen_ = false;
+ this.seekedBehindLive_ = false;
+ this.nextSeekedFromUser_ = false;
+ this.clearInterval(this.trackingInterval_);
+ this.trackingInterval_ = null;
+ this.off(this.player_, ['play', 'pause'], this.trackLiveHandler_);
+ this.off(this.player_, 'seeked', this.handleSeeked_);
+ this.off(this.player_, 'play', this.handlePlay_);
+ this.off(this.player_, 'timeupdate', this.handleFirstTimeupdate_);
+ this.off(this.player_, 'timeupdate', this.seekToLiveEdge_);
+ }
+ /**
+ * The next seeked event is from the user. Meaning that any seek
+ * > 2s behind live will be considered behind live for real and
+ * liveTolerance will be ignored.
+ */
+ ;
+
+ _proto.nextSeekedFromUser = function nextSeekedFromUser() {
+ this.nextSeekedFromUser_ = true;
+ }
+ /**
+ * stop tracking live playback
+ */
+ ;
+
+ _proto.stopTracking = function stopTracking() {
+ if (!this.isTracking()) {
+ return;
+ }
+
+ this.reset_();
+ this.trigger('liveedgechange');
+ }
+ /**
+ * A helper to get the player seekable end
+ * so that we don't have to null check everywhere
+ *
+ * @return {number}
+ * The furthest seekable end or Infinity.
+ */
+ ;
+
+ _proto.seekableEnd = function seekableEnd() {
+ var seekable = this.player_.seekable();
+ var seekableEnds = [];
+ var i = seekable ? seekable.length : 0;
+
+ while (i--) {
+ seekableEnds.push(seekable.end(i));
+ } // grab the furthest seekable end after sorting, or if there are none
+ // default to Infinity
+
+
+ return seekableEnds.length ? seekableEnds.sort()[seekableEnds.length - 1] : Infinity;
+ }
+ /**
+ * A helper to get the player seekable start
+ * so that we don't have to null check everywhere
+ *
+ * @return {number}
+ * The earliest seekable start or 0.
+ */
+ ;
+
+ _proto.seekableStart = function seekableStart() {
+ var seekable = this.player_.seekable();
+ var seekableStarts = [];
+ var i = seekable ? seekable.length : 0;
+
+ while (i--) {
+ seekableStarts.push(seekable.start(i));
+ } // grab the first seekable start after sorting, or if there are none
+ // default to 0
+
+
+ return seekableStarts.length ? seekableStarts.sort()[0] : 0;
+ }
+ /**
+ * Get the live time window aka
+ * the amount of time between seekable start and
+ * live current time.
+ *
+ * @return {number}
+ * The amount of seconds that are seekable in
+ * the live video.
+ */
+ ;
+
+ _proto.liveWindow = function liveWindow() {
+ var liveCurrentTime = this.liveCurrentTime(); // if liveCurrenTime is Infinity then we don't have a liveWindow at all
+
+ if (liveCurrentTime === Infinity) {
+ return 0;
+ }
+
+ return liveCurrentTime - this.seekableStart();
+ }
+ /**
+ * Determines if the player is live, only checks if this component
+ * is tracking live playback or not
+ *
+ * @return {boolean}
+ * Wether liveTracker is tracking
+ */
+ ;
+
+ _proto.isLive = function isLive() {
+ return this.isTracking();
+ }
+ /**
+ * Determines if currentTime is at the live edge and won't fall behind
+ * on each seekableendchange
+ *
+ * @return {boolean}
+ * Wether playback is at the live edge
+ */
+ ;
+
+ _proto.atLiveEdge = function atLiveEdge() {
+ return !this.behindLiveEdge();
+ }
+ /**
+ * get what we expect the live current time to be
+ *
+ * @return {number}
+ * The expected live current time
+ */
+ ;
+
+ _proto.liveCurrentTime = function liveCurrentTime() {
+ return this.pastSeekEnd() + this.seekableEnd();
+ }
+ /**
+ * The number of seconds that have occured after seekable end
+ * changed. This will be reset to 0 once seekable end changes.
+ *
+ * @return {number}
+ * Seconds past the current seekable end
+ */
+ ;
+
+ _proto.pastSeekEnd = function pastSeekEnd() {
+ var seekableEnd = this.seekableEnd();
+
+ if (this.lastSeekEnd_ !== -1 && seekableEnd !== this.lastSeekEnd_) {
+ this.pastSeekEnd_ = 0;
+ }
+
+ this.lastSeekEnd_ = seekableEnd;
+ return this.pastSeekEnd_;
+ }
+ /**
+ * If we are currently behind the live edge, aka currentTime will be
+ * behind on a seekableendchange
+ *
+ * @return {boolean}
+ * If we are behind the live edge
+ */
+ ;
+
+ _proto.behindLiveEdge = function behindLiveEdge() {
+ return this.behindLiveEdge_;
+ }
+ /**
+ * Wether live tracker is currently tracking or not.
+ */
+ ;
+
+ _proto.isTracking = function isTracking() {
+ return typeof this.trackingInterval_ === 'number';
+ }
+ /**
+ * Seek to the live edge if we are behind the live edge
+ */
+ ;
+
+ _proto.seekToLiveEdge = function seekToLiveEdge() {
+ this.seekedBehindLive_ = false;
+
+ if (this.atLiveEdge()) {
+ return;
+ }
+
+ this.nextSeekedFromUser_ = false;
+ this.player_.currentTime(this.liveCurrentTime());
+ }
+ /**
+ * Dispose of liveTracker
+ */
+ ;
+
+ _proto.dispose = function dispose() {
+ this.off(document, 'visibilitychange', this.handleVisibilityChange_);
+ this.stopTracking();
+
+ _Component.prototype.dispose.call(this);
+ };
+
+ return LiveTracker;
+}(Component$1);
+
+Component$1.registerComponent('LiveTracker', LiveTracker);
+
+/**
+ * This function is used to fire a sourceset when there is something
+ * similar to `mediaEl.load()` being called. It will try to find the source via
+ * the `src` attribute and then the `` elements. It will then fire `sourceset`
+ * with the source that was found or empty string if we cannot know. If it cannot
+ * find a source then `sourceset` will not be fired.
+ *
+ * @param {Html5} tech
+ * The tech object that sourceset was setup on
+ *
+ * @return {boolean}
+ * returns false if the sourceset was not fired and true otherwise.
+ */
+
+var sourcesetLoad = function sourcesetLoad(tech) {
+ var el = tech.el(); // if `el.src` is set, that source will be loaded.
+
+ if (el.hasAttribute('src')) {
+ tech.triggerSourceset(el.src);
+ return true;
+ }
+ /**
+ * Since there isn't a src property on the media element, source elements will be used for
+ * implementing the source selection algorithm. This happens asynchronously and
+ * for most cases were there is more than one source we cannot tell what source will
+ * be loaded, without re-implementing the source selection algorithm. At this time we are not
+ * going to do that. There are three special cases that we do handle here though:
+ *
+ * 1. If there are no sources, do not fire `sourceset`.
+ * 2. If there is only one `` with a `src` property/attribute that is our `src`
+ * 3. If there is more than one `` but all of them have the same `src` url.
+ * That will be our src.
+ */
+
+
+ var sources = tech.$$('source');
+ var srcUrls = [];
+ var src = ''; // if there are no sources, do not fire sourceset
+
+ if (!sources.length) {
+ return false;
+ } // only count valid/non-duplicate source elements
+
+
+ for (var i = 0; i < sources.length; i++) {
+ var url = sources[i].src;
+
+ if (url && srcUrls.indexOf(url) === -1) {
+ srcUrls.push(url);
+ }
+ } // there were no valid sources
+
+
+ if (!srcUrls.length) {
+ return false;
+ } // there is only one valid source element url
+ // use that
+
+
+ if (srcUrls.length === 1) {
+ src = srcUrls[0];
+ }
+
+ tech.triggerSourceset(src);
+ return true;
+};
+/**
+ * our implementation of an `innerHTML` descriptor for browsers
+ * that do not have one.
+ */
+
+
+var innerHTMLDescriptorPolyfill = Object.defineProperty({}, 'innerHTML', {
+ get: function get() {
+ return this.cloneNode(true).innerHTML;
+ },
+ set: function set(v) {
+ // make a dummy node to use innerHTML on
+ var dummy = document.createElement(this.nodeName.toLowerCase()); // set innerHTML to the value provided
+
+ dummy.innerHTML = v; // make a document fragment to hold the nodes from dummy
+
+ var docFrag = document.createDocumentFragment(); // copy all of the nodes created by the innerHTML on dummy
+ // to the document fragment
+
+ while (dummy.childNodes.length) {
+ docFrag.appendChild(dummy.childNodes[0]);
+ } // remove content
+
+
+ this.innerText = ''; // now we add all of that html in one by appending the
+ // document fragment. This is how innerHTML does it.
+
+ window$1.Element.prototype.appendChild.call(this, docFrag); // then return the result that innerHTML's setter would
+
+ return this.innerHTML;
+ }
+});
+/**
+ * Get a property descriptor given a list of priorities and the
+ * property to get.
+ */
+
+var getDescriptor = function getDescriptor(priority, prop) {
+ var descriptor = {};
+
+ for (var i = 0; i < priority.length; i++) {
+ descriptor = Object.getOwnPropertyDescriptor(priority[i], prop);
+
+ if (descriptor && descriptor.set && descriptor.get) {
+ break;
+ }
+ }
+
+ descriptor.enumerable = true;
+ descriptor.configurable = true;
+ return descriptor;
+};
+
+var getInnerHTMLDescriptor = function getInnerHTMLDescriptor(tech) {
+ return getDescriptor([tech.el(), window$1.HTMLMediaElement.prototype, window$1.Element.prototype, innerHTMLDescriptorPolyfill], 'innerHTML');
+};
+/**
+ * Patches browser internal functions so that we can tell synchronously
+ * if a `` was appended to the media element. For some reason this
+ * causes a `sourceset` if the the media element is ready and has no source.
+ * This happens when:
+ * - The page has just loaded and the media element does not have a source.
+ * - The media element was emptied of all sources, then `load()` was called.
+ *
+ * It does this by patching the following functions/properties when they are supported:
+ *
+ * - `append()` - can be used to add a `` element to the media element
+ * - `appendChild()` - can be used to add a `` element to the media element
+ * - `insertAdjacentHTML()` - can be used to add a `` element to the media element
+ * - `innerHTML` - can be used to add a `` element to the media element
+ *
+ * @param {Html5} tech
+ * The tech object that sourceset is being setup on.
+ */
+
+
+var firstSourceWatch = function firstSourceWatch(tech) {
+ var el = tech.el(); // make sure firstSourceWatch isn't setup twice.
+
+ if (el.resetSourceWatch_) {
+ return;
+ }
+
+ var old = {};
+ var innerDescriptor = getInnerHTMLDescriptor(tech);
+
+ var appendWrapper = function appendWrapper(appendFn) {
+ return function () {
+ for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
+ args[_key] = arguments[_key];
+ }
+
+ var retval = appendFn.apply(el, args);
+ sourcesetLoad(tech);
+ return retval;
+ };
+ };
+
+ ['append', 'appendChild', 'insertAdjacentHTML'].forEach(function (k) {
+ if (!el[k]) {
+ return;
+ } // store the old function
+
+
+ old[k] = el[k]; // call the old function with a sourceset if a source
+ // was loaded
+
+ el[k] = appendWrapper(old[k]);
+ });
+ Object.defineProperty(el, 'innerHTML', mergeOptions$3(innerDescriptor, {
+ set: appendWrapper(innerDescriptor.set)
+ }));
+
+ el.resetSourceWatch_ = function () {
+ el.resetSourceWatch_ = null;
+ Object.keys(old).forEach(function (k) {
+ el[k] = old[k];
+ });
+ Object.defineProperty(el, 'innerHTML', innerDescriptor);
+ }; // on the first sourceset, we need to revert our changes
+
+
+ tech.one('sourceset', el.resetSourceWatch_);
+};
+/**
+ * our implementation of a `src` descriptor for browsers
+ * that do not have one.
+ */
+
+
+var srcDescriptorPolyfill = Object.defineProperty({}, 'src', {
+ get: function get() {
+ if (this.hasAttribute('src')) {
+ return getAbsoluteURL(window$1.Element.prototype.getAttribute.call(this, 'src'));
+ }
+
+ return '';
+ },
+ set: function set(v) {
+ window$1.Element.prototype.setAttribute.call(this, 'src', v);
+ return v;
+ }
+});
+
+var getSrcDescriptor = function getSrcDescriptor(tech) {
+ return getDescriptor([tech.el(), window$1.HTMLMediaElement.prototype, srcDescriptorPolyfill], 'src');
+};
+/**
+ * setup `sourceset` handling on the `Html5` tech. This function
+ * patches the following element properties/functions:
+ *
+ * - `src` - to determine when `src` is set
+ * - `setAttribute()` - to determine when `src` is set
+ * - `load()` - this re-triggers the source selection algorithm, and can
+ * cause a sourceset.
+ *
+ * If there is no source when we are adding `sourceset` support or during a `load()`
+ * we also patch the functions listed in `firstSourceWatch`.
+ *
+ * @param {Html5} tech
+ * The tech to patch
+ */
+
+
+var setupSourceset = function setupSourceset(tech) {
+ if (!tech.featuresSourceset) {
+ return;
+ }
+
+ var el = tech.el(); // make sure sourceset isn't setup twice.
+
+ if (el.resetSourceset_) {
+ return;
+ }
+
+ var srcDescriptor = getSrcDescriptor(tech);
+ var oldSetAttribute = el.setAttribute;
+ var oldLoad = el.load;
+ Object.defineProperty(el, 'src', mergeOptions$3(srcDescriptor, {
+ set: function set(v) {
+ var retval = srcDescriptor.set.call(el, v); // we use the getter here to get the actual value set on src
+
+ tech.triggerSourceset(el.src);
+ return retval;
+ }
+ }));
+
+ el.setAttribute = function (n, v) {
+ var retval = oldSetAttribute.call(el, n, v);
+
+ if (/src/i.test(n)) {
+ tech.triggerSourceset(el.src);
+ }
+
+ return retval;
+ };
+
+ el.load = function () {
+ var retval = oldLoad.call(el); // if load was called, but there was no source to fire
+ // sourceset on. We have to watch for a source append
+ // as that can trigger a `sourceset` when the media element
+ // has no source
+
+ if (!sourcesetLoad(tech)) {
+ tech.triggerSourceset('');
+ firstSourceWatch(tech);
+ }
+
+ return retval;
+ };
+
+ if (el.currentSrc) {
+ tech.triggerSourceset(el.currentSrc);
+ } else if (!sourcesetLoad(tech)) {
+ firstSourceWatch(tech);
+ }
+
+ el.resetSourceset_ = function () {
+ el.resetSourceset_ = null;
+ el.load = oldLoad;
+ el.setAttribute = oldSetAttribute;
+ Object.defineProperty(el, 'src', srcDescriptor);
+
+ if (el.resetSourceWatch_) {
+ el.resetSourceWatch_();
+ }
+ };
+};
+
+/**
+ * Object.defineProperty but "lazy", which means that the value is only set after
+ * it retrieved the first time, rather than being set right away.
+ *
+ * @param {Object} obj the object to set the property on
+ * @param {string} key the key for the property to set
+ * @param {Function} getValue the function used to get the value when it is needed.
+ * @param {boolean} setter wether a setter shoould be allowed or not
+ */
+var defineLazyProperty = function defineLazyProperty(obj, key, getValue, setter) {
+ if (setter === void 0) {
+ setter = true;
+ }
+
+ var set = function set(value) {
+ return Object.defineProperty(obj, key, {
+ value: value,
+ enumerable: true,
+ writable: true
+ });
+ };
+
+ var options = {
+ configurable: true,
+ enumerable: true,
+ get: function get() {
+ var value = getValue();
+ set(value);
+ return value;
+ }
+ };
+
+ if (setter) {
+ options.set = set;
+ }
+
+ return Object.defineProperty(obj, key, options);
+};
+
+/**
+ * HTML5 Media Controller - Wrapper for HTML5 Media API
+ *
+ * @mixes Tech~SourceHandlerAdditions
+ * @extends Tech
+ */
+
+var Html5 = /*#__PURE__*/function (_Tech) {
+ _inheritsLoose(Html5, _Tech);
+
+ /**
+ * Create an instance of this Tech.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ *
+ * @param {Component~ReadyCallback} ready
+ * Callback function to call when the `HTML5` Tech is ready.
+ */
+ function Html5(options, ready) {
+ var _this;
+
+ _this = _Tech.call(this, options, ready) || this;
+ var source = options.source;
+ var crossoriginTracks = false;
+ _this.featuresVideoFrameCallback = _this.featuresVideoFrameCallback && _this.el_.tagName === 'VIDEO'; // Set the source if one is provided
+ // 1) Check if the source is new (if not, we want to keep the original so playback isn't interrupted)
+ // 2) Check to see if the network state of the tag was failed at init, and if so, reset the source
+ // anyway so the error gets fired.
+
+ if (source && (_this.el_.currentSrc !== source.src || options.tag && options.tag.initNetworkState_ === 3)) {
+ _this.setSource(source);
+ } else {
+ _this.handleLateInit_(_this.el_);
+ } // setup sourceset after late sourceset/init
+
+
+ if (options.enableSourceset) {
+ _this.setupSourcesetHandling_();
+ }
+
+ _this.isScrubbing_ = false;
+
+ if (_this.el_.hasChildNodes()) {
+ var nodes = _this.el_.childNodes;
+ var nodesLength = nodes.length;
+ var removeNodes = [];
+
+ while (nodesLength--) {
+ var node = nodes[nodesLength];
+ var nodeName = node.nodeName.toLowerCase();
+
+ if (nodeName === 'track') {
+ if (!_this.featuresNativeTextTracks) {
+ // Empty video tag tracks so the built-in player doesn't use them also.
+ // This may not be fast enough to stop HTML5 browsers from reading the tags
+ // so we'll need to turn off any default tracks if we're manually doing
+ // captions and subtitles. videoElement.textTracks
+ removeNodes.push(node);
+ } else {
+ // store HTMLTrackElement and TextTrack to remote list
+ _this.remoteTextTrackEls().addTrackElement_(node);
+
+ _this.remoteTextTracks().addTrack(node.track);
+
+ _this.textTracks().addTrack(node.track);
+
+ if (!crossoriginTracks && !_this.el_.hasAttribute('crossorigin') && isCrossOrigin(node.src)) {
+ crossoriginTracks = true;
+ }
+ }
+ }
+ }
+
+ for (var i = 0; i < removeNodes.length; i++) {
+ _this.el_.removeChild(removeNodes[i]);
+ }
+ }
+
+ _this.proxyNativeTracks_();
+
+ if (_this.featuresNativeTextTracks && crossoriginTracks) {
+ log$1.warn('Text Tracks are being loaded from another origin but the crossorigin attribute isn\'t used.\n' + 'This may prevent text tracks from loading.');
+ } // prevent iOS Safari from disabling metadata text tracks during native playback
+
+
+ _this.restoreMetadataTracksInIOSNativePlayer_(); // Determine if native controls should be used
+ // Our goal should be to get the custom controls on mobile solid everywhere
+ // so we can remove this all together. Right now this will block custom
+ // controls on touch enabled laptops like the Chrome Pixel
+
+
+ if ((TOUCH_ENABLED || IS_IPHONE || IS_NATIVE_ANDROID) && options.nativeControlsForTouch === true) {
+ _this.setControls(true);
+ } // on iOS, we want to proxy `webkitbeginfullscreen` and `webkitendfullscreen`
+ // into a `fullscreenchange` event
+
+
+ _this.proxyWebkitFullscreen_();
+
+ _this.triggerReady();
+
+ return _this;
+ }
+ /**
+ * Dispose of `HTML5` media element and remove all tracks.
+ */
+
+
+ var _proto = Html5.prototype;
+
+ _proto.dispose = function dispose() {
+ if (this.el_ && this.el_.resetSourceset_) {
+ this.el_.resetSourceset_();
+ }
+
+ Html5.disposeMediaElement(this.el_);
+ this.options_ = null; // tech will handle clearing of the emulated track list
+
+ _Tech.prototype.dispose.call(this);
+ }
+ /**
+ * Modify the media element so that we can detect when
+ * the source is changed. Fires `sourceset` just after the source has changed
+ */
+ ;
+
+ _proto.setupSourcesetHandling_ = function setupSourcesetHandling_() {
+ setupSourceset(this);
+ }
+ /**
+ * When a captions track is enabled in the iOS Safari native player, all other
+ * tracks are disabled (including metadata tracks), which nulls all of their
+ * associated cue points. This will restore metadata tracks to their pre-fullscreen
+ * state in those cases so that cue points are not needlessly lost.
+ *
+ * @private
+ */
+ ;
+
+ _proto.restoreMetadataTracksInIOSNativePlayer_ = function restoreMetadataTracksInIOSNativePlayer_() {
+ var textTracks = this.textTracks();
+ var metadataTracksPreFullscreenState; // captures a snapshot of every metadata track's current state
+
+ var takeMetadataTrackSnapshot = function takeMetadataTrackSnapshot() {
+ metadataTracksPreFullscreenState = [];
+
+ for (var i = 0; i < textTracks.length; i++) {
+ var track = textTracks[i];
+
+ if (track.kind === 'metadata') {
+ metadataTracksPreFullscreenState.push({
+ track: track,
+ storedMode: track.mode
+ });
+ }
+ }
+ }; // snapshot each metadata track's initial state, and update the snapshot
+ // each time there is a track 'change' event
+
+
+ takeMetadataTrackSnapshot();
+ textTracks.addEventListener('change', takeMetadataTrackSnapshot);
+ this.on('dispose', function () {
+ return textTracks.removeEventListener('change', takeMetadataTrackSnapshot);
+ });
+
+ var restoreTrackMode = function restoreTrackMode() {
+ for (var i = 0; i < metadataTracksPreFullscreenState.length; i++) {
+ var storedTrack = metadataTracksPreFullscreenState[i];
+
+ if (storedTrack.track.mode === 'disabled' && storedTrack.track.mode !== storedTrack.storedMode) {
+ storedTrack.track.mode = storedTrack.storedMode;
+ }
+ } // we only want this handler to be executed on the first 'change' event
+
+
+ textTracks.removeEventListener('change', restoreTrackMode);
+ }; // when we enter fullscreen playback, stop updating the snapshot and
+ // restore all track modes to their pre-fullscreen state
+
+
+ this.on('webkitbeginfullscreen', function () {
+ textTracks.removeEventListener('change', takeMetadataTrackSnapshot); // remove the listener before adding it just in case it wasn't previously removed
+
+ textTracks.removeEventListener('change', restoreTrackMode);
+ textTracks.addEventListener('change', restoreTrackMode);
+ }); // start updating the snapshot again after leaving fullscreen
+
+ this.on('webkitendfullscreen', function () {
+ // remove the listener before adding it just in case it wasn't previously removed
+ textTracks.removeEventListener('change', takeMetadataTrackSnapshot);
+ textTracks.addEventListener('change', takeMetadataTrackSnapshot); // remove the restoreTrackMode handler in case it wasn't triggered during fullscreen playback
+
+ textTracks.removeEventListener('change', restoreTrackMode);
+ });
+ }
+ /**
+ * Attempt to force override of tracks for the given type
+ *
+ * @param {string} type - Track type to override, possible values include 'Audio',
+ * 'Video', and 'Text'.
+ * @param {boolean} override - If set to true native audio/video will be overridden,
+ * otherwise native audio/video will potentially be used.
+ * @private
+ */
+ ;
+
+ _proto.overrideNative_ = function overrideNative_(type, override) {
+ var _this2 = this;
+
+ // If there is no behavioral change don't add/remove listeners
+ if (override !== this["featuresNative" + type + "Tracks"]) {
+ return;
+ }
+
+ var lowerCaseType = type.toLowerCase();
+
+ if (this[lowerCaseType + "TracksListeners_"]) {
+ Object.keys(this[lowerCaseType + "TracksListeners_"]).forEach(function (eventName) {
+ var elTracks = _this2.el()[lowerCaseType + "Tracks"];
+
+ elTracks.removeEventListener(eventName, _this2[lowerCaseType + "TracksListeners_"][eventName]);
+ });
+ }
+
+ this["featuresNative" + type + "Tracks"] = !override;
+ this[lowerCaseType + "TracksListeners_"] = null;
+ this.proxyNativeTracksForType_(lowerCaseType);
+ }
+ /**
+ * Attempt to force override of native audio tracks.
+ *
+ * @param {boolean} override - If set to true native audio will be overridden,
+ * otherwise native audio will potentially be used.
+ */
+ ;
+
+ _proto.overrideNativeAudioTracks = function overrideNativeAudioTracks(override) {
+ this.overrideNative_('Audio', override);
+ }
+ /**
+ * Attempt to force override of native video tracks.
+ *
+ * @param {boolean} override - If set to true native video will be overridden,
+ * otherwise native video will potentially be used.
+ */
+ ;
+
+ _proto.overrideNativeVideoTracks = function overrideNativeVideoTracks(override) {
+ this.overrideNative_('Video', override);
+ }
+ /**
+ * Proxy native track list events for the given type to our track
+ * lists if the browser we are playing in supports that type of track list.
+ *
+ * @param {string} name - Track type; values include 'audio', 'video', and 'text'
+ * @private
+ */
+ ;
+
+ _proto.proxyNativeTracksForType_ = function proxyNativeTracksForType_(name) {
+ var _this3 = this;
+
+ var props = NORMAL[name];
+ var elTracks = this.el()[props.getterName];
+ var techTracks = this[props.getterName]();
+
+ if (!this["featuresNative" + props.capitalName + "Tracks"] || !elTracks || !elTracks.addEventListener) {
+ return;
+ }
+
+ var listeners = {
+ change: function change(e) {
+ var event = {
+ type: 'change',
+ target: techTracks,
+ currentTarget: techTracks,
+ srcElement: techTracks
+ };
+ techTracks.trigger(event); // if we are a text track change event, we should also notify the
+ // remote text track list. This can potentially cause a false positive
+ // if we were to get a change event on a non-remote track and
+ // we triggered the event on the remote text track list which doesn't
+ // contain that track. However, best practices mean looping through the
+ // list of tracks and searching for the appropriate mode value, so,
+ // this shouldn't pose an issue
+
+ if (name === 'text') {
+ _this3[REMOTE.remoteText.getterName]().trigger(event);
+ }
+ },
+ addtrack: function addtrack(e) {
+ techTracks.addTrack(e.track);
+ },
+ removetrack: function removetrack(e) {
+ techTracks.removeTrack(e.track);
+ }
+ };
+
+ var removeOldTracks = function removeOldTracks() {
+ var removeTracks = [];
+
+ for (var i = 0; i < techTracks.length; i++) {
+ var found = false;
+
+ for (var j = 0; j < elTracks.length; j++) {
+ if (elTracks[j] === techTracks[i]) {
+ found = true;
+ break;
+ }
+ }
+
+ if (!found) {
+ removeTracks.push(techTracks[i]);
+ }
+ }
+
+ while (removeTracks.length) {
+ techTracks.removeTrack(removeTracks.shift());
+ }
+ };
+
+ this[props.getterName + 'Listeners_'] = listeners;
+ Object.keys(listeners).forEach(function (eventName) {
+ var listener = listeners[eventName];
+ elTracks.addEventListener(eventName, listener);
+
+ _this3.on('dispose', function (e) {
+ return elTracks.removeEventListener(eventName, listener);
+ });
+ }); // Remove (native) tracks that are not used anymore
+
+ this.on('loadstart', removeOldTracks);
+ this.on('dispose', function (e) {
+ return _this3.off('loadstart', removeOldTracks);
+ });
+ }
+ /**
+ * Proxy all native track list events to our track lists if the browser we are playing
+ * in supports that type of track list.
+ *
+ * @private
+ */
+ ;
+
+ _proto.proxyNativeTracks_ = function proxyNativeTracks_() {
+ var _this4 = this;
+
+ NORMAL.names.forEach(function (name) {
+ _this4.proxyNativeTracksForType_(name);
+ });
+ }
+ /**
+ * Create the `Html5` Tech's DOM element.
+ *
+ * @return {Element}
+ * The element that gets created.
+ */
+ ;
+
+ _proto.createEl = function createEl() {
+ var el = this.options_.tag; // Check if this browser supports moving the element into the box.
+ // On the iPhone video will break if you move the element,
+ // So we have to create a brand new element.
+ // If we ingested the player div, we do not need to move the media element.
+
+ if (!el || !(this.options_.playerElIngest || this.movingMediaElementInDOM)) {
+ // If the original tag is still there, clone and remove it.
+ if (el) {
+ var clone = el.cloneNode(true);
+
+ if (el.parentNode) {
+ el.parentNode.insertBefore(clone, el);
+ }
+
+ Html5.disposeMediaElement(el);
+ el = clone;
+ } else {
+ el = document.createElement('video'); // determine if native controls should be used
+
+ var tagAttributes = this.options_.tag && getAttributes(this.options_.tag);
+ var attributes = mergeOptions$3({}, tagAttributes);
+
+ if (!TOUCH_ENABLED || this.options_.nativeControlsForTouch !== true) {
+ delete attributes.controls;
+ }
+
+ setAttributes(el, assign(attributes, {
+ id: this.options_.techId,
+ "class": 'vjs-tech'
+ }));
+ }
+
+ el.playerId = this.options_.playerId;
+ }
+
+ if (typeof this.options_.preload !== 'undefined') {
+ setAttribute(el, 'preload', this.options_.preload);
+ }
+
+ if (this.options_.disablePictureInPicture !== undefined) {
+ el.disablePictureInPicture = this.options_.disablePictureInPicture;
+ } // Update specific tag settings, in case they were overridden
+ // `autoplay` has to be *last* so that `muted` and `playsinline` are present
+ // when iOS/Safari or other browsers attempt to autoplay.
+
+
+ var settingsAttrs = ['loop', 'muted', 'playsinline', 'autoplay'];
+
+ for (var i = 0; i < settingsAttrs.length; i++) {
+ var attr = settingsAttrs[i];
+ var value = this.options_[attr];
+
+ if (typeof value !== 'undefined') {
+ if (value) {
+ setAttribute(el, attr, attr);
+ } else {
+ removeAttribute(el, attr);
+ }
+
+ el[attr] = value;
+ }
+ }
+
+ return el;
+ }
+ /**
+ * This will be triggered if the loadstart event has already fired, before videojs was
+ * ready. Two known examples of when this can happen are:
+ * 1. If we're loading the playback object after it has started loading
+ * 2. The media is already playing the (often with autoplay on) then
+ *
+ * This function will fire another loadstart so that videojs can catchup.
+ *
+ * @fires Tech#loadstart
+ *
+ * @return {undefined}
+ * returns nothing.
+ */
+ ;
+
+ _proto.handleLateInit_ = function handleLateInit_(el) {
+ if (el.networkState === 0 || el.networkState === 3) {
+ // The video element hasn't started loading the source yet
+ // or didn't find a source
+ return;
+ }
+
+ if (el.readyState === 0) {
+ // NetworkState is set synchronously BUT loadstart is fired at the
+ // end of the current stack, usually before setInterval(fn, 0).
+ // So at this point we know loadstart may have already fired or is
+ // about to fire, and either way the player hasn't seen it yet.
+ // We don't want to fire loadstart prematurely here and cause a
+ // double loadstart so we'll wait and see if it happens between now
+ // and the next loop, and fire it if not.
+ // HOWEVER, we also want to make sure it fires before loadedmetadata
+ // which could also happen between now and the next loop, so we'll
+ // watch for that also.
+ var loadstartFired = false;
+
+ var setLoadstartFired = function setLoadstartFired() {
+ loadstartFired = true;
+ };
+
+ this.on('loadstart', setLoadstartFired);
+
+ var triggerLoadstart = function triggerLoadstart() {
+ // We did miss the original loadstart. Make sure the player
+ // sees loadstart before loadedmetadata
+ if (!loadstartFired) {
+ this.trigger('loadstart');
+ }
+ };
+
+ this.on('loadedmetadata', triggerLoadstart);
+ this.ready(function () {
+ this.off('loadstart', setLoadstartFired);
+ this.off('loadedmetadata', triggerLoadstart);
+
+ if (!loadstartFired) {
+ // We did miss the original native loadstart. Fire it now.
+ this.trigger('loadstart');
+ }
+ });
+ return;
+ } // From here on we know that loadstart already fired and we missed it.
+ // The other readyState events aren't as much of a problem if we double
+ // them, so not going to go to as much trouble as loadstart to prevent
+ // that unless we find reason to.
+
+
+ var eventsToTrigger = ['loadstart']; // loadedmetadata: newly equal to HAVE_METADATA (1) or greater
+
+ eventsToTrigger.push('loadedmetadata'); // loadeddata: newly increased to HAVE_CURRENT_DATA (2) or greater
+
+ if (el.readyState >= 2) {
+ eventsToTrigger.push('loadeddata');
+ } // canplay: newly increased to HAVE_FUTURE_DATA (3) or greater
+
+
+ if (el.readyState >= 3) {
+ eventsToTrigger.push('canplay');
+ } // canplaythrough: newly equal to HAVE_ENOUGH_DATA (4)
+
+
+ if (el.readyState >= 4) {
+ eventsToTrigger.push('canplaythrough');
+ } // We still need to give the player time to add event listeners
+
+
+ this.ready(function () {
+ eventsToTrigger.forEach(function (type) {
+ this.trigger(type);
+ }, this);
+ });
+ }
+ /**
+ * Set whether we are scrubbing or not.
+ * This is used to decide whether we should use `fastSeek` or not.
+ * `fastSeek` is used to provide trick play on Safari browsers.
+ *
+ * @param {boolean} isScrubbing
+ * - true for we are currently scrubbing
+ * - false for we are no longer scrubbing
+ */
+ ;
+
+ _proto.setScrubbing = function setScrubbing(isScrubbing) {
+ this.isScrubbing_ = isScrubbing;
+ }
+ /**
+ * Get whether we are scrubbing or not.
+ *
+ * @return {boolean} isScrubbing
+ * - true for we are currently scrubbing
+ * - false for we are no longer scrubbing
+ */
+ ;
+
+ _proto.scrubbing = function scrubbing() {
+ return this.isScrubbing_;
+ }
+ /**
+ * Set current time for the `HTML5` tech.
+ *
+ * @param {number} seconds
+ * Set the current time of the media to this.
+ */
+ ;
+
+ _proto.setCurrentTime = function setCurrentTime(seconds) {
+ try {
+ if (this.isScrubbing_ && this.el_.fastSeek && IS_ANY_SAFARI) {
+ this.el_.fastSeek(seconds);
+ } else {
+ this.el_.currentTime = seconds;
+ }
+ } catch (e) {
+ log$1(e, 'Video is not ready. (Video.js)'); // this.warning(VideoJS.warnings.videoNotReady);
+ }
+ }
+ /**
+ * Get the current duration of the HTML5 media element.
+ *
+ * @return {number}
+ * The duration of the media or 0 if there is no duration.
+ */
+ ;
+
+ _proto.duration = function duration() {
+ var _this5 = this;
+
+ // Android Chrome will report duration as Infinity for VOD HLS until after
+ // playback has started, which triggers the live display erroneously.
+ // Return NaN if playback has not started and trigger a durationupdate once
+ // the duration can be reliably known.
+ if (this.el_.duration === Infinity && IS_ANDROID && IS_CHROME && this.el_.currentTime === 0) {
+ // Wait for the first `timeupdate` with currentTime > 0 - there may be
+ // several with 0
+ var checkProgress = function checkProgress() {
+ if (_this5.el_.currentTime > 0) {
+ // Trigger durationchange for genuinely live video
+ if (_this5.el_.duration === Infinity) {
+ _this5.trigger('durationchange');
+ }
+
+ _this5.off('timeupdate', checkProgress);
+ }
+ };
+
+ this.on('timeupdate', checkProgress);
+ return NaN;
+ }
+
+ return this.el_.duration || NaN;
+ }
+ /**
+ * Get the current width of the HTML5 media element.
+ *
+ * @return {number}
+ * The width of the HTML5 media element.
+ */
+ ;
+
+ _proto.width = function width() {
+ return this.el_.offsetWidth;
+ }
+ /**
+ * Get the current height of the HTML5 media element.
+ *
+ * @return {number}
+ * The height of the HTML5 media element.
+ */
+ ;
+
+ _proto.height = function height() {
+ return this.el_.offsetHeight;
+ }
+ /**
+ * Proxy iOS `webkitbeginfullscreen` and `webkitendfullscreen` into
+ * `fullscreenchange` event.
+ *
+ * @private
+ * @fires fullscreenchange
+ * @listens webkitendfullscreen
+ * @listens webkitbeginfullscreen
+ * @listens webkitbeginfullscreen
+ */
+ ;
+
+ _proto.proxyWebkitFullscreen_ = function proxyWebkitFullscreen_() {
+ var _this6 = this;
+
+ if (!('webkitDisplayingFullscreen' in this.el_)) {
+ return;
+ }
+
+ var endFn = function endFn() {
+ this.trigger('fullscreenchange', {
+ isFullscreen: false
+ }); // Safari will sometimes set contols on the videoelement when existing fullscreen.
+
+ if (this.el_.controls && !this.options_.nativeControlsForTouch && this.controls()) {
+ this.el_.controls = false;
+ }
+ };
+
+ var beginFn = function beginFn() {
+ if ('webkitPresentationMode' in this.el_ && this.el_.webkitPresentationMode !== 'picture-in-picture') {
+ this.one('webkitendfullscreen', endFn);
+ this.trigger('fullscreenchange', {
+ isFullscreen: true,
+ // set a flag in case another tech triggers fullscreenchange
+ nativeIOSFullscreen: true
+ });
+ }
+ };
+
+ this.on('webkitbeginfullscreen', beginFn);
+ this.on('dispose', function () {
+ _this6.off('webkitbeginfullscreen', beginFn);
+
+ _this6.off('webkitendfullscreen', endFn);
+ });
+ }
+ /**
+ * Check if fullscreen is supported on the current playback device.
+ *
+ * @return {boolean}
+ * - True if fullscreen is supported.
+ * - False if fullscreen is not supported.
+ */
+ ;
+
+ _proto.supportsFullScreen = function supportsFullScreen() {
+ if (typeof this.el_.webkitEnterFullScreen === 'function') {
+ var userAgent = window$1.navigator && window$1.navigator.userAgent || ''; // Seems to be broken in Chromium/Chrome && Safari in Leopard
+
+ if (/Android/.test(userAgent) || !/Chrome|Mac OS X 10.5/.test(userAgent)) {
+ return true;
+ }
+ }
+
+ return false;
+ }
+ /**
+ * Request that the `HTML5` Tech enter fullscreen.
+ */
+ ;
+
+ _proto.enterFullScreen = function enterFullScreen() {
+ var video = this.el_;
+
+ if (video.paused && video.networkState <= video.HAVE_METADATA) {
+ // attempt to prime the video element for programmatic access
+ // this isn't necessary on the desktop but shouldn't hurt
+ silencePromise(this.el_.play()); // playing and pausing synchronously during the transition to fullscreen
+ // can get iOS ~6.1 devices into a play/pause loop
+
+ this.setTimeout(function () {
+ video.pause();
+
+ try {
+ video.webkitEnterFullScreen();
+ } catch (e) {
+ this.trigger('fullscreenerror', e);
+ }
+ }, 0);
+ } else {
+ try {
+ video.webkitEnterFullScreen();
+ } catch (e) {
+ this.trigger('fullscreenerror', e);
+ }
+ }
+ }
+ /**
+ * Request that the `HTML5` Tech exit fullscreen.
+ */
+ ;
+
+ _proto.exitFullScreen = function exitFullScreen() {
+ if (!this.el_.webkitDisplayingFullscreen) {
+ this.trigger('fullscreenerror', new Error('The video is not fullscreen'));
+ return;
+ }
+
+ this.el_.webkitExitFullScreen();
+ }
+ /**
+ * Create a floating video window always on top of other windows so that users may
+ * continue consuming media while they interact with other content sites, or
+ * applications on their device.
+ *
+ * @see [Spec]{@link https://wicg.github.io/picture-in-picture}
+ *
+ * @return {Promise}
+ * A promise with a Picture-in-Picture window.
+ */
+ ;
+
+ _proto.requestPictureInPicture = function requestPictureInPicture() {
+ return this.el_.requestPictureInPicture();
+ }
+ /**
+ * Native requestVideoFrameCallback if supported by browser/tech, or fallback
+ * Don't use rVCF on Safari when DRM is playing, as it doesn't fire
+ * Needs to be checked later than the constructor
+ * This will be a false positive for clear sources loaded after a Fairplay source
+ *
+ * @param {function} cb function to call
+ * @return {number} id of request
+ */
+ ;
+
+ _proto.requestVideoFrameCallback = function requestVideoFrameCallback(cb) {
+ if (this.featuresVideoFrameCallback && !this.el_.webkitKeys) {
+ return this.el_.requestVideoFrameCallback(cb);
+ }
+
+ return _Tech.prototype.requestVideoFrameCallback.call(this, cb);
+ }
+ /**
+ * Native or fallback requestVideoFrameCallback
+ *
+ * @param {number} id request id to cancel
+ */
+ ;
+
+ _proto.cancelVideoFrameCallback = function cancelVideoFrameCallback(id) {
+ if (this.featuresVideoFrameCallback && !this.el_.webkitKeys) {
+ this.el_.cancelVideoFrameCallback(id);
+ } else {
+ _Tech.prototype.cancelVideoFrameCallback.call(this, id);
+ }
+ }
+ /**
+ * A getter/setter for the `Html5` Tech's source object.
+ * > Note: Please use {@link Html5#setSource}
+ *
+ * @param {Tech~SourceObject} [src]
+ * The source object you want to set on the `HTML5` techs element.
+ *
+ * @return {Tech~SourceObject|undefined}
+ * - The current source object when a source is not passed in.
+ * - undefined when setting
+ *
+ * @deprecated Since version 5.
+ */
+ ;
+
+ _proto.src = function src(_src) {
+ if (_src === undefined) {
+ return this.el_.src;
+ } // Setting src through `src` instead of `setSrc` will be deprecated
+
+
+ this.setSrc(_src);
+ }
+ /**
+ * Reset the tech by removing all sources and then calling
+ * {@link Html5.resetMediaElement}.
+ */
+ ;
+
+ _proto.reset = function reset() {
+ Html5.resetMediaElement(this.el_);
+ }
+ /**
+ * Get the current source on the `HTML5` Tech. Falls back to returning the source from
+ * the HTML5 media element.
+ *
+ * @return {Tech~SourceObject}
+ * The current source object from the HTML5 tech. With a fallback to the
+ * elements source.
+ */
+ ;
+
+ _proto.currentSrc = function currentSrc() {
+ if (this.currentSource_) {
+ return this.currentSource_.src;
+ }
+
+ return this.el_.currentSrc;
+ }
+ /**
+ * Set controls attribute for the HTML5 media Element.
+ *
+ * @param {string} val
+ * Value to set the controls attribute to
+ */
+ ;
+
+ _proto.setControls = function setControls(val) {
+ this.el_.controls = !!val;
+ }
+ /**
+ * Create and returns a remote {@link TextTrack} object.
+ *
+ * @param {string} kind
+ * `TextTrack` kind (subtitles, captions, descriptions, chapters, or metadata)
+ *
+ * @param {string} [label]
+ * Label to identify the text track
+ *
+ * @param {string} [language]
+ * Two letter language abbreviation
+ *
+ * @return {TextTrack}
+ * The TextTrack that gets created.
+ */
+ ;
+
+ _proto.addTextTrack = function addTextTrack(kind, label, language) {
+ if (!this.featuresNativeTextTracks) {
+ return _Tech.prototype.addTextTrack.call(this, kind, label, language);
+ }
+
+ return this.el_.addTextTrack(kind, label, language);
+ }
+ /**
+ * Creates either native TextTrack or an emulated TextTrack depending
+ * on the value of `featuresNativeTextTracks`
+ *
+ * @param {Object} options
+ * The object should contain the options to initialize the TextTrack with.
+ *
+ * @param {string} [options.kind]
+ * `TextTrack` kind (subtitles, captions, descriptions, chapters, or metadata).
+ *
+ * @param {string} [options.label]
+ * Label to identify the text track
+ *
+ * @param {string} [options.language]
+ * Two letter language abbreviation.
+ *
+ * @param {boolean} [options.default]
+ * Default this track to on.
+ *
+ * @param {string} [options.id]
+ * The internal id to assign this track.
+ *
+ * @param {string} [options.src]
+ * A source url for the track.
+ *
+ * @return {HTMLTrackElement}
+ * The track element that gets created.
+ */
+ ;
+
+ _proto.createRemoteTextTrack = function createRemoteTextTrack(options) {
+ if (!this.featuresNativeTextTracks) {
+ return _Tech.prototype.createRemoteTextTrack.call(this, options);
+ }
+
+ var htmlTrackElement = document.createElement('track');
+
+ if (options.kind) {
+ htmlTrackElement.kind = options.kind;
+ }
+
+ if (options.label) {
+ htmlTrackElement.label = options.label;
+ }
+
+ if (options.language || options.srclang) {
+ htmlTrackElement.srclang = options.language || options.srclang;
+ }
+
+ if (options["default"]) {
+ htmlTrackElement["default"] = options["default"];
+ }
+
+ if (options.id) {
+ htmlTrackElement.id = options.id;
+ }
+
+ if (options.src) {
+ htmlTrackElement.src = options.src;
+ }
+
+ return htmlTrackElement;
+ }
+ /**
+ * Creates a remote text track object and returns an html track element.
+ *
+ * @param {Object} options The object should contain values for
+ * kind, language, label, and src (location of the WebVTT file)
+ * @param {boolean} [manualCleanup=true] if set to false, the TextTrack will be
+ * automatically removed from the video element whenever the source changes
+ * @return {HTMLTrackElement} An Html Track Element.
+ * This can be an emulated {@link HTMLTrackElement} or a native one.
+ * @deprecated The default value of the "manualCleanup" parameter will default
+ * to "false" in upcoming versions of Video.js
+ */
+ ;
+
+ _proto.addRemoteTextTrack = function addRemoteTextTrack(options, manualCleanup) {
+ var htmlTrackElement = _Tech.prototype.addRemoteTextTrack.call(this, options, manualCleanup);
+
+ if (this.featuresNativeTextTracks) {
+ this.el().appendChild(htmlTrackElement);
+ }
+
+ return htmlTrackElement;
+ }
+ /**
+ * Remove remote `TextTrack` from `TextTrackList` object
+ *
+ * @param {TextTrack} track
+ * `TextTrack` object to remove
+ */
+ ;
+
+ _proto.removeRemoteTextTrack = function removeRemoteTextTrack(track) {
+ _Tech.prototype.removeRemoteTextTrack.call(this, track);
+
+ if (this.featuresNativeTextTracks) {
+ var tracks = this.$$('track');
+ var i = tracks.length;
+
+ while (i--) {
+ if (track === tracks[i] || track === tracks[i].track) {
+ this.el().removeChild(tracks[i]);
+ }
+ }
+ }
+ }
+ /**
+ * Gets available media playback quality metrics as specified by the W3C's Media
+ * Playback Quality API.
+ *
+ * @see [Spec]{@link https://wicg.github.io/media-playback-quality}
+ *
+ * @return {Object}
+ * An object with supported media playback quality metrics
+ */
+ ;
+
+ _proto.getVideoPlaybackQuality = function getVideoPlaybackQuality() {
+ if (typeof this.el().getVideoPlaybackQuality === 'function') {
+ return this.el().getVideoPlaybackQuality();
+ }
+
+ var videoPlaybackQuality = {};
+
+ if (typeof this.el().webkitDroppedFrameCount !== 'undefined' && typeof this.el().webkitDecodedFrameCount !== 'undefined') {
+ videoPlaybackQuality.droppedVideoFrames = this.el().webkitDroppedFrameCount;
+ videoPlaybackQuality.totalVideoFrames = this.el().webkitDecodedFrameCount;
+ }
+
+ if (window$1.performance && typeof window$1.performance.now === 'function') {
+ videoPlaybackQuality.creationTime = window$1.performance.now();
+ } else if (window$1.performance && window$1.performance.timing && typeof window$1.performance.timing.navigationStart === 'number') {
+ videoPlaybackQuality.creationTime = window$1.Date.now() - window$1.performance.timing.navigationStart;
+ }
+
+ return videoPlaybackQuality;
+ };
+
+ return Html5;
+}(Tech);
+/* HTML5 Support Testing ---------------------------------------------------- */
+
+/**
+ * Element for testing browser HTML5 media capabilities
+ *
+ * @type {Element}
+ * @constant
+ * @private
+ */
+
+
+defineLazyProperty(Html5, 'TEST_VID', function () {
+ if (!isReal()) {
+ return;
+ }
+
+ var video = document.createElement('video');
+ var track = document.createElement('track');
+ track.kind = 'captions';
+ track.srclang = 'en';
+ track.label = 'English';
+ video.appendChild(track);
+ return video;
+});
+/**
+ * Check if HTML5 media is supported by this browser/device.
+ *
+ * @return {boolean}
+ * - True if HTML5 media is supported.
+ * - False if HTML5 media is not supported.
+ */
+
+Html5.isSupported = function () {
+ // IE with no Media Player is a LIAR! (#984)
+ try {
+ Html5.TEST_VID.volume = 0.5;
+ } catch (e) {
+ return false;
+ }
+
+ return !!(Html5.TEST_VID && Html5.TEST_VID.canPlayType);
+};
+/**
+ * Check if the tech can support the given type
+ *
+ * @param {string} type
+ * The mimetype to check
+ * @return {string} 'probably', 'maybe', or '' (empty string)
+ */
+
+
+Html5.canPlayType = function (type) {
+ return Html5.TEST_VID.canPlayType(type);
+};
+/**
+ * Check if the tech can support the given source
+ *
+ * @param {Object} srcObj
+ * The source object
+ * @param {Object} options
+ * The options passed to the tech
+ * @return {string} 'probably', 'maybe', or '' (empty string)
+ */
+
+
+Html5.canPlaySource = function (srcObj, options) {
+ return Html5.canPlayType(srcObj.type);
+};
+/**
+ * Check if the volume can be changed in this browser/device.
+ * Volume cannot be changed in a lot of mobile devices.
+ * Specifically, it can't be changed from 1 on iOS.
+ *
+ * @return {boolean}
+ * - True if volume can be controlled
+ * - False otherwise
+ */
+
+
+Html5.canControlVolume = function () {
+ // IE will error if Windows Media Player not installed #3315
+ try {
+ var volume = Html5.TEST_VID.volume;
+ Html5.TEST_VID.volume = volume / 2 + 0.1;
+ var canControl = volume !== Html5.TEST_VID.volume; // With the introduction of iOS 15, there are cases where the volume is read as
+ // changed but reverts back to its original state at the start of the next tick.
+ // To determine whether volume can be controlled on iOS,
+ // a timeout is set and the volume is checked asynchronously.
+ // Since `features` doesn't currently work asynchronously, the value is manually set.
+
+ if (canControl && IS_IOS) {
+ window$1.setTimeout(function () {
+ if (Html5 && Html5.prototype) {
+ Html5.prototype.featuresVolumeControl = volume !== Html5.TEST_VID.volume;
+ }
+ }); // default iOS to false, which will be updated in the timeout above.
+
+ return false;
+ }
+
+ return canControl;
+ } catch (e) {
+ return false;
+ }
+};
+/**
+ * Check if the volume can be muted in this browser/device.
+ * Some devices, e.g. iOS, don't allow changing volume
+ * but permits muting/unmuting.
+ *
+ * @return {bolean}
+ * - True if volume can be muted
+ * - False otherwise
+ */
+
+
+Html5.canMuteVolume = function () {
+ try {
+ var muted = Html5.TEST_VID.muted; // in some versions of iOS muted property doesn't always
+ // work, so we want to set both property and attribute
+
+ Html5.TEST_VID.muted = !muted;
+
+ if (Html5.TEST_VID.muted) {
+ setAttribute(Html5.TEST_VID, 'muted', 'muted');
+ } else {
+ removeAttribute(Html5.TEST_VID, 'muted', 'muted');
+ }
+
+ return muted !== Html5.TEST_VID.muted;
+ } catch (e) {
+ return false;
+ }
+};
+/**
+ * Check if the playback rate can be changed in this browser/device.
+ *
+ * @return {boolean}
+ * - True if playback rate can be controlled
+ * - False otherwise
+ */
+
+
+Html5.canControlPlaybackRate = function () {
+ // Playback rate API is implemented in Android Chrome, but doesn't do anything
+ // https://github.com/videojs/video.js/issues/3180
+ if (IS_ANDROID && IS_CHROME && CHROME_VERSION < 58) {
+ return false;
+ } // IE will error if Windows Media Player not installed #3315
+
+
+ try {
+ var playbackRate = Html5.TEST_VID.playbackRate;
+ Html5.TEST_VID.playbackRate = playbackRate / 2 + 0.1;
+ return playbackRate !== Html5.TEST_VID.playbackRate;
+ } catch (e) {
+ return false;
+ }
+};
+/**
+ * Check if we can override a video/audio elements attributes, with
+ * Object.defineProperty.
+ *
+ * @return {boolean}
+ * - True if builtin attributes can be overridden
+ * - False otherwise
+ */
+
+
+Html5.canOverrideAttributes = function () {
+ // if we cannot overwrite the src/innerHTML property, there is no support
+ // iOS 7 safari for instance cannot do this.
+ try {
+ var noop = function noop() {};
+
+ Object.defineProperty(document.createElement('video'), 'src', {
+ get: noop,
+ set: noop
+ });
+ Object.defineProperty(document.createElement('audio'), 'src', {
+ get: noop,
+ set: noop
+ });
+ Object.defineProperty(document.createElement('video'), 'innerHTML', {
+ get: noop,
+ set: noop
+ });
+ Object.defineProperty(document.createElement('audio'), 'innerHTML', {
+ get: noop,
+ set: noop
+ });
+ } catch (e) {
+ return false;
+ }
+
+ return true;
+};
+/**
+ * Check to see if native `TextTrack`s are supported by this browser/device.
+ *
+ * @return {boolean}
+ * - True if native `TextTrack`s are supported.
+ * - False otherwise
+ */
+
+
+Html5.supportsNativeTextTracks = function () {
+ return IS_ANY_SAFARI || IS_IOS && IS_CHROME;
+};
+/**
+ * Check to see if native `VideoTrack`s are supported by this browser/device
+ *
+ * @return {boolean}
+ * - True if native `VideoTrack`s are supported.
+ * - False otherwise
+ */
+
+
+Html5.supportsNativeVideoTracks = function () {
+ return !!(Html5.TEST_VID && Html5.TEST_VID.videoTracks);
+};
+/**
+ * Check to see if native `AudioTrack`s are supported by this browser/device
+ *
+ * @return {boolean}
+ * - True if native `AudioTrack`s are supported.
+ * - False otherwise
+ */
+
+
+Html5.supportsNativeAudioTracks = function () {
+ return !!(Html5.TEST_VID && Html5.TEST_VID.audioTracks);
+};
+/**
+ * An array of events available on the Html5 tech.
+ *
+ * @private
+ * @type {Array}
+ */
+
+
+Html5.Events = ['loadstart', 'suspend', 'abort', 'error', 'emptied', 'stalled', 'loadedmetadata', 'loadeddata', 'canplay', 'canplaythrough', 'playing', 'waiting', 'seeking', 'seeked', 'ended', 'durationchange', 'timeupdate', 'progress', 'play', 'pause', 'ratechange', 'resize', 'volumechange'];
+/**
+ * Boolean indicating whether the `Tech` supports volume control.
+ *
+ * @type {boolean}
+ * @default {@link Html5.canControlVolume}
+ */
+
+/**
+ * Boolean indicating whether the `Tech` supports muting volume.
+ *
+ * @type {bolean}
+ * @default {@link Html5.canMuteVolume}
+ */
+
+/**
+ * Boolean indicating whether the `Tech` supports changing the speed at which the media
+ * plays. Examples:
+ * - Set player to play 2x (twice) as fast
+ * - Set player to play 0.5x (half) as fast
+ *
+ * @type {boolean}
+ * @default {@link Html5.canControlPlaybackRate}
+ */
+
+/**
+ * Boolean indicating whether the `Tech` supports the `sourceset` event.
+ *
+ * @type {boolean}
+ * @default
+ */
+
+/**
+ * Boolean indicating whether the `HTML5` tech currently supports native `TextTrack`s.
+ *
+ * @type {boolean}
+ * @default {@link Html5.supportsNativeTextTracks}
+ */
+
+/**
+ * Boolean indicating whether the `HTML5` tech currently supports native `VideoTrack`s.
+ *
+ * @type {boolean}
+ * @default {@link Html5.supportsNativeVideoTracks}
+ */
+
+/**
+ * Boolean indicating whether the `HTML5` tech currently supports native `AudioTrack`s.
+ *
+ * @type {boolean}
+ * @default {@link Html5.supportsNativeAudioTracks}
+ */
+
+[['featuresMuteControl', 'canMuteVolume'], ['featuresPlaybackRate', 'canControlPlaybackRate'], ['featuresSourceset', 'canOverrideAttributes'], ['featuresNativeTextTracks', 'supportsNativeTextTracks'], ['featuresNativeVideoTracks', 'supportsNativeVideoTracks'], ['featuresNativeAudioTracks', 'supportsNativeAudioTracks']].forEach(function (_ref) {
+ var key = _ref[0],
+ fn = _ref[1];
+ defineLazyProperty(Html5.prototype, key, function () {
+ return Html5[fn]();
+ }, true);
+});
+Html5.prototype.featuresVolumeControl = Html5.canControlVolume();
+/**
+ * Boolean indicating whether the `HTML5` tech currently supports the media element
+ * moving in the DOM. iOS breaks if you move the media element, so this is set this to
+ * false there. Everywhere else this should be true.
+ *
+ * @type {boolean}
+ * @default
+ */
+
+Html5.prototype.movingMediaElementInDOM = !IS_IOS; // TODO: Previous comment: No longer appears to be used. Can probably be removed.
+// Is this true?
+
+/**
+ * Boolean indicating whether the `HTML5` tech currently supports automatic media resize
+ * when going into fullscreen.
+ *
+ * @type {boolean}
+ * @default
+ */
+
+Html5.prototype.featuresFullscreenResize = true;
+/**
+ * Boolean indicating whether the `HTML5` tech currently supports the progress event.
+ * If this is false, manual `progress` events will be triggered instead.
+ *
+ * @type {boolean}
+ * @default
+ */
+
+Html5.prototype.featuresProgressEvents = true;
+/**
+ * Boolean indicating whether the `HTML5` tech currently supports the timeupdate event.
+ * If this is false, manual `timeupdate` events will be triggered instead.
+ *
+ * @default
+ */
+
+Html5.prototype.featuresTimeupdateEvents = true;
+/**
+ * Whether the HTML5 el supports `requestVideoFrameCallback`
+ *
+ * @type {boolean}
+ */
+
+Html5.prototype.featuresVideoFrameCallback = !!(Html5.TEST_VID && Html5.TEST_VID.requestVideoFrameCallback); // HTML5 Feature detection and Device Fixes --------------------------------- //
+
+var canPlayType;
+
+Html5.patchCanPlayType = function () {
+ // Android 4.0 and above can play HLS to some extent but it reports being unable to do so
+ // Firefox and Chrome report correctly
+ if (ANDROID_VERSION >= 4.0 && !IS_FIREFOX && !IS_CHROME) {
+ canPlayType = Html5.TEST_VID && Html5.TEST_VID.constructor.prototype.canPlayType;
+
+ Html5.TEST_VID.constructor.prototype.canPlayType = function (type) {
+ var mpegurlRE = /^application\/(?:x-|vnd\.apple\.)mpegurl/i;
+
+ if (type && mpegurlRE.test(type)) {
+ return 'maybe';
+ }
+
+ return canPlayType.call(this, type);
+ };
+ }
+};
+
+Html5.unpatchCanPlayType = function () {
+ var r = Html5.TEST_VID.constructor.prototype.canPlayType;
+
+ if (canPlayType) {
+ Html5.TEST_VID.constructor.prototype.canPlayType = canPlayType;
+ }
+
+ return r;
+}; // by default, patch the media element
+
+
+Html5.patchCanPlayType();
+
+Html5.disposeMediaElement = function (el) {
+ if (!el) {
+ return;
+ }
+
+ if (el.parentNode) {
+ el.parentNode.removeChild(el);
+ } // remove any child track or source nodes to prevent their loading
+
+
+ while (el.hasChildNodes()) {
+ el.removeChild(el.firstChild);
+ } // remove any src reference. not setting `src=''` because that causes a warning
+ // in firefox
+
+
+ el.removeAttribute('src'); // force the media element to update its loading state by calling load()
+ // however IE on Windows 7N has a bug that throws an error so need a try/catch (#793)
+
+ if (typeof el.load === 'function') {
+ // wrapping in an iife so it's not deoptimized (#1060#discussion_r10324473)
+ (function () {
+ try {
+ el.load();
+ } catch (e) {// not supported
+ }
+ })();
+ }
+};
+
+Html5.resetMediaElement = function (el) {
+ if (!el) {
+ return;
+ }
+
+ var sources = el.querySelectorAll('source');
+ var i = sources.length;
+
+ while (i--) {
+ el.removeChild(sources[i]);
+ } // remove any src reference.
+ // not setting `src=''` because that throws an error
+
+
+ el.removeAttribute('src');
+
+ if (typeof el.load === 'function') {
+ // wrapping in an iife so it's not deoptimized (#1060#discussion_r10324473)
+ (function () {
+ try {
+ el.load();
+ } catch (e) {// satisfy linter
+ }
+ })();
+ }
+};
+/* Native HTML5 element property wrapping ----------------------------------- */
+// Wrap native boolean attributes with getters that check both property and attribute
+// The list is as followed:
+// muted, defaultMuted, autoplay, controls, loop, playsinline
+
+
+[
+/**
+ * Get the value of `muted` from the media element. `muted` indicates
+ * that the volume for the media should be set to silent. This does not actually change
+ * the `volume` attribute.
+ *
+ * @method Html5#muted
+ * @return {boolean}
+ * - True if the value of `volume` should be ignored and the audio set to silent.
+ * - False if the value of `volume` should be used.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-muted}
+ */
+'muted',
+/**
+ * Get the value of `defaultMuted` from the media element. `defaultMuted` indicates
+ * whether the media should start muted or not. Only changes the default state of the
+ * media. `muted` and `defaultMuted` can have different values. {@link Html5#muted} indicates the
+ * current state.
+ *
+ * @method Html5#defaultMuted
+ * @return {boolean}
+ * - The value of `defaultMuted` from the media element.
+ * - True indicates that the media should start muted.
+ * - False indicates that the media should not start muted
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-defaultmuted}
+ */
+'defaultMuted',
+/**
+ * Get the value of `autoplay` from the media element. `autoplay` indicates
+ * that the media should start to play as soon as the page is ready.
+ *
+ * @method Html5#autoplay
+ * @return {boolean}
+ * - The value of `autoplay` from the media element.
+ * - True indicates that the media should start as soon as the page loads.
+ * - False indicates that the media should not start as soon as the page loads.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-media-autoplay}
+ */
+'autoplay',
+/**
+ * Get the value of `controls` from the media element. `controls` indicates
+ * whether the native media controls should be shown or hidden.
+ *
+ * @method Html5#controls
+ * @return {boolean}
+ * - The value of `controls` from the media element.
+ * - True indicates that native controls should be showing.
+ * - False indicates that native controls should be hidden.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-media-controls}
+ */
+'controls',
+/**
+ * Get the value of `loop` from the media element. `loop` indicates
+ * that the media should return to the start of the media and continue playing once
+ * it reaches the end.
+ *
+ * @method Html5#loop
+ * @return {boolean}
+ * - The value of `loop` from the media element.
+ * - True indicates that playback should seek back to start once
+ * the end of a media is reached.
+ * - False indicates that playback should not loop back to the start when the
+ * end of the media is reached.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-media-loop}
+ */
+'loop',
+/**
+ * Get the value of `playsinline` from the media element. `playsinline` indicates
+ * to the browser that non-fullscreen playback is preferred when fullscreen
+ * playback is the native default, such as in iOS Safari.
+ *
+ * @method Html5#playsinline
+ * @return {boolean}
+ * - The value of `playsinline` from the media element.
+ * - True indicates that the media should play inline.
+ * - False indicates that the media should not play inline.
+ *
+ * @see [Spec]{@link https://html.spec.whatwg.org/#attr-video-playsinline}
+ */
+'playsinline'].forEach(function (prop) {
+ Html5.prototype[prop] = function () {
+ return this.el_[prop] || this.el_.hasAttribute(prop);
+ };
+}); // Wrap native boolean attributes with setters that set both property and attribute
+// The list is as followed:
+// setMuted, setDefaultMuted, setAutoplay, setLoop, setPlaysinline
+// setControls is special-cased above
+
+[
+/**
+ * Set the value of `muted` on the media element. `muted` indicates that the current
+ * audio level should be silent.
+ *
+ * @method Html5#setMuted
+ * @param {boolean} muted
+ * - True if the audio should be set to silent
+ * - False otherwise
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-muted}
+ */
+'muted',
+/**
+ * Set the value of `defaultMuted` on the media element. `defaultMuted` indicates that the current
+ * audio level should be silent, but will only effect the muted level on initial playback..
+ *
+ * @method Html5.prototype.setDefaultMuted
+ * @param {boolean} defaultMuted
+ * - True if the audio should be set to silent
+ * - False otherwise
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-defaultmuted}
+ */
+'defaultMuted',
+/**
+ * Set the value of `autoplay` on the media element. `autoplay` indicates
+ * that the media should start to play as soon as the page is ready.
+ *
+ * @method Html5#setAutoplay
+ * @param {boolean} autoplay
+ * - True indicates that the media should start as soon as the page loads.
+ * - False indicates that the media should not start as soon as the page loads.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-media-autoplay}
+ */
+'autoplay',
+/**
+ * Set the value of `loop` on the media element. `loop` indicates
+ * that the media should return to the start of the media and continue playing once
+ * it reaches the end.
+ *
+ * @method Html5#setLoop
+ * @param {boolean} loop
+ * - True indicates that playback should seek back to start once
+ * the end of a media is reached.
+ * - False indicates that playback should not loop back to the start when the
+ * end of the media is reached.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-media-loop}
+ */
+'loop',
+/**
+ * Set the value of `playsinline` from the media element. `playsinline` indicates
+ * to the browser that non-fullscreen playback is preferred when fullscreen
+ * playback is the native default, such as in iOS Safari.
+ *
+ * @method Html5#setPlaysinline
+ * @param {boolean} playsinline
+ * - True indicates that the media should play inline.
+ * - False indicates that the media should not play inline.
+ *
+ * @see [Spec]{@link https://html.spec.whatwg.org/#attr-video-playsinline}
+ */
+'playsinline'].forEach(function (prop) {
+ Html5.prototype['set' + toTitleCase$1(prop)] = function (v) {
+ this.el_[prop] = v;
+
+ if (v) {
+ this.el_.setAttribute(prop, prop);
+ } else {
+ this.el_.removeAttribute(prop);
+ }
+ };
+}); // Wrap native properties with a getter
+// The list is as followed
+// paused, currentTime, buffered, volume, poster, preload, error, seeking
+// seekable, ended, playbackRate, defaultPlaybackRate, disablePictureInPicture
+// played, networkState, readyState, videoWidth, videoHeight, crossOrigin
+
+[
+/**
+ * Get the value of `paused` from the media element. `paused` indicates whether the media element
+ * is currently paused or not.
+ *
+ * @method Html5#paused
+ * @return {boolean}
+ * The value of `paused` from the media element.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-paused}
+ */
+'paused',
+/**
+ * Get the value of `currentTime` from the media element. `currentTime` indicates
+ * the current second that the media is at in playback.
+ *
+ * @method Html5#currentTime
+ * @return {number}
+ * The value of `currentTime` from the media element.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-currenttime}
+ */
+'currentTime',
+/**
+ * Get the value of `buffered` from the media element. `buffered` is a `TimeRange`
+ * object that represents the parts of the media that are already downloaded and
+ * available for playback.
+ *
+ * @method Html5#buffered
+ * @return {TimeRange}
+ * The value of `buffered` from the media element.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-buffered}
+ */
+'buffered',
+/**
+ * Get the value of `volume` from the media element. `volume` indicates
+ * the current playback volume of audio for a media. `volume` will be a value from 0
+ * (silent) to 1 (loudest and default).
+ *
+ * @method Html5#volume
+ * @return {number}
+ * The value of `volume` from the media element. Value will be between 0-1.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-a-volume}
+ */
+'volume',
+/**
+ * Get the value of `poster` from the media element. `poster` indicates
+ * that the url of an image file that can/will be shown when no media data is available.
+ *
+ * @method Html5#poster
+ * @return {string}
+ * The value of `poster` from the media element. Value will be a url to an
+ * image.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-video-poster}
+ */
+'poster',
+/**
+ * Get the value of `preload` from the media element. `preload` indicates
+ * what should download before the media is interacted with. It can have the following
+ * values:
+ * - none: nothing should be downloaded
+ * - metadata: poster and the first few frames of the media may be downloaded to get
+ * media dimensions and other metadata
+ * - auto: allow the media and metadata for the media to be downloaded before
+ * interaction
+ *
+ * @method Html5#preload
+ * @return {string}
+ * The value of `preload` from the media element. Will be 'none', 'metadata',
+ * or 'auto'.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-media-preload}
+ */
+'preload',
+/**
+ * Get the value of the `error` from the media element. `error` indicates any
+ * MediaError that may have occurred during playback. If error returns null there is no
+ * current error.
+ *
+ * @method Html5#error
+ * @return {MediaError|null}
+ * The value of `error` from the media element. Will be `MediaError` if there
+ * is a current error and null otherwise.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-error}
+ */
+'error',
+/**
+ * Get the value of `seeking` from the media element. `seeking` indicates whether the
+ * media is currently seeking to a new position or not.
+ *
+ * @method Html5#seeking
+ * @return {boolean}
+ * - The value of `seeking` from the media element.
+ * - True indicates that the media is currently seeking to a new position.
+ * - False indicates that the media is not seeking to a new position at this time.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-seeking}
+ */
+'seeking',
+/**
+ * Get the value of `seekable` from the media element. `seekable` returns a
+ * `TimeRange` object indicating ranges of time that can currently be `seeked` to.
+ *
+ * @method Html5#seekable
+ * @return {TimeRange}
+ * The value of `seekable` from the media element. A `TimeRange` object
+ * indicating the current ranges of time that can be seeked to.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-seekable}
+ */
+'seekable',
+/**
+ * Get the value of `ended` from the media element. `ended` indicates whether
+ * the media has reached the end or not.
+ *
+ * @method Html5#ended
+ * @return {boolean}
+ * - The value of `ended` from the media element.
+ * - True indicates that the media has ended.
+ * - False indicates that the media has not ended.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-ended}
+ */
+'ended',
+/**
+ * Get the value of `playbackRate` from the media element. `playbackRate` indicates
+ * the rate at which the media is currently playing back. Examples:
+ * - if playbackRate is set to 2, media will play twice as fast.
+ * - if playbackRate is set to 0.5, media will play half as fast.
+ *
+ * @method Html5#playbackRate
+ * @return {number}
+ * The value of `playbackRate` from the media element. A number indicating
+ * the current playback speed of the media, where 1 is normal speed.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-playbackrate}
+ */
+'playbackRate',
+/**
+ * Get the value of `defaultPlaybackRate` from the media element. `defaultPlaybackRate` indicates
+ * the rate at which the media is currently playing back. This value will not indicate the current
+ * `playbackRate` after playback has started, use {@link Html5#playbackRate} for that.
+ *
+ * Examples:
+ * - if defaultPlaybackRate is set to 2, media will play twice as fast.
+ * - if defaultPlaybackRate is set to 0.5, media will play half as fast.
+ *
+ * @method Html5.prototype.defaultPlaybackRate
+ * @return {number}
+ * The value of `defaultPlaybackRate` from the media element. A number indicating
+ * the current playback speed of the media, where 1 is normal speed.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-playbackrate}
+ */
+'defaultPlaybackRate',
+/**
+ * Get the value of 'disablePictureInPicture' from the video element.
+ *
+ * @method Html5#disablePictureInPicture
+ * @return {boolean} value
+ * - The value of `disablePictureInPicture` from the video element.
+ * - True indicates that the video can't be played in Picture-In-Picture mode
+ * - False indicates that the video can be played in Picture-In-Picture mode
+ *
+ * @see [Spec]{@link https://w3c.github.io/picture-in-picture/#disable-pip}
+ */
+'disablePictureInPicture',
+/**
+ * Get the value of `played` from the media element. `played` returns a `TimeRange`
+ * object representing points in the media timeline that have been played.
+ *
+ * @method Html5#played
+ * @return {TimeRange}
+ * The value of `played` from the media element. A `TimeRange` object indicating
+ * the ranges of time that have been played.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-played}
+ */
+'played',
+/**
+ * Get the value of `networkState` from the media element. `networkState` indicates
+ * the current network state. It returns an enumeration from the following list:
+ * - 0: NETWORK_EMPTY
+ * - 1: NETWORK_IDLE
+ * - 2: NETWORK_LOADING
+ * - 3: NETWORK_NO_SOURCE
+ *
+ * @method Html5#networkState
+ * @return {number}
+ * The value of `networkState` from the media element. This will be a number
+ * from the list in the description.
+ *
+ * @see [Spec] {@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-networkstate}
+ */
+'networkState',
+/**
+ * Get the value of `readyState` from the media element. `readyState` indicates
+ * the current state of the media element. It returns an enumeration from the
+ * following list:
+ * - 0: HAVE_NOTHING
+ * - 1: HAVE_METADATA
+ * - 2: HAVE_CURRENT_DATA
+ * - 3: HAVE_FUTURE_DATA
+ * - 4: HAVE_ENOUGH_DATA
+ *
+ * @method Html5#readyState
+ * @return {number}
+ * The value of `readyState` from the media element. This will be a number
+ * from the list in the description.
+ *
+ * @see [Spec] {@link https://www.w3.org/TR/html5/embedded-content-0.html#ready-states}
+ */
+'readyState',
+/**
+ * Get the value of `videoWidth` from the video element. `videoWidth` indicates
+ * the current width of the video in css pixels.
+ *
+ * @method Html5#videoWidth
+ * @return {number}
+ * The value of `videoWidth` from the video element. This will be a number
+ * in css pixels.
+ *
+ * @see [Spec] {@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-video-videowidth}
+ */
+'videoWidth',
+/**
+ * Get the value of `videoHeight` from the video element. `videoHeight` indicates
+ * the current height of the video in css pixels.
+ *
+ * @method Html5#videoHeight
+ * @return {number}
+ * The value of `videoHeight` from the video element. This will be a number
+ * in css pixels.
+ *
+ * @see [Spec] {@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-video-videowidth}
+ */
+'videoHeight',
+/**
+ * Get the value of `crossOrigin` from the media element. `crossOrigin` indicates
+ * to the browser that should sent the cookies along with the requests for the
+ * different assets/playlists
+ *
+ * @method Html5#crossOrigin
+ * @return {string}
+ * - anonymous indicates that the media should not sent cookies.
+ * - use-credentials indicates that the media should sent cookies along the requests.
+ *
+ * @see [Spec]{@link https://html.spec.whatwg.org/#attr-media-crossorigin}
+ */
+'crossOrigin'].forEach(function (prop) {
+ Html5.prototype[prop] = function () {
+ return this.el_[prop];
+ };
+}); // Wrap native properties with a setter in this format:
+// set + toTitleCase(name)
+// The list is as follows:
+// setVolume, setSrc, setPoster, setPreload, setPlaybackRate, setDefaultPlaybackRate,
+// setDisablePictureInPicture, setCrossOrigin
+
+[
+/**
+ * Set the value of `volume` on the media element. `volume` indicates the current
+ * audio level as a percentage in decimal form. This means that 1 is 100%, 0.5 is 50%, and
+ * so on.
+ *
+ * @method Html5#setVolume
+ * @param {number} percentAsDecimal
+ * The volume percent as a decimal. Valid range is from 0-1.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-a-volume}
+ */
+'volume',
+/**
+ * Set the value of `src` on the media element. `src` indicates the current
+ * {@link Tech~SourceObject} for the media.
+ *
+ * @method Html5#setSrc
+ * @param {Tech~SourceObject} src
+ * The source object to set as the current source.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-src}
+ */
+'src',
+/**
+ * Set the value of `poster` on the media element. `poster` is the url to
+ * an image file that can/will be shown when no media data is available.
+ *
+ * @method Html5#setPoster
+ * @param {string} poster
+ * The url to an image that should be used as the `poster` for the media
+ * element.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-media-poster}
+ */
+'poster',
+/**
+ * Set the value of `preload` on the media element. `preload` indicates
+ * what should download before the media is interacted with. It can have the following
+ * values:
+ * - none: nothing should be downloaded
+ * - metadata: poster and the first few frames of the media may be downloaded to get
+ * media dimensions and other metadata
+ * - auto: allow the media and metadata for the media to be downloaded before
+ * interaction
+ *
+ * @method Html5#setPreload
+ * @param {string} preload
+ * The value of `preload` to set on the media element. Must be 'none', 'metadata',
+ * or 'auto'.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-media-preload}
+ */
+'preload',
+/**
+ * Set the value of `playbackRate` on the media element. `playbackRate` indicates
+ * the rate at which the media should play back. Examples:
+ * - if playbackRate is set to 2, media will play twice as fast.
+ * - if playbackRate is set to 0.5, media will play half as fast.
+ *
+ * @method Html5#setPlaybackRate
+ * @return {number}
+ * The value of `playbackRate` from the media element. A number indicating
+ * the current playback speed of the media, where 1 is normal speed.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-playbackrate}
+ */
+'playbackRate',
+/**
+ * Set the value of `defaultPlaybackRate` on the media element. `defaultPlaybackRate` indicates
+ * the rate at which the media should play back upon initial startup. Changing this value
+ * after a video has started will do nothing. Instead you should used {@link Html5#setPlaybackRate}.
+ *
+ * Example Values:
+ * - if playbackRate is set to 2, media will play twice as fast.
+ * - if playbackRate is set to 0.5, media will play half as fast.
+ *
+ * @method Html5.prototype.setDefaultPlaybackRate
+ * @return {number}
+ * The value of `defaultPlaybackRate` from the media element. A number indicating
+ * the current playback speed of the media, where 1 is normal speed.
+ *
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-defaultplaybackrate}
+ */
+'defaultPlaybackRate',
+/**
+ * Prevents the browser from suggesting a Picture-in-Picture context menu
+ * or to request Picture-in-Picture automatically in some cases.
+ *
+ * @method Html5#setDisablePictureInPicture
+ * @param {boolean} value
+ * The true value will disable Picture-in-Picture mode.
+ *
+ * @see [Spec]{@link https://w3c.github.io/picture-in-picture/#disable-pip}
+ */
+'disablePictureInPicture',
+/**
+ * Set the value of `crossOrigin` from the media element. `crossOrigin` indicates
+ * to the browser that should sent the cookies along with the requests for the
+ * different assets/playlists
+ *
+ * @method Html5#setCrossOrigin
+ * @param {string} crossOrigin
+ * - anonymous indicates that the media should not sent cookies.
+ * - use-credentials indicates that the media should sent cookies along the requests.
+ *
+ * @see [Spec]{@link https://html.spec.whatwg.org/#attr-media-crossorigin}
+ */
+'crossOrigin'].forEach(function (prop) {
+ Html5.prototype['set' + toTitleCase$1(prop)] = function (v) {
+ this.el_[prop] = v;
+ };
+}); // wrap native functions with a function
+// The list is as follows:
+// pause, load, play
+
+[
+/**
+ * A wrapper around the media elements `pause` function. This will call the `HTML5`
+ * media elements `pause` function.
+ *
+ * @method Html5#pause
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-pause}
+ */
+'pause',
+/**
+ * A wrapper around the media elements `load` function. This will call the `HTML5`s
+ * media element `load` function.
+ *
+ * @method Html5#load
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-load}
+ */
+'load',
+/**
+ * A wrapper around the media elements `play` function. This will call the `HTML5`s
+ * media element `play` function.
+ *
+ * @method Html5#play
+ * @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-play}
+ */
+'play'].forEach(function (prop) {
+ Html5.prototype[prop] = function () {
+ return this.el_[prop]();
+ };
+});
+Tech.withSourceHandlers(Html5);
+/**
+ * Native source handler for Html5, simply passes the source to the media element.
+ *
+ * @property {Tech~SourceObject} source
+ * The source object
+ *
+ * @property {Html5} tech
+ * The instance of the HTML5 tech.
+ */
+
+Html5.nativeSourceHandler = {};
+/**
+ * Check if the media element can play the given mime type.
+ *
+ * @param {string} type
+ * The mimetype to check
+ *
+ * @return {string}
+ * 'probably', 'maybe', or '' (empty string)
+ */
+
+Html5.nativeSourceHandler.canPlayType = function (type) {
+ // IE without MediaPlayer throws an error (#519)
+ try {
+ return Html5.TEST_VID.canPlayType(type);
+ } catch (e) {
+ return '';
+ }
+};
+/**
+ * Check if the media element can handle a source natively.
+ *
+ * @param {Tech~SourceObject} source
+ * The source object
+ *
+ * @param {Object} [options]
+ * Options to be passed to the tech.
+ *
+ * @return {string}
+ * 'probably', 'maybe', or '' (empty string).
+ */
+
+
+Html5.nativeSourceHandler.canHandleSource = function (source, options) {
+ // If a type was provided we should rely on that
+ if (source.type) {
+ return Html5.nativeSourceHandler.canPlayType(source.type); // If no type, fall back to checking 'video/[EXTENSION]'
+ } else if (source.src) {
+ var ext = getFileExtension(source.src);
+ return Html5.nativeSourceHandler.canPlayType("video/" + ext);
+ }
+
+ return '';
+};
+/**
+ * Pass the source to the native media element.
+ *
+ * @param {Tech~SourceObject} source
+ * The source object
+ *
+ * @param {Html5} tech
+ * The instance of the Html5 tech
+ *
+ * @param {Object} [options]
+ * The options to pass to the source
+ */
+
+
+Html5.nativeSourceHandler.handleSource = function (source, tech, options) {
+ tech.setSrc(source.src);
+};
+/**
+ * A noop for the native dispose function, as cleanup is not needed.
+ */
+
+
+Html5.nativeSourceHandler.dispose = function () {}; // Register the native source handler
+
+
+Html5.registerSourceHandler(Html5.nativeSourceHandler);
+Tech.registerTech('Html5', Html5);
+
+// on the player when they happen
+
+var TECH_EVENTS_RETRIGGER = [
+/**
+ * Fired while the user agent is downloading media data.
+ *
+ * @event Player#progress
+ * @type {EventTarget~Event}
+ */
+
+/**
+ * Retrigger the `progress` event that was triggered by the {@link Tech}.
+ *
+ * @private
+ * @method Player#handleTechProgress_
+ * @fires Player#progress
+ * @listens Tech#progress
+ */
+'progress',
+/**
+ * Fires when the loading of an audio/video is aborted.
+ *
+ * @event Player#abort
+ * @type {EventTarget~Event}
+ */
+
+/**
+ * Retrigger the `abort` event that was triggered by the {@link Tech}.
+ *
+ * @private
+ * @method Player#handleTechAbort_
+ * @fires Player#abort
+ * @listens Tech#abort
+ */
+'abort',
+/**
+ * Fires when the browser is intentionally not getting media data.
+ *
+ * @event Player#suspend
+ * @type {EventTarget~Event}
+ */
+
+/**
+ * Retrigger the `suspend` event that was triggered by the {@link Tech}.
+ *
+ * @private
+ * @method Player#handleTechSuspend_
+ * @fires Player#suspend
+ * @listens Tech#suspend
+ */
+'suspend',
+/**
+ * Fires when the current playlist is empty.
+ *
+ * @event Player#emptied
+ * @type {EventTarget~Event}
+ */
+
+/**
+ * Retrigger the `emptied` event that was triggered by the {@link Tech}.
+ *
+ * @private
+ * @method Player#handleTechEmptied_
+ * @fires Player#emptied
+ * @listens Tech#emptied
+ */
+'emptied',
+/**
+ * Fires when the browser is trying to get media data, but data is not available.
+ *
+ * @event Player#stalled
+ * @type {EventTarget~Event}
+ */
+
+/**
+ * Retrigger the `stalled` event that was triggered by the {@link Tech}.
+ *
+ * @private
+ * @method Player#handleTechStalled_
+ * @fires Player#stalled
+ * @listens Tech#stalled
+ */
+'stalled',
+/**
+ * Fires when the browser has loaded meta data for the audio/video.
+ *
+ * @event Player#loadedmetadata
+ * @type {EventTarget~Event}
+ */
+
+/**
+ * Retrigger the `loadedmetadata` event that was triggered by the {@link Tech}.
+ *
+ * @private
+ * @method Player#handleTechLoadedmetadata_
+ * @fires Player#loadedmetadata
+ * @listens Tech#loadedmetadata
+ */
+'loadedmetadata',
+/**
+ * Fires when the browser has loaded the current frame of the audio/video.
+ *
+ * @event Player#loadeddata
+ * @type {event}
+ */
+
+/**
+ * Retrigger the `loadeddata` event that was triggered by the {@link Tech}.
+ *
+ * @private
+ * @method Player#handleTechLoaddeddata_
+ * @fires Player#loadeddata
+ * @listens Tech#loadeddata
+ */
+'loadeddata',
+/**
+ * Fires when the current playback position has changed.
+ *
+ * @event Player#timeupdate
+ * @type {event}
+ */
+
+/**
+ * Retrigger the `timeupdate` event that was triggered by the {@link Tech}.
+ *
+ * @private
+ * @method Player#handleTechTimeUpdate_
+ * @fires Player#timeupdate
+ * @listens Tech#timeupdate
+ */
+'timeupdate',
+/**
+ * Fires when the video's intrinsic dimensions change
+ *
+ * @event Player#resize
+ * @type {event}
+ */
+
+/**
+ * Retrigger the `resize` event that was triggered by the {@link Tech}.
+ *
+ * @private
+ * @method Player#handleTechResize_
+ * @fires Player#resize
+ * @listens Tech#resize
+ */
+'resize',
+/**
+ * Fires when the volume has been changed
+ *
+ * @event Player#volumechange
+ * @type {event}
+ */
+
+/**
+ * Retrigger the `volumechange` event that was triggered by the {@link Tech}.
+ *
+ * @private
+ * @method Player#handleTechVolumechange_
+ * @fires Player#volumechange
+ * @listens Tech#volumechange
+ */
+'volumechange',
+/**
+ * Fires when the text track has been changed
+ *
+ * @event Player#texttrackchange
+ * @type {event}
+ */
+
+/**
+ * Retrigger the `texttrackchange` event that was triggered by the {@link Tech}.
+ *
+ * @private
+ * @method Player#handleTechTexttrackchange_
+ * @fires Player#texttrackchange
+ * @listens Tech#texttrackchange
+ */
+'texttrackchange']; // events to queue when playback rate is zero
+// this is a hash for the sole purpose of mapping non-camel-cased event names
+// to camel-cased function names
+
+var TECH_EVENTS_QUEUE = {
+ canplay: 'CanPlay',
+ canplaythrough: 'CanPlayThrough',
+ playing: 'Playing',
+ seeked: 'Seeked'
+};
+var BREAKPOINT_ORDER = ['tiny', 'xsmall', 'small', 'medium', 'large', 'xlarge', 'huge'];
+var BREAKPOINT_CLASSES = {}; // grep: vjs-layout-tiny
+// grep: vjs-layout-x-small
+// grep: vjs-layout-small
+// grep: vjs-layout-medium
+// grep: vjs-layout-large
+// grep: vjs-layout-x-large
+// grep: vjs-layout-huge
+
+BREAKPOINT_ORDER.forEach(function (k) {
+ var v = k.charAt(0) === 'x' ? "x-" + k.substring(1) : k;
+ BREAKPOINT_CLASSES[k] = "vjs-layout-" + v;
+});
+var DEFAULT_BREAKPOINTS = {
+ tiny: 210,
+ xsmall: 320,
+ small: 425,
+ medium: 768,
+ large: 1440,
+ xlarge: 2560,
+ huge: Infinity
+};
+/**
+ * An instance of the `Player` class is created when any of the Video.js setup methods
+ * are used to initialize a video.
+ *
+ * After an instance has been created it can be accessed globally in two ways:
+ * 1. By calling `videojs('example_video_1');`
+ * 2. By using it directly via `videojs.players.example_video_1;`
+ *
+ * @extends Component
+ */
+
+var Player = /*#__PURE__*/function (_Component) {
+ _inheritsLoose(Player, _Component);
+
+ /**
+ * Create an instance of this class.
+ *
+ * @param {Element} tag
+ * The original video DOM element used for configuring options.
+ *
+ * @param {Object} [options]
+ * Object of option names and values.
+ *
+ * @param {Component~ReadyCallback} [ready]
+ * Ready callback function.
+ */
+ function Player(tag, options, ready) {
+ var _this;
+
+ // Make sure tag ID exists
+ tag.id = tag.id || options.id || "vjs_video_" + newGUID(); // Set Options
+ // The options argument overrides options set in the video tag
+ // which overrides globally set options.
+ // This latter part coincides with the load order
+ // (tag must exist before Player)
+
+ options = assign(Player.getTagSettings(tag), options); // Delay the initialization of children because we need to set up
+ // player properties first, and can't use `this` before `super()`
+
+ options.initChildren = false; // Same with creating the element
+
+ options.createEl = false; // don't auto mixin the evented mixin
+
+ options.evented = false; // we don't want the player to report touch activity on itself
+ // see enableTouchActivity in Component
+
+ options.reportTouchActivity = false; // If language is not set, get the closest lang attribute
+
+ if (!options.language) {
+ if (typeof tag.closest === 'function') {
+ var closest = tag.closest('[lang]');
+
+ if (closest && closest.getAttribute) {
+ options.language = closest.getAttribute('lang');
+ }
+ } else {
+ var element = tag;
+
+ while (element && element.nodeType === 1) {
+ if (getAttributes(element).hasOwnProperty('lang')) {
+ options.language = element.getAttribute('lang');
+ break;
+ }
+
+ element = element.parentNode;
+ }
+ }
+ } // Run base component initializing with new options
+
+
+ _this = _Component.call(this, null, options, ready) || this; // Create bound methods for document listeners.
+
+ _this.boundDocumentFullscreenChange_ = function (e) {
+ return _this.documentFullscreenChange_(e);
+ };
+
+ _this.boundFullWindowOnEscKey_ = function (e) {
+ return _this.fullWindowOnEscKey(e);
+ };
+
+ _this.boundUpdateStyleEl_ = function (e) {
+ return _this.updateStyleEl_(e);
+ };
+
+ _this.boundApplyInitTime_ = function (e) {
+ return _this.applyInitTime_(e);
+ };
+
+ _this.boundUpdateCurrentBreakpoint_ = function (e) {
+ return _this.updateCurrentBreakpoint_(e);
+ };
+
+ _this.boundHandleTechClick_ = function (e) {
+ return _this.handleTechClick_(e);
+ };
+
+ _this.boundHandleTechDoubleClick_ = function (e) {
+ return _this.handleTechDoubleClick_(e);
+ };
+
+ _this.boundHandleTechTouchStart_ = function (e) {
+ return _this.handleTechTouchStart_(e);
+ };
+
+ _this.boundHandleTechTouchMove_ = function (e) {
+ return _this.handleTechTouchMove_(e);
+ };
+
+ _this.boundHandleTechTouchEnd_ = function (e) {
+ return _this.handleTechTouchEnd_(e);
+ };
+
+ _this.boundHandleTechTap_ = function (e) {
+ return _this.handleTechTap_(e);
+ }; // default isFullscreen_ to false
+
+
+ _this.isFullscreen_ = false; // create logger
+
+ _this.log = createLogger(_this.id_); // Hold our own reference to fullscreen api so it can be mocked in tests
+
+ _this.fsApi_ = FullscreenApi; // Tracks when a tech changes the poster
+
+ _this.isPosterFromTech_ = false; // Holds callback info that gets queued when playback rate is zero
+ // and a seek is happening
+
+ _this.queuedCallbacks_ = []; // Turn off API access because we're loading a new tech that might load asynchronously
+
+ _this.isReady_ = false; // Init state hasStarted_
+
+ _this.hasStarted_ = false; // Init state userActive_
+
+ _this.userActive_ = false; // Init debugEnabled_
+
+ _this.debugEnabled_ = false; // Init state audioOnlyMode_
+
+ _this.audioOnlyMode_ = false; // Init state audioPosterMode_
+
+ _this.audioPosterMode_ = false; // Init state audioOnlyCache_
+
+ _this.audioOnlyCache_ = {
+ playerHeight: null,
+ hiddenChildren: []
+ }; // if the global option object was accidentally blown away by
+ // someone, bail early with an informative error
+
+ if (!_this.options_ || !_this.options_.techOrder || !_this.options_.techOrder.length) {
+ throw new Error('No techOrder specified. Did you overwrite ' + 'videojs.options instead of just changing the ' + 'properties you want to override?');
+ } // Store the original tag used to set options
+
+
+ _this.tag = tag; // Store the tag attributes used to restore html5 element
+
+ _this.tagAttributes = tag && getAttributes(tag); // Update current language
+
+ _this.language(_this.options_.language); // Update Supported Languages
+
+
+ if (options.languages) {
+ // Normalise player option languages to lowercase
+ var languagesToLower = {};
+ Object.getOwnPropertyNames(options.languages).forEach(function (name) {
+ languagesToLower[name.toLowerCase()] = options.languages[name];
+ });
+ _this.languages_ = languagesToLower;
+ } else {
+ _this.languages_ = Player.prototype.options_.languages;
+ }
+
+ _this.resetCache_(); // Set poster
+
+
+ _this.poster_ = options.poster || ''; // Set controls
+
+ _this.controls_ = !!options.controls; // Original tag settings stored in options
+ // now remove immediately so native controls don't flash.
+ // May be turned back on by HTML5 tech if nativeControlsForTouch is true
+
+ tag.controls = false;
+ tag.removeAttribute('controls');
+ _this.changingSrc_ = false;
+ _this.playCallbacks_ = [];
+ _this.playTerminatedQueue_ = []; // the attribute overrides the option
+
+ if (tag.hasAttribute('autoplay')) {
+ _this.autoplay(true);
+ } else {
+ // otherwise use the setter to validate and
+ // set the correct value.
+ _this.autoplay(_this.options_.autoplay);
+ } // check plugins
+
+
+ if (options.plugins) {
+ Object.keys(options.plugins).forEach(function (name) {
+ if (typeof _this[name] !== 'function') {
+ throw new Error("plugin \"" + name + "\" does not exist");
+ }
+ });
+ }
+ /*
+ * Store the internal state of scrubbing
+ *
+ * @private
+ * @return {Boolean} True if the user is scrubbing
+ */
+
+
+ _this.scrubbing_ = false;
+ _this.el_ = _this.createEl(); // Make this an evented object and use `el_` as its event bus.
+
+ evented(_assertThisInitialized(_this), {
+ eventBusKey: 'el_'
+ }); // listen to document and player fullscreenchange handlers so we receive those events
+ // before a user can receive them so we can update isFullscreen appropriately.
+ // make sure that we listen to fullscreenchange events before everything else to make sure that
+ // our isFullscreen method is updated properly for internal components as well as external.
+
+ if (_this.fsApi_.requestFullscreen) {
+ on(document, _this.fsApi_.fullscreenchange, _this.boundDocumentFullscreenChange_);
+
+ _this.on(_this.fsApi_.fullscreenchange, _this.boundDocumentFullscreenChange_);
+ }
+
+ if (_this.fluid_) {
+ _this.on(['playerreset', 'resize'], _this.boundUpdateStyleEl_);
+ } // We also want to pass the original player options to each component and plugin
+ // as well so they don't need to reach back into the player for options later.
+ // We also need to do another copy of this.options_ so we don't end up with
+ // an infinite loop.
+
+
+ var playerOptionsCopy = mergeOptions$3(_this.options_); // Load plugins
+
+ if (options.plugins) {
+ Object.keys(options.plugins).forEach(function (name) {
+ _this[name](options.plugins[name]);
+ });
+ } // Enable debug mode to fire debugon event for all plugins.
+
+
+ if (options.debug) {
+ _this.debug(true);
+ }
+
+ _this.options_.playerOptions = playerOptionsCopy;
+ _this.middleware_ = [];
+
+ _this.playbackRates(options.playbackRates);
+
+ _this.initChildren(); // Set isAudio based on whether or not an audio tag was used
+
+
+ _this.isAudio(tag.nodeName.toLowerCase() === 'audio'); // Update controls className. Can't do this when the controls are initially
+ // set because the element doesn't exist yet.
+
+
+ if (_this.controls()) {
+ _this.addClass('vjs-controls-enabled');
+ } else {
+ _this.addClass('vjs-controls-disabled');
+ } // Set ARIA label and region role depending on player type
+
+
+ _this.el_.setAttribute('role', 'region');
+
+ if (_this.isAudio()) {
+ _this.el_.setAttribute('aria-label', _this.localize('Audio Player'));
+ } else {
+ _this.el_.setAttribute('aria-label', _this.localize('Video Player'));
+ }
+
+ if (_this.isAudio()) {
+ _this.addClass('vjs-audio');
+ }
+
+ if (_this.flexNotSupported_()) {
+ _this.addClass('vjs-no-flex');
+ } // TODO: Make this smarter. Toggle user state between touching/mousing
+ // using events, since devices can have both touch and mouse events.
+ // TODO: Make this check be performed again when the window switches between monitors
+ // (See https://github.com/videojs/video.js/issues/5683)
+
+
+ if (TOUCH_ENABLED) {
+ _this.addClass('vjs-touch-enabled');
+ } // iOS Safari has broken hover handling
+
+
+ if (!IS_IOS) {
+ _this.addClass('vjs-workinghover');
+ } // Make player easily findable by ID
+
+
+ Player.players[_this.id_] = _assertThisInitialized(_this); // Add a major version class to aid css in plugins
+
+ var majorVersion = version$5.split('.')[0];
+
+ _this.addClass("vjs-v" + majorVersion); // When the player is first initialized, trigger activity so components
+ // like the control bar show themselves if needed
+
+
+ _this.userActive(true);
+
+ _this.reportUserActivity();
+
+ _this.one('play', function (e) {
+ return _this.listenForUserActivity_(e);
+ });
+
+ _this.on('stageclick', function (e) {
+ return _this.handleStageClick_(e);
+ });
+
+ _this.on('keydown', function (e) {
+ return _this.handleKeyDown(e);
+ });
+
+ _this.on('languagechange', function (e) {
+ return _this.handleLanguagechange(e);
+ });
+
+ _this.breakpoints(_this.options_.breakpoints);
+
+ _this.responsive(_this.options_.responsive); // Calling both the audio mode methods after the player is fully
+ // setup to be able to listen to the events triggered by them
+
+
+ _this.on('ready', function () {
+ // Calling the audioPosterMode method first so that
+ // the audioOnlyMode can take precedence when both options are set to true
+ _this.audioPosterMode(_this.options_.audioPosterMode);
+
+ _this.audioOnlyMode(_this.options_.audioOnlyMode);
+ });
+
+ return _this;
+ }
+ /**
+ * Destroys the video player and does any necessary cleanup.
+ *
+ * This is especially helpful if you are dynamically adding and removing videos
+ * to/from the DOM.
+ *
+ * @fires Player#dispose
+ */
+
+
+ var _proto = Player.prototype;
+
+ _proto.dispose = function dispose() {
+ var _this2 = this;
+
+ /**
+ * Called when the player is being disposed of.
+ *
+ * @event Player#dispose
+ * @type {EventTarget~Event}
+ */
+ this.trigger('dispose'); // prevent dispose from being called twice
+
+ this.off('dispose'); // Make sure all player-specific document listeners are unbound. This is
+
+ off(document, this.fsApi_.fullscreenchange, this.boundDocumentFullscreenChange_);
+ off(document, 'keydown', this.boundFullWindowOnEscKey_);
+
+ if (this.styleEl_ && this.styleEl_.parentNode) {
+ this.styleEl_.parentNode.removeChild(this.styleEl_);
+ this.styleEl_ = null;
+ } // Kill reference to this player
+
+
+ Player.players[this.id_] = null;
+
+ if (this.tag && this.tag.player) {
+ this.tag.player = null;
+ }
+
+ if (this.el_ && this.el_.player) {
+ this.el_.player = null;
+ }
+
+ if (this.tech_) {
+ this.tech_.dispose();
+ this.isPosterFromTech_ = false;
+ this.poster_ = '';
+ }
+
+ if (this.playerElIngest_) {
+ this.playerElIngest_ = null;
+ }
+
+ if (this.tag) {
+ this.tag = null;
+ }
+
+ clearCacheForPlayer(this); // remove all event handlers for track lists
+ // all tracks and track listeners are removed on
+ // tech dispose
+
+ ALL.names.forEach(function (name) {
+ var props = ALL[name];
+
+ var list = _this2[props.getterName](); // if it is not a native list
+ // we have to manually remove event listeners
+
+
+ if (list && list.off) {
+ list.off();
+ }
+ }); // the actual .el_ is removed here, or replaced if
+
+ _Component.prototype.dispose.call(this, {
+ restoreEl: this.options_.restoreEl
+ });
+ }
+ /**
+ * Create the `Player`'s DOM element.
+ *
+ * @return {Element}
+ * The DOM element that gets created.
+ */
+ ;
+
+ _proto.createEl = function createEl() {
+ var tag = this.tag;
+ var el;
+ var playerElIngest = this.playerElIngest_ = tag.parentNode && tag.parentNode.hasAttribute && tag.parentNode.hasAttribute('data-vjs-player');
+ var divEmbed = this.tag.tagName.toLowerCase() === 'video-js';
+
+ if (playerElIngest) {
+ el = this.el_ = tag.parentNode;
+ } else if (!divEmbed) {
+ el = this.el_ = _Component.prototype.createEl.call(this, 'div');
+ } // Copy over all the attributes from the tag, including ID and class
+ // ID will now reference player box, not the video tag
+
+
+ var attrs = getAttributes(tag);
+
+ if (divEmbed) {
+ el = this.el_ = tag;
+ tag = this.tag = document.createElement('video');
+
+ while (el.children.length) {
+ tag.appendChild(el.firstChild);
+ }
+
+ if (!hasClass(el, 'video-js')) {
+ addClass(el, 'video-js');
+ }
+
+ el.appendChild(tag);
+ playerElIngest = this.playerElIngest_ = el; // move properties over from our custom `video-js` element
+ // to our new `video` element. This will move things like
+ // `src` or `controls` that were set via js before the player
+ // was initialized.
+
+ Object.keys(el).forEach(function (k) {
+ try {
+ tag[k] = el[k];
+ } catch (e) {// we got a a property like outerHTML which we can't actually copy, ignore it
+ }
+ });
+ } // set tabindex to -1 to remove the video element from the focus order
+
+
+ tag.setAttribute('tabindex', '-1');
+ attrs.tabindex = '-1'; // Workaround for #4583 (JAWS+IE doesn't announce BPB or play button), and
+ // for the same issue with Chrome (on Windows) with JAWS.
+ // See https://github.com/FreedomScientific/VFO-standards-support/issues/78
+ // Note that we can't detect if JAWS is being used, but this ARIA attribute
+ // doesn't change behavior of IE11 or Chrome if JAWS is not being used
+
+ if (IE_VERSION || IS_CHROME && IS_WINDOWS) {
+ tag.setAttribute('role', 'application');
+ attrs.role = 'application';
+ } // Remove width/height attrs from tag so CSS can make it 100% width/height
+
+
+ tag.removeAttribute('width');
+ tag.removeAttribute('height');
+
+ if ('width' in attrs) {
+ delete attrs.width;
+ }
+
+ if ('height' in attrs) {
+ delete attrs.height;
+ }
+
+ Object.getOwnPropertyNames(attrs).forEach(function (attr) {
+ // don't copy over the class attribute to the player element when we're in a div embed
+ // the class is already set up properly in the divEmbed case
+ // and we want to make sure that the `video-js` class doesn't get lost
+ if (!(divEmbed && attr === 'class')) {
+ el.setAttribute(attr, attrs[attr]);
+ }
+
+ if (divEmbed) {
+ tag.setAttribute(attr, attrs[attr]);
+ }
+ }); // Update tag id/class for use as HTML5 playback tech
+ // Might think we should do this after embedding in container so .vjs-tech class
+ // doesn't flash 100% width/height, but class only applies with .video-js parent
+
+ tag.playerId = tag.id;
+ tag.id += '_html5_api';
+ tag.className = 'vjs-tech'; // Make player findable on elements
+
+ tag.player = el.player = this; // Default state of video is paused
+
+ this.addClass('vjs-paused'); // Add a style element in the player that we'll use to set the width/height
+ // of the player in a way that's still overrideable by CSS, just like the
+ // video element
+
+ if (window$1.VIDEOJS_NO_DYNAMIC_STYLE !== true) {
+ this.styleEl_ = createStyleElement('vjs-styles-dimensions');
+ var defaultsStyleEl = $('.vjs-styles-defaults');
+ var head = $('head');
+ head.insertBefore(this.styleEl_, defaultsStyleEl ? defaultsStyleEl.nextSibling : head.firstChild);
+ }
+
+ this.fill_ = false;
+ this.fluid_ = false; // Pass in the width/height/aspectRatio options which will update the style el
+
+ this.width(this.options_.width);
+ this.height(this.options_.height);
+ this.fill(this.options_.fill);
+ this.fluid(this.options_.fluid);
+ this.aspectRatio(this.options_.aspectRatio); // support both crossOrigin and crossorigin to reduce confusion and issues around the name
+
+ this.crossOrigin(this.options_.crossOrigin || this.options_.crossorigin); // Hide any links within the video/audio tag,
+ // because IE doesn't hide them completely from screen readers.
+
+ var links = tag.getElementsByTagName('a');
+
+ for (var i = 0; i < links.length; i++) {
+ var linkEl = links.item(i);
+ addClass(linkEl, 'vjs-hidden');
+ linkEl.setAttribute('hidden', 'hidden');
+ } // insertElFirst seems to cause the networkState to flicker from 3 to 2, so
+ // keep track of the original for later so we can know if the source originally failed
+
+
+ tag.initNetworkState_ = tag.networkState; // Wrap video tag in div (el/box) container
+
+ if (tag.parentNode && !playerElIngest) {
+ tag.parentNode.insertBefore(el, tag);
+ } // insert the tag as the first child of the player element
+ // then manually add it to the children array so that this.addChild
+ // will work properly for other components
+ //
+ // Breaks iPhone, fixed in HTML5 setup.
+
+
+ prependTo(tag, el);
+ this.children_.unshift(tag); // Set lang attr on player to ensure CSS :lang() in consistent with player
+ // if it's been set to something different to the doc
+
+ this.el_.setAttribute('lang', this.language_);
+ this.el_.setAttribute('translate', 'no');
+ this.el_ = el;
+ return el;
+ }
+ /**
+ * Get or set the `Player`'s crossOrigin option. For the HTML5 player, this
+ * sets the `crossOrigin` property on the `` tag to control the CORS
+ * behavior.
+ *
+ * @see [Video Element Attributes]{@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/video#attr-crossorigin}
+ *
+ * @param {string} [value]
+ * The value to set the `Player`'s crossOrigin to. If an argument is
+ * given, must be one of `anonymous` or `use-credentials`.
+ *
+ * @return {string|undefined}
+ * - The current crossOrigin value of the `Player` when getting.
+ * - undefined when setting
+ */
+ ;
+
+ _proto.crossOrigin = function crossOrigin(value) {
+ if (!value) {
+ return this.techGet_('crossOrigin');
+ }
+
+ if (value !== 'anonymous' && value !== 'use-credentials') {
+ log$1.warn("crossOrigin must be \"anonymous\" or \"use-credentials\", given \"" + value + "\"");
+ return;
+ }
+
+ this.techCall_('setCrossOrigin', value);
+ return;
+ }
+ /**
+ * A getter/setter for the `Player`'s width. Returns the player's configured value.
+ * To get the current width use `currentWidth()`.
+ *
+ * @param {number} [value]
+ * The value to set the `Player`'s width to.
+ *
+ * @return {number}
+ * The current width of the `Player` when getting.
+ */
+ ;
+
+ _proto.width = function width(value) {
+ return this.dimension('width', value);
+ }
+ /**
+ * A getter/setter for the `Player`'s height. Returns the player's configured value.
+ * To get the current height use `currentheight()`.
+ *
+ * @param {number} [value]
+ * The value to set the `Player`'s heigth to.
+ *
+ * @return {number}
+ * The current height of the `Player` when getting.
+ */
+ ;
+
+ _proto.height = function height(value) {
+ return this.dimension('height', value);
+ }
+ /**
+ * A getter/setter for the `Player`'s width & height.
+ *
+ * @param {string} dimension
+ * This string can be:
+ * - 'width'
+ * - 'height'
+ *
+ * @param {number} [value]
+ * Value for dimension specified in the first argument.
+ *
+ * @return {number}
+ * The dimension arguments value when getting (width/height).
+ */
+ ;
+
+ _proto.dimension = function dimension(_dimension, value) {
+ var privDimension = _dimension + '_';
+
+ if (value === undefined) {
+ return this[privDimension] || 0;
+ }
+
+ if (value === '' || value === 'auto') {
+ // If an empty string is given, reset the dimension to be automatic
+ this[privDimension] = undefined;
+ this.updateStyleEl_();
+ return;
+ }
+
+ var parsedVal = parseFloat(value);
+
+ if (isNaN(parsedVal)) {
+ log$1.error("Improper value \"" + value + "\" supplied for for " + _dimension);
+ return;
+ }
+
+ this[privDimension] = parsedVal;
+ this.updateStyleEl_();
+ }
+ /**
+ * A getter/setter/toggler for the vjs-fluid `className` on the `Player`.
+ *
+ * Turning this on will turn off fill mode.
+ *
+ * @param {boolean} [bool]
+ * - A value of true adds the class.
+ * - A value of false removes the class.
+ * - No value will be a getter.
+ *
+ * @return {boolean|undefined}
+ * - The value of fluid when getting.
+ * - `undefined` when setting.
+ */
+ ;
+
+ _proto.fluid = function fluid(bool) {
+ var _this3 = this;
+
+ if (bool === undefined) {
+ return !!this.fluid_;
+ }
+
+ this.fluid_ = !!bool;
+
+ if (isEvented(this)) {
+ this.off(['playerreset', 'resize'], this.boundUpdateStyleEl_);
+ }
+
+ if (bool) {
+ this.addClass('vjs-fluid');
+ this.fill(false);
+ addEventedCallback(this, function () {
+ _this3.on(['playerreset', 'resize'], _this3.boundUpdateStyleEl_);
+ });
+ } else {
+ this.removeClass('vjs-fluid');
+ }
+
+ this.updateStyleEl_();
+ }
+ /**
+ * A getter/setter/toggler for the vjs-fill `className` on the `Player`.
+ *
+ * Turning this on will turn off fluid mode.
+ *
+ * @param {boolean} [bool]
+ * - A value of true adds the class.
+ * - A value of false removes the class.
+ * - No value will be a getter.
+ *
+ * @return {boolean|undefined}
+ * - The value of fluid when getting.
+ * - `undefined` when setting.
+ */
+ ;
+
+ _proto.fill = function fill(bool) {
+ if (bool === undefined) {
+ return !!this.fill_;
+ }
+
+ this.fill_ = !!bool;
+
+ if (bool) {
+ this.addClass('vjs-fill');
+ this.fluid(false);
+ } else {
+ this.removeClass('vjs-fill');
+ }
+ }
+ /**
+ * Get/Set the aspect ratio
+ *
+ * @param {string} [ratio]
+ * Aspect ratio for player
+ *
+ * @return {string|undefined}
+ * returns the current aspect ratio when getting
+ */
+
+ /**
+ * A getter/setter for the `Player`'s aspect ratio.
+ *
+ * @param {string} [ratio]
+ * The value to set the `Player`'s aspect ratio to.
+ *
+ * @return {string|undefined}
+ * - The current aspect ratio of the `Player` when getting.
+ * - undefined when setting
+ */
+ ;
+
+ _proto.aspectRatio = function aspectRatio(ratio) {
+ if (ratio === undefined) {
+ return this.aspectRatio_;
+ } // Check for width:height format
+
+
+ if (!/^\d+\:\d+$/.test(ratio)) {
+ throw new Error('Improper value supplied for aspect ratio. The format should be width:height, for example 16:9.');
+ }
+
+ this.aspectRatio_ = ratio; // We're assuming if you set an aspect ratio you want fluid mode,
+ // because in fixed mode you could calculate width and height yourself.
+
+ this.fluid(true);
+ this.updateStyleEl_();
+ }
+ /**
+ * Update styles of the `Player` element (height, width and aspect ratio).
+ *
+ * @private
+ * @listens Tech#loadedmetadata
+ */
+ ;
+
+ _proto.updateStyleEl_ = function updateStyleEl_() {
+ if (window$1.VIDEOJS_NO_DYNAMIC_STYLE === true) {
+ var _width = typeof this.width_ === 'number' ? this.width_ : this.options_.width;
+
+ var _height = typeof this.height_ === 'number' ? this.height_ : this.options_.height;
+
+ var techEl = this.tech_ && this.tech_.el();
+
+ if (techEl) {
+ if (_width >= 0) {
+ techEl.width = _width;
+ }
+
+ if (_height >= 0) {
+ techEl.height = _height;
+ }
+ }
+
+ return;
+ }
+
+ var width;
+ var height;
+ var aspectRatio;
+ var idClass; // The aspect ratio is either used directly or to calculate width and height.
+
+ if (this.aspectRatio_ !== undefined && this.aspectRatio_ !== 'auto') {
+ // Use any aspectRatio that's been specifically set
+ aspectRatio = this.aspectRatio_;
+ } else if (this.videoWidth() > 0) {
+ // Otherwise try to get the aspect ratio from the video metadata
+ aspectRatio = this.videoWidth() + ':' + this.videoHeight();
+ } else {
+ // Or use a default. The video element's is 2:1, but 16:9 is more common.
+ aspectRatio = '16:9';
+ } // Get the ratio as a decimal we can use to calculate dimensions
+
+
+ var ratioParts = aspectRatio.split(':');
+ var ratioMultiplier = ratioParts[1] / ratioParts[0];
+
+ if (this.width_ !== undefined) {
+ // Use any width that's been specifically set
+ width = this.width_;
+ } else if (this.height_ !== undefined) {
+ // Or calulate the width from the aspect ratio if a height has been set
+ width = this.height_ / ratioMultiplier;
+ } else {
+ // Or use the video's metadata, or use the video el's default of 300
+ width = this.videoWidth() || 300;
+ }
+
+ if (this.height_ !== undefined) {
+ // Use any height that's been specifically set
+ height = this.height_;
+ } else {
+ // Otherwise calculate the height from the ratio and the width
+ height = width * ratioMultiplier;
+ } // Ensure the CSS class is valid by starting with an alpha character
+
+
+ if (/^[^a-zA-Z]/.test(this.id())) {
+ idClass = 'dimensions-' + this.id();
+ } else {
+ idClass = this.id() + '-dimensions';
+ } // Ensure the right class is still on the player for the style element
+
+
+ this.addClass(idClass);
+ setTextContent(this.styleEl_, "\n ." + idClass + " {\n width: " + width + "px;\n height: " + height + "px;\n }\n\n ." + idClass + ".vjs-fluid:not(.vjs-audio-only-mode) {\n padding-top: " + ratioMultiplier * 100 + "%;\n }\n ");
+ }
+ /**
+ * Load/Create an instance of playback {@link Tech} including element
+ * and API methods. Then append the `Tech` element in `Player` as a child.
+ *
+ * @param {string} techName
+ * name of the playback technology
+ *
+ * @param {string} source
+ * video source
+ *
+ * @private
+ */
+ ;
+
+ _proto.loadTech_ = function loadTech_(techName, source) {
+ var _this4 = this;
+
+ // Pause and remove current playback technology
+ if (this.tech_) {
+ this.unloadTech_();
+ }
+
+ var titleTechName = toTitleCase$1(techName);
+ var camelTechName = techName.charAt(0).toLowerCase() + techName.slice(1); // get rid of the HTML5 video tag as soon as we are using another tech
+
+ if (titleTechName !== 'Html5' && this.tag) {
+ Tech.getTech('Html5').disposeMediaElement(this.tag);
+ this.tag.player = null;
+ this.tag = null;
+ }
+
+ this.techName_ = titleTechName; // Turn off API access because we're loading a new tech that might load asynchronously
+
+ this.isReady_ = false;
+ var autoplay = this.autoplay(); // if autoplay is a string (or `true` with normalizeAutoplay: true) we pass false to the tech
+ // because the player is going to handle autoplay on `loadstart`
+
+ if (typeof this.autoplay() === 'string' || this.autoplay() === true && this.options_.normalizeAutoplay) {
+ autoplay = false;
+ } // Grab tech-specific options from player options and add source and parent element to use.
+
+
+ var techOptions = {
+ source: source,
+ autoplay: autoplay,
+ 'nativeControlsForTouch': this.options_.nativeControlsForTouch,
+ 'playerId': this.id(),
+ 'techId': this.id() + "_" + camelTechName + "_api",
+ 'playsinline': this.options_.playsinline,
+ 'preload': this.options_.preload,
+ 'loop': this.options_.loop,
+ 'disablePictureInPicture': this.options_.disablePictureInPicture,
+ 'muted': this.options_.muted,
+ 'poster': this.poster(),
+ 'language': this.language(),
+ 'playerElIngest': this.playerElIngest_ || false,
+ 'vtt.js': this.options_['vtt.js'],
+ 'canOverridePoster': !!this.options_.techCanOverridePoster,
+ 'enableSourceset': this.options_.enableSourceset,
+ 'Promise': this.options_.Promise
+ };
+ ALL.names.forEach(function (name) {
+ var props = ALL[name];
+ techOptions[props.getterName] = _this4[props.privateName];
+ });
+ assign(techOptions, this.options_[titleTechName]);
+ assign(techOptions, this.options_[camelTechName]);
+ assign(techOptions, this.options_[techName.toLowerCase()]);
+
+ if (this.tag) {
+ techOptions.tag = this.tag;
+ }
+
+ if (source && source.src === this.cache_.src && this.cache_.currentTime > 0) {
+ techOptions.startTime = this.cache_.currentTime;
+ } // Initialize tech instance
+
+
+ var TechClass = Tech.getTech(techName);
+
+ if (!TechClass) {
+ throw new Error("No Tech named '" + titleTechName + "' exists! '" + titleTechName + "' should be registered using videojs.registerTech()'");
+ }
+
+ this.tech_ = new TechClass(techOptions); // player.triggerReady is always async, so don't need this to be async
+
+ this.tech_.ready(bind(this, this.handleTechReady_), true);
+ textTrackConverter.jsonToTextTracks(this.textTracksJson_ || [], this.tech_); // Listen to all HTML5-defined events and trigger them on the player
+
+ TECH_EVENTS_RETRIGGER.forEach(function (event) {
+ _this4.on(_this4.tech_, event, function (e) {
+ return _this4["handleTech" + toTitleCase$1(event) + "_"](e);
+ });
+ });
+ Object.keys(TECH_EVENTS_QUEUE).forEach(function (event) {
+ _this4.on(_this4.tech_, event, function (eventObj) {
+ if (_this4.tech_.playbackRate() === 0 && _this4.tech_.seeking()) {
+ _this4.queuedCallbacks_.push({
+ callback: _this4["handleTech" + TECH_EVENTS_QUEUE[event] + "_"].bind(_this4),
+ event: eventObj
+ });
+
+ return;
+ }
+
+ _this4["handleTech" + TECH_EVENTS_QUEUE[event] + "_"](eventObj);
+ });
+ });
+ this.on(this.tech_, 'loadstart', function (e) {
+ return _this4.handleTechLoadStart_(e);
+ });
+ this.on(this.tech_, 'sourceset', function (e) {
+ return _this4.handleTechSourceset_(e);
+ });
+ this.on(this.tech_, 'waiting', function (e) {
+ return _this4.handleTechWaiting_(e);
+ });
+ this.on(this.tech_, 'ended', function (e) {
+ return _this4.handleTechEnded_(e);
+ });
+ this.on(this.tech_, 'seeking', function (e) {
+ return _this4.handleTechSeeking_(e);
+ });
+ this.on(this.tech_, 'play', function (e) {
+ return _this4.handleTechPlay_(e);
+ });
+ this.on(this.tech_, 'firstplay', function (e) {
+ return _this4.handleTechFirstPlay_(e);
+ });
+ this.on(this.tech_, 'pause', function (e) {
+ return _this4.handleTechPause_(e);
+ });
+ this.on(this.tech_, 'durationchange', function (e) {
+ return _this4.handleTechDurationChange_(e);
+ });
+ this.on(this.tech_, 'fullscreenchange', function (e, data) {
+ return _this4.handleTechFullscreenChange_(e, data);
+ });
+ this.on(this.tech_, 'fullscreenerror', function (e, err) {
+ return _this4.handleTechFullscreenError_(e, err);
+ });
+ this.on(this.tech_, 'enterpictureinpicture', function (e) {
+ return _this4.handleTechEnterPictureInPicture_(e);
+ });
+ this.on(this.tech_, 'leavepictureinpicture', function (e) {
+ return _this4.handleTechLeavePictureInPicture_(e);
+ });
+ this.on(this.tech_, 'error', function (e) {
+ return _this4.handleTechError_(e);
+ });
+ this.on(this.tech_, 'posterchange', function (e) {
+ return _this4.handleTechPosterChange_(e);
+ });
+ this.on(this.tech_, 'textdata', function (e) {
+ return _this4.handleTechTextData_(e);
+ });
+ this.on(this.tech_, 'ratechange', function (e) {
+ return _this4.handleTechRateChange_(e);
+ });
+ this.on(this.tech_, 'loadedmetadata', this.boundUpdateStyleEl_);
+ this.usingNativeControls(this.techGet_('controls'));
+
+ if (this.controls() && !this.usingNativeControls()) {
+ this.addTechControlsListeners_();
+ } // Add the tech element in the DOM if it was not already there
+ // Make sure to not insert the original video element if using Html5
+
+
+ if (this.tech_.el().parentNode !== this.el() && (titleTechName !== 'Html5' || !this.tag)) {
+ prependTo(this.tech_.el(), this.el());
+ } // Get rid of the original video tag reference after the first tech is loaded
+
+
+ if (this.tag) {
+ this.tag.player = null;
+ this.tag = null;
+ }
+ }
+ /**
+ * Unload and dispose of the current playback {@link Tech}.
+ *
+ * @private
+ */
+ ;
+
+ _proto.unloadTech_ = function unloadTech_() {
+ var _this5 = this;
+
+ // Save the current text tracks so that we can reuse the same text tracks with the next tech
+ ALL.names.forEach(function (name) {
+ var props = ALL[name];
+ _this5[props.privateName] = _this5[props.getterName]();
+ });
+ this.textTracksJson_ = textTrackConverter.textTracksToJson(this.tech_);
+ this.isReady_ = false;
+ this.tech_.dispose();
+ this.tech_ = false;
+
+ if (this.isPosterFromTech_) {
+ this.poster_ = '';
+ this.trigger('posterchange');
+ }
+
+ this.isPosterFromTech_ = false;
+ }
+ /**
+ * Return a reference to the current {@link Tech}.
+ * It will print a warning by default about the danger of using the tech directly
+ * but any argument that is passed in will silence the warning.
+ *
+ * @param {*} [safety]
+ * Anything passed in to silence the warning
+ *
+ * @return {Tech}
+ * The Tech
+ */
+ ;
+
+ _proto.tech = function tech(safety) {
+ if (safety === undefined) {
+ log$1.warn('Using the tech directly can be dangerous. I hope you know what you\'re doing.\n' + 'See https://github.com/videojs/video.js/issues/2617 for more info.\n');
+ }
+
+ return this.tech_;
+ }
+ /**
+ * Set up click and touch listeners for the playback element
+ *
+ * - On desktops: a click on the video itself will toggle playback
+ * - On mobile devices: a click on the video toggles controls
+ * which is done by toggling the user state between active and
+ * inactive
+ * - A tap can signal that a user has become active or has become inactive
+ * e.g. a quick tap on an iPhone movie should reveal the controls. Another
+ * quick tap should hide them again (signaling the user is in an inactive
+ * viewing state)
+ * - In addition to this, we still want the user to be considered inactive after
+ * a few seconds of inactivity.
+ *
+ * > Note: the only part of iOS interaction we can't mimic with this setup
+ * is a touch and hold on the video element counting as activity in order to
+ * keep the controls showing, but that shouldn't be an issue. A touch and hold
+ * on any controls will still keep the user active
+ *
+ * @private
+ */
+ ;
+
+ _proto.addTechControlsListeners_ = function addTechControlsListeners_() {
+ // Make sure to remove all the previous listeners in case we are called multiple times.
+ this.removeTechControlsListeners_();
+ this.on(this.tech_, 'click', this.boundHandleTechClick_);
+ this.on(this.tech_, 'dblclick', this.boundHandleTechDoubleClick_); // If the controls were hidden we don't want that to change without a tap event
+ // so we'll check if the controls were already showing before reporting user
+ // activity
+
+ this.on(this.tech_, 'touchstart', this.boundHandleTechTouchStart_);
+ this.on(this.tech_, 'touchmove', this.boundHandleTechTouchMove_);
+ this.on(this.tech_, 'touchend', this.boundHandleTechTouchEnd_); // The tap listener needs to come after the touchend listener because the tap
+ // listener cancels out any reportedUserActivity when setting userActive(false)
+
+ this.on(this.tech_, 'tap', this.boundHandleTechTap_);
+ }
+ /**
+ * Remove the listeners used for click and tap controls. This is needed for
+ * toggling to controls disabled, where a tap/touch should do nothing.
+ *
+ * @private
+ */
+ ;
+
+ _proto.removeTechControlsListeners_ = function removeTechControlsListeners_() {
+ // We don't want to just use `this.off()` because there might be other needed
+ // listeners added by techs that extend this.
+ this.off(this.tech_, 'tap', this.boundHandleTechTap_);
+ this.off(this.tech_, 'touchstart', this.boundHandleTechTouchStart_);
+ this.off(this.tech_, 'touchmove', this.boundHandleTechTouchMove_);
+ this.off(this.tech_, 'touchend', this.boundHandleTechTouchEnd_);
+ this.off(this.tech_, 'click', this.boundHandleTechClick_);
+ this.off(this.tech_, 'dblclick', this.boundHandleTechDoubleClick_);
+ }
+ /**
+ * Player waits for the tech to be ready
+ *
+ * @private
+ */
+ ;
+
+ _proto.handleTechReady_ = function handleTechReady_() {
+ this.triggerReady(); // Keep the same volume as before
+
+ if (this.cache_.volume) {
+ this.techCall_('setVolume', this.cache_.volume);
+ } // Look if the tech found a higher resolution poster while loading
+
+
+ this.handleTechPosterChange_(); // Update the duration if available
+
+ this.handleTechDurationChange_();
+ }
+ /**
+ * Retrigger the `loadstart` event that was triggered by the {@link Tech}. This
+ * function will also trigger {@link Player#firstplay} if it is the first loadstart
+ * for a video.
+ *
+ * @fires Player#loadstart
+ * @fires Player#firstplay
+ * @listens Tech#loadstart
+ * @private
+ */
+ ;
+
+ _proto.handleTechLoadStart_ = function handleTechLoadStart_() {
+ // TODO: Update to use `emptied` event instead. See #1277.
+ this.removeClass('vjs-ended');
+ this.removeClass('vjs-seeking'); // reset the error state
+
+ this.error(null); // Update the duration
+
+ this.handleTechDurationChange_(); // If it's already playing we want to trigger a firstplay event now.
+ // The firstplay event relies on both the play and loadstart events
+ // which can happen in any order for a new source
+
+ if (!this.paused()) {
+ /**
+ * Fired when the user agent begins looking for media data
+ *
+ * @event Player#loadstart
+ * @type {EventTarget~Event}
+ */
+ this.trigger('loadstart');
+ this.trigger('firstplay');
+ } else {
+ // reset the hasStarted state
+ this.hasStarted(false);
+ this.trigger('loadstart');
+ } // autoplay happens after loadstart for the browser,
+ // so we mimic that behavior
+
+
+ this.manualAutoplay_(this.autoplay() === true && this.options_.normalizeAutoplay ? 'play' : this.autoplay());
+ }
+ /**
+ * Handle autoplay string values, rather than the typical boolean
+ * values that should be handled by the tech. Note that this is not
+ * part of any specification. Valid values and what they do can be
+ * found on the autoplay getter at Player#autoplay()
+ */
+ ;
+
+ _proto.manualAutoplay_ = function manualAutoplay_(type) {
+ var _this6 = this;
+
+ if (!this.tech_ || typeof type !== 'string') {
+ return;
+ } // Save original muted() value, set muted to true, and attempt to play().
+ // On promise rejection, restore muted from saved value
+
+
+ var resolveMuted = function resolveMuted() {
+ var previouslyMuted = _this6.muted();
+
+ _this6.muted(true);
+
+ var restoreMuted = function restoreMuted() {
+ _this6.muted(previouslyMuted);
+ }; // restore muted on play terminatation
+
+
+ _this6.playTerminatedQueue_.push(restoreMuted);
+
+ var mutedPromise = _this6.play();
+
+ if (!isPromise(mutedPromise)) {
+ return;
+ }
+
+ return mutedPromise["catch"](function (err) {
+ restoreMuted();
+ throw new Error("Rejection at manualAutoplay. Restoring muted value. " + (err ? err : ''));
+ });
+ };
+
+ var promise; // if muted defaults to true
+ // the only thing we can do is call play
+
+ if (type === 'any' && !this.muted()) {
+ promise = this.play();
+
+ if (isPromise(promise)) {
+ promise = promise["catch"](resolveMuted);
+ }
+ } else if (type === 'muted' && !this.muted()) {
+ promise = resolveMuted();
+ } else {
+ promise = this.play();
+ }
+
+ if (!isPromise(promise)) {
+ return;
+ }
+
+ return promise.then(function () {
+ _this6.trigger({
+ type: 'autoplay-success',
+ autoplay: type
+ });
+ })["catch"](function () {
+ _this6.trigger({
+ type: 'autoplay-failure',
+ autoplay: type
+ });
+ });
+ }
+ /**
+ * Update the internal source caches so that we return the correct source from
+ * `src()`, `currentSource()`, and `currentSources()`.
+ *
+ * > Note: `currentSources` will not be updated if the source that is passed in exists
+ * in the current `currentSources` cache.
+ *
+ *
+ * @param {Tech~SourceObject} srcObj
+ * A string or object source to update our caches to.
+ */
+ ;
+
+ _proto.updateSourceCaches_ = function updateSourceCaches_(srcObj) {
+ if (srcObj === void 0) {
+ srcObj = '';
+ }
+
+ var src = srcObj;
+ var type = '';
+
+ if (typeof src !== 'string') {
+ src = srcObj.src;
+ type = srcObj.type;
+ } // make sure all the caches are set to default values
+ // to prevent null checking
+
+
+ this.cache_.source = this.cache_.source || {};
+ this.cache_.sources = this.cache_.sources || []; // try to get the type of the src that was passed in
+
+ if (src && !type) {
+ type = findMimetype(this, src);
+ } // update `currentSource` cache always
+
+
+ this.cache_.source = mergeOptions$3({}, srcObj, {
+ src: src,
+ type: type
+ });
+ var matchingSources = this.cache_.sources.filter(function (s) {
+ return s.src && s.src === src;
+ });
+ var sourceElSources = [];
+ var sourceEls = this.$$('source');
+ var matchingSourceEls = [];
+
+ for (var i = 0; i < sourceEls.length; i++) {
+ var sourceObj = getAttributes(sourceEls[i]);
+ sourceElSources.push(sourceObj);
+
+ if (sourceObj.src && sourceObj.src === src) {
+ matchingSourceEls.push(sourceObj.src);
+ }
+ } // if we have matching source els but not matching sources
+ // the current source cache is not up to date
+
+
+ if (matchingSourceEls.length && !matchingSources.length) {
+ this.cache_.sources = sourceElSources; // if we don't have matching source or source els set the
+ // sources cache to the `currentSource` cache
+ } else if (!matchingSources.length) {
+ this.cache_.sources = [this.cache_.source];
+ } // update the tech `src` cache
+
+
+ this.cache_.src = src;
+ }
+ /**
+ * *EXPERIMENTAL* Fired when the source is set or changed on the {@link Tech}
+ * causing the media element to reload.
+ *
+ * It will fire for the initial source and each subsequent source.
+ * This event is a custom event from Video.js and is triggered by the {@link Tech}.
+ *
+ * The event object for this event contains a `src` property that will contain the source
+ * that was available when the event was triggered. This is generally only necessary if Video.js
+ * is switching techs while the source was being changed.
+ *
+ * It is also fired when `load` is called on the player (or media element)
+ * because the {@link https://html.spec.whatwg.org/multipage/media.html#dom-media-load|specification for `load`}
+ * says that the resource selection algorithm needs to be aborted and restarted.
+ * In this case, it is very likely that the `src` property will be set to the
+ * empty string `""` to indicate we do not know what the source will be but
+ * that it is changing.
+ *
+ * *This event is currently still experimental and may change in minor releases.*
+ * __To use this, pass `enableSourceset` option to the player.__
+ *
+ * @event Player#sourceset
+ * @type {EventTarget~Event}
+ * @prop {string} src
+ * The source url available when the `sourceset` was triggered.
+ * It will be an empty string if we cannot know what the source is
+ * but know that the source will change.
+ */
+
+ /**
+ * Retrigger the `sourceset` event that was triggered by the {@link Tech}.
+ *
+ * @fires Player#sourceset
+ * @listens Tech#sourceset
+ * @private
+ */
+ ;
+
+ _proto.handleTechSourceset_ = function handleTechSourceset_(event) {
+ var _this7 = this;
+
+ // only update the source cache when the source
+ // was not updated using the player api
+ if (!this.changingSrc_) {
+ var updateSourceCaches = function updateSourceCaches(src) {
+ return _this7.updateSourceCaches_(src);
+ };
+
+ var playerSrc = this.currentSource().src;
+ var eventSrc = event.src; // if we have a playerSrc that is not a blob, and a tech src that is a blob
+
+ if (playerSrc && !/^blob:/.test(playerSrc) && /^blob:/.test(eventSrc)) {
+ // if both the tech source and the player source were updated we assume
+ // something like @videojs/http-streaming did the sourceset and skip updating the source cache.
+ if (!this.lastSource_ || this.lastSource_.tech !== eventSrc && this.lastSource_.player !== playerSrc) {
+ updateSourceCaches = function updateSourceCaches() {};
+ }
+ } // update the source to the initial source right away
+ // in some cases this will be empty string
+
+
+ updateSourceCaches(eventSrc); // if the `sourceset` `src` was an empty string
+ // wait for a `loadstart` to update the cache to `currentSrc`.
+ // If a sourceset happens before a `loadstart`, we reset the state
+
+ if (!event.src) {
+ this.tech_.any(['sourceset', 'loadstart'], function (e) {
+ // if a sourceset happens before a `loadstart` there
+ // is nothing to do as this `handleTechSourceset_`
+ // will be called again and this will be handled there.
+ if (e.type === 'sourceset') {
+ return;
+ }
+
+ var techSrc = _this7.techGet('currentSrc');
+
+ _this7.lastSource_.tech = techSrc;
+
+ _this7.updateSourceCaches_(techSrc);
+ });
+ }
+ }
+
+ this.lastSource_ = {
+ player: this.currentSource().src,
+ tech: event.src
+ };
+ this.trigger({
+ src: event.src,
+ type: 'sourceset'
+ });
+ }
+ /**
+ * Add/remove the vjs-has-started class
+ *
+ * @fires Player#firstplay
+ *
+ * @param {boolean} request
+ * - true: adds the class
+ * - false: remove the class
+ *
+ * @return {boolean}
+ * the boolean value of hasStarted_
+ */
+ ;
+
+ _proto.hasStarted = function hasStarted(request) {
+ if (request === undefined) {
+ // act as getter, if we have no request to change
+ return this.hasStarted_;
+ }
+
+ if (request === this.hasStarted_) {
+ return;
+ }
+
+ this.hasStarted_ = request;
+
+ if (this.hasStarted_) {
+ this.addClass('vjs-has-started');
+ this.trigger('firstplay');
+ } else {
+ this.removeClass('vjs-has-started');
+ }
+ }
+ /**
+ * Fired whenever the media begins or resumes playback
+ *
+ * @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#dom-media-play}
+ * @fires Player#play
+ * @listens Tech#play
+ * @private
+ */
+ ;
+
+ _proto.handleTechPlay_ = function handleTechPlay_() {
+ this.removeClass('vjs-ended');
+ this.removeClass('vjs-paused');
+ this.addClass('vjs-playing'); // hide the poster when the user hits play
+
+ this.hasStarted(true);
+ /**
+ * Triggered whenever an {@link Tech#play} event happens. Indicates that
+ * playback has started or resumed.
+ *
+ * @event Player#play
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('play');
+ }
+ /**
+ * Retrigger the `ratechange` event that was triggered by the {@link Tech}.
+ *
+ * If there were any events queued while the playback rate was zero, fire
+ * those events now.
+ *
+ * @private
+ * @method Player#handleTechRateChange_
+ * @fires Player#ratechange
+ * @listens Tech#ratechange
+ */
+ ;
+
+ _proto.handleTechRateChange_ = function handleTechRateChange_() {
+ if (this.tech_.playbackRate() > 0 && this.cache_.lastPlaybackRate === 0) {
+ this.queuedCallbacks_.forEach(function (queued) {
+ return queued.callback(queued.event);
+ });
+ this.queuedCallbacks_ = [];
+ }
+
+ this.cache_.lastPlaybackRate = this.tech_.playbackRate();
+ /**
+ * Fires when the playing speed of the audio/video is changed
+ *
+ * @event Player#ratechange
+ * @type {event}
+ */
+
+ this.trigger('ratechange');
+ }
+ /**
+ * Retrigger the `waiting` event that was triggered by the {@link Tech}.
+ *
+ * @fires Player#waiting
+ * @listens Tech#waiting
+ * @private
+ */
+ ;
+
+ _proto.handleTechWaiting_ = function handleTechWaiting_() {
+ var _this8 = this;
+
+ this.addClass('vjs-waiting');
+ /**
+ * A readyState change on the DOM element has caused playback to stop.
+ *
+ * @event Player#waiting
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('waiting'); // Browsers may emit a timeupdate event after a waiting event. In order to prevent
+ // premature removal of the waiting class, wait for the time to change.
+
+ var timeWhenWaiting = this.currentTime();
+
+ var timeUpdateListener = function timeUpdateListener() {
+ if (timeWhenWaiting !== _this8.currentTime()) {
+ _this8.removeClass('vjs-waiting');
+
+ _this8.off('timeupdate', timeUpdateListener);
+ }
+ };
+
+ this.on('timeupdate', timeUpdateListener);
+ }
+ /**
+ * Retrigger the `canplay` event that was triggered by the {@link Tech}.
+ * > Note: This is not consistent between browsers. See #1351
+ *
+ * @fires Player#canplay
+ * @listens Tech#canplay
+ * @private
+ */
+ ;
+
+ _proto.handleTechCanPlay_ = function handleTechCanPlay_() {
+ this.removeClass('vjs-waiting');
+ /**
+ * The media has a readyState of HAVE_FUTURE_DATA or greater.
+ *
+ * @event Player#canplay
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('canplay');
+ }
+ /**
+ * Retrigger the `canplaythrough` event that was triggered by the {@link Tech}.
+ *
+ * @fires Player#canplaythrough
+ * @listens Tech#canplaythrough
+ * @private
+ */
+ ;
+
+ _proto.handleTechCanPlayThrough_ = function handleTechCanPlayThrough_() {
+ this.removeClass('vjs-waiting');
+ /**
+ * The media has a readyState of HAVE_ENOUGH_DATA or greater. This means that the
+ * entire media file can be played without buffering.
+ *
+ * @event Player#canplaythrough
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('canplaythrough');
+ }
+ /**
+ * Retrigger the `playing` event that was triggered by the {@link Tech}.
+ *
+ * @fires Player#playing
+ * @listens Tech#playing
+ * @private
+ */
+ ;
+
+ _proto.handleTechPlaying_ = function handleTechPlaying_() {
+ this.removeClass('vjs-waiting');
+ /**
+ * The media is no longer blocked from playback, and has started playing.
+ *
+ * @event Player#playing
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('playing');
+ }
+ /**
+ * Retrigger the `seeking` event that was triggered by the {@link Tech}.
+ *
+ * @fires Player#seeking
+ * @listens Tech#seeking
+ * @private
+ */
+ ;
+
+ _proto.handleTechSeeking_ = function handleTechSeeking_() {
+ this.addClass('vjs-seeking');
+ /**
+ * Fired whenever the player is jumping to a new time
+ *
+ * @event Player#seeking
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('seeking');
+ }
+ /**
+ * Retrigger the `seeked` event that was triggered by the {@link Tech}.
+ *
+ * @fires Player#seeked
+ * @listens Tech#seeked
+ * @private
+ */
+ ;
+
+ _proto.handleTechSeeked_ = function handleTechSeeked_() {
+ this.removeClass('vjs-seeking');
+ this.removeClass('vjs-ended');
+ /**
+ * Fired when the player has finished jumping to a new time
+ *
+ * @event Player#seeked
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('seeked');
+ }
+ /**
+ * Retrigger the `firstplay` event that was triggered by the {@link Tech}.
+ *
+ * @fires Player#firstplay
+ * @listens Tech#firstplay
+ * @deprecated As of 6.0 firstplay event is deprecated.
+ * As of 6.0 passing the `starttime` option to the player and the firstplay event are deprecated.
+ * @private
+ */
+ ;
+
+ _proto.handleTechFirstPlay_ = function handleTechFirstPlay_() {
+ // If the first starttime attribute is specified
+ // then we will start at the given offset in seconds
+ if (this.options_.starttime) {
+ log$1.warn('Passing the `starttime` option to the player will be deprecated in 6.0');
+ this.currentTime(this.options_.starttime);
+ }
+
+ this.addClass('vjs-has-started');
+ /**
+ * Fired the first time a video is played. Not part of the HLS spec, and this is
+ * probably not the best implementation yet, so use sparingly. If you don't have a
+ * reason to prevent playback, use `myPlayer.one('play');` instead.
+ *
+ * @event Player#firstplay
+ * @deprecated As of 6.0 firstplay event is deprecated.
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('firstplay');
+ }
+ /**
+ * Retrigger the `pause` event that was triggered by the {@link Tech}.
+ *
+ * @fires Player#pause
+ * @listens Tech#pause
+ * @private
+ */
+ ;
+
+ _proto.handleTechPause_ = function handleTechPause_() {
+ this.removeClass('vjs-playing');
+ this.addClass('vjs-paused');
+ /**
+ * Fired whenever the media has been paused
+ *
+ * @event Player#pause
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('pause');
+ }
+ /**
+ * Retrigger the `ended` event that was triggered by the {@link Tech}.
+ *
+ * @fires Player#ended
+ * @listens Tech#ended
+ * @private
+ */
+ ;
+
+ _proto.handleTechEnded_ = function handleTechEnded_() {
+ this.addClass('vjs-ended');
+ this.removeClass('vjs-waiting');
+
+ if (this.options_.loop) {
+ this.currentTime(0);
+ this.play();
+ } else if (!this.paused()) {
+ this.pause();
+ }
+ /**
+ * Fired when the end of the media resource is reached (currentTime == duration)
+ *
+ * @event Player#ended
+ * @type {EventTarget~Event}
+ */
+
+
+ this.trigger('ended');
+ }
+ /**
+ * Fired when the duration of the media resource is first known or changed
+ *
+ * @listens Tech#durationchange
+ * @private
+ */
+ ;
+
+ _proto.handleTechDurationChange_ = function handleTechDurationChange_() {
+ this.duration(this.techGet_('duration'));
+ }
+ /**
+ * Handle a click on the media element to play/pause
+ *
+ * @param {EventTarget~Event} event
+ * the event that caused this function to trigger
+ *
+ * @listens Tech#click
+ * @private
+ */
+ ;
+
+ _proto.handleTechClick_ = function handleTechClick_(event) {
+ // When controls are disabled a click should not toggle playback because
+ // the click is considered a control
+ if (!this.controls_) {
+ return;
+ }
+
+ if (this.options_ === undefined || this.options_.userActions === undefined || this.options_.userActions.click === undefined || this.options_.userActions.click !== false) {
+ if (this.options_ !== undefined && this.options_.userActions !== undefined && typeof this.options_.userActions.click === 'function') {
+ this.options_.userActions.click.call(this, event);
+ } else if (this.paused()) {
+ silencePromise(this.play());
+ } else {
+ this.pause();
+ }
+ }
+ }
+ /**
+ * Handle a double-click on the media element to enter/exit fullscreen
+ *
+ * @param {EventTarget~Event} event
+ * the event that caused this function to trigger
+ *
+ * @listens Tech#dblclick
+ * @private
+ */
+ ;
+
+ _proto.handleTechDoubleClick_ = function handleTechDoubleClick_(event) {
+ if (!this.controls_) {
+ return;
+ } // we do not want to toggle fullscreen state
+ // when double-clicking inside a control bar or a modal
+
+
+ var inAllowedEls = Array.prototype.some.call(this.$$('.vjs-control-bar, .vjs-modal-dialog'), function (el) {
+ return el.contains(event.target);
+ });
+
+ if (!inAllowedEls) {
+ /*
+ * options.userActions.doubleClick
+ *
+ * If `undefined` or `true`, double-click toggles fullscreen if controls are present
+ * Set to `false` to disable double-click handling
+ * Set to a function to substitute an external double-click handler
+ */
+ if (this.options_ === undefined || this.options_.userActions === undefined || this.options_.userActions.doubleClick === undefined || this.options_.userActions.doubleClick !== false) {
+ if (this.options_ !== undefined && this.options_.userActions !== undefined && typeof this.options_.userActions.doubleClick === 'function') {
+ this.options_.userActions.doubleClick.call(this, event);
+ } else if (this.isFullscreen()) {
+ this.exitFullscreen();
+ } else {
+ this.requestFullscreen();
+ }
+ }
+ }
+ }
+ /**
+ * Handle a tap on the media element. It will toggle the user
+ * activity state, which hides and shows the controls.
+ *
+ * @listens Tech#tap
+ * @private
+ */
+ ;
+
+ _proto.handleTechTap_ = function handleTechTap_() {
+ this.userActive(!this.userActive());
+ }
+ /**
+ * Handle touch to start
+ *
+ * @listens Tech#touchstart
+ * @private
+ */
+ ;
+
+ _proto.handleTechTouchStart_ = function handleTechTouchStart_() {
+ this.userWasActive = this.userActive();
+ }
+ /**
+ * Handle touch to move
+ *
+ * @listens Tech#touchmove
+ * @private
+ */
+ ;
+
+ _proto.handleTechTouchMove_ = function handleTechTouchMove_() {
+ if (this.userWasActive) {
+ this.reportUserActivity();
+ }
+ }
+ /**
+ * Handle touch to end
+ *
+ * @param {EventTarget~Event} event
+ * the touchend event that triggered
+ * this function
+ *
+ * @listens Tech#touchend
+ * @private
+ */
+ ;
+
+ _proto.handleTechTouchEnd_ = function handleTechTouchEnd_(event) {
+ // Stop the mouse events from also happening
+ if (event.cancelable) {
+ event.preventDefault();
+ }
+ }
+ /**
+ * native click events on the SWF aren't triggered on IE11, Win8.1RT
+ * use stageclick events triggered from inside the SWF instead
+ *
+ * @private
+ * @listens stageclick
+ */
+ ;
+
+ _proto.handleStageClick_ = function handleStageClick_() {
+ this.reportUserActivity();
+ }
+ /**
+ * @private
+ */
+ ;
+
+ _proto.toggleFullscreenClass_ = function toggleFullscreenClass_() {
+ if (this.isFullscreen()) {
+ this.addClass('vjs-fullscreen');
+ } else {
+ this.removeClass('vjs-fullscreen');
+ }
+ }
+ /**
+ * when the document fschange event triggers it calls this
+ */
+ ;
+
+ _proto.documentFullscreenChange_ = function documentFullscreenChange_(e) {
+ var targetPlayer = e.target.player; // if another player was fullscreen
+ // do a null check for targetPlayer because older firefox's would put document as e.target
+
+ if (targetPlayer && targetPlayer !== this) {
+ return;
+ }
+
+ var el = this.el();
+ var isFs = document[this.fsApi_.fullscreenElement] === el;
+
+ if (!isFs && el.matches) {
+ isFs = el.matches(':' + this.fsApi_.fullscreen);
+ } else if (!isFs && el.msMatchesSelector) {
+ isFs = el.msMatchesSelector(':' + this.fsApi_.fullscreen);
+ }
+
+ this.isFullscreen(isFs);
+ }
+ /**
+ * Handle Tech Fullscreen Change
+ *
+ * @param {EventTarget~Event} event
+ * the fullscreenchange event that triggered this function
+ *
+ * @param {Object} data
+ * the data that was sent with the event
+ *
+ * @private
+ * @listens Tech#fullscreenchange
+ * @fires Player#fullscreenchange
+ */
+ ;
+
+ _proto.handleTechFullscreenChange_ = function handleTechFullscreenChange_(event, data) {
+ var _this9 = this;
+
+ if (data) {
+ if (data.nativeIOSFullscreen) {
+ this.addClass('vjs-ios-native-fs');
+ this.tech_.one('webkitendfullscreen', function () {
+ _this9.removeClass('vjs-ios-native-fs');
+ });
+ }
+
+ this.isFullscreen(data.isFullscreen);
+ }
+ };
+
+ _proto.handleTechFullscreenError_ = function handleTechFullscreenError_(event, err) {
+ this.trigger('fullscreenerror', err);
+ }
+ /**
+ * @private
+ */
+ ;
+
+ _proto.togglePictureInPictureClass_ = function togglePictureInPictureClass_() {
+ if (this.isInPictureInPicture()) {
+ this.addClass('vjs-picture-in-picture');
+ } else {
+ this.removeClass('vjs-picture-in-picture');
+ }
+ }
+ /**
+ * Handle Tech Enter Picture-in-Picture.
+ *
+ * @param {EventTarget~Event} event
+ * the enterpictureinpicture event that triggered this function
+ *
+ * @private
+ * @listens Tech#enterpictureinpicture
+ */
+ ;
+
+ _proto.handleTechEnterPictureInPicture_ = function handleTechEnterPictureInPicture_(event) {
+ this.isInPictureInPicture(true);
+ }
+ /**
+ * Handle Tech Leave Picture-in-Picture.
+ *
+ * @param {EventTarget~Event} event
+ * the leavepictureinpicture event that triggered this function
+ *
+ * @private
+ * @listens Tech#leavepictureinpicture
+ */
+ ;
+
+ _proto.handleTechLeavePictureInPicture_ = function handleTechLeavePictureInPicture_(event) {
+ this.isInPictureInPicture(false);
+ }
+ /**
+ * Fires when an error occurred during the loading of an audio/video.
+ *
+ * @private
+ * @listens Tech#error
+ */
+ ;
+
+ _proto.handleTechError_ = function handleTechError_() {
+ var error = this.tech_.error();
+ this.error(error);
+ }
+ /**
+ * Retrigger the `textdata` event that was triggered by the {@link Tech}.
+ *
+ * @fires Player#textdata
+ * @listens Tech#textdata
+ * @private
+ */
+ ;
+
+ _proto.handleTechTextData_ = function handleTechTextData_() {
+ var data = null;
+
+ if (arguments.length > 1) {
+ data = arguments[1];
+ }
+ /**
+ * Fires when we get a textdata event from tech
+ *
+ * @event Player#textdata
+ * @type {EventTarget~Event}
+ */
+
+
+ this.trigger('textdata', data);
+ }
+ /**
+ * Get object for cached values.
+ *
+ * @return {Object}
+ * get the current object cache
+ */
+ ;
+
+ _proto.getCache = function getCache() {
+ return this.cache_;
+ }
+ /**
+ * Resets the internal cache object.
+ *
+ * Using this function outside the player constructor or reset method may
+ * have unintended side-effects.
+ *
+ * @private
+ */
+ ;
+
+ _proto.resetCache_ = function resetCache_() {
+ this.cache_ = {
+ // Right now, the currentTime is not _really_ cached because it is always
+ // retrieved from the tech (see: currentTime). However, for completeness,
+ // we set it to zero here to ensure that if we do start actually caching
+ // it, we reset it along with everything else.
+ currentTime: 0,
+ initTime: 0,
+ inactivityTimeout: this.options_.inactivityTimeout,
+ duration: NaN,
+ lastVolume: 1,
+ lastPlaybackRate: this.defaultPlaybackRate(),
+ media: null,
+ src: '',
+ source: {},
+ sources: [],
+ playbackRates: [],
+ volume: 1
+ };
+ }
+ /**
+ * Pass values to the playback tech
+ *
+ * @param {string} [method]
+ * the method to call
+ *
+ * @param {Object} arg
+ * the argument to pass
+ *
+ * @private
+ */
+ ;
+
+ _proto.techCall_ = function techCall_(method, arg) {
+ // If it's not ready yet, call method when it is
+ this.ready(function () {
+ if (method in allowedSetters) {
+ return set(this.middleware_, this.tech_, method, arg);
+ } else if (method in allowedMediators) {
+ return mediate(this.middleware_, this.tech_, method, arg);
+ }
+
+ try {
+ if (this.tech_) {
+ this.tech_[method](arg);
+ }
+ } catch (e) {
+ log$1(e);
+ throw e;
+ }
+ }, true);
+ }
+ /**
+ * Get calls can't wait for the tech, and sometimes don't need to.
+ *
+ * @param {string} method
+ * Tech method
+ *
+ * @return {Function|undefined}
+ * the method or undefined
+ *
+ * @private
+ */
+ ;
+
+ _proto.techGet_ = function techGet_(method) {
+ if (!this.tech_ || !this.tech_.isReady_) {
+ return;
+ }
+
+ if (method in allowedGetters) {
+ return get(this.middleware_, this.tech_, method);
+ } else if (method in allowedMediators) {
+ return mediate(this.middleware_, this.tech_, method);
+ } // Flash likes to die and reload when you hide or reposition it.
+ // In these cases the object methods go away and we get errors.
+ // TODO: Is this needed for techs other than Flash?
+ // When that happens we'll catch the errors and inform tech that it's not ready any more.
+
+
+ try {
+ return this.tech_[method]();
+ } catch (e) {
+ // When building additional tech libs, an expected method may not be defined yet
+ if (this.tech_[method] === undefined) {
+ log$1("Video.js: " + method + " method not defined for " + this.techName_ + " playback technology.", e);
+ throw e;
+ } // When a method isn't available on the object it throws a TypeError
+
+
+ if (e.name === 'TypeError') {
+ log$1("Video.js: " + method + " unavailable on " + this.techName_ + " playback technology element.", e);
+ this.tech_.isReady_ = false;
+ throw e;
+ } // If error unknown, just log and throw
+
+
+ log$1(e);
+ throw e;
+ }
+ }
+ /**
+ * Attempt to begin playback at the first opportunity.
+ *
+ * @return {Promise|undefined}
+ * Returns a promise if the browser supports Promises (or one
+ * was passed in as an option). This promise will be resolved on
+ * the return value of play. If this is undefined it will fulfill the
+ * promise chain otherwise the promise chain will be fulfilled when
+ * the promise from play is fulfilled.
+ */
+ ;
+
+ _proto.play = function play() {
+ var _this10 = this;
+
+ var PromiseClass = this.options_.Promise || window$1.Promise;
+
+ if (PromiseClass) {
+ return new PromiseClass(function (resolve) {
+ _this10.play_(resolve);
+ });
+ }
+
+ return this.play_();
+ }
+ /**
+ * The actual logic for play, takes a callback that will be resolved on the
+ * return value of play. This allows us to resolve to the play promise if there
+ * is one on modern browsers.
+ *
+ * @private
+ * @param {Function} [callback]
+ * The callback that should be called when the techs play is actually called
+ */
+ ;
+
+ _proto.play_ = function play_(callback) {
+ var _this11 = this;
+
+ if (callback === void 0) {
+ callback = silencePromise;
+ }
+
+ this.playCallbacks_.push(callback);
+ var isSrcReady = Boolean(!this.changingSrc_ && (this.src() || this.currentSrc())); // treat calls to play_ somewhat like the `one` event function
+
+ if (this.waitToPlay_) {
+ this.off(['ready', 'loadstart'], this.waitToPlay_);
+ this.waitToPlay_ = null;
+ } // if the player/tech is not ready or the src itself is not ready
+ // queue up a call to play on `ready` or `loadstart`
+
+
+ if (!this.isReady_ || !isSrcReady) {
+ this.waitToPlay_ = function (e) {
+ _this11.play_();
+ };
+
+ this.one(['ready', 'loadstart'], this.waitToPlay_); // if we are in Safari, there is a high chance that loadstart will trigger after the gesture timeperiod
+ // in that case, we need to prime the video element by calling load so it'll be ready in time
+
+ if (!isSrcReady && (IS_ANY_SAFARI || IS_IOS)) {
+ this.load();
+ }
+
+ return;
+ } // If the player/tech is ready and we have a source, we can attempt playback.
+
+
+ var val = this.techGet_('play'); // play was terminated if the returned value is null
+
+ if (val === null) {
+ this.runPlayTerminatedQueue_();
+ } else {
+ this.runPlayCallbacks_(val);
+ }
+ }
+ /**
+ * These functions will be run when if play is terminated. If play
+ * runPlayCallbacks_ is run these function will not be run. This allows us
+ * to differenciate between a terminated play and an actual call to play.
+ */
+ ;
+
+ _proto.runPlayTerminatedQueue_ = function runPlayTerminatedQueue_() {
+ var queue = this.playTerminatedQueue_.slice(0);
+ this.playTerminatedQueue_ = [];
+ queue.forEach(function (q) {
+ q();
+ });
+ }
+ /**
+ * When a callback to play is delayed we have to run these
+ * callbacks when play is actually called on the tech. This function
+ * runs the callbacks that were delayed and accepts the return value
+ * from the tech.
+ *
+ * @param {undefined|Promise} val
+ * The return value from the tech.
+ */
+ ;
+
+ _proto.runPlayCallbacks_ = function runPlayCallbacks_(val) {
+ var callbacks = this.playCallbacks_.slice(0);
+ this.playCallbacks_ = []; // clear play terminatedQueue since we finished a real play
+
+ this.playTerminatedQueue_ = [];
+ callbacks.forEach(function (cb) {
+ cb(val);
+ });
+ }
+ /**
+ * Pause the video playback
+ *
+ * @return {Player}
+ * A reference to the player object this function was called on
+ */
+ ;
+
+ _proto.pause = function pause() {
+ this.techCall_('pause');
+ }
+ /**
+ * Check if the player is paused or has yet to play
+ *
+ * @return {boolean}
+ * - false: if the media is currently playing
+ * - true: if media is not currently playing
+ */
+ ;
+
+ _proto.paused = function paused() {
+ // The initial state of paused should be true (in Safari it's actually false)
+ return this.techGet_('paused') === false ? false : true;
+ }
+ /**
+ * Get a TimeRange object representing the current ranges of time that the user
+ * has played.
+ *
+ * @return {TimeRange}
+ * A time range object that represents all the increments of time that have
+ * been played.
+ */
+ ;
+
+ _proto.played = function played() {
+ return this.techGet_('played') || createTimeRanges(0, 0);
+ }
+ /**
+ * Returns whether or not the user is "scrubbing". Scrubbing is
+ * when the user has clicked the progress bar handle and is
+ * dragging it along the progress bar.
+ *
+ * @param {boolean} [isScrubbing]
+ * whether the user is or is not scrubbing
+ *
+ * @return {boolean}
+ * The value of scrubbing when getting
+ */
+ ;
+
+ _proto.scrubbing = function scrubbing(isScrubbing) {
+ if (typeof isScrubbing === 'undefined') {
+ return this.scrubbing_;
+ }
+
+ this.scrubbing_ = !!isScrubbing;
+ this.techCall_('setScrubbing', this.scrubbing_);
+
+ if (isScrubbing) {
+ this.addClass('vjs-scrubbing');
+ } else {
+ this.removeClass('vjs-scrubbing');
+ }
+ }
+ /**
+ * Get or set the current time (in seconds)
+ *
+ * @param {number|string} [seconds]
+ * The time to seek to in seconds
+ *
+ * @return {number}
+ * - the current time in seconds when getting
+ */
+ ;
+
+ _proto.currentTime = function currentTime(seconds) {
+ if (typeof seconds !== 'undefined') {
+ if (seconds < 0) {
+ seconds = 0;
+ }
+
+ if (!this.isReady_ || this.changingSrc_ || !this.tech_ || !this.tech_.isReady_) {
+ this.cache_.initTime = seconds;
+ this.off('canplay', this.boundApplyInitTime_);
+ this.one('canplay', this.boundApplyInitTime_);
+ return;
+ }
+
+ this.techCall_('setCurrentTime', seconds);
+ this.cache_.initTime = 0;
+ return;
+ } // cache last currentTime and return. default to 0 seconds
+ //
+ // Caching the currentTime is meant to prevent a massive amount of reads on the tech's
+ // currentTime when scrubbing, but may not provide much performance benefit afterall.
+ // Should be tested. Also something has to read the actual current time or the cache will
+ // never get updated.
+
+
+ this.cache_.currentTime = this.techGet_('currentTime') || 0;
+ return this.cache_.currentTime;
+ }
+ /**
+ * Apply the value of initTime stored in cache as currentTime.
+ *
+ * @private
+ */
+ ;
+
+ _proto.applyInitTime_ = function applyInitTime_() {
+ this.currentTime(this.cache_.initTime);
+ }
+ /**
+ * Normally gets the length in time of the video in seconds;
+ * in all but the rarest use cases an argument will NOT be passed to the method
+ *
+ * > **NOTE**: The video must have started loading before the duration can be
+ * known, and depending on preload behaviour may not be known until the video starts
+ * playing.
+ *
+ * @fires Player#durationchange
+ *
+ * @param {number} [seconds]
+ * The duration of the video to set in seconds
+ *
+ * @return {number}
+ * - The duration of the video in seconds when getting
+ */
+ ;
+
+ _proto.duration = function duration(seconds) {
+ if (seconds === undefined) {
+ // return NaN if the duration is not known
+ return this.cache_.duration !== undefined ? this.cache_.duration : NaN;
+ }
+
+ seconds = parseFloat(seconds); // Standardize on Infinity for signaling video is live
+
+ if (seconds < 0) {
+ seconds = Infinity;
+ }
+
+ if (seconds !== this.cache_.duration) {
+ // Cache the last set value for optimized scrubbing (esp. Flash)
+ // TODO: Required for techs other than Flash?
+ this.cache_.duration = seconds;
+
+ if (seconds === Infinity) {
+ this.addClass('vjs-live');
+ } else {
+ this.removeClass('vjs-live');
+ }
+
+ if (!isNaN(seconds)) {
+ // Do not fire durationchange unless the duration value is known.
+ // @see [Spec]{@link https://www.w3.org/TR/2011/WD-html5-20110113/video.html#media-element-load-algorithm}
+
+ /**
+ * @event Player#durationchange
+ * @type {EventTarget~Event}
+ */
+ this.trigger('durationchange');
+ }
+ }
+ }
+ /**
+ * Calculates how much time is left in the video. Not part
+ * of the native video API.
+ *
+ * @return {number}
+ * The time remaining in seconds
+ */
+ ;
+
+ _proto.remainingTime = function remainingTime() {
+ return this.duration() - this.currentTime();
+ }
+ /**
+ * A remaining time function that is intented to be used when
+ * the time is to be displayed directly to the user.
+ *
+ * @return {number}
+ * The rounded time remaining in seconds
+ */
+ ;
+
+ _proto.remainingTimeDisplay = function remainingTimeDisplay() {
+ return Math.floor(this.duration()) - Math.floor(this.currentTime());
+ } //
+ // Kind of like an array of portions of the video that have been downloaded.
+
+ /**
+ * Get a TimeRange object with an array of the times of the video
+ * that have been downloaded. If you just want the percent of the
+ * video that's been downloaded, use bufferedPercent.
+ *
+ * @see [Buffered Spec]{@link http://dev.w3.org/html5/spec/video.html#dom-media-buffered}
+ *
+ * @return {TimeRange}
+ * A mock TimeRange object (following HTML spec)
+ */
+ ;
+
+ _proto.buffered = function buffered() {
+ var buffered = this.techGet_('buffered');
+
+ if (!buffered || !buffered.length) {
+ buffered = createTimeRanges(0, 0);
+ }
+
+ return buffered;
+ }
+ /**
+ * Get the percent (as a decimal) of the video that's been downloaded.
+ * This method is not a part of the native HTML video API.
+ *
+ * @return {number}
+ * A decimal between 0 and 1 representing the percent
+ * that is buffered 0 being 0% and 1 being 100%
+ */
+ ;
+
+ _proto.bufferedPercent = function bufferedPercent$1() {
+ return bufferedPercent(this.buffered(), this.duration());
+ }
+ /**
+ * Get the ending time of the last buffered time range
+ * This is used in the progress bar to encapsulate all time ranges.
+ *
+ * @return {number}
+ * The end of the last buffered time range
+ */
+ ;
+
+ _proto.bufferedEnd = function bufferedEnd() {
+ var buffered = this.buffered();
+ var duration = this.duration();
+ var end = buffered.end(buffered.length - 1);
+
+ if (end > duration) {
+ end = duration;
+ }
+
+ return end;
+ }
+ /**
+ * Get or set the current volume of the media
+ *
+ * @param {number} [percentAsDecimal]
+ * The new volume as a decimal percent:
+ * - 0 is muted/0%/off
+ * - 1.0 is 100%/full
+ * - 0.5 is half volume or 50%
+ *
+ * @return {number}
+ * The current volume as a percent when getting
+ */
+ ;
+
+ _proto.volume = function volume(percentAsDecimal) {
+ var vol;
+
+ if (percentAsDecimal !== undefined) {
+ // Force value to between 0 and 1
+ vol = Math.max(0, Math.min(1, parseFloat(percentAsDecimal)));
+ this.cache_.volume = vol;
+ this.techCall_('setVolume', vol);
+
+ if (vol > 0) {
+ this.lastVolume_(vol);
+ }
+
+ return;
+ } // Default to 1 when returning current volume.
+
+
+ vol = parseFloat(this.techGet_('volume'));
+ return isNaN(vol) ? 1 : vol;
+ }
+ /**
+ * Get the current muted state, or turn mute on or off
+ *
+ * @param {boolean} [muted]
+ * - true to mute
+ * - false to unmute
+ *
+ * @return {boolean}
+ * - true if mute is on and getting
+ * - false if mute is off and getting
+ */
+ ;
+
+ _proto.muted = function muted(_muted) {
+ if (_muted !== undefined) {
+ this.techCall_('setMuted', _muted);
+ return;
+ }
+
+ return this.techGet_('muted') || false;
+ }
+ /**
+ * Get the current defaultMuted state, or turn defaultMuted on or off. defaultMuted
+ * indicates the state of muted on initial playback.
+ *
+ * ```js
+ * var myPlayer = videojs('some-player-id');
+ *
+ * myPlayer.src("http://www.example.com/path/to/video.mp4");
+ *
+ * // get, should be false
+ * console.log(myPlayer.defaultMuted());
+ * // set to true
+ * myPlayer.defaultMuted(true);
+ * // get should be true
+ * console.log(myPlayer.defaultMuted());
+ * ```
+ *
+ * @param {boolean} [defaultMuted]
+ * - true to mute
+ * - false to unmute
+ *
+ * @return {boolean|Player}
+ * - true if defaultMuted is on and getting
+ * - false if defaultMuted is off and getting
+ * - A reference to the current player when setting
+ */
+ ;
+
+ _proto.defaultMuted = function defaultMuted(_defaultMuted) {
+ if (_defaultMuted !== undefined) {
+ return this.techCall_('setDefaultMuted', _defaultMuted);
+ }
+
+ return this.techGet_('defaultMuted') || false;
+ }
+ /**
+ * Get the last volume, or set it
+ *
+ * @param {number} [percentAsDecimal]
+ * The new last volume as a decimal percent:
+ * - 0 is muted/0%/off
+ * - 1.0 is 100%/full
+ * - 0.5 is half volume or 50%
+ *
+ * @return {number}
+ * the current value of lastVolume as a percent when getting
+ *
+ * @private
+ */
+ ;
+
+ _proto.lastVolume_ = function lastVolume_(percentAsDecimal) {
+ if (percentAsDecimal !== undefined && percentAsDecimal !== 0) {
+ this.cache_.lastVolume = percentAsDecimal;
+ return;
+ }
+
+ return this.cache_.lastVolume;
+ }
+ /**
+ * Check if current tech can support native fullscreen
+ * (e.g. with built in controls like iOS)
+ *
+ * @return {boolean}
+ * if native fullscreen is supported
+ */
+ ;
+
+ _proto.supportsFullScreen = function supportsFullScreen() {
+ return this.techGet_('supportsFullScreen') || false;
+ }
+ /**
+ * Check if the player is in fullscreen mode or tell the player that it
+ * is or is not in fullscreen mode.
+ *
+ * > NOTE: As of the latest HTML5 spec, isFullscreen is no longer an official
+ * property and instead document.fullscreenElement is used. But isFullscreen is
+ * still a valuable property for internal player workings.
+ *
+ * @param {boolean} [isFS]
+ * Set the players current fullscreen state
+ *
+ * @return {boolean}
+ * - true if fullscreen is on and getting
+ * - false if fullscreen is off and getting
+ */
+ ;
+
+ _proto.isFullscreen = function isFullscreen(isFS) {
+ if (isFS !== undefined) {
+ var oldValue = this.isFullscreen_;
+ this.isFullscreen_ = Boolean(isFS); // if we changed fullscreen state and we're in prefixed mode, trigger fullscreenchange
+ // this is the only place where we trigger fullscreenchange events for older browsers
+ // fullWindow mode is treated as a prefixed event and will get a fullscreenchange event as well
+
+ if (this.isFullscreen_ !== oldValue && this.fsApi_.prefixed) {
+ /**
+ * @event Player#fullscreenchange
+ * @type {EventTarget~Event}
+ */
+ this.trigger('fullscreenchange');
+ }
+
+ this.toggleFullscreenClass_();
+ return;
+ }
+
+ return this.isFullscreen_;
+ }
+ /**
+ * Increase the size of the video to full screen
+ * In some browsers, full screen is not supported natively, so it enters
+ * "full window mode", where the video fills the browser window.
+ * In browsers and devices that support native full screen, sometimes the
+ * browser's default controls will be shown, and not the Video.js custom skin.
+ * This includes most mobile devices (iOS, Android) and older versions of
+ * Safari.
+ *
+ * @param {Object} [fullscreenOptions]
+ * Override the player fullscreen options
+ *
+ * @fires Player#fullscreenchange
+ */
+ ;
+
+ _proto.requestFullscreen = function requestFullscreen(fullscreenOptions) {
+ var PromiseClass = this.options_.Promise || window$1.Promise;
+
+ if (PromiseClass) {
+ var self = this;
+ return new PromiseClass(function (resolve, reject) {
+ function offHandler() {
+ self.off('fullscreenerror', errorHandler);
+ self.off('fullscreenchange', changeHandler);
+ }
+
+ function changeHandler() {
+ offHandler();
+ resolve();
+ }
+
+ function errorHandler(e, err) {
+ offHandler();
+ reject(err);
+ }
+
+ self.one('fullscreenchange', changeHandler);
+ self.one('fullscreenerror', errorHandler);
+ var promise = self.requestFullscreenHelper_(fullscreenOptions);
+
+ if (promise) {
+ promise.then(offHandler, offHandler);
+ promise.then(resolve, reject);
+ }
+ });
+ }
+
+ return this.requestFullscreenHelper_();
+ };
+
+ _proto.requestFullscreenHelper_ = function requestFullscreenHelper_(fullscreenOptions) {
+ var _this12 = this;
+
+ var fsOptions; // Only pass fullscreen options to requestFullscreen in spec-compliant browsers.
+ // Use defaults or player configured option unless passed directly to this method.
+
+ if (!this.fsApi_.prefixed) {
+ fsOptions = this.options_.fullscreen && this.options_.fullscreen.options || {};
+
+ if (fullscreenOptions !== undefined) {
+ fsOptions = fullscreenOptions;
+ }
+ } // This method works as follows:
+ // 1. if a fullscreen api is available, use it
+ // 1. call requestFullscreen with potential options
+ // 2. if we got a promise from above, use it to update isFullscreen()
+ // 2. otherwise, if the tech supports fullscreen, call `enterFullScreen` on it.
+ // This is particularly used for iPhone, older iPads, and non-safari browser on iOS.
+ // 3. otherwise, use "fullWindow" mode
+
+
+ if (this.fsApi_.requestFullscreen) {
+ var promise = this.el_[this.fsApi_.requestFullscreen](fsOptions);
+
+ if (promise) {
+ promise.then(function () {
+ return _this12.isFullscreen(true);
+ }, function () {
+ return _this12.isFullscreen(false);
+ });
+ }
+
+ return promise;
+ } else if (this.tech_.supportsFullScreen() && !this.options_.preferFullWindow === true) {
+ // we can't take the video.js controls fullscreen but we can go fullscreen
+ // with native controls
+ this.techCall_('enterFullScreen');
+ } else {
+ // fullscreen isn't supported so we'll just stretch the video element to
+ // fill the viewport
+ this.enterFullWindow();
+ }
+ }
+ /**
+ * Return the video to its normal size after having been in full screen mode
+ *
+ * @fires Player#fullscreenchange
+ */
+ ;
+
+ _proto.exitFullscreen = function exitFullscreen() {
+ var PromiseClass = this.options_.Promise || window$1.Promise;
+
+ if (PromiseClass) {
+ var self = this;
+ return new PromiseClass(function (resolve, reject) {
+ function offHandler() {
+ self.off('fullscreenerror', errorHandler);
+ self.off('fullscreenchange', changeHandler);
+ }
+
+ function changeHandler() {
+ offHandler();
+ resolve();
+ }
+
+ function errorHandler(e, err) {
+ offHandler();
+ reject(err);
+ }
+
+ self.one('fullscreenchange', changeHandler);
+ self.one('fullscreenerror', errorHandler);
+ var promise = self.exitFullscreenHelper_();
+
+ if (promise) {
+ promise.then(offHandler, offHandler); // map the promise to our resolve/reject methods
+
+ promise.then(resolve, reject);
+ }
+ });
+ }
+
+ return this.exitFullscreenHelper_();
+ };
+
+ _proto.exitFullscreenHelper_ = function exitFullscreenHelper_() {
+ var _this13 = this;
+
+ if (this.fsApi_.requestFullscreen) {
+ var promise = document[this.fsApi_.exitFullscreen]();
+
+ if (promise) {
+ // we're splitting the promise here, so, we want to catch the
+ // potential error so that this chain doesn't have unhandled errors
+ silencePromise(promise.then(function () {
+ return _this13.isFullscreen(false);
+ }));
+ }
+
+ return promise;
+ } else if (this.tech_.supportsFullScreen() && !this.options_.preferFullWindow === true) {
+ this.techCall_('exitFullScreen');
+ } else {
+ this.exitFullWindow();
+ }
+ }
+ /**
+ * When fullscreen isn't supported we can stretch the
+ * video container to as wide as the browser will let us.
+ *
+ * @fires Player#enterFullWindow
+ */
+ ;
+
+ _proto.enterFullWindow = function enterFullWindow() {
+ this.isFullscreen(true);
+ this.isFullWindow = true; // Storing original doc overflow value to return to when fullscreen is off
+
+ this.docOrigOverflow = document.documentElement.style.overflow; // Add listener for esc key to exit fullscreen
+
+ on(document, 'keydown', this.boundFullWindowOnEscKey_); // Hide any scroll bars
+
+ document.documentElement.style.overflow = 'hidden'; // Apply fullscreen styles
+
+ addClass(document.body, 'vjs-full-window');
+ /**
+ * @event Player#enterFullWindow
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('enterFullWindow');
+ }
+ /**
+ * Check for call to either exit full window or
+ * full screen on ESC key
+ *
+ * @param {string} event
+ * Event to check for key press
+ */
+ ;
+
+ _proto.fullWindowOnEscKey = function fullWindowOnEscKey(event) {
+ if (keycode.isEventKey(event, 'Esc')) {
+ if (this.isFullscreen() === true) {
+ if (!this.isFullWindow) {
+ this.exitFullscreen();
+ } else {
+ this.exitFullWindow();
+ }
+ }
+ }
+ }
+ /**
+ * Exit full window
+ *
+ * @fires Player#exitFullWindow
+ */
+ ;
+
+ _proto.exitFullWindow = function exitFullWindow() {
+ this.isFullscreen(false);
+ this.isFullWindow = false;
+ off(document, 'keydown', this.boundFullWindowOnEscKey_); // Unhide scroll bars.
+
+ document.documentElement.style.overflow = this.docOrigOverflow; // Remove fullscreen styles
+
+ removeClass(document.body, 'vjs-full-window'); // Resize the box, controller, and poster to original sizes
+ // this.positionAll();
+
+ /**
+ * @event Player#exitFullWindow
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('exitFullWindow');
+ }
+ /**
+ * Disable Picture-in-Picture mode.
+ *
+ * @param {boolean} value
+ * - true will disable Picture-in-Picture mode
+ * - false will enable Picture-in-Picture mode
+ */
+ ;
+
+ _proto.disablePictureInPicture = function disablePictureInPicture(value) {
+ if (value === undefined) {
+ return this.techGet_('disablePictureInPicture');
+ }
+
+ this.techCall_('setDisablePictureInPicture', value);
+ this.options_.disablePictureInPicture = value;
+ this.trigger('disablepictureinpicturechanged');
+ }
+ /**
+ * Check if the player is in Picture-in-Picture mode or tell the player that it
+ * is or is not in Picture-in-Picture mode.
+ *
+ * @param {boolean} [isPiP]
+ * Set the players current Picture-in-Picture state
+ *
+ * @return {boolean}
+ * - true if Picture-in-Picture is on and getting
+ * - false if Picture-in-Picture is off and getting
+ */
+ ;
+
+ _proto.isInPictureInPicture = function isInPictureInPicture(isPiP) {
+ if (isPiP !== undefined) {
+ this.isInPictureInPicture_ = !!isPiP;
+ this.togglePictureInPictureClass_();
+ return;
+ }
+
+ return !!this.isInPictureInPicture_;
+ }
+ /**
+ * Create a floating video window always on top of other windows so that users may
+ * continue consuming media while they interact with other content sites, or
+ * applications on their device.
+ *
+ * @see [Spec]{@link https://wicg.github.io/picture-in-picture}
+ *
+ * @fires Player#enterpictureinpicture
+ *
+ * @return {Promise}
+ * A promise with a Picture-in-Picture window.
+ */
+ ;
+
+ _proto.requestPictureInPicture = function requestPictureInPicture() {
+ if ('pictureInPictureEnabled' in document && this.disablePictureInPicture() === false) {
+ /**
+ * This event fires when the player enters picture in picture mode
+ *
+ * @event Player#enterpictureinpicture
+ * @type {EventTarget~Event}
+ */
+ return this.techGet_('requestPictureInPicture');
+ }
+ }
+ /**
+ * Exit Picture-in-Picture mode.
+ *
+ * @see [Spec]{@link https://wicg.github.io/picture-in-picture}
+ *
+ * @fires Player#leavepictureinpicture
+ *
+ * @return {Promise}
+ * A promise.
+ */
+ ;
+
+ _proto.exitPictureInPicture = function exitPictureInPicture() {
+ if ('pictureInPictureEnabled' in document) {
+ /**
+ * This event fires when the player leaves picture in picture mode
+ *
+ * @event Player#leavepictureinpicture
+ * @type {EventTarget~Event}
+ */
+ return document.exitPictureInPicture();
+ }
+ }
+ /**
+ * Called when this Player has focus and a key gets pressed down, or when
+ * any Component of this player receives a key press that it doesn't handle.
+ * This allows player-wide hotkeys (either as defined below, or optionally
+ * by an external function).
+ *
+ * @param {EventTarget~Event} event
+ * The `keydown` event that caused this function to be called.
+ *
+ * @listens keydown
+ */
+ ;
+
+ _proto.handleKeyDown = function handleKeyDown(event) {
+ var userActions = this.options_.userActions; // Bail out if hotkeys are not configured.
+
+ if (!userActions || !userActions.hotkeys) {
+ return;
+ } // Function that determines whether or not to exclude an element from
+ // hotkeys handling.
+
+
+ var excludeElement = function excludeElement(el) {
+ var tagName = el.tagName.toLowerCase(); // The first and easiest test is for `contenteditable` elements.
+
+ if (el.isContentEditable) {
+ return true;
+ } // Inputs matching these types will still trigger hotkey handling as
+ // they are not text inputs.
+
+
+ var allowedInputTypes = ['button', 'checkbox', 'hidden', 'radio', 'reset', 'submit'];
+
+ if (tagName === 'input') {
+ return allowedInputTypes.indexOf(el.type) === -1;
+ } // The final test is by tag name. These tags will be excluded entirely.
+
+
+ var excludedTags = ['textarea'];
+ return excludedTags.indexOf(tagName) !== -1;
+ }; // Bail out if the user is focused on an interactive form element.
+
+
+ if (excludeElement(this.el_.ownerDocument.activeElement)) {
+ return;
+ }
+
+ if (typeof userActions.hotkeys === 'function') {
+ userActions.hotkeys.call(this, event);
+ } else {
+ this.handleHotkeys(event);
+ }
+ }
+ /**
+ * Called when this Player receives a hotkey keydown event.
+ * Supported player-wide hotkeys are:
+ *
+ * f - toggle fullscreen
+ * m - toggle mute
+ * k or Space - toggle play/pause
+ *
+ * @param {EventTarget~Event} event
+ * The `keydown` event that caused this function to be called.
+ */
+ ;
+
+ _proto.handleHotkeys = function handleHotkeys(event) {
+ var hotkeys = this.options_.userActions ? this.options_.userActions.hotkeys : {}; // set fullscreenKey, muteKey, playPauseKey from `hotkeys`, use defaults if not set
+
+ var _hotkeys$fullscreenKe = hotkeys.fullscreenKey,
+ fullscreenKey = _hotkeys$fullscreenKe === void 0 ? function (keydownEvent) {
+ return keycode.isEventKey(keydownEvent, 'f');
+ } : _hotkeys$fullscreenKe,
+ _hotkeys$muteKey = hotkeys.muteKey,
+ muteKey = _hotkeys$muteKey === void 0 ? function (keydownEvent) {
+ return keycode.isEventKey(keydownEvent, 'm');
+ } : _hotkeys$muteKey,
+ _hotkeys$playPauseKey = hotkeys.playPauseKey,
+ playPauseKey = _hotkeys$playPauseKey === void 0 ? function (keydownEvent) {
+ return keycode.isEventKey(keydownEvent, 'k') || keycode.isEventKey(keydownEvent, 'Space');
+ } : _hotkeys$playPauseKey;
+
+ if (fullscreenKey.call(this, event)) {
+ event.preventDefault();
+ event.stopPropagation();
+ var FSToggle = Component$1.getComponent('FullscreenToggle');
+
+ if (document[this.fsApi_.fullscreenEnabled] !== false) {
+ FSToggle.prototype.handleClick.call(this, event);
+ }
+ } else if (muteKey.call(this, event)) {
+ event.preventDefault();
+ event.stopPropagation();
+ var MuteToggle = Component$1.getComponent('MuteToggle');
+ MuteToggle.prototype.handleClick.call(this, event);
+ } else if (playPauseKey.call(this, event)) {
+ event.preventDefault();
+ event.stopPropagation();
+ var PlayToggle = Component$1.getComponent('PlayToggle');
+ PlayToggle.prototype.handleClick.call(this, event);
+ }
+ }
+ /**
+ * Check whether the player can play a given mimetype
+ *
+ * @see https://www.w3.org/TR/2011/WD-html5-20110113/video.html#dom-navigator-canplaytype
+ *
+ * @param {string} type
+ * The mimetype to check
+ *
+ * @return {string}
+ * 'probably', 'maybe', or '' (empty string)
+ */
+ ;
+
+ _proto.canPlayType = function canPlayType(type) {
+ var can; // Loop through each playback technology in the options order
+
+ for (var i = 0, j = this.options_.techOrder; i < j.length; i++) {
+ var techName = j[i];
+ var tech = Tech.getTech(techName); // Support old behavior of techs being registered as components.
+ // Remove once that deprecated behavior is removed.
+
+ if (!tech) {
+ tech = Component$1.getComponent(techName);
+ } // Check if the current tech is defined before continuing
+
+
+ if (!tech) {
+ log$1.error("The \"" + techName + "\" tech is undefined. Skipped browser support check for that tech.");
+ continue;
+ } // Check if the browser supports this technology
+
+
+ if (tech.isSupported()) {
+ can = tech.canPlayType(type);
+
+ if (can) {
+ return can;
+ }
+ }
+ }
+
+ return '';
+ }
+ /**
+ * Select source based on tech-order or source-order
+ * Uses source-order selection if `options.sourceOrder` is truthy. Otherwise,
+ * defaults to tech-order selection
+ *
+ * @param {Array} sources
+ * The sources for a media asset
+ *
+ * @return {Object|boolean}
+ * Object of source and tech order or false
+ */
+ ;
+
+ _proto.selectSource = function selectSource(sources) {
+ var _this14 = this;
+
+ // Get only the techs specified in `techOrder` that exist and are supported by the
+ // current platform
+ var techs = this.options_.techOrder.map(function (techName) {
+ return [techName, Tech.getTech(techName)];
+ }).filter(function (_ref) {
+ var techName = _ref[0],
+ tech = _ref[1];
+
+ // Check if the current tech is defined before continuing
+ if (tech) {
+ // Check if the browser supports this technology
+ return tech.isSupported();
+ }
+
+ log$1.error("The \"" + techName + "\" tech is undefined. Skipped browser support check for that tech.");
+ return false;
+ }); // Iterate over each `innerArray` element once per `outerArray` element and execute
+ // `tester` with both. If `tester` returns a non-falsy value, exit early and return
+ // that value.
+
+ var findFirstPassingTechSourcePair = function findFirstPassingTechSourcePair(outerArray, innerArray, tester) {
+ var found;
+ outerArray.some(function (outerChoice) {
+ return innerArray.some(function (innerChoice) {
+ found = tester(outerChoice, innerChoice);
+
+ if (found) {
+ return true;
+ }
+ });
+ });
+ return found;
+ };
+
+ var foundSourceAndTech;
+
+ var flip = function flip(fn) {
+ return function (a, b) {
+ return fn(b, a);
+ };
+ };
+
+ var finder = function finder(_ref2, source) {
+ var techName = _ref2[0],
+ tech = _ref2[1];
+
+ if (tech.canPlaySource(source, _this14.options_[techName.toLowerCase()])) {
+ return {
+ source: source,
+ tech: techName
+ };
+ }
+ }; // Depending on the truthiness of `options.sourceOrder`, we swap the order of techs and sources
+ // to select from them based on their priority.
+
+
+ if (this.options_.sourceOrder) {
+ // Source-first ordering
+ foundSourceAndTech = findFirstPassingTechSourcePair(sources, techs, flip(finder));
+ } else {
+ // Tech-first ordering
+ foundSourceAndTech = findFirstPassingTechSourcePair(techs, sources, finder);
+ }
+
+ return foundSourceAndTech || false;
+ }
+ /**
+ * Executes source setting and getting logic
+ *
+ * @param {Tech~SourceObject|Tech~SourceObject[]|string} [source]
+ * A SourceObject, an array of SourceObjects, or a string referencing
+ * a URL to a media source. It is _highly recommended_ that an object
+ * or array of objects is used here, so that source selection
+ * algorithms can take the `type` into account.
+ *
+ * If not provided, this method acts as a getter.
+ * @param {boolean} isRetry
+ * Indicates whether this is being called internally as a result of a retry
+ *
+ * @return {string|undefined}
+ * If the `source` argument is missing, returns the current source
+ * URL. Otherwise, returns nothing/undefined.
+ */
+ ;
+
+ _proto.handleSrc_ = function handleSrc_(source, isRetry) {
+ var _this15 = this;
+
+ // getter usage
+ if (typeof source === 'undefined') {
+ return this.cache_.src || '';
+ } // Reset retry behavior for new source
+
+
+ if (this.resetRetryOnError_) {
+ this.resetRetryOnError_();
+ } // filter out invalid sources and turn our source into
+ // an array of source objects
+
+
+ var sources = filterSource(source); // if a source was passed in then it is invalid because
+ // it was filtered to a zero length Array. So we have to
+ // show an error
+
+ if (!sources.length) {
+ this.setTimeout(function () {
+ this.error({
+ code: 4,
+ message: this.options_.notSupportedMessage
+ });
+ }, 0);
+ return;
+ } // initial sources
+
+
+ this.changingSrc_ = true; // Only update the cached source list if we are not retrying a new source after error,
+ // since in that case we want to include the failed source(s) in the cache
+
+ if (!isRetry) {
+ this.cache_.sources = sources;
+ }
+
+ this.updateSourceCaches_(sources[0]); // middlewareSource is the source after it has been changed by middleware
+
+ setSource(this, sources[0], function (middlewareSource, mws) {
+ _this15.middleware_ = mws; // since sourceSet is async we have to update the cache again after we select a source since
+ // the source that is selected could be out of order from the cache update above this callback.
+
+ if (!isRetry) {
+ _this15.cache_.sources = sources;
+ }
+
+ _this15.updateSourceCaches_(middlewareSource);
+
+ var err = _this15.src_(middlewareSource);
+
+ if (err) {
+ if (sources.length > 1) {
+ return _this15.handleSrc_(sources.slice(1));
+ }
+
+ _this15.changingSrc_ = false; // We need to wrap this in a timeout to give folks a chance to add error event handlers
+
+ _this15.setTimeout(function () {
+ this.error({
+ code: 4,
+ message: this.options_.notSupportedMessage
+ });
+ }, 0); // we could not find an appropriate tech, but let's still notify the delegate that this is it
+ // this needs a better comment about why this is needed
+
+
+ _this15.triggerReady();
+
+ return;
+ }
+
+ setTech(mws, _this15.tech_);
+ }); // Try another available source if this one fails before playback.
+
+ if (this.options_.retryOnError && sources.length > 1) {
+ var retry = function retry() {
+ // Remove the error modal
+ _this15.error(null);
+
+ _this15.handleSrc_(sources.slice(1), true);
+ };
+
+ var stopListeningForErrors = function stopListeningForErrors() {
+ _this15.off('error', retry);
+ };
+
+ this.one('error', retry);
+ this.one('playing', stopListeningForErrors);
+
+ this.resetRetryOnError_ = function () {
+ _this15.off('error', retry);
+
+ _this15.off('playing', stopListeningForErrors);
+ };
+ }
+ }
+ /**
+ * Get or set the video source.
+ *
+ * @param {Tech~SourceObject|Tech~SourceObject[]|string} [source]
+ * A SourceObject, an array of SourceObjects, or a string referencing
+ * a URL to a media source. It is _highly recommended_ that an object
+ * or array of objects is used here, so that source selection
+ * algorithms can take the `type` into account.
+ *
+ * If not provided, this method acts as a getter.
+ *
+ * @return {string|undefined}
+ * If the `source` argument is missing, returns the current source
+ * URL. Otherwise, returns nothing/undefined.
+ */
+ ;
+
+ _proto.src = function src(source) {
+ return this.handleSrc_(source, false);
+ }
+ /**
+ * Set the source object on the tech, returns a boolean that indicates whether
+ * there is a tech that can play the source or not
+ *
+ * @param {Tech~SourceObject} source
+ * The source object to set on the Tech
+ *
+ * @return {boolean}
+ * - True if there is no Tech to playback this source
+ * - False otherwise
+ *
+ * @private
+ */
+ ;
+
+ _proto.src_ = function src_(source) {
+ var _this16 = this;
+
+ var sourceTech = this.selectSource([source]);
+
+ if (!sourceTech) {
+ return true;
+ }
+
+ if (!titleCaseEquals(sourceTech.tech, this.techName_)) {
+ this.changingSrc_ = true; // load this technology with the chosen source
+
+ this.loadTech_(sourceTech.tech, sourceTech.source);
+ this.tech_.ready(function () {
+ _this16.changingSrc_ = false;
+ });
+ return false;
+ } // wait until the tech is ready to set the source
+ // and set it synchronously if possible (#2326)
+
+
+ this.ready(function () {
+ // The setSource tech method was added with source handlers
+ // so older techs won't support it
+ // We need to check the direct prototype for the case where subclasses
+ // of the tech do not support source handlers
+ if (this.tech_.constructor.prototype.hasOwnProperty('setSource')) {
+ this.techCall_('setSource', source);
+ } else {
+ this.techCall_('src', source.src);
+ }
+
+ this.changingSrc_ = false;
+ }, true);
+ return false;
+ }
+ /**
+ * Begin loading the src data.
+ */
+ ;
+
+ _proto.load = function load() {
+ this.techCall_('load');
+ }
+ /**
+ * Reset the player. Loads the first tech in the techOrder,
+ * removes all the text tracks in the existing `tech`,
+ * and calls `reset` on the `tech`.
+ */
+ ;
+
+ _proto.reset = function reset() {
+ var _this17 = this;
+
+ var PromiseClass = this.options_.Promise || window$1.Promise;
+
+ if (this.paused() || !PromiseClass) {
+ this.doReset_();
+ } else {
+ var playPromise = this.play();
+ silencePromise(playPromise.then(function () {
+ return _this17.doReset_();
+ }));
+ }
+ };
+
+ _proto.doReset_ = function doReset_() {
+ if (this.tech_) {
+ this.tech_.clearTracks('text');
+ }
+
+ this.resetCache_();
+ this.poster('');
+ this.loadTech_(this.options_.techOrder[0], null);
+ this.techCall_('reset');
+ this.resetControlBarUI_();
+
+ if (isEvented(this)) {
+ this.trigger('playerreset');
+ }
+ }
+ /**
+ * Reset Control Bar's UI by calling sub-methods that reset
+ * all of Control Bar's components
+ */
+ ;
+
+ _proto.resetControlBarUI_ = function resetControlBarUI_() {
+ this.resetProgressBar_();
+ this.resetPlaybackRate_();
+ this.resetVolumeBar_();
+ }
+ /**
+ * Reset tech's progress so progress bar is reset in the UI
+ */
+ ;
+
+ _proto.resetProgressBar_ = function resetProgressBar_() {
+ this.currentTime(0);
+
+ var _ref3 = this.controlBar || {},
+ durationDisplay = _ref3.durationDisplay,
+ remainingTimeDisplay = _ref3.remainingTimeDisplay;
+
+ if (durationDisplay) {
+ durationDisplay.updateContent();
+ }
+
+ if (remainingTimeDisplay) {
+ remainingTimeDisplay.updateContent();
+ }
+ }
+ /**
+ * Reset Playback ratio
+ */
+ ;
+
+ _proto.resetPlaybackRate_ = function resetPlaybackRate_() {
+ this.playbackRate(this.defaultPlaybackRate());
+ this.handleTechRateChange_();
+ }
+ /**
+ * Reset Volume bar
+ */
+ ;
+
+ _proto.resetVolumeBar_ = function resetVolumeBar_() {
+ this.volume(1.0);
+ this.trigger('volumechange');
+ }
+ /**
+ * Returns all of the current source objects.
+ *
+ * @return {Tech~SourceObject[]}
+ * The current source objects
+ */
+ ;
+
+ _proto.currentSources = function currentSources() {
+ var source = this.currentSource();
+ var sources = []; // assume `{}` or `{ src }`
+
+ if (Object.keys(source).length !== 0) {
+ sources.push(source);
+ }
+
+ return this.cache_.sources || sources;
+ }
+ /**
+ * Returns the current source object.
+ *
+ * @return {Tech~SourceObject}
+ * The current source object
+ */
+ ;
+
+ _proto.currentSource = function currentSource() {
+ return this.cache_.source || {};
+ }
+ /**
+ * Returns the fully qualified URL of the current source value e.g. http://mysite.com/video.mp4
+ * Can be used in conjunction with `currentType` to assist in rebuilding the current source object.
+ *
+ * @return {string}
+ * The current source
+ */
+ ;
+
+ _proto.currentSrc = function currentSrc() {
+ return this.currentSource() && this.currentSource().src || '';
+ }
+ /**
+ * Get the current source type e.g. video/mp4
+ * This can allow you rebuild the current source object so that you could load the same
+ * source and tech later
+ *
+ * @return {string}
+ * The source MIME type
+ */
+ ;
+
+ _proto.currentType = function currentType() {
+ return this.currentSource() && this.currentSource().type || '';
+ }
+ /**
+ * Get or set the preload attribute
+ *
+ * @param {boolean} [value]
+ * - true means that we should preload
+ * - false means that we should not preload
+ *
+ * @return {string}
+ * The preload attribute value when getting
+ */
+ ;
+
+ _proto.preload = function preload(value) {
+ if (value !== undefined) {
+ this.techCall_('setPreload', value);
+ this.options_.preload = value;
+ return;
+ }
+
+ return this.techGet_('preload');
+ }
+ /**
+ * Get or set the autoplay option. When this is a boolean it will
+ * modify the attribute on the tech. When this is a string the attribute on
+ * the tech will be removed and `Player` will handle autoplay on loadstarts.
+ *
+ * @param {boolean|string} [value]
+ * - true: autoplay using the browser behavior
+ * - false: do not autoplay
+ * - 'play': call play() on every loadstart
+ * - 'muted': call muted() then play() on every loadstart
+ * - 'any': call play() on every loadstart. if that fails call muted() then play().
+ * - *: values other than those listed here will be set `autoplay` to true
+ *
+ * @return {boolean|string}
+ * The current value of autoplay when getting
+ */
+ ;
+
+ _proto.autoplay = function autoplay(value) {
+ // getter usage
+ if (value === undefined) {
+ return this.options_.autoplay || false;
+ }
+
+ var techAutoplay; // if the value is a valid string set it to that, or normalize `true` to 'play', if need be
+
+ if (typeof value === 'string' && /(any|play|muted)/.test(value) || value === true && this.options_.normalizeAutoplay) {
+ this.options_.autoplay = value;
+ this.manualAutoplay_(typeof value === 'string' ? value : 'play');
+ techAutoplay = false; // any falsy value sets autoplay to false in the browser,
+ // lets do the same
+ } else if (!value) {
+ this.options_.autoplay = false; // any other value (ie truthy) sets autoplay to true
+ } else {
+ this.options_.autoplay = true;
+ }
+
+ techAutoplay = typeof techAutoplay === 'undefined' ? this.options_.autoplay : techAutoplay; // if we don't have a tech then we do not queue up
+ // a setAutoplay call on tech ready. We do this because the
+ // autoplay option will be passed in the constructor and we
+ // do not need to set it twice
+
+ if (this.tech_) {
+ this.techCall_('setAutoplay', techAutoplay);
+ }
+ }
+ /**
+ * Set or unset the playsinline attribute.
+ * Playsinline tells the browser that non-fullscreen playback is preferred.
+ *
+ * @param {boolean} [value]
+ * - true means that we should try to play inline by default
+ * - false means that we should use the browser's default playback mode,
+ * which in most cases is inline. iOS Safari is a notable exception
+ * and plays fullscreen by default.
+ *
+ * @return {string|Player}
+ * - the current value of playsinline
+ * - the player when setting
+ *
+ * @see [Spec]{@link https://html.spec.whatwg.org/#attr-video-playsinline}
+ */
+ ;
+
+ _proto.playsinline = function playsinline(value) {
+ if (value !== undefined) {
+ this.techCall_('setPlaysinline', value);
+ this.options_.playsinline = value;
+ return this;
+ }
+
+ return this.techGet_('playsinline');
+ }
+ /**
+ * Get or set the loop attribute on the video element.
+ *
+ * @param {boolean} [value]
+ * - true means that we should loop the video
+ * - false means that we should not loop the video
+ *
+ * @return {boolean}
+ * The current value of loop when getting
+ */
+ ;
+
+ _proto.loop = function loop(value) {
+ if (value !== undefined) {
+ this.techCall_('setLoop', value);
+ this.options_.loop = value;
+ return;
+ }
+
+ return this.techGet_('loop');
+ }
+ /**
+ * Get or set the poster image source url
+ *
+ * @fires Player#posterchange
+ *
+ * @param {string} [src]
+ * Poster image source URL
+ *
+ * @return {string}
+ * The current value of poster when getting
+ */
+ ;
+
+ _proto.poster = function poster(src) {
+ if (src === undefined) {
+ return this.poster_;
+ } // The correct way to remove a poster is to set as an empty string
+ // other falsey values will throw errors
+
+
+ if (!src) {
+ src = '';
+ }
+
+ if (src === this.poster_) {
+ return;
+ } // update the internal poster variable
+
+
+ this.poster_ = src; // update the tech's poster
+
+ this.techCall_('setPoster', src);
+ this.isPosterFromTech_ = false; // alert components that the poster has been set
+
+ /**
+ * This event fires when the poster image is changed on the player.
+ *
+ * @event Player#posterchange
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('posterchange');
+ }
+ /**
+ * Some techs (e.g. YouTube) can provide a poster source in an
+ * asynchronous way. We want the poster component to use this
+ * poster source so that it covers up the tech's controls.
+ * (YouTube's play button). However we only want to use this
+ * source if the player user hasn't set a poster through
+ * the normal APIs.
+ *
+ * @fires Player#posterchange
+ * @listens Tech#posterchange
+ * @private
+ */
+ ;
+
+ _proto.handleTechPosterChange_ = function handleTechPosterChange_() {
+ if ((!this.poster_ || this.options_.techCanOverridePoster) && this.tech_ && this.tech_.poster) {
+ var newPoster = this.tech_.poster() || '';
+
+ if (newPoster !== this.poster_) {
+ this.poster_ = newPoster;
+ this.isPosterFromTech_ = true; // Let components know the poster has changed
+
+ this.trigger('posterchange');
+ }
+ }
+ }
+ /**
+ * Get or set whether or not the controls are showing.
+ *
+ * @fires Player#controlsenabled
+ *
+ * @param {boolean} [bool]
+ * - true to turn controls on
+ * - false to turn controls off
+ *
+ * @return {boolean}
+ * The current value of controls when getting
+ */
+ ;
+
+ _proto.controls = function controls(bool) {
+ if (bool === undefined) {
+ return !!this.controls_;
+ }
+
+ bool = !!bool; // Don't trigger a change event unless it actually changed
+
+ if (this.controls_ === bool) {
+ return;
+ }
+
+ this.controls_ = bool;
+
+ if (this.usingNativeControls()) {
+ this.techCall_('setControls', bool);
+ }
+
+ if (this.controls_) {
+ this.removeClass('vjs-controls-disabled');
+ this.addClass('vjs-controls-enabled');
+ /**
+ * @event Player#controlsenabled
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('controlsenabled');
+
+ if (!this.usingNativeControls()) {
+ this.addTechControlsListeners_();
+ }
+ } else {
+ this.removeClass('vjs-controls-enabled');
+ this.addClass('vjs-controls-disabled');
+ /**
+ * @event Player#controlsdisabled
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('controlsdisabled');
+
+ if (!this.usingNativeControls()) {
+ this.removeTechControlsListeners_();
+ }
+ }
+ }
+ /**
+ * Toggle native controls on/off. Native controls are the controls built into
+ * devices (e.g. default iPhone controls) or other techs
+ * (e.g. Vimeo Controls)
+ * **This should only be set by the current tech, because only the tech knows
+ * if it can support native controls**
+ *
+ * @fires Player#usingnativecontrols
+ * @fires Player#usingcustomcontrols
+ *
+ * @param {boolean} [bool]
+ * - true to turn native controls on
+ * - false to turn native controls off
+ *
+ * @return {boolean}
+ * The current value of native controls when getting
+ */
+ ;
+
+ _proto.usingNativeControls = function usingNativeControls(bool) {
+ if (bool === undefined) {
+ return !!this.usingNativeControls_;
+ }
+
+ bool = !!bool; // Don't trigger a change event unless it actually changed
+
+ if (this.usingNativeControls_ === bool) {
+ return;
+ }
+
+ this.usingNativeControls_ = bool;
+
+ if (this.usingNativeControls_) {
+ this.addClass('vjs-using-native-controls');
+ /**
+ * player is using the native device controls
+ *
+ * @event Player#usingnativecontrols
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('usingnativecontrols');
+ } else {
+ this.removeClass('vjs-using-native-controls');
+ /**
+ * player is using the custom HTML controls
+ *
+ * @event Player#usingcustomcontrols
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('usingcustomcontrols');
+ }
+ }
+ /**
+ * Set or get the current MediaError
+ *
+ * @fires Player#error
+ *
+ * @param {MediaError|string|number} [err]
+ * A MediaError or a string/number to be turned
+ * into a MediaError
+ *
+ * @return {MediaError|null}
+ * The current MediaError when getting (or null)
+ */
+ ;
+
+ _proto.error = function error(err) {
+ var _this18 = this;
+
+ if (err === undefined) {
+ return this.error_ || null;
+ } // allow hooks to modify error object
+
+
+ hooks('beforeerror').forEach(function (hookFunction) {
+ var newErr = hookFunction(_this18, err);
+
+ if (!(isObject(newErr) && !Array.isArray(newErr) || typeof newErr === 'string' || typeof newErr === 'number' || newErr === null)) {
+ _this18.log.error('please return a value that MediaError expects in beforeerror hooks');
+
+ return;
+ }
+
+ err = newErr;
+ }); // Suppress the first error message for no compatible source until
+ // user interaction
+
+ if (this.options_.suppressNotSupportedError && err && err.code === 4) {
+ var triggerSuppressedError = function triggerSuppressedError() {
+ this.error(err);
+ };
+
+ this.options_.suppressNotSupportedError = false;
+ this.any(['click', 'touchstart'], triggerSuppressedError);
+ this.one('loadstart', function () {
+ this.off(['click', 'touchstart'], triggerSuppressedError);
+ });
+ return;
+ } // restoring to default
+
+
+ if (err === null) {
+ this.error_ = err;
+ this.removeClass('vjs-error');
+
+ if (this.errorDisplay) {
+ this.errorDisplay.close();
+ }
+
+ return;
+ }
+
+ this.error_ = new MediaError(err); // add the vjs-error classname to the player
+
+ this.addClass('vjs-error'); // log the name of the error type and any message
+ // IE11 logs "[object object]" and required you to expand message to see error object
+
+ log$1.error("(CODE:" + this.error_.code + " " + MediaError.errorTypes[this.error_.code] + ")", this.error_.message, this.error_);
+ /**
+ * @event Player#error
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('error'); // notify hooks of the per player error
+
+ hooks('error').forEach(function (hookFunction) {
+ return hookFunction(_this18, _this18.error_);
+ });
+ return;
+ }
+ /**
+ * Report user activity
+ *
+ * @param {Object} event
+ * Event object
+ */
+ ;
+
+ _proto.reportUserActivity = function reportUserActivity(event) {
+ this.userActivity_ = true;
+ }
+ /**
+ * Get/set if user is active
+ *
+ * @fires Player#useractive
+ * @fires Player#userinactive
+ *
+ * @param {boolean} [bool]
+ * - true if the user is active
+ * - false if the user is inactive
+ *
+ * @return {boolean}
+ * The current value of userActive when getting
+ */
+ ;
+
+ _proto.userActive = function userActive(bool) {
+ if (bool === undefined) {
+ return this.userActive_;
+ }
+
+ bool = !!bool;
+
+ if (bool === this.userActive_) {
+ return;
+ }
+
+ this.userActive_ = bool;
+
+ if (this.userActive_) {
+ this.userActivity_ = true;
+ this.removeClass('vjs-user-inactive');
+ this.addClass('vjs-user-active');
+ /**
+ * @event Player#useractive
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('useractive');
+ return;
+ } // Chrome/Safari/IE have bugs where when you change the cursor it can
+ // trigger a mousemove event. This causes an issue when you're hiding
+ // the cursor when the user is inactive, and a mousemove signals user
+ // activity. Making it impossible to go into inactive mode. Specifically
+ // this happens in fullscreen when we really need to hide the cursor.
+ //
+ // When this gets resolved in ALL browsers it can be removed
+ // https://code.google.com/p/chromium/issues/detail?id=103041
+
+
+ if (this.tech_) {
+ this.tech_.one('mousemove', function (e) {
+ e.stopPropagation();
+ e.preventDefault();
+ });
+ }
+
+ this.userActivity_ = false;
+ this.removeClass('vjs-user-active');
+ this.addClass('vjs-user-inactive');
+ /**
+ * @event Player#userinactive
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('userinactive');
+ }
+ /**
+ * Listen for user activity based on timeout value
+ *
+ * @private
+ */
+ ;
+
+ _proto.listenForUserActivity_ = function listenForUserActivity_() {
+ var mouseInProgress;
+ var lastMoveX;
+ var lastMoveY;
+ var handleActivity = bind(this, this.reportUserActivity);
+
+ var handleMouseMove = function handleMouseMove(e) {
+ // #1068 - Prevent mousemove spamming
+ // Chrome Bug: https://code.google.com/p/chromium/issues/detail?id=366970
+ if (e.screenX !== lastMoveX || e.screenY !== lastMoveY) {
+ lastMoveX = e.screenX;
+ lastMoveY = e.screenY;
+ handleActivity();
+ }
+ };
+
+ var handleMouseDown = function handleMouseDown() {
+ handleActivity(); // For as long as the they are touching the device or have their mouse down,
+ // we consider them active even if they're not moving their finger or mouse.
+ // So we want to continue to update that they are active
+
+ this.clearInterval(mouseInProgress); // Setting userActivity=true now and setting the interval to the same time
+ // as the activityCheck interval (250) should ensure we never miss the
+ // next activityCheck
+
+ mouseInProgress = this.setInterval(handleActivity, 250);
+ };
+
+ var handleMouseUpAndMouseLeave = function handleMouseUpAndMouseLeave(event) {
+ handleActivity(); // Stop the interval that maintains activity if the mouse/touch is down
+
+ this.clearInterval(mouseInProgress);
+ }; // Any mouse movement will be considered user activity
+
+
+ this.on('mousedown', handleMouseDown);
+ this.on('mousemove', handleMouseMove);
+ this.on('mouseup', handleMouseUpAndMouseLeave);
+ this.on('mouseleave', handleMouseUpAndMouseLeave);
+ var controlBar = this.getChild('controlBar'); // Fixes bug on Android & iOS where when tapping progressBar (when control bar is displayed)
+ // controlBar would no longer be hidden by default timeout.
+
+ if (controlBar && !IS_IOS && !IS_ANDROID) {
+ controlBar.on('mouseenter', function (event) {
+ if (this.player().options_.inactivityTimeout !== 0) {
+ this.player().cache_.inactivityTimeout = this.player().options_.inactivityTimeout;
+ }
+
+ this.player().options_.inactivityTimeout = 0;
+ });
+ controlBar.on('mouseleave', function (event) {
+ this.player().options_.inactivityTimeout = this.player().cache_.inactivityTimeout;
+ });
+ } // Listen for keyboard navigation
+ // Shouldn't need to use inProgress interval because of key repeat
+
+
+ this.on('keydown', handleActivity);
+ this.on('keyup', handleActivity); // Run an interval every 250 milliseconds instead of stuffing everything into
+ // the mousemove/touchmove function itself, to prevent performance degradation.
+ // `this.reportUserActivity` simply sets this.userActivity_ to true, which
+ // then gets picked up by this loop
+ // http://ejohn.org/blog/learning-from-twitter/
+
+ var inactivityTimeout;
+ this.setInterval(function () {
+ // Check to see if mouse/touch activity has happened
+ if (!this.userActivity_) {
+ return;
+ } // Reset the activity tracker
+
+
+ this.userActivity_ = false; // If the user state was inactive, set the state to active
+
+ this.userActive(true); // Clear any existing inactivity timeout to start the timer over
+
+ this.clearTimeout(inactivityTimeout);
+ var timeout = this.options_.inactivityTimeout;
+
+ if (timeout <= 0) {
+ return;
+ } // In milliseconds, if no more activity has occurred the
+ // user will be considered inactive
+
+
+ inactivityTimeout = this.setTimeout(function () {
+ // Protect against the case where the inactivityTimeout can trigger just
+ // before the next user activity is picked up by the activity check loop
+ // causing a flicker
+ if (!this.userActivity_) {
+ this.userActive(false);
+ }
+ }, timeout);
+ }, 250);
+ }
+ /**
+ * Gets or sets the current playback rate. A playback rate of
+ * 1.0 represents normal speed and 0.5 would indicate half-speed
+ * playback, for instance.
+ *
+ * @see https://html.spec.whatwg.org/multipage/embedded-content.html#dom-media-playbackrate
+ *
+ * @param {number} [rate]
+ * New playback rate to set.
+ *
+ * @return {number}
+ * The current playback rate when getting or 1.0
+ */
+ ;
+
+ _proto.playbackRate = function playbackRate(rate) {
+ if (rate !== undefined) {
+ // NOTE: this.cache_.lastPlaybackRate is set from the tech handler
+ // that is registered above
+ this.techCall_('setPlaybackRate', rate);
+ return;
+ }
+
+ if (this.tech_ && this.tech_.featuresPlaybackRate) {
+ return this.cache_.lastPlaybackRate || this.techGet_('playbackRate');
+ }
+
+ return 1.0;
+ }
+ /**
+ * Gets or sets the current default playback rate. A default playback rate of
+ * 1.0 represents normal speed and 0.5 would indicate half-speed playback, for instance.
+ * defaultPlaybackRate will only represent what the initial playbackRate of a video was, not
+ * not the current playbackRate.
+ *
+ * @see https://html.spec.whatwg.org/multipage/embedded-content.html#dom-media-defaultplaybackrate
+ *
+ * @param {number} [rate]
+ * New default playback rate to set.
+ *
+ * @return {number|Player}
+ * - The default playback rate when getting or 1.0
+ * - the player when setting
+ */
+ ;
+
+ _proto.defaultPlaybackRate = function defaultPlaybackRate(rate) {
+ if (rate !== undefined) {
+ return this.techCall_('setDefaultPlaybackRate', rate);
+ }
+
+ if (this.tech_ && this.tech_.featuresPlaybackRate) {
+ return this.techGet_('defaultPlaybackRate');
+ }
+
+ return 1.0;
+ }
+ /**
+ * Gets or sets the audio flag
+ *
+ * @param {boolean} bool
+ * - true signals that this is an audio player
+ * - false signals that this is not an audio player
+ *
+ * @return {boolean}
+ * The current value of isAudio when getting
+ */
+ ;
+
+ _proto.isAudio = function isAudio(bool) {
+ if (bool !== undefined) {
+ this.isAudio_ = !!bool;
+ return;
+ }
+
+ return !!this.isAudio_;
+ };
+
+ _proto.enableAudioOnlyUI_ = function enableAudioOnlyUI_() {
+ var _this19 = this;
+
+ // Update styling immediately to show the control bar so we can get its height
+ this.addClass('vjs-audio-only-mode');
+ var playerChildren = this.children();
+ var controlBar = this.getChild('ControlBar');
+ var controlBarHeight = controlBar && controlBar.currentHeight(); // Hide all player components except the control bar. Control bar components
+ // needed only for video are hidden with CSS
+
+ playerChildren.forEach(function (child) {
+ if (child === controlBar) {
+ return;
+ }
+
+ if (child.el_ && !child.hasClass('vjs-hidden')) {
+ child.hide();
+
+ _this19.audioOnlyCache_.hiddenChildren.push(child);
+ }
+ });
+ this.audioOnlyCache_.playerHeight = this.currentHeight(); // Set the player height the same as the control bar
+
+ this.height(controlBarHeight);
+ this.trigger('audioonlymodechange');
+ };
+
+ _proto.disableAudioOnlyUI_ = function disableAudioOnlyUI_() {
+ this.removeClass('vjs-audio-only-mode'); // Show player components that were previously hidden
+
+ this.audioOnlyCache_.hiddenChildren.forEach(function (child) {
+ return child.show();
+ }); // Reset player height
+
+ this.height(this.audioOnlyCache_.playerHeight);
+ this.trigger('audioonlymodechange');
+ }
+ /**
+ * Get the current audioOnlyMode state or set audioOnlyMode to true or false.
+ *
+ * Setting this to `true` will hide all player components except the control bar,
+ * as well as control bar components needed only for video.
+ *
+ * @param {boolean} [value]
+ * The value to set audioOnlyMode to.
+ *
+ * @return {Promise|boolean}
+ * A Promise is returned when setting the state, and a boolean when getting
+ * the present state
+ */
+ ;
+
+ _proto.audioOnlyMode = function audioOnlyMode(value) {
+ var _this20 = this;
+
+ if (typeof value !== 'boolean' || value === this.audioOnlyMode_) {
+ return this.audioOnlyMode_;
+ }
+
+ this.audioOnlyMode_ = value;
+ var PromiseClass = this.options_.Promise || window$1.Promise;
+
+ if (PromiseClass) {
+ // Enable Audio Only Mode
+ if (value) {
+ var exitPromises = []; // Fullscreen and PiP are not supported in audioOnlyMode, so exit if we need to.
+
+ if (this.isInPictureInPicture()) {
+ exitPromises.push(this.exitPictureInPicture());
+ }
+
+ if (this.isFullscreen()) {
+ exitPromises.push(this.exitFullscreen());
+ }
+
+ if (this.audioPosterMode()) {
+ exitPromises.push(this.audioPosterMode(false));
+ }
+
+ return PromiseClass.all(exitPromises).then(function () {
+ return _this20.enableAudioOnlyUI_();
+ });
+ } // Disable Audio Only Mode
+
+
+ return PromiseClass.resolve().then(function () {
+ return _this20.disableAudioOnlyUI_();
+ });
+ }
+
+ if (value) {
+ if (this.isInPictureInPicture()) {
+ this.exitPictureInPicture();
+ }
+
+ if (this.isFullscreen()) {
+ this.exitFullscreen();
+ }
+
+ this.enableAudioOnlyUI_();
+ } else {
+ this.disableAudioOnlyUI_();
+ }
+ };
+
+ _proto.enablePosterModeUI_ = function enablePosterModeUI_() {
+ // Hide the video element and show the poster image to enable posterModeUI
+ var tech = this.tech_ && this.tech_;
+ tech.hide();
+ this.addClass('vjs-audio-poster-mode');
+ this.trigger('audiopostermodechange');
+ };
+
+ _proto.disablePosterModeUI_ = function disablePosterModeUI_() {
+ // Show the video element and hide the poster image to disable posterModeUI
+ var tech = this.tech_ && this.tech_;
+ tech.show();
+ this.removeClass('vjs-audio-poster-mode');
+ this.trigger('audiopostermodechange');
+ }
+ /**
+ * Get the current audioPosterMode state or set audioPosterMode to true or false
+ *
+ * @param {boolean} [value]
+ * The value to set audioPosterMode to.
+ *
+ * @return {Promise|boolean}
+ * A Promise is returned when setting the state, and a boolean when getting
+ * the present state
+ */
+ ;
+
+ _proto.audioPosterMode = function audioPosterMode(value) {
+ var _this21 = this;
+
+ if (typeof value !== 'boolean' || value === this.audioPosterMode_) {
+ return this.audioPosterMode_;
+ }
+
+ this.audioPosterMode_ = value;
+ var PromiseClass = this.options_.Promise || window$1.Promise;
+
+ if (PromiseClass) {
+ if (value) {
+ if (this.audioOnlyMode()) {
+ var audioOnlyModePromise = this.audioOnlyMode(false);
+ return audioOnlyModePromise.then(function () {
+ // enable audio poster mode after audio only mode is disabled
+ _this21.enablePosterModeUI_();
+ });
+ }
+
+ return PromiseClass.resolve().then(function () {
+ // enable audio poster mode
+ _this21.enablePosterModeUI_();
+ });
+ }
+
+ return PromiseClass.resolve().then(function () {
+ // disable audio poster mode
+ _this21.disablePosterModeUI_();
+ });
+ }
+
+ if (value) {
+ if (this.audioOnlyMode()) {
+ this.audioOnlyMode(false);
+ }
+
+ this.enablePosterModeUI_();
+ return;
+ }
+
+ this.disablePosterModeUI_();
+ }
+ /**
+ * A helper method for adding a {@link TextTrack} to our
+ * {@link TextTrackList}.
+ *
+ * In addition to the W3C settings we allow adding additional info through options.
+ *
+ * @see http://www.w3.org/html/wg/drafts/html/master/embedded-content-0.html#dom-media-addtexttrack
+ *
+ * @param {string} [kind]
+ * the kind of TextTrack you are adding
+ *
+ * @param {string} [label]
+ * the label to give the TextTrack label
+ *
+ * @param {string} [language]
+ * the language to set on the TextTrack
+ *
+ * @return {TextTrack|undefined}
+ * the TextTrack that was added or undefined
+ * if there is no tech
+ */
+ ;
+
+ _proto.addTextTrack = function addTextTrack(kind, label, language) {
+ if (this.tech_) {
+ return this.tech_.addTextTrack(kind, label, language);
+ }
+ }
+ /**
+ * Create a remote {@link TextTrack} and an {@link HTMLTrackElement}.
+ * When manualCleanup is set to false, the track will be automatically removed
+ * on source changes.
+ *
+ * @param {Object} options
+ * Options to pass to {@link HTMLTrackElement} during creation. See
+ * {@link HTMLTrackElement} for object properties that you should use.
+ *
+ * @param {boolean} [manualCleanup=true] if set to false, the TextTrack will be
+ * removed on a source change
+ *
+ * @return {HtmlTrackElement}
+ * the HTMLTrackElement that was created and added
+ * to the HtmlTrackElementList and the remote
+ * TextTrackList
+ *
+ * @deprecated The default value of the "manualCleanup" parameter will default
+ * to "false" in upcoming versions of Video.js
+ */
+ ;
+
+ _proto.addRemoteTextTrack = function addRemoteTextTrack(options, manualCleanup) {
+ if (this.tech_) {
+ return this.tech_.addRemoteTextTrack(options, manualCleanup);
+ }
+ }
+ /**
+ * Remove a remote {@link TextTrack} from the respective
+ * {@link TextTrackList} and {@link HtmlTrackElementList}.
+ *
+ * @param {Object} track
+ * Remote {@link TextTrack} to remove
+ *
+ * @return {undefined}
+ * does not return anything
+ */
+ ;
+
+ _proto.removeRemoteTextTrack = function removeRemoteTextTrack(obj) {
+ if (obj === void 0) {
+ obj = {};
+ }
+
+ var _obj = obj,
+ track = _obj.track;
+
+ if (!track) {
+ track = obj;
+ } // destructure the input into an object with a track argument, defaulting to arguments[0]
+ // default the whole argument to an empty object if nothing was passed in
+
+
+ if (this.tech_) {
+ return this.tech_.removeRemoteTextTrack(track);
+ }
+ }
+ /**
+ * Gets available media playback quality metrics as specified by the W3C's Media
+ * Playback Quality API.
+ *
+ * @see [Spec]{@link https://wicg.github.io/media-playback-quality}
+ *
+ * @return {Object|undefined}
+ * An object with supported media playback quality metrics or undefined if there
+ * is no tech or the tech does not support it.
+ */
+ ;
+
+ _proto.getVideoPlaybackQuality = function getVideoPlaybackQuality() {
+ return this.techGet_('getVideoPlaybackQuality');
+ }
+ /**
+ * Get video width
+ *
+ * @return {number}
+ * current video width
+ */
+ ;
+
+ _proto.videoWidth = function videoWidth() {
+ return this.tech_ && this.tech_.videoWidth && this.tech_.videoWidth() || 0;
+ }
+ /**
+ * Get video height
+ *
+ * @return {number}
+ * current video height
+ */
+ ;
+
+ _proto.videoHeight = function videoHeight() {
+ return this.tech_ && this.tech_.videoHeight && this.tech_.videoHeight() || 0;
+ }
+ /**
+ * The player's language code.
+ *
+ * Changing the langauge will trigger
+ * [languagechange]{@link Player#event:languagechange}
+ * which Components can use to update control text.
+ * ClickableComponent will update its control text by default on
+ * [languagechange]{@link Player#event:languagechange}.
+ *
+ * @fires Player#languagechange
+ *
+ * @param {string} [code]
+ * the language code to set the player to
+ *
+ * @return {string}
+ * The current language code when getting
+ */
+ ;
+
+ _proto.language = function language(code) {
+ if (code === undefined) {
+ return this.language_;
+ }
+
+ if (this.language_ !== String(code).toLowerCase()) {
+ this.language_ = String(code).toLowerCase(); // during first init, it's possible some things won't be evented
+
+ if (isEvented(this)) {
+ /**
+ * fires when the player language change
+ *
+ * @event Player#languagechange
+ * @type {EventTarget~Event}
+ */
+ this.trigger('languagechange');
+ }
+ }
+ }
+ /**
+ * Get the player's language dictionary
+ * Merge every time, because a newly added plugin might call videojs.addLanguage() at any time
+ * Languages specified directly in the player options have precedence
+ *
+ * @return {Array}
+ * An array of of supported languages
+ */
+ ;
+
+ _proto.languages = function languages() {
+ return mergeOptions$3(Player.prototype.options_.languages, this.languages_);
+ }
+ /**
+ * returns a JavaScript object reperesenting the current track
+ * information. **DOES not return it as JSON**
+ *
+ * @return {Object}
+ * Object representing the current of track info
+ */
+ ;
+
+ _proto.toJSON = function toJSON() {
+ var options = mergeOptions$3(this.options_);
+ var tracks = options.tracks;
+ options.tracks = [];
+
+ for (var i = 0; i < tracks.length; i++) {
+ var track = tracks[i]; // deep merge tracks and null out player so no circular references
+
+ track = mergeOptions$3(track);
+ track.player = undefined;
+ options.tracks[i] = track;
+ }
+
+ return options;
+ }
+ /**
+ * Creates a simple modal dialog (an instance of the {@link ModalDialog}
+ * component) that immediately overlays the player with arbitrary
+ * content and removes itself when closed.
+ *
+ * @param {string|Function|Element|Array|null} content
+ * Same as {@link ModalDialog#content}'s param of the same name.
+ * The most straight-forward usage is to provide a string or DOM
+ * element.
+ *
+ * @param {Object} [options]
+ * Extra options which will be passed on to the {@link ModalDialog}.
+ *
+ * @return {ModalDialog}
+ * the {@link ModalDialog} that was created
+ */
+ ;
+
+ _proto.createModal = function createModal(content, options) {
+ var _this22 = this;
+
+ options = options || {};
+ options.content = content || '';
+ var modal = new ModalDialog(this, options);
+ this.addChild(modal);
+ modal.on('dispose', function () {
+ _this22.removeChild(modal);
+ });
+ modal.open();
+ return modal;
+ }
+ /**
+ * Change breakpoint classes when the player resizes.
+ *
+ * @private
+ */
+ ;
+
+ _proto.updateCurrentBreakpoint_ = function updateCurrentBreakpoint_() {
+ if (!this.responsive()) {
+ return;
+ }
+
+ var currentBreakpoint = this.currentBreakpoint();
+ var currentWidth = this.currentWidth();
+
+ for (var i = 0; i < BREAKPOINT_ORDER.length; i++) {
+ var candidateBreakpoint = BREAKPOINT_ORDER[i];
+ var maxWidth = this.breakpoints_[candidateBreakpoint];
+
+ if (currentWidth <= maxWidth) {
+ // The current breakpoint did not change, nothing to do.
+ if (currentBreakpoint === candidateBreakpoint) {
+ return;
+ } // Only remove a class if there is a current breakpoint.
+
+
+ if (currentBreakpoint) {
+ this.removeClass(BREAKPOINT_CLASSES[currentBreakpoint]);
+ }
+
+ this.addClass(BREAKPOINT_CLASSES[candidateBreakpoint]);
+ this.breakpoint_ = candidateBreakpoint;
+ break;
+ }
+ }
+ }
+ /**
+ * Removes the current breakpoint.
+ *
+ * @private
+ */
+ ;
+
+ _proto.removeCurrentBreakpoint_ = function removeCurrentBreakpoint_() {
+ var className = this.currentBreakpointClass();
+ this.breakpoint_ = '';
+
+ if (className) {
+ this.removeClass(className);
+ }
+ }
+ /**
+ * Get or set breakpoints on the player.
+ *
+ * Calling this method with an object or `true` will remove any previous
+ * custom breakpoints and start from the defaults again.
+ *
+ * @param {Object|boolean} [breakpoints]
+ * If an object is given, it can be used to provide custom
+ * breakpoints. If `true` is given, will set default breakpoints.
+ * If this argument is not given, will simply return the current
+ * breakpoints.
+ *
+ * @param {number} [breakpoints.tiny]
+ * The maximum width for the "vjs-layout-tiny" class.
+ *
+ * @param {number} [breakpoints.xsmall]
+ * The maximum width for the "vjs-layout-x-small" class.
+ *
+ * @param {number} [breakpoints.small]
+ * The maximum width for the "vjs-layout-small" class.
+ *
+ * @param {number} [breakpoints.medium]
+ * The maximum width for the "vjs-layout-medium" class.
+ *
+ * @param {number} [breakpoints.large]
+ * The maximum width for the "vjs-layout-large" class.
+ *
+ * @param {number} [breakpoints.xlarge]
+ * The maximum width for the "vjs-layout-x-large" class.
+ *
+ * @param {number} [breakpoints.huge]
+ * The maximum width for the "vjs-layout-huge" class.
+ *
+ * @return {Object}
+ * An object mapping breakpoint names to maximum width values.
+ */
+ ;
+
+ _proto.breakpoints = function breakpoints(_breakpoints) {
+ // Used as a getter.
+ if (_breakpoints === undefined) {
+ return assign(this.breakpoints_);
+ }
+
+ this.breakpoint_ = '';
+ this.breakpoints_ = assign({}, DEFAULT_BREAKPOINTS, _breakpoints); // When breakpoint definitions change, we need to update the currently
+ // selected breakpoint.
+
+ this.updateCurrentBreakpoint_(); // Clone the breakpoints before returning.
+
+ return assign(this.breakpoints_);
+ }
+ /**
+ * Get or set a flag indicating whether or not this player should adjust
+ * its UI based on its dimensions.
+ *
+ * @param {boolean} value
+ * Should be `true` if the player should adjust its UI based on its
+ * dimensions; otherwise, should be `false`.
+ *
+ * @return {boolean}
+ * Will be `true` if this player should adjust its UI based on its
+ * dimensions; otherwise, will be `false`.
+ */
+ ;
+
+ _proto.responsive = function responsive(value) {
+ // Used as a getter.
+ if (value === undefined) {
+ return this.responsive_;
+ }
+
+ value = Boolean(value);
+ var current = this.responsive_; // Nothing changed.
+
+ if (value === current) {
+ return;
+ } // The value actually changed, set it.
+
+
+ this.responsive_ = value; // Start listening for breakpoints and set the initial breakpoint if the
+ // player is now responsive.
+
+ if (value) {
+ this.on('playerresize', this.boundUpdateCurrentBreakpoint_);
+ this.updateCurrentBreakpoint_(); // Stop listening for breakpoints if the player is no longer responsive.
+ } else {
+ this.off('playerresize', this.boundUpdateCurrentBreakpoint_);
+ this.removeCurrentBreakpoint_();
+ }
+
+ return value;
+ }
+ /**
+ * Get current breakpoint name, if any.
+ *
+ * @return {string}
+ * If there is currently a breakpoint set, returns a the key from the
+ * breakpoints object matching it. Otherwise, returns an empty string.
+ */
+ ;
+
+ _proto.currentBreakpoint = function currentBreakpoint() {
+ return this.breakpoint_;
+ }
+ /**
+ * Get the current breakpoint class name.
+ *
+ * @return {string}
+ * The matching class name (e.g. `"vjs-layout-tiny"` or
+ * `"vjs-layout-large"`) for the current breakpoint. Empty string if
+ * there is no current breakpoint.
+ */
+ ;
+
+ _proto.currentBreakpointClass = function currentBreakpointClass() {
+ return BREAKPOINT_CLASSES[this.breakpoint_] || '';
+ }
+ /**
+ * An object that describes a single piece of media.
+ *
+ * Properties that are not part of this type description will be retained; so,
+ * this can be viewed as a generic metadata storage mechanism as well.
+ *
+ * @see {@link https://wicg.github.io/mediasession/#the-mediametadata-interface}
+ * @typedef {Object} Player~MediaObject
+ *
+ * @property {string} [album]
+ * Unused, except if this object is passed to the `MediaSession`
+ * API.
+ *
+ * @property {string} [artist]
+ * Unused, except if this object is passed to the `MediaSession`
+ * API.
+ *
+ * @property {Object[]} [artwork]
+ * Unused, except if this object is passed to the `MediaSession`
+ * API. If not specified, will be populated via the `poster`, if
+ * available.
+ *
+ * @property {string} [poster]
+ * URL to an image that will display before playback.
+ *
+ * @property {Tech~SourceObject|Tech~SourceObject[]|string} [src]
+ * A single source object, an array of source objects, or a string
+ * referencing a URL to a media source. It is _highly recommended_
+ * that an object or array of objects is used here, so that source
+ * selection algorithms can take the `type` into account.
+ *
+ * @property {string} [title]
+ * Unused, except if this object is passed to the `MediaSession`
+ * API.
+ *
+ * @property {Object[]} [textTracks]
+ * An array of objects to be used to create text tracks, following
+ * the {@link https://www.w3.org/TR/html50/embedded-content-0.html#the-track-element|native track element format}.
+ * For ease of removal, these will be created as "remote" text
+ * tracks and set to automatically clean up on source changes.
+ *
+ * These objects may have properties like `src`, `kind`, `label`,
+ * and `language`, see {@link Tech#createRemoteTextTrack}.
+ */
+
+ /**
+ * Populate the player using a {@link Player~MediaObject|MediaObject}.
+ *
+ * @param {Player~MediaObject} media
+ * A media object.
+ *
+ * @param {Function} ready
+ * A callback to be called when the player is ready.
+ */
+ ;
+
+ _proto.loadMedia = function loadMedia(media, ready) {
+ var _this23 = this;
+
+ if (!media || typeof media !== 'object') {
+ return;
+ }
+
+ this.reset(); // Clone the media object so it cannot be mutated from outside.
+
+ this.cache_.media = mergeOptions$3(media);
+ var _this$cache_$media = this.cache_.media,
+ artwork = _this$cache_$media.artwork,
+ poster = _this$cache_$media.poster,
+ src = _this$cache_$media.src,
+ textTracks = _this$cache_$media.textTracks; // If `artwork` is not given, create it using `poster`.
+
+ if (!artwork && poster) {
+ this.cache_.media.artwork = [{
+ src: poster,
+ type: getMimetype(poster)
+ }];
+ }
+
+ if (src) {
+ this.src(src);
+ }
+
+ if (poster) {
+ this.poster(poster);
+ }
+
+ if (Array.isArray(textTracks)) {
+ textTracks.forEach(function (tt) {
+ return _this23.addRemoteTextTrack(tt, false);
+ });
+ }
+
+ this.ready(ready);
+ }
+ /**
+ * Get a clone of the current {@link Player~MediaObject} for this player.
+ *
+ * If the `loadMedia` method has not been used, will attempt to return a
+ * {@link Player~MediaObject} based on the current state of the player.
+ *
+ * @return {Player~MediaObject}
+ */
+ ;
+
+ _proto.getMedia = function getMedia() {
+ if (!this.cache_.media) {
+ var poster = this.poster();
+ var src = this.currentSources();
+ var textTracks = Array.prototype.map.call(this.remoteTextTracks(), function (tt) {
+ return {
+ kind: tt.kind,
+ label: tt.label,
+ language: tt.language,
+ src: tt.src
+ };
+ });
+ var media = {
+ src: src,
+ textTracks: textTracks
+ };
+
+ if (poster) {
+ media.poster = poster;
+ media.artwork = [{
+ src: media.poster,
+ type: getMimetype(media.poster)
+ }];
+ }
+
+ return media;
+ }
+
+ return mergeOptions$3(this.cache_.media);
+ }
+ /**
+ * Gets tag settings
+ *
+ * @param {Element} tag
+ * The player tag
+ *
+ * @return {Object}
+ * An object containing all of the settings
+ * for a player tag
+ */
+ ;
+
+ Player.getTagSettings = function getTagSettings(tag) {
+ var baseOptions = {
+ sources: [],
+ tracks: []
+ };
+ var tagOptions = getAttributes(tag);
+ var dataSetup = tagOptions['data-setup'];
+
+ if (hasClass(tag, 'vjs-fill')) {
+ tagOptions.fill = true;
+ }
+
+ if (hasClass(tag, 'vjs-fluid')) {
+ tagOptions.fluid = true;
+ } // Check if data-setup attr exists.
+
+
+ if (dataSetup !== null) {
+ // Parse options JSON
+ // If empty string, make it a parsable json object.
+ var _safeParseTuple = safeParseTuple(dataSetup || '{}'),
+ err = _safeParseTuple[0],
+ data = _safeParseTuple[1];
+
+ if (err) {
+ log$1.error(err);
+ }
+
+ assign(tagOptions, data);
+ }
+
+ assign(baseOptions, tagOptions); // Get tag children settings
+
+ if (tag.hasChildNodes()) {
+ var children = tag.childNodes;
+
+ for (var i = 0, j = children.length; i < j; i++) {
+ var child = children[i]; // Change case needed: http://ejohn.org/blog/nodename-case-sensitivity/
+
+ var childName = child.nodeName.toLowerCase();
+
+ if (childName === 'source') {
+ baseOptions.sources.push(getAttributes(child));
+ } else if (childName === 'track') {
+ baseOptions.tracks.push(getAttributes(child));
+ }
+ }
+ }
+
+ return baseOptions;
+ }
+ /**
+ * Determine whether or not flexbox is supported
+ *
+ * @return {boolean}
+ * - true if flexbox is supported
+ * - false if flexbox is not supported
+ */
+ ;
+
+ _proto.flexNotSupported_ = function flexNotSupported_() {
+ var elem = document.createElement('i'); // Note: We don't actually use flexBasis (or flexOrder), but it's one of the more
+ // common flex features that we can rely on when checking for flex support.
+
+ return !('flexBasis' in elem.style || 'webkitFlexBasis' in elem.style || 'mozFlexBasis' in elem.style || 'msFlexBasis' in elem.style || // IE10-specific (2012 flex spec), available for completeness
+ 'msFlexOrder' in elem.style);
+ }
+ /**
+ * Set debug mode to enable/disable logs at info level.
+ *
+ * @param {boolean} enabled
+ * @fires Player#debugon
+ * @fires Player#debugoff
+ */
+ ;
+
+ _proto.debug = function debug(enabled) {
+ if (enabled === undefined) {
+ return this.debugEnabled_;
+ }
+
+ if (enabled) {
+ this.trigger('debugon');
+ this.previousLogLevel_ = this.log.level;
+ this.log.level('debug');
+ this.debugEnabled_ = true;
+ } else {
+ this.trigger('debugoff');
+ this.log.level(this.previousLogLevel_);
+ this.previousLogLevel_ = undefined;
+ this.debugEnabled_ = false;
+ }
+ }
+ /**
+ * Set or get current playback rates.
+ * Takes an array and updates the playback rates menu with the new items.
+ * Pass in an empty array to hide the menu.
+ * Values other than arrays are ignored.
+ *
+ * @fires Player#playbackrateschange
+ * @param {number[]} newRates
+ * The new rates that the playback rates menu should update to.
+ * An empty array will hide the menu
+ * @return {number[]} When used as a getter will return the current playback rates
+ */
+ ;
+
+ _proto.playbackRates = function playbackRates(newRates) {
+ if (newRates === undefined) {
+ return this.cache_.playbackRates;
+ } // ignore any value that isn't an array
+
+
+ if (!Array.isArray(newRates)) {
+ return;
+ } // ignore any arrays that don't only contain numbers
+
+
+ if (!newRates.every(function (rate) {
+ return typeof rate === 'number';
+ })) {
+ return;
+ }
+
+ this.cache_.playbackRates = newRates;
+ /**
+ * fires when the playback rates in a player are changed
+ *
+ * @event Player#playbackrateschange
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('playbackrateschange');
+ };
+
+ return Player;
+}(Component$1);
+/**
+ * Get the {@link VideoTrackList}
+ * @link https://html.spec.whatwg.org/multipage/embedded-content.html#videotracklist
+ *
+ * @return {VideoTrackList}
+ * the current video track list
+ *
+ * @method Player.prototype.videoTracks
+ */
+
+/**
+ * Get the {@link AudioTrackList}
+ * @link https://html.spec.whatwg.org/multipage/embedded-content.html#audiotracklist
+ *
+ * @return {AudioTrackList}
+ * the current audio track list
+ *
+ * @method Player.prototype.audioTracks
+ */
+
+/**
+ * Get the {@link TextTrackList}
+ *
+ * @link http://www.w3.org/html/wg/drafts/html/master/embedded-content-0.html#dom-media-texttracks
+ *
+ * @return {TextTrackList}
+ * the current text track list
+ *
+ * @method Player.prototype.textTracks
+ */
+
+/**
+ * Get the remote {@link TextTrackList}
+ *
+ * @return {TextTrackList}
+ * The current remote text track list
+ *
+ * @method Player.prototype.remoteTextTracks
+ */
+
+/**
+ * Get the remote {@link HtmlTrackElementList} tracks.
+ *
+ * @return {HtmlTrackElementList}
+ * The current remote text track element list
+ *
+ * @method Player.prototype.remoteTextTrackEls
+ */
+
+
+ALL.names.forEach(function (name) {
+ var props = ALL[name];
+
+ Player.prototype[props.getterName] = function () {
+ if (this.tech_) {
+ return this.tech_[props.getterName]();
+ } // if we have not yet loadTech_, we create {video,audio,text}Tracks_
+ // these will be passed to the tech during loading
+
+
+ this[props.privateName] = this[props.privateName] || new props.ListClass();
+ return this[props.privateName];
+ };
+});
+/**
+ * Get or set the `Player`'s crossorigin option. For the HTML5 player, this
+ * sets the `crossOrigin` property on the `` tag to control the CORS
+ * behavior.
+ *
+ * @see [Video Element Attributes]{@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/video#attr-crossorigin}
+ *
+ * @param {string} [value]
+ * The value to set the `Player`'s crossorigin to. If an argument is
+ * given, must be one of `anonymous` or `use-credentials`.
+ *
+ * @return {string|undefined}
+ * - The current crossorigin value of the `Player` when getting.
+ * - undefined when setting
+ */
+
+Player.prototype.crossorigin = Player.prototype.crossOrigin;
+/**
+ * Global enumeration of players.
+ *
+ * The keys are the player IDs and the values are either the {@link Player}
+ * instance or `null` for disposed players.
+ *
+ * @type {Object}
+ */
+
+Player.players = {};
+var navigator = window$1.navigator;
+/*
+ * Player instance options, surfaced using options
+ * options = Player.prototype.options_
+ * Make changes in options, not here.
+ *
+ * @type {Object}
+ * @private
+ */
+
+Player.prototype.options_ = {
+ // Default order of fallback technology
+ techOrder: Tech.defaultTechOrder_,
+ html5: {},
+ // default inactivity timeout
+ inactivityTimeout: 2000,
+ // default playback rates
+ playbackRates: [],
+ // Add playback rate selection by adding rates
+ // 'playbackRates': [0.5, 1, 1.5, 2],
+ liveui: false,
+ // Included control sets
+ children: ['mediaLoader', 'posterImage', 'textTrackDisplay', 'loadingSpinner', 'bigPlayButton', 'liveTracker', 'controlBar', 'errorDisplay', 'textTrackSettings', 'resizeManager'],
+ language: navigator && (navigator.languages && navigator.languages[0] || navigator.userLanguage || navigator.language) || 'en',
+ // locales and their language translations
+ languages: {},
+ // Default message to show when a video cannot be played.
+ notSupportedMessage: 'No compatible source was found for this media.',
+ normalizeAutoplay: false,
+ fullscreen: {
+ options: {
+ navigationUI: 'hide'
+ }
+ },
+ breakpoints: {},
+ responsive: false,
+ audioOnlyMode: false,
+ audioPosterMode: false
+};
+[
+/**
+ * Returns whether or not the player is in the "ended" state.
+ *
+ * @return {Boolean} True if the player is in the ended state, false if not.
+ * @method Player#ended
+ */
+'ended',
+/**
+ * Returns whether or not the player is in the "seeking" state.
+ *
+ * @return {Boolean} True if the player is in the seeking state, false if not.
+ * @method Player#seeking
+ */
+'seeking',
+/**
+ * Returns the TimeRanges of the media that are currently available
+ * for seeking to.
+ *
+ * @return {TimeRanges} the seekable intervals of the media timeline
+ * @method Player#seekable
+ */
+'seekable',
+/**
+ * Returns the current state of network activity for the element, from
+ * the codes in the list below.
+ * - NETWORK_EMPTY (numeric value 0)
+ * The element has not yet been initialised. All attributes are in
+ * their initial states.
+ * - NETWORK_IDLE (numeric value 1)
+ * The element's resource selection algorithm is active and has
+ * selected a resource, but it is not actually using the network at
+ * this time.
+ * - NETWORK_LOADING (numeric value 2)
+ * The user agent is actively trying to download data.
+ * - NETWORK_NO_SOURCE (numeric value 3)
+ * The element's resource selection algorithm is active, but it has
+ * not yet found a resource to use.
+ *
+ * @see https://html.spec.whatwg.org/multipage/embedded-content.html#network-states
+ * @return {number} the current network activity state
+ * @method Player#networkState
+ */
+'networkState',
+/**
+ * Returns a value that expresses the current state of the element
+ * with respect to rendering the current playback position, from the
+ * codes in the list below.
+ * - HAVE_NOTHING (numeric value 0)
+ * No information regarding the media resource is available.
+ * - HAVE_METADATA (numeric value 1)
+ * Enough of the resource has been obtained that the duration of the
+ * resource is available.
+ * - HAVE_CURRENT_DATA (numeric value 2)
+ * Data for the immediate current playback position is available.
+ * - HAVE_FUTURE_DATA (numeric value 3)
+ * Data for the immediate current playback position is available, as
+ * well as enough data for the user agent to advance the current
+ * playback position in the direction of playback.
+ * - HAVE_ENOUGH_DATA (numeric value 4)
+ * The user agent estimates that enough data is available for
+ * playback to proceed uninterrupted.
+ *
+ * @see https://html.spec.whatwg.org/multipage/embedded-content.html#dom-media-readystate
+ * @return {number} the current playback rendering state
+ * @method Player#readyState
+ */
+'readyState'].forEach(function (fn) {
+ Player.prototype[fn] = function () {
+ return this.techGet_(fn);
+ };
+});
+TECH_EVENTS_RETRIGGER.forEach(function (event) {
+ Player.prototype["handleTech" + toTitleCase$1(event) + "_"] = function () {
+ return this.trigger(event);
+ };
+});
+/**
+ * Fired when the player has initial duration and dimension information
+ *
+ * @event Player#loadedmetadata
+ * @type {EventTarget~Event}
+ */
+
+/**
+ * Fired when the player has downloaded data at the current playback position
+ *
+ * @event Player#loadeddata
+ * @type {EventTarget~Event}
+ */
+
+/**
+ * Fired when the current playback position has changed *
+ * During playback this is fired every 15-250 milliseconds, depending on the
+ * playback technology in use.
+ *
+ * @event Player#timeupdate
+ * @type {EventTarget~Event}
+ */
+
+/**
+ * Fired when the volume changes
+ *
+ * @event Player#volumechange
+ * @type {EventTarget~Event}
+ */
+
+/**
+ * Reports whether or not a player has a plugin available.
+ *
+ * This does not report whether or not the plugin has ever been initialized
+ * on this player. For that, [usingPlugin]{@link Player#usingPlugin}.
+ *
+ * @method Player#hasPlugin
+ * @param {string} name
+ * The name of a plugin.
+ *
+ * @return {boolean}
+ * Whether or not this player has the requested plugin available.
+ */
+
+/**
+ * Reports whether or not a player is using a plugin by name.
+ *
+ * For basic plugins, this only reports whether the plugin has _ever_ been
+ * initialized on this player.
+ *
+ * @method Player#usingPlugin
+ * @param {string} name
+ * The name of a plugin.
+ *
+ * @return {boolean}
+ * Whether or not this player is using the requested plugin.
+ */
+
+Component$1.registerComponent('Player', Player);
+
+/**
+ * The base plugin name.
+ *
+ * @private
+ * @constant
+ * @type {string}
+ */
+
+var BASE_PLUGIN_NAME = 'plugin';
+/**
+ * The key on which a player's active plugins cache is stored.
+ *
+ * @private
+ * @constant
+ * @type {string}
+ */
+
+var PLUGIN_CACHE_KEY = 'activePlugins_';
+/**
+ * Stores registered plugins in a private space.
+ *
+ * @private
+ * @type {Object}
+ */
+
+var pluginStorage = {};
+/**
+ * Reports whether or not a plugin has been registered.
+ *
+ * @private
+ * @param {string} name
+ * The name of a plugin.
+ *
+ * @return {boolean}
+ * Whether or not the plugin has been registered.
+ */
+
+var pluginExists = function pluginExists(name) {
+ return pluginStorage.hasOwnProperty(name);
+};
+/**
+ * Get a single registered plugin by name.
+ *
+ * @private
+ * @param {string} name
+ * The name of a plugin.
+ *
+ * @return {Function|undefined}
+ * The plugin (or undefined).
+ */
+
+
+var getPlugin = function getPlugin(name) {
+ return pluginExists(name) ? pluginStorage[name] : undefined;
+};
+/**
+ * Marks a plugin as "active" on a player.
+ *
+ * Also, ensures that the player has an object for tracking active plugins.
+ *
+ * @private
+ * @param {Player} player
+ * A Video.js player instance.
+ *
+ * @param {string} name
+ * The name of a plugin.
+ */
+
+
+var markPluginAsActive = function markPluginAsActive(player, name) {
+ player[PLUGIN_CACHE_KEY] = player[PLUGIN_CACHE_KEY] || {};
+ player[PLUGIN_CACHE_KEY][name] = true;
+};
+/**
+ * Triggers a pair of plugin setup events.
+ *
+ * @private
+ * @param {Player} player
+ * A Video.js player instance.
+ *
+ * @param {Plugin~PluginEventHash} hash
+ * A plugin event hash.
+ *
+ * @param {boolean} [before]
+ * If true, prefixes the event name with "before". In other words,
+ * use this to trigger "beforepluginsetup" instead of "pluginsetup".
+ */
+
+
+var triggerSetupEvent = function triggerSetupEvent(player, hash, before) {
+ var eventName = (before ? 'before' : '') + 'pluginsetup';
+ player.trigger(eventName, hash);
+ player.trigger(eventName + ':' + hash.name, hash);
+};
+/**
+ * Takes a basic plugin function and returns a wrapper function which marks
+ * on the player that the plugin has been activated.
+ *
+ * @private
+ * @param {string} name
+ * The name of the plugin.
+ *
+ * @param {Function} plugin
+ * The basic plugin.
+ *
+ * @return {Function}
+ * A wrapper function for the given plugin.
+ */
+
+
+var createBasicPlugin = function createBasicPlugin(name, plugin) {
+ var basicPluginWrapper = function basicPluginWrapper() {
+ // We trigger the "beforepluginsetup" and "pluginsetup" events on the player
+ // regardless, but we want the hash to be consistent with the hash provided
+ // for advanced plugins.
+ //
+ // The only potentially counter-intuitive thing here is the `instance` in
+ // the "pluginsetup" event is the value returned by the `plugin` function.
+ triggerSetupEvent(this, {
+ name: name,
+ plugin: plugin,
+ instance: null
+ }, true);
+ var instance = plugin.apply(this, arguments);
+ markPluginAsActive(this, name);
+ triggerSetupEvent(this, {
+ name: name,
+ plugin: plugin,
+ instance: instance
+ });
+ return instance;
+ };
+
+ Object.keys(plugin).forEach(function (prop) {
+ basicPluginWrapper[prop] = plugin[prop];
+ });
+ return basicPluginWrapper;
+};
+/**
+ * Takes a plugin sub-class and returns a factory function for generating
+ * instances of it.
+ *
+ * This factory function will replace itself with an instance of the requested
+ * sub-class of Plugin.
+ *
+ * @private
+ * @param {string} name
+ * The name of the plugin.
+ *
+ * @param {Plugin} PluginSubClass
+ * The advanced plugin.
+ *
+ * @return {Function}
+ */
+
+
+var createPluginFactory = function createPluginFactory(name, PluginSubClass) {
+ // Add a `name` property to the plugin prototype so that each plugin can
+ // refer to itself by name.
+ PluginSubClass.prototype.name = name;
+ return function () {
+ triggerSetupEvent(this, {
+ name: name,
+ plugin: PluginSubClass,
+ instance: null
+ }, true);
+
+ for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
+ args[_key] = arguments[_key];
+ }
+
+ var instance = _construct(PluginSubClass, [this].concat(args)); // The plugin is replaced by a function that returns the current instance.
+
+
+ this[name] = function () {
+ return instance;
+ };
+
+ triggerSetupEvent(this, instance.getEventHash());
+ return instance;
+ };
+};
+/**
+ * Parent class for all advanced plugins.
+ *
+ * @mixes module:evented~EventedMixin
+ * @mixes module:stateful~StatefulMixin
+ * @fires Player#beforepluginsetup
+ * @fires Player#beforepluginsetup:$name
+ * @fires Player#pluginsetup
+ * @fires Player#pluginsetup:$name
+ * @listens Player#dispose
+ * @throws {Error}
+ * If attempting to instantiate the base {@link Plugin} class
+ * directly instead of via a sub-class.
+ */
+
+
+var Plugin = /*#__PURE__*/function () {
+ /**
+ * Creates an instance of this class.
+ *
+ * Sub-classes should call `super` to ensure plugins are properly initialized.
+ *
+ * @param {Player} player
+ * A Video.js player instance.
+ */
+ function Plugin(player) {
+ if (this.constructor === Plugin) {
+ throw new Error('Plugin must be sub-classed; not directly instantiated.');
+ }
+
+ this.player = player;
+
+ if (!this.log) {
+ this.log = this.player.log.createLogger(this.name);
+ } // Make this object evented, but remove the added `trigger` method so we
+ // use the prototype version instead.
+
+
+ evented(this);
+ delete this.trigger;
+ stateful(this, this.constructor.defaultState);
+ markPluginAsActive(player, this.name); // Auto-bind the dispose method so we can use it as a listener and unbind
+ // it later easily.
+
+ this.dispose = this.dispose.bind(this); // If the player is disposed, dispose the plugin.
+
+ player.on('dispose', this.dispose);
+ }
+ /**
+ * Get the version of the plugin that was set on .VERSION
+ */
+
+
+ var _proto = Plugin.prototype;
+
+ _proto.version = function version() {
+ return this.constructor.VERSION;
+ }
+ /**
+ * Each event triggered by plugins includes a hash of additional data with
+ * conventional properties.
+ *
+ * This returns that object or mutates an existing hash.
+ *
+ * @param {Object} [hash={}]
+ * An object to be used as event an event hash.
+ *
+ * @return {Plugin~PluginEventHash}
+ * An event hash object with provided properties mixed-in.
+ */
+ ;
+
+ _proto.getEventHash = function getEventHash(hash) {
+ if (hash === void 0) {
+ hash = {};
+ }
+
+ hash.name = this.name;
+ hash.plugin = this.constructor;
+ hash.instance = this;
+ return hash;
+ }
+ /**
+ * Triggers an event on the plugin object and overrides
+ * {@link module:evented~EventedMixin.trigger|EventedMixin.trigger}.
+ *
+ * @param {string|Object} event
+ * An event type or an object with a type property.
+ *
+ * @param {Object} [hash={}]
+ * Additional data hash to merge with a
+ * {@link Plugin~PluginEventHash|PluginEventHash}.
+ *
+ * @return {boolean}
+ * Whether or not default was prevented.
+ */
+ ;
+
+ _proto.trigger = function trigger$1(event, hash) {
+ if (hash === void 0) {
+ hash = {};
+ }
+
+ return trigger(this.eventBusEl_, event, this.getEventHash(hash));
+ }
+ /**
+ * Handles "statechanged" events on the plugin. No-op by default, override by
+ * subclassing.
+ *
+ * @abstract
+ * @param {Event} e
+ * An event object provided by a "statechanged" event.
+ *
+ * @param {Object} e.changes
+ * An object describing changes that occurred with the "statechanged"
+ * event.
+ */
+ ;
+
+ _proto.handleStateChanged = function handleStateChanged(e) {}
+ /**
+ * Disposes a plugin.
+ *
+ * Subclasses can override this if they want, but for the sake of safety,
+ * it's probably best to subscribe the "dispose" event.
+ *
+ * @fires Plugin#dispose
+ */
+ ;
+
+ _proto.dispose = function dispose() {
+ var name = this.name,
+ player = this.player;
+ /**
+ * Signals that a advanced plugin is about to be disposed.
+ *
+ * @event Plugin#dispose
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('dispose');
+ this.off();
+ player.off('dispose', this.dispose); // Eliminate any possible sources of leaking memory by clearing up
+ // references between the player and the plugin instance and nulling out
+ // the plugin's state and replacing methods with a function that throws.
+
+ player[PLUGIN_CACHE_KEY][name] = false;
+ this.player = this.state = null; // Finally, replace the plugin name on the player with a new factory
+ // function, so that the plugin is ready to be set up again.
+
+ player[name] = createPluginFactory(name, pluginStorage[name]);
+ }
+ /**
+ * Determines if a plugin is a basic plugin (i.e. not a sub-class of `Plugin`).
+ *
+ * @param {string|Function} plugin
+ * If a string, matches the name of a plugin. If a function, will be
+ * tested directly.
+ *
+ * @return {boolean}
+ * Whether or not a plugin is a basic plugin.
+ */
+ ;
+
+ Plugin.isBasic = function isBasic(plugin) {
+ var p = typeof plugin === 'string' ? getPlugin(plugin) : plugin;
+ return typeof p === 'function' && !Plugin.prototype.isPrototypeOf(p.prototype);
+ }
+ /**
+ * Register a Video.js plugin.
+ *
+ * @param {string} name
+ * The name of the plugin to be registered. Must be a string and
+ * must not match an existing plugin or a method on the `Player`
+ * prototype.
+ *
+ * @param {Function} plugin
+ * A sub-class of `Plugin` or a function for basic plugins.
+ *
+ * @return {Function}
+ * For advanced plugins, a factory function for that plugin. For
+ * basic plugins, a wrapper function that initializes the plugin.
+ */
+ ;
+
+ Plugin.registerPlugin = function registerPlugin(name, plugin) {
+ if (typeof name !== 'string') {
+ throw new Error("Illegal plugin name, \"" + name + "\", must be a string, was " + typeof name + ".");
+ }
+
+ if (pluginExists(name)) {
+ log$1.warn("A plugin named \"" + name + "\" already exists. You may want to avoid re-registering plugins!");
+ } else if (Player.prototype.hasOwnProperty(name)) {
+ throw new Error("Illegal plugin name, \"" + name + "\", cannot share a name with an existing player method!");
+ }
+
+ if (typeof plugin !== 'function') {
+ throw new Error("Illegal plugin for \"" + name + "\", must be a function, was " + typeof plugin + ".");
+ }
+
+ pluginStorage[name] = plugin; // Add a player prototype method for all sub-classed plugins (but not for
+ // the base Plugin class).
+
+ if (name !== BASE_PLUGIN_NAME) {
+ if (Plugin.isBasic(plugin)) {
+ Player.prototype[name] = createBasicPlugin(name, plugin);
+ } else {
+ Player.prototype[name] = createPluginFactory(name, plugin);
+ }
+ }
+
+ return plugin;
+ }
+ /**
+ * De-register a Video.js plugin.
+ *
+ * @param {string} name
+ * The name of the plugin to be de-registered. Must be a string that
+ * matches an existing plugin.
+ *
+ * @throws {Error}
+ * If an attempt is made to de-register the base plugin.
+ */
+ ;
+
+ Plugin.deregisterPlugin = function deregisterPlugin(name) {
+ if (name === BASE_PLUGIN_NAME) {
+ throw new Error('Cannot de-register base plugin.');
+ }
+
+ if (pluginExists(name)) {
+ delete pluginStorage[name];
+ delete Player.prototype[name];
+ }
+ }
+ /**
+ * Gets an object containing multiple Video.js plugins.
+ *
+ * @param {Array} [names]
+ * If provided, should be an array of plugin names. Defaults to _all_
+ * plugin names.
+ *
+ * @return {Object|undefined}
+ * An object containing plugin(s) associated with their name(s) or
+ * `undefined` if no matching plugins exist).
+ */
+ ;
+
+ Plugin.getPlugins = function getPlugins(names) {
+ if (names === void 0) {
+ names = Object.keys(pluginStorage);
+ }
+
+ var result;
+ names.forEach(function (name) {
+ var plugin = getPlugin(name);
+
+ if (plugin) {
+ result = result || {};
+ result[name] = plugin;
+ }
+ });
+ return result;
+ }
+ /**
+ * Gets a plugin's version, if available
+ *
+ * @param {string} name
+ * The name of a plugin.
+ *
+ * @return {string}
+ * The plugin's version or an empty string.
+ */
+ ;
+
+ Plugin.getPluginVersion = function getPluginVersion(name) {
+ var plugin = getPlugin(name);
+ return plugin && plugin.VERSION || '';
+ };
+
+ return Plugin;
+}();
+/**
+ * Gets a plugin by name if it exists.
+ *
+ * @static
+ * @method getPlugin
+ * @memberOf Plugin
+ * @param {string} name
+ * The name of a plugin.
+ *
+ * @returns {Function|undefined}
+ * The plugin (or `undefined`).
+ */
+
+
+Plugin.getPlugin = getPlugin;
+/**
+ * The name of the base plugin class as it is registered.
+ *
+ * @type {string}
+ */
+
+Plugin.BASE_PLUGIN_NAME = BASE_PLUGIN_NAME;
+Plugin.registerPlugin(BASE_PLUGIN_NAME, Plugin);
+/**
+ * Documented in player.js
+ *
+ * @ignore
+ */
+
+Player.prototype.usingPlugin = function (name) {
+ return !!this[PLUGIN_CACHE_KEY] && this[PLUGIN_CACHE_KEY][name] === true;
+};
+/**
+ * Documented in player.js
+ *
+ * @ignore
+ */
+
+
+Player.prototype.hasPlugin = function (name) {
+ return !!pluginExists(name);
+};
+/**
+ * Signals that a plugin is about to be set up on a player.
+ *
+ * @event Player#beforepluginsetup
+ * @type {Plugin~PluginEventHash}
+ */
+
+/**
+ * Signals that a plugin is about to be set up on a player - by name. The name
+ * is the name of the plugin.
+ *
+ * @event Player#beforepluginsetup:$name
+ * @type {Plugin~PluginEventHash}
+ */
+
+/**
+ * Signals that a plugin has just been set up on a player.
+ *
+ * @event Player#pluginsetup
+ * @type {Plugin~PluginEventHash}
+ */
+
+/**
+ * Signals that a plugin has just been set up on a player - by name. The name
+ * is the name of the plugin.
+ *
+ * @event Player#pluginsetup:$name
+ * @type {Plugin~PluginEventHash}
+ */
+
+/**
+ * @typedef {Object} Plugin~PluginEventHash
+ *
+ * @property {string} instance
+ * For basic plugins, the return value of the plugin function. For
+ * advanced plugins, the plugin instance on which the event is fired.
+ *
+ * @property {string} name
+ * The name of the plugin.
+ *
+ * @property {string} plugin
+ * For basic plugins, the plugin function. For advanced plugins, the
+ * plugin class/constructor.
+ */
+
+/**
+ * @file extend.js
+ * @module extend
+ */
+/**
+ * Used to subclass an existing class by emulating ES subclassing using the
+ * `extends` keyword.
+ *
+ * @function
+ * @example
+ * var MyComponent = videojs.extend(videojs.getComponent('Component'), {
+ * myCustomMethod: function() {
+ * // Do things in my method.
+ * }
+ * });
+ *
+ * @param {Function} superClass
+ * The class to inherit from
+ *
+ * @param {Object} [subClassMethods={}]
+ * Methods of the new class
+ *
+ * @return {Function}
+ * The new class with subClassMethods that inherited superClass.
+ */
+
+var extend = function extend(superClass, subClassMethods) {
+ if (subClassMethods === void 0) {
+ subClassMethods = {};
+ }
+
+ var subClass = function subClass() {
+ superClass.apply(this, arguments);
+ };
+
+ var methods = {};
+
+ if (typeof subClassMethods === 'object') {
+ if (subClassMethods.constructor !== Object.prototype.constructor) {
+ subClass = subClassMethods.constructor;
+ }
+
+ methods = subClassMethods;
+ } else if (typeof subClassMethods === 'function') {
+ subClass = subClassMethods;
+ }
+
+ _inherits(subClass, superClass); // this is needed for backward-compatibility and node compatibility.
+
+
+ if (superClass) {
+ subClass.super_ = superClass;
+ } // Extend subObj's prototype with functions and other properties from props
+
+
+ for (var name in methods) {
+ if (methods.hasOwnProperty(name)) {
+ subClass.prototype[name] = methods[name];
+ }
+ }
+
+ return subClass;
+};
+
+/**
+ * @file video.js
+ * @module videojs
+ */
+/**
+ * Normalize an `id` value by trimming off a leading `#`
+ *
+ * @private
+ * @param {string} id
+ * A string, maybe with a leading `#`.
+ *
+ * @return {string}
+ * The string, without any leading `#`.
+ */
+
+var normalizeId = function normalizeId(id) {
+ return id.indexOf('#') === 0 ? id.slice(1) : id;
+};
+/**
+ * The `videojs()` function doubles as the main function for users to create a
+ * {@link Player} instance as well as the main library namespace.
+ *
+ * It can also be used as a getter for a pre-existing {@link Player} instance.
+ * However, we _strongly_ recommend using `videojs.getPlayer()` for this
+ * purpose because it avoids any potential for unintended initialization.
+ *
+ * Due to [limitations](https://github.com/jsdoc3/jsdoc/issues/955#issuecomment-313829149)
+ * of our JSDoc template, we cannot properly document this as both a function
+ * and a namespace, so its function signature is documented here.
+ *
+ * #### Arguments
+ * ##### id
+ * string|Element, **required**
+ *
+ * Video element or video element ID.
+ *
+ * ##### options
+ * Object, optional
+ *
+ * Options object for providing settings.
+ * See: [Options Guide](https://docs.videojs.com/tutorial-options.html).
+ *
+ * ##### ready
+ * {@link Component~ReadyCallback}, optional
+ *
+ * A function to be called when the {@link Player} and {@link Tech} are ready.
+ *
+ * #### Return Value
+ *
+ * The `videojs()` function returns a {@link Player} instance.
+ *
+ * @namespace
+ *
+ * @borrows AudioTrack as AudioTrack
+ * @borrows Component.getComponent as getComponent
+ * @borrows module:computed-style~computedStyle as computedStyle
+ * @borrows module:events.on as on
+ * @borrows module:events.one as one
+ * @borrows module:events.off as off
+ * @borrows module:events.trigger as trigger
+ * @borrows EventTarget as EventTarget
+ * @borrows module:extend~extend as extend
+ * @borrows module:fn.bind as bind
+ * @borrows module:format-time.formatTime as formatTime
+ * @borrows module:format-time.resetFormatTime as resetFormatTime
+ * @borrows module:format-time.setFormatTime as setFormatTime
+ * @borrows module:merge-options.mergeOptions as mergeOptions
+ * @borrows module:middleware.use as use
+ * @borrows Player.players as players
+ * @borrows Plugin.registerPlugin as registerPlugin
+ * @borrows Plugin.deregisterPlugin as deregisterPlugin
+ * @borrows Plugin.getPlugins as getPlugins
+ * @borrows Plugin.getPlugin as getPlugin
+ * @borrows Plugin.getPluginVersion as getPluginVersion
+ * @borrows Tech.getTech as getTech
+ * @borrows Tech.registerTech as registerTech
+ * @borrows TextTrack as TextTrack
+ * @borrows module:time-ranges.createTimeRanges as createTimeRange
+ * @borrows module:time-ranges.createTimeRanges as createTimeRanges
+ * @borrows module:url.isCrossOrigin as isCrossOrigin
+ * @borrows module:url.parseUrl as parseUrl
+ * @borrows VideoTrack as VideoTrack
+ *
+ * @param {string|Element} id
+ * Video element or video element ID.
+ *
+ * @param {Object} [options]
+ * Options object for providing settings.
+ * See: [Options Guide](https://docs.videojs.com/tutorial-options.html).
+ *
+ * @param {Component~ReadyCallback} [ready]
+ * A function to be called when the {@link Player} and {@link Tech} are
+ * ready.
+ *
+ * @return {Player}
+ * The `videojs()` function returns a {@link Player|Player} instance.
+ */
+
+
+function videojs(id, options, ready) {
+ var player = videojs.getPlayer(id);
+
+ if (player) {
+ if (options) {
+ log$1.warn("Player \"" + id + "\" is already initialised. Options will not be applied.");
+ }
+
+ if (ready) {
+ player.ready(ready);
+ }
+
+ return player;
+ }
+
+ var el = typeof id === 'string' ? $('#' + normalizeId(id)) : id;
+
+ if (!isEl(el)) {
+ throw new TypeError('The element or ID supplied is not valid. (videojs)');
+ } // document.body.contains(el) will only check if el is contained within that one document.
+ // This causes problems for elements in iframes.
+ // Instead, use the element's ownerDocument instead of the global document.
+ // This will make sure that the element is indeed in the dom of that document.
+ // Additionally, check that the document in question has a default view.
+ // If the document is no longer attached to the dom, the defaultView of the document will be null.
+
+
+ if (!el.ownerDocument.defaultView || !el.ownerDocument.body.contains(el)) {
+ log$1.warn('The element supplied is not included in the DOM');
+ }
+
+ options = options || {}; // Store a copy of the el before modification, if it is to be restored in destroy()
+ // If div ingest, store the parent div
+
+ if (options.restoreEl === true) {
+ options.restoreEl = (el.parentNode && el.parentNode.hasAttribute('data-vjs-player') ? el.parentNode : el).cloneNode(true);
+ }
+
+ hooks('beforesetup').forEach(function (hookFunction) {
+ var opts = hookFunction(el, mergeOptions$3(options));
+
+ if (!isObject(opts) || Array.isArray(opts)) {
+ log$1.error('please return an object in beforesetup hooks');
+ return;
+ }
+
+ options = mergeOptions$3(options, opts);
+ }); // We get the current "Player" component here in case an integration has
+ // replaced it with a custom player.
+
+ var PlayerComponent = Component$1.getComponent('Player');
+ player = new PlayerComponent(el, options, ready);
+ hooks('setup').forEach(function (hookFunction) {
+ return hookFunction(player);
+ });
+ return player;
+}
+
+videojs.hooks_ = hooks_;
+videojs.hooks = hooks;
+videojs.hook = hook;
+videojs.hookOnce = hookOnce;
+videojs.removeHook = removeHook; // Add default styles
+
+if (window$1.VIDEOJS_NO_DYNAMIC_STYLE !== true && isReal()) {
+ var style = $('.vjs-styles-defaults');
+
+ if (!style) {
+ style = createStyleElement('vjs-styles-defaults');
+ var head = $('head');
+
+ if (head) {
+ head.insertBefore(style, head.firstChild);
+ }
+
+ setTextContent(style, "\n .video-js {\n width: 300px;\n height: 150px;\n }\n\n .vjs-fluid:not(.vjs-audio-only-mode) {\n padding-top: 56.25%\n }\n ");
+ }
+} // Run Auto-load players
+// You have to wait at least once in case this script is loaded after your
+// video in the DOM (weird behavior only with minified version)
+
+
+autoSetupTimeout(1, videojs);
+/**
+ * Current Video.js version. Follows [semantic versioning](https://semver.org/).
+ *
+ * @type {string}
+ */
+
+videojs.VERSION = version$5;
+/**
+ * The global options object. These are the settings that take effect
+ * if no overrides are specified when the player is created.
+ *
+ * @type {Object}
+ */
+
+videojs.options = Player.prototype.options_;
+/**
+ * Get an object with the currently created players, keyed by player ID
+ *
+ * @return {Object}
+ * The created players
+ */
+
+videojs.getPlayers = function () {
+ return Player.players;
+};
+/**
+ * Get a single player based on an ID or DOM element.
+ *
+ * This is useful if you want to check if an element or ID has an associated
+ * Video.js player, but not create one if it doesn't.
+ *
+ * @param {string|Element} id
+ * An HTML element - ``, ``, or `` -
+ * or a string matching the `id` of such an element.
+ *
+ * @return {Player|undefined}
+ * A player instance or `undefined` if there is no player instance
+ * matching the argument.
+ */
+
+
+videojs.getPlayer = function (id) {
+ var players = Player.players;
+ var tag;
+
+ if (typeof id === 'string') {
+ var nId = normalizeId(id);
+ var player = players[nId];
+
+ if (player) {
+ return player;
+ }
+
+ tag = $('#' + nId);
+ } else {
+ tag = id;
+ }
+
+ if (isEl(tag)) {
+ var _tag = tag,
+ _player = _tag.player,
+ playerId = _tag.playerId; // Element may have a `player` property referring to an already created
+ // player instance. If so, return that.
+
+ if (_player || players[playerId]) {
+ return _player || players[playerId];
+ }
+ }
+};
+/**
+ * Returns an array of all current players.
+ *
+ * @return {Array}
+ * An array of all players. The array will be in the order that
+ * `Object.keys` provides, which could potentially vary between
+ * JavaScript engines.
+ *
+ */
+
+
+videojs.getAllPlayers = function () {
+ return (// Disposed players leave a key with a `null` value, so we need to make sure
+ // we filter those out.
+ Object.keys(Player.players).map(function (k) {
+ return Player.players[k];
+ }).filter(Boolean)
+ );
+};
+
+videojs.players = Player.players;
+videojs.getComponent = Component$1.getComponent;
+/**
+ * Register a component so it can referred to by name. Used when adding to other
+ * components, either through addChild `component.addChild('myComponent')` or through
+ * default children options `{ children: ['myComponent'] }`.
+ *
+ * > NOTE: You could also just initialize the component before adding.
+ * `component.addChild(new MyComponent());`
+ *
+ * @param {string} name
+ * The class name of the component
+ *
+ * @param {Component} comp
+ * The component class
+ *
+ * @return {Component}
+ * The newly registered component
+ */
+
+videojs.registerComponent = function (name, comp) {
+ if (Tech.isTech(comp)) {
+ log$1.warn("The " + name + " tech was registered as a component. It should instead be registered using videojs.registerTech(name, tech)");
+ }
+
+ Component$1.registerComponent.call(Component$1, name, comp);
+};
+
+videojs.getTech = Tech.getTech;
+videojs.registerTech = Tech.registerTech;
+videojs.use = use;
+/**
+ * An object that can be returned by a middleware to signify
+ * that the middleware is being terminated.
+ *
+ * @type {object}
+ * @property {object} middleware.TERMINATOR
+ */
+
+Object.defineProperty(videojs, 'middleware', {
+ value: {},
+ writeable: false,
+ enumerable: true
+});
+Object.defineProperty(videojs.middleware, 'TERMINATOR', {
+ value: TERMINATOR,
+ writeable: false,
+ enumerable: true
+});
+/**
+ * A reference to the {@link module:browser|browser utility module} as an object.
+ *
+ * @type {Object}
+ * @see {@link module:browser|browser}
+ */
+
+videojs.browser = browser;
+/**
+ * Use {@link module:browser.TOUCH_ENABLED|browser.TOUCH_ENABLED} instead; only
+ * included for backward-compatibility with 4.x.
+ *
+ * @deprecated Since version 5.0, use {@link module:browser.TOUCH_ENABLED|browser.TOUCH_ENABLED instead.
+ * @type {boolean}
+ */
+
+videojs.TOUCH_ENABLED = TOUCH_ENABLED;
+videojs.extend = extend;
+videojs.mergeOptions = mergeOptions$3;
+videojs.bind = bind;
+videojs.registerPlugin = Plugin.registerPlugin;
+videojs.deregisterPlugin = Plugin.deregisterPlugin;
+/**
+ * Deprecated method to register a plugin with Video.js
+ *
+ * @deprecated videojs.plugin() is deprecated; use videojs.registerPlugin() instead
+ *
+ * @param {string} name
+ * The plugin name
+ *
+ * @param {Plugin|Function} plugin
+ * The plugin sub-class or function
+ */
+
+videojs.plugin = function (name, plugin) {
+ log$1.warn('videojs.plugin() is deprecated; use videojs.registerPlugin() instead');
+ return Plugin.registerPlugin(name, plugin);
+};
+
+videojs.getPlugins = Plugin.getPlugins;
+videojs.getPlugin = Plugin.getPlugin;
+videojs.getPluginVersion = Plugin.getPluginVersion;
+/**
+ * Adding languages so that they're available to all players.
+ * Example: `videojs.addLanguage('es', { 'Hello': 'Hola' });`
+ *
+ * @param {string} code
+ * The language code or dictionary property
+ *
+ * @param {Object} data
+ * The data values to be translated
+ *
+ * @return {Object}
+ * The resulting language dictionary object
+ */
+
+videojs.addLanguage = function (code, data) {
+ var _mergeOptions;
+
+ code = ('' + code).toLowerCase();
+ videojs.options.languages = mergeOptions$3(videojs.options.languages, (_mergeOptions = {}, _mergeOptions[code] = data, _mergeOptions));
+ return videojs.options.languages[code];
+};
+/**
+ * A reference to the {@link module:log|log utility module} as an object.
+ *
+ * @type {Function}
+ * @see {@link module:log|log}
+ */
+
+
+videojs.log = log$1;
+videojs.createLogger = createLogger;
+videojs.createTimeRange = videojs.createTimeRanges = createTimeRanges;
+videojs.formatTime = formatTime;
+videojs.setFormatTime = setFormatTime;
+videojs.resetFormatTime = resetFormatTime;
+videojs.parseUrl = parseUrl;
+videojs.isCrossOrigin = isCrossOrigin;
+videojs.EventTarget = EventTarget$2;
+videojs.on = on;
+videojs.one = one;
+videojs.off = off;
+videojs.trigger = trigger;
+/**
+ * A cross-browser XMLHttpRequest wrapper.
+ *
+ * @function
+ * @param {Object} options
+ * Settings for the request.
+ *
+ * @return {XMLHttpRequest|XDomainRequest}
+ * The request object.
+ *
+ * @see https://github.com/Raynos/xhr
+ */
+
+videojs.xhr = XHR;
+videojs.TextTrack = TextTrack;
+videojs.AudioTrack = AudioTrack;
+videojs.VideoTrack = VideoTrack;
+['isEl', 'isTextNode', 'createEl', 'hasClass', 'addClass', 'removeClass', 'toggleClass', 'setAttributes', 'getAttributes', 'emptyEl', 'appendContent', 'insertContent'].forEach(function (k) {
+ videojs[k] = function () {
+ log$1.warn("videojs." + k + "() is deprecated; use videojs.dom." + k + "() instead");
+ return Dom[k].apply(null, arguments);
+ };
+});
+videojs.computedStyle = computedStyle;
+/**
+ * A reference to the {@link module:dom|DOM utility module} as an object.
+ *
+ * @type {Object}
+ * @see {@link module:dom|dom}
+ */
+
+videojs.dom = Dom;
+/**
+ * A reference to the {@link module:url|URL utility module} as an object.
+ *
+ * @type {Object}
+ * @see {@link module:url|url}
+ */
+
+videojs.url = Url;
+videojs.defineLazyProperty = defineLazyProperty; // Adding less ambiguous text for fullscreen button.
+// In a major update this could become the default text and key.
+
+videojs.addLanguage('en', {
+ 'Non-Fullscreen': 'Exit Fullscreen'
+});
+
+/*! @name @videojs/http-streaming @version 2.14.2 @license Apache-2.0 */
+/**
+ * @file resolve-url.js - Handling how URLs are resolved and manipulated
+ */
+
+var resolveUrl = _resolveUrl;
+/**
+ * Checks whether xhr request was redirected and returns correct url depending
+ * on `handleManifestRedirects` option
+ *
+ * @api private
+ *
+ * @param {string} url - an url being requested
+ * @param {XMLHttpRequest} req - xhr request result
+ *
+ * @return {string}
+ */
+
+var resolveManifestRedirect = function resolveManifestRedirect(handleManifestRedirect, url, req) {
+ // To understand how the responseURL below is set and generated:
+ // - https://fetch.spec.whatwg.org/#concept-response-url
+ // - https://fetch.spec.whatwg.org/#atomic-http-redirect-handling
+ if (handleManifestRedirect && req && req.responseURL && url !== req.responseURL) {
+ return req.responseURL;
+ }
+
+ return url;
+};
+
+var logger = function logger(source) {
+ if (videojs.log.debug) {
+ return videojs.log.debug.bind(videojs, 'VHS:', source + " >");
+ }
+
+ return function () {};
+};
+/**
+ * ranges
+ *
+ * Utilities for working with TimeRanges.
+ *
+ */
+
+
+var TIME_FUDGE_FACTOR = 1 / 30; // Comparisons between time values such as current time and the end of the buffered range
+// can be misleading because of precision differences or when the current media has poorly
+// aligned audio and video, which can cause values to be slightly off from what you would
+// expect. This value is what we consider to be safe to use in such comparisons to account
+// for these scenarios.
+
+var SAFE_TIME_DELTA = TIME_FUDGE_FACTOR * 3;
+
+var filterRanges = function filterRanges(timeRanges, predicate) {
+ var results = [];
+ var i;
+
+ if (timeRanges && timeRanges.length) {
+ // Search for ranges that match the predicate
+ for (i = 0; i < timeRanges.length; i++) {
+ if (predicate(timeRanges.start(i), timeRanges.end(i))) {
+ results.push([timeRanges.start(i), timeRanges.end(i)]);
+ }
+ }
+ }
+
+ return videojs.createTimeRanges(results);
+};
+/**
+ * Attempts to find the buffered TimeRange that contains the specified
+ * time.
+ *
+ * @param {TimeRanges} buffered - the TimeRanges object to query
+ * @param {number} time - the time to filter on.
+ * @return {TimeRanges} a new TimeRanges object
+ */
+
+
+var findRange = function findRange(buffered, time) {
+ return filterRanges(buffered, function (start, end) {
+ return start - SAFE_TIME_DELTA <= time && end + SAFE_TIME_DELTA >= time;
+ });
+};
+/**
+ * Returns the TimeRanges that begin later than the specified time.
+ *
+ * @param {TimeRanges} timeRanges - the TimeRanges object to query
+ * @param {number} time - the time to filter on.
+ * @return {TimeRanges} a new TimeRanges object.
+ */
+
+
+var findNextRange = function findNextRange(timeRanges, time) {
+ return filterRanges(timeRanges, function (start) {
+ return start - TIME_FUDGE_FACTOR >= time;
+ });
+};
+/**
+ * Returns gaps within a list of TimeRanges
+ *
+ * @param {TimeRanges} buffered - the TimeRanges object
+ * @return {TimeRanges} a TimeRanges object of gaps
+ */
+
+
+var findGaps = function findGaps(buffered) {
+ if (buffered.length < 2) {
+ return videojs.createTimeRanges();
+ }
+
+ var ranges = [];
+
+ for (var i = 1; i < buffered.length; i++) {
+ var start = buffered.end(i - 1);
+ var end = buffered.start(i);
+ ranges.push([start, end]);
+ }
+
+ return videojs.createTimeRanges(ranges);
+};
+/**
+ * Calculate the intersection of two TimeRanges
+ *
+ * @param {TimeRanges} bufferA
+ * @param {TimeRanges} bufferB
+ * @return {TimeRanges} The interesection of `bufferA` with `bufferB`
+ */
+
+
+var bufferIntersection = function bufferIntersection(bufferA, bufferB) {
+ var start = null;
+ var end = null;
+ var arity = 0;
+ var extents = [];
+ var ranges = [];
+
+ if (!bufferA || !bufferA.length || !bufferB || !bufferB.length) {
+ return videojs.createTimeRange();
+ } // Handle the case where we have both buffers and create an
+ // intersection of the two
+
+
+ var count = bufferA.length; // A) Gather up all start and end times
+
+ while (count--) {
+ extents.push({
+ time: bufferA.start(count),
+ type: 'start'
+ });
+ extents.push({
+ time: bufferA.end(count),
+ type: 'end'
+ });
+ }
+
+ count = bufferB.length;
+
+ while (count--) {
+ extents.push({
+ time: bufferB.start(count),
+ type: 'start'
+ });
+ extents.push({
+ time: bufferB.end(count),
+ type: 'end'
+ });
+ } // B) Sort them by time
+
+
+ extents.sort(function (a, b) {
+ return a.time - b.time;
+ }); // C) Go along one by one incrementing arity for start and decrementing
+ // arity for ends
+
+ for (count = 0; count < extents.length; count++) {
+ if (extents[count].type === 'start') {
+ arity++; // D) If arity is ever incremented to 2 we are entering an
+ // overlapping range
+
+ if (arity === 2) {
+ start = extents[count].time;
+ }
+ } else if (extents[count].type === 'end') {
+ arity--; // E) If arity is ever decremented to 1 we leaving an
+ // overlapping range
+
+ if (arity === 1) {
+ end = extents[count].time;
+ }
+ } // F) Record overlapping ranges
+
+
+ if (start !== null && end !== null) {
+ ranges.push([start, end]);
+ start = null;
+ end = null;
+ }
+ }
+
+ return videojs.createTimeRanges(ranges);
+};
+/**
+ * Gets a human readable string for a TimeRange
+ *
+ * @param {TimeRange} range
+ * @return {string} a human readable string
+ */
+
+
+var printableRange = function printableRange(range) {
+ var strArr = [];
+
+ if (!range || !range.length) {
+ return '';
+ }
+
+ for (var i = 0; i < range.length; i++) {
+ strArr.push(range.start(i) + ' => ' + range.end(i));
+ }
+
+ return strArr.join(', ');
+};
+/**
+ * Calculates the amount of time left in seconds until the player hits the end of the
+ * buffer and causes a rebuffer
+ *
+ * @param {TimeRange} buffered
+ * The state of the buffer
+ * @param {Numnber} currentTime
+ * The current time of the player
+ * @param {number} playbackRate
+ * The current playback rate of the player. Defaults to 1.
+ * @return {number}
+ * Time until the player has to start rebuffering in seconds.
+ * @function timeUntilRebuffer
+ */
+
+
+var timeUntilRebuffer = function timeUntilRebuffer(buffered, currentTime, playbackRate) {
+ if (playbackRate === void 0) {
+ playbackRate = 1;
+ }
+
+ var bufferedEnd = buffered.length ? buffered.end(buffered.length - 1) : 0;
+ return (bufferedEnd - currentTime) / playbackRate;
+};
+/**
+ * Converts a TimeRanges object into an array representation
+ *
+ * @param {TimeRanges} timeRanges
+ * @return {Array}
+ */
+
+
+var timeRangesToArray = function timeRangesToArray(timeRanges) {
+ var timeRangesList = [];
+
+ for (var i = 0; i < timeRanges.length; i++) {
+ timeRangesList.push({
+ start: timeRanges.start(i),
+ end: timeRanges.end(i)
+ });
+ }
+
+ return timeRangesList;
+};
+/**
+ * Determines if two time range objects are different.
+ *
+ * @param {TimeRange} a
+ * the first time range object to check
+ *
+ * @param {TimeRange} b
+ * the second time range object to check
+ *
+ * @return {Boolean}
+ * Whether the time range objects differ
+ */
+
+
+var isRangeDifferent = function isRangeDifferent(a, b) {
+ // same object
+ if (a === b) {
+ return false;
+ } // one or the other is undefined
+
+
+ if (!a && b || !b && a) {
+ return true;
+ } // length is different
+
+
+ if (a.length !== b.length) {
+ return true;
+ } // see if any start/end pair is different
+
+
+ for (var i = 0; i < a.length; i++) {
+ if (a.start(i) !== b.start(i) || a.end(i) !== b.end(i)) {
+ return true;
+ }
+ } // if the length and every pair is the same
+ // this is the same time range
+
+
+ return false;
+};
+
+var lastBufferedEnd = function lastBufferedEnd(a) {
+ if (!a || !a.length || !a.end) {
+ return;
+ }
+
+ return a.end(a.length - 1);
+};
+/**
+ * A utility function to add up the amount of time in a timeRange
+ * after a specified startTime.
+ * ie:[[0, 10], [20, 40], [50, 60]] with a startTime 0
+ * would return 40 as there are 40s seconds after 0 in the timeRange
+ *
+ * @param {TimeRange} range
+ * The range to check against
+ * @param {number} startTime
+ * The time in the time range that you should start counting from
+ *
+ * @return {number}
+ * The number of seconds in the buffer passed the specified time.
+ */
+
+
+var timeAheadOf = function timeAheadOf(range, startTime) {
+ var time = 0;
+
+ if (!range || !range.length) {
+ return time;
+ }
+
+ for (var i = 0; i < range.length; i++) {
+ var start = range.start(i);
+ var end = range.end(i); // startTime is after this range entirely
+
+ if (startTime > end) {
+ continue;
+ } // startTime is within this range
+
+
+ if (startTime > start && startTime <= end) {
+ time += end - startTime;
+ continue;
+ } // startTime is before this range.
+
+
+ time += end - start;
+ }
+
+ return time;
+};
+/**
+ * @file playlist.js
+ *
+ * Playlist related utilities.
+ */
+
+
+var createTimeRange = videojs.createTimeRange;
+/**
+ * Get the duration of a segment, with special cases for
+ * llhls segments that do not have a duration yet.
+ *
+ * @param {Object} playlist
+ * the playlist that the segment belongs to.
+ * @param {Object} segment
+ * the segment to get a duration for.
+ *
+ * @return {number}
+ * the segment duration
+ */
+
+var segmentDurationWithParts = function segmentDurationWithParts(playlist, segment) {
+ // if this isn't a preload segment
+ // then we will have a segment duration that is accurate.
+ if (!segment.preload) {
+ return segment.duration;
+ } // otherwise we have to add up parts and preload hints
+ // to get an up to date duration.
+
+
+ var result = 0;
+ (segment.parts || []).forEach(function (p) {
+ result += p.duration;
+ }); // for preload hints we have to use partTargetDuration
+ // as they won't even have a duration yet.
+
+ (segment.preloadHints || []).forEach(function (p) {
+ if (p.type === 'PART') {
+ result += playlist.partTargetDuration;
+ }
+ });
+ return result;
+};
+/**
+ * A function to get a combined list of parts and segments with durations
+ * and indexes.
+ *
+ * @param {Playlist} playlist the playlist to get the list for.
+ *
+ * @return {Array} The part/segment list.
+ */
+
+
+var getPartsAndSegments = function getPartsAndSegments(playlist) {
+ return (playlist.segments || []).reduce(function (acc, segment, si) {
+ if (segment.parts) {
+ segment.parts.forEach(function (part, pi) {
+ acc.push({
+ duration: part.duration,
+ segmentIndex: si,
+ partIndex: pi,
+ part: part,
+ segment: segment
+ });
+ });
+ } else {
+ acc.push({
+ duration: segment.duration,
+ segmentIndex: si,
+ partIndex: null,
+ segment: segment,
+ part: null
+ });
+ }
+
+ return acc;
+ }, []);
+};
+
+var getLastParts = function getLastParts(media) {
+ var lastSegment = media.segments && media.segments.length && media.segments[media.segments.length - 1];
+ return lastSegment && lastSegment.parts || [];
+};
+
+var getKnownPartCount = function getKnownPartCount(_ref) {
+ var preloadSegment = _ref.preloadSegment;
+
+ if (!preloadSegment) {
+ return;
+ }
+
+ var parts = preloadSegment.parts,
+ preloadHints = preloadSegment.preloadHints;
+ var partCount = (preloadHints || []).reduce(function (count, hint) {
+ return count + (hint.type === 'PART' ? 1 : 0);
+ }, 0);
+ partCount += parts && parts.length ? parts.length : 0;
+ return partCount;
+};
+/**
+ * Get the number of seconds to delay from the end of a
+ * live playlist.
+ *
+ * @param {Playlist} master the master playlist
+ * @param {Playlist} media the media playlist
+ * @return {number} the hold back in seconds.
+ */
+
+
+var liveEdgeDelay = function liveEdgeDelay(master, media) {
+ if (media.endList) {
+ return 0;
+ } // dash suggestedPresentationDelay trumps everything
+
+
+ if (master && master.suggestedPresentationDelay) {
+ return master.suggestedPresentationDelay;
+ }
+
+ var hasParts = getLastParts(media).length > 0; // look for "part" delays from ll-hls first
+
+ if (hasParts && media.serverControl && media.serverControl.partHoldBack) {
+ return media.serverControl.partHoldBack;
+ } else if (hasParts && media.partTargetDuration) {
+ return media.partTargetDuration * 3; // finally look for full segment delays
+ } else if (media.serverControl && media.serverControl.holdBack) {
+ return media.serverControl.holdBack;
+ } else if (media.targetDuration) {
+ return media.targetDuration * 3;
+ }
+
+ return 0;
+};
+/**
+ * walk backward until we find a duration we can use
+ * or return a failure
+ *
+ * @param {Playlist} playlist the playlist to walk through
+ * @param {Number} endSequence the mediaSequence to stop walking on
+ */
+
+
+var backwardDuration = function backwardDuration(playlist, endSequence) {
+ var result = 0;
+ var i = endSequence - playlist.mediaSequence; // if a start time is available for segment immediately following
+ // the interval, use it
+
+ var segment = playlist.segments[i]; // Walk backward until we find the latest segment with timeline
+ // information that is earlier than endSequence
+
+ if (segment) {
+ if (typeof segment.start !== 'undefined') {
+ return {
+ result: segment.start,
+ precise: true
+ };
+ }
+
+ if (typeof segment.end !== 'undefined') {
+ return {
+ result: segment.end - segment.duration,
+ precise: true
+ };
+ }
+ }
+
+ while (i--) {
+ segment = playlist.segments[i];
+
+ if (typeof segment.end !== 'undefined') {
+ return {
+ result: result + segment.end,
+ precise: true
+ };
+ }
+
+ result += segmentDurationWithParts(playlist, segment);
+
+ if (typeof segment.start !== 'undefined') {
+ return {
+ result: result + segment.start,
+ precise: true
+ };
+ }
+ }
+
+ return {
+ result: result,
+ precise: false
+ };
+};
+/**
+ * walk forward until we find a duration we can use
+ * or return a failure
+ *
+ * @param {Playlist} playlist the playlist to walk through
+ * @param {number} endSequence the mediaSequence to stop walking on
+ */
+
+
+var forwardDuration = function forwardDuration(playlist, endSequence) {
+ var result = 0;
+ var segment;
+ var i = endSequence - playlist.mediaSequence; // Walk forward until we find the earliest segment with timeline
+ // information
+
+ for (; i < playlist.segments.length; i++) {
+ segment = playlist.segments[i];
+
+ if (typeof segment.start !== 'undefined') {
+ return {
+ result: segment.start - result,
+ precise: true
+ };
+ }
+
+ result += segmentDurationWithParts(playlist, segment);
+
+ if (typeof segment.end !== 'undefined') {
+ return {
+ result: segment.end - result,
+ precise: true
+ };
+ }
+ } // indicate we didn't find a useful duration estimate
+
+
+ return {
+ result: -1,
+ precise: false
+ };
+};
+/**
+ * Calculate the media duration from the segments associated with a
+ * playlist. The duration of a subinterval of the available segments
+ * may be calculated by specifying an end index.
+ *
+ * @param {Object} playlist a media playlist object
+ * @param {number=} endSequence an exclusive upper boundary
+ * for the playlist. Defaults to playlist length.
+ * @param {number} expired the amount of time that has dropped
+ * off the front of the playlist in a live scenario
+ * @return {number} the duration between the first available segment
+ * and end index.
+ */
+
+
+var intervalDuration = function intervalDuration(playlist, endSequence, expired) {
+ if (typeof endSequence === 'undefined') {
+ endSequence = playlist.mediaSequence + playlist.segments.length;
+ }
+
+ if (endSequence < playlist.mediaSequence) {
+ return 0;
+ } // do a backward walk to estimate the duration
+
+
+ var backward = backwardDuration(playlist, endSequence);
+
+ if (backward.precise) {
+ // if we were able to base our duration estimate on timing
+ // information provided directly from the Media Source, return
+ // it
+ return backward.result;
+ } // walk forward to see if a precise duration estimate can be made
+ // that way
+
+
+ var forward = forwardDuration(playlist, endSequence);
+
+ if (forward.precise) {
+ // we found a segment that has been buffered and so it's
+ // position is known precisely
+ return forward.result;
+ } // return the less-precise, playlist-based duration estimate
+
+
+ return backward.result + expired;
+};
+/**
+ * Calculates the duration of a playlist. If a start and end index
+ * are specified, the duration will be for the subset of the media
+ * timeline between those two indices. The total duration for live
+ * playlists is always Infinity.
+ *
+ * @param {Object} playlist a media playlist object
+ * @param {number=} endSequence an exclusive upper
+ * boundary for the playlist. Defaults to the playlist media
+ * sequence number plus its length.
+ * @param {number=} expired the amount of time that has
+ * dropped off the front of the playlist in a live scenario
+ * @return {number} the duration between the start index and end
+ * index.
+ */
+
+
+var duration = function duration(playlist, endSequence, expired) {
+ if (!playlist) {
+ return 0;
+ }
+
+ if (typeof expired !== 'number') {
+ expired = 0;
+ } // if a slice of the total duration is not requested, use
+ // playlist-level duration indicators when they're present
+
+
+ if (typeof endSequence === 'undefined') {
+ // if present, use the duration specified in the playlist
+ if (playlist.totalDuration) {
+ return playlist.totalDuration;
+ } // duration should be Infinity for live playlists
+
+
+ if (!playlist.endList) {
+ return window$1.Infinity;
+ }
+ } // calculate the total duration based on the segment durations
+
+
+ return intervalDuration(playlist, endSequence, expired);
+};
+/**
+ * Calculate the time between two indexes in the current playlist
+ * neight the start- nor the end-index need to be within the current
+ * playlist in which case, the targetDuration of the playlist is used
+ * to approximate the durations of the segments
+ *
+ * @param {Array} options.durationList list to iterate over for durations.
+ * @param {number} options.defaultDuration duration to use for elements before or after the durationList
+ * @param {number} options.startIndex partsAndSegments index to start
+ * @param {number} options.endIndex partsAndSegments index to end.
+ * @return {number} the number of seconds between startIndex and endIndex
+ */
+
+
+var sumDurations = function sumDurations(_ref2) {
+ var defaultDuration = _ref2.defaultDuration,
+ durationList = _ref2.durationList,
+ startIndex = _ref2.startIndex,
+ endIndex = _ref2.endIndex;
+ var durations = 0;
+
+ if (startIndex > endIndex) {
+ var _ref3 = [endIndex, startIndex];
+ startIndex = _ref3[0];
+ endIndex = _ref3[1];
+ }
+
+ if (startIndex < 0) {
+ for (var i = startIndex; i < Math.min(0, endIndex); i++) {
+ durations += defaultDuration;
+ }
+
+ startIndex = 0;
+ }
+
+ for (var _i = startIndex; _i < endIndex; _i++) {
+ durations += durationList[_i].duration;
+ }
+
+ return durations;
+};
+/**
+ * Calculates the playlist end time
+ *
+ * @param {Object} playlist a media playlist object
+ * @param {number=} expired the amount of time that has
+ * dropped off the front of the playlist in a live scenario
+ * @param {boolean|false} useSafeLiveEnd a boolean value indicating whether or not the
+ * playlist end calculation should consider the safe live end
+ * (truncate the playlist end by three segments). This is normally
+ * used for calculating the end of the playlist's seekable range.
+ * This takes into account the value of liveEdgePadding.
+ * Setting liveEdgePadding to 0 is equivalent to setting this to false.
+ * @param {number} liveEdgePadding a number indicating how far from the end of the playlist we should be in seconds.
+ * If this is provided, it is used in the safe live end calculation.
+ * Setting useSafeLiveEnd=false or liveEdgePadding=0 are equivalent.
+ * Corresponds to suggestedPresentationDelay in DASH manifests.
+ * @return {number} the end time of playlist
+ * @function playlistEnd
+ */
+
+
+var playlistEnd = function playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding) {
+ if (!playlist || !playlist.segments) {
+ return null;
+ }
+
+ if (playlist.endList) {
+ return duration(playlist);
+ }
+
+ if (expired === null) {
+ return null;
+ }
+
+ expired = expired || 0;
+ var lastSegmentEndTime = intervalDuration(playlist, playlist.mediaSequence + playlist.segments.length, expired);
+
+ if (useSafeLiveEnd) {
+ liveEdgePadding = typeof liveEdgePadding === 'number' ? liveEdgePadding : liveEdgeDelay(null, playlist);
+ lastSegmentEndTime -= liveEdgePadding;
+ } // don't return a time less than zero
+
+
+ return Math.max(0, lastSegmentEndTime);
+};
+/**
+ * Calculates the interval of time that is currently seekable in a
+ * playlist. The returned time ranges are relative to the earliest
+ * moment in the specified playlist that is still available. A full
+ * seekable implementation for live streams would need to offset
+ * these values by the duration of content that has expired from the
+ * stream.
+ *
+ * @param {Object} playlist a media playlist object
+ * dropped off the front of the playlist in a live scenario
+ * @param {number=} expired the amount of time that has
+ * dropped off the front of the playlist in a live scenario
+ * @param {number} liveEdgePadding how far from the end of the playlist we should be in seconds.
+ * Corresponds to suggestedPresentationDelay in DASH manifests.
+ * @return {TimeRanges} the periods of time that are valid targets
+ * for seeking
+ */
+
+
+var seekable = function seekable(playlist, expired, liveEdgePadding) {
+ var useSafeLiveEnd = true;
+ var seekableStart = expired || 0;
+ var seekableEnd = playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding);
+
+ if (seekableEnd === null) {
+ return createTimeRange();
+ }
+
+ return createTimeRange(seekableStart, seekableEnd);
+};
+/**
+ * Determine the index and estimated starting time of the segment that
+ * contains a specified playback position in a media playlist.
+ *
+ * @param {Object} options.playlist the media playlist to query
+ * @param {number} options.currentTime The number of seconds since the earliest
+ * possible position to determine the containing segment for
+ * @param {number} options.startTime the time when the segment/part starts
+ * @param {number} options.startingSegmentIndex the segment index to start looking at.
+ * @param {number?} [options.startingPartIndex] the part index to look at within the segment.
+ *
+ * @return {Object} an object with partIndex, segmentIndex, and startTime.
+ */
+
+
+var getMediaInfoForTime = function getMediaInfoForTime(_ref4) {
+ var playlist = _ref4.playlist,
+ currentTime = _ref4.currentTime,
+ startingSegmentIndex = _ref4.startingSegmentIndex,
+ startingPartIndex = _ref4.startingPartIndex,
+ startTime = _ref4.startTime,
+ experimentalExactManifestTimings = _ref4.experimentalExactManifestTimings;
+ var time = currentTime - startTime;
+ var partsAndSegments = getPartsAndSegments(playlist);
+ var startIndex = 0;
+
+ for (var i = 0; i < partsAndSegments.length; i++) {
+ var partAndSegment = partsAndSegments[i];
+
+ if (startingSegmentIndex !== partAndSegment.segmentIndex) {
+ continue;
+ } // skip this if part index does not match.
+
+
+ if (typeof startingPartIndex === 'number' && typeof partAndSegment.partIndex === 'number' && startingPartIndex !== partAndSegment.partIndex) {
+ continue;
+ }
+
+ startIndex = i;
+ break;
+ }
+
+ if (time < 0) {
+ // Walk backward from startIndex in the playlist, adding durations
+ // until we find a segment that contains `time` and return it
+ if (startIndex > 0) {
+ for (var _i2 = startIndex - 1; _i2 >= 0; _i2--) {
+ var _partAndSegment = partsAndSegments[_i2];
+ time += _partAndSegment.duration;
+
+ if (experimentalExactManifestTimings) {
+ if (time < 0) {
+ continue;
+ }
+ } else if (time + TIME_FUDGE_FACTOR <= 0) {
+ continue;
+ }
+
+ return {
+ partIndex: _partAndSegment.partIndex,
+ segmentIndex: _partAndSegment.segmentIndex,
+ startTime: startTime - sumDurations({
+ defaultDuration: playlist.targetDuration,
+ durationList: partsAndSegments,
+ startIndex: startIndex,
+ endIndex: _i2
+ })
+ };
+ }
+ } // We were unable to find a good segment within the playlist
+ // so select the first segment
+
+
+ return {
+ partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
+ segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
+ startTime: currentTime
+ };
+ } // When startIndex is negative, we first walk forward to first segment
+ // adding target durations. If we "run out of time" before getting to
+ // the first segment, return the first segment
+
+
+ if (startIndex < 0) {
+ for (var _i3 = startIndex; _i3 < 0; _i3++) {
+ time -= playlist.targetDuration;
+
+ if (time < 0) {
+ return {
+ partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
+ segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
+ startTime: currentTime
+ };
+ }
+ }
+
+ startIndex = 0;
+ } // Walk forward from startIndex in the playlist, subtracting durations
+ // until we find a segment that contains `time` and return it
+
+
+ for (var _i4 = startIndex; _i4 < partsAndSegments.length; _i4++) {
+ var _partAndSegment2 = partsAndSegments[_i4];
+ time -= _partAndSegment2.duration;
+
+ if (experimentalExactManifestTimings) {
+ if (time > 0) {
+ continue;
+ }
+ } else if (time - TIME_FUDGE_FACTOR >= 0) {
+ continue;
+ }
+
+ return {
+ partIndex: _partAndSegment2.partIndex,
+ segmentIndex: _partAndSegment2.segmentIndex,
+ startTime: startTime + sumDurations({
+ defaultDuration: playlist.targetDuration,
+ durationList: partsAndSegments,
+ startIndex: startIndex,
+ endIndex: _i4
+ })
+ };
+ } // We are out of possible candidates so load the last one...
+
+
+ return {
+ segmentIndex: partsAndSegments[partsAndSegments.length - 1].segmentIndex,
+ partIndex: partsAndSegments[partsAndSegments.length - 1].partIndex,
+ startTime: currentTime
+ };
+};
+/**
+ * Check whether the playlist is blacklisted or not.
+ *
+ * @param {Object} playlist the media playlist object
+ * @return {boolean} whether the playlist is blacklisted or not
+ * @function isBlacklisted
+ */
+
+
+var isBlacklisted = function isBlacklisted(playlist) {
+ return playlist.excludeUntil && playlist.excludeUntil > Date.now();
+};
+/**
+ * Check whether the playlist is compatible with current playback configuration or has
+ * been blacklisted permanently for being incompatible.
+ *
+ * @param {Object} playlist the media playlist object
+ * @return {boolean} whether the playlist is incompatible or not
+ * @function isIncompatible
+ */
+
+
+var isIncompatible = function isIncompatible(playlist) {
+ return playlist.excludeUntil && playlist.excludeUntil === Infinity;
+};
+/**
+ * Check whether the playlist is enabled or not.
+ *
+ * @param {Object} playlist the media playlist object
+ * @return {boolean} whether the playlist is enabled or not
+ * @function isEnabled
+ */
+
+
+var isEnabled = function isEnabled(playlist) {
+ var blacklisted = isBlacklisted(playlist);
+ return !playlist.disabled && !blacklisted;
+};
+/**
+ * Check whether the playlist has been manually disabled through the representations api.
+ *
+ * @param {Object} playlist the media playlist object
+ * @return {boolean} whether the playlist is disabled manually or not
+ * @function isDisabled
+ */
+
+
+var isDisabled = function isDisabled(playlist) {
+ return playlist.disabled;
+};
+/**
+ * Returns whether the current playlist is an AES encrypted HLS stream
+ *
+ * @return {boolean} true if it's an AES encrypted HLS stream
+ */
+
+
+var isAes = function isAes(media) {
+ for (var i = 0; i < media.segments.length; i++) {
+ if (media.segments[i].key) {
+ return true;
+ }
+ }
+
+ return false;
+};
+/**
+ * Checks if the playlist has a value for the specified attribute
+ *
+ * @param {string} attr
+ * Attribute to check for
+ * @param {Object} playlist
+ * The media playlist object
+ * @return {boolean}
+ * Whether the playlist contains a value for the attribute or not
+ * @function hasAttribute
+ */
+
+
+var hasAttribute = function hasAttribute(attr, playlist) {
+ return playlist.attributes && playlist.attributes[attr];
+};
+/**
+ * Estimates the time required to complete a segment download from the specified playlist
+ *
+ * @param {number} segmentDuration
+ * Duration of requested segment
+ * @param {number} bandwidth
+ * Current measured bandwidth of the player
+ * @param {Object} playlist
+ * The media playlist object
+ * @param {number=} bytesReceived
+ * Number of bytes already received for the request. Defaults to 0
+ * @return {number|NaN}
+ * The estimated time to request the segment. NaN if bandwidth information for
+ * the given playlist is unavailable
+ * @function estimateSegmentRequestTime
+ */
+
+
+var estimateSegmentRequestTime = function estimateSegmentRequestTime(segmentDuration, bandwidth, playlist, bytesReceived) {
+ if (bytesReceived === void 0) {
+ bytesReceived = 0;
+ }
+
+ if (!hasAttribute('BANDWIDTH', playlist)) {
+ return NaN;
+ }
+
+ var size = segmentDuration * playlist.attributes.BANDWIDTH;
+ return (size - bytesReceived * 8) / bandwidth;
+};
+/*
+ * Returns whether the current playlist is the lowest rendition
+ *
+ * @return {Boolean} true if on lowest rendition
+ */
+
+
+var isLowestEnabledRendition = function isLowestEnabledRendition(master, media) {
+ if (master.playlists.length === 1) {
+ return true;
+ }
+
+ var currentBandwidth = media.attributes.BANDWIDTH || Number.MAX_VALUE;
+ return master.playlists.filter(function (playlist) {
+ if (!isEnabled(playlist)) {
+ return false;
+ }
+
+ return (playlist.attributes.BANDWIDTH || 0) < currentBandwidth;
+ }).length === 0;
+};
+
+var playlistMatch = function playlistMatch(a, b) {
+ // both playlits are null
+ // or only one playlist is non-null
+ // no match
+ if (!a && !b || !a && b || a && !b) {
+ return false;
+ } // playlist objects are the same, match
+
+
+ if (a === b) {
+ return true;
+ } // first try to use id as it should be the most
+ // accurate
+
+
+ if (a.id && b.id && a.id === b.id) {
+ return true;
+ } // next try to use reslovedUri as it should be the
+ // second most accurate.
+
+
+ if (a.resolvedUri && b.resolvedUri && a.resolvedUri === b.resolvedUri) {
+ return true;
+ } // finally try to use uri as it should be accurate
+ // but might miss a few cases for relative uris
+
+
+ if (a.uri && b.uri && a.uri === b.uri) {
+ return true;
+ }
+
+ return false;
+};
+
+var someAudioVariant = function someAudioVariant(master, callback) {
+ var AUDIO = master && master.mediaGroups && master.mediaGroups.AUDIO || {};
+ var found = false;
+
+ for (var groupName in AUDIO) {
+ for (var label in AUDIO[groupName]) {
+ found = callback(AUDIO[groupName][label]);
+
+ if (found) {
+ break;
+ }
+ }
+
+ if (found) {
+ break;
+ }
+ }
+
+ return !!found;
+};
+
+var isAudioOnly = function isAudioOnly(master) {
+ // we are audio only if we have no main playlists but do
+ // have media group playlists.
+ if (!master || !master.playlists || !master.playlists.length) {
+ // without audio variants or playlists this
+ // is not an audio only master.
+ var found = someAudioVariant(master, function (variant) {
+ return variant.playlists && variant.playlists.length || variant.uri;
+ });
+ return found;
+ } // if every playlist has only an audio codec it is audio only
+
+
+ var _loop = function _loop(i) {
+ var playlist = master.playlists[i];
+ var CODECS = playlist.attributes && playlist.attributes.CODECS; // all codecs are audio, this is an audio playlist.
+
+ if (CODECS && CODECS.split(',').every(function (c) {
+ return isAudioCodec(c);
+ })) {
+ return "continue";
+ } // playlist is in an audio group it is audio only
+
+
+ var found = someAudioVariant(master, function (variant) {
+ return playlistMatch(playlist, variant);
+ });
+
+ if (found) {
+ return "continue";
+ } // if we make it here this playlist isn't audio and we
+ // are not audio only
+
+
+ return {
+ v: false
+ };
+ };
+
+ for (var i = 0; i < master.playlists.length; i++) {
+ var _ret = _loop(i);
+
+ if (_ret === "continue") continue;
+ if (typeof _ret === "object") return _ret.v;
+ } // if we make it past every playlist without returning, then
+ // this is an audio only playlist.
+
+
+ return true;
+}; // exports
+
+
+var Playlist = {
+ liveEdgeDelay: liveEdgeDelay,
+ duration: duration,
+ seekable: seekable,
+ getMediaInfoForTime: getMediaInfoForTime,
+ isEnabled: isEnabled,
+ isDisabled: isDisabled,
+ isBlacklisted: isBlacklisted,
+ isIncompatible: isIncompatible,
+ playlistEnd: playlistEnd,
+ isAes: isAes,
+ hasAttribute: hasAttribute,
+ estimateSegmentRequestTime: estimateSegmentRequestTime,
+ isLowestEnabledRendition: isLowestEnabledRendition,
+ isAudioOnly: isAudioOnly,
+ playlistMatch: playlistMatch,
+ segmentDurationWithParts: segmentDurationWithParts
+};
+var log = videojs.log;
+
+var createPlaylistID = function createPlaylistID(index, uri) {
+ return index + "-" + uri;
+};
+/**
+ * Parses a given m3u8 playlist
+ *
+ * @param {Function} [onwarn]
+ * a function to call when the parser triggers a warning event.
+ * @param {Function} [oninfo]
+ * a function to call when the parser triggers an info event.
+ * @param {string} manifestString
+ * The downloaded manifest string
+ * @param {Object[]} [customTagParsers]
+ * An array of custom tag parsers for the m3u8-parser instance
+ * @param {Object[]} [customTagMappers]
+ * An array of custom tag mappers for the m3u8-parser instance
+ * @param {boolean} [experimentalLLHLS=false]
+ * Whether to keep ll-hls features in the manifest after parsing.
+ * @return {Object}
+ * The manifest object
+ */
+
+
+var parseManifest = function parseManifest(_ref) {
+ var onwarn = _ref.onwarn,
+ oninfo = _ref.oninfo,
+ manifestString = _ref.manifestString,
+ _ref$customTagParsers = _ref.customTagParsers,
+ customTagParsers = _ref$customTagParsers === void 0 ? [] : _ref$customTagParsers,
+ _ref$customTagMappers = _ref.customTagMappers,
+ customTagMappers = _ref$customTagMappers === void 0 ? [] : _ref$customTagMappers,
+ experimentalLLHLS = _ref.experimentalLLHLS;
+ var parser = new Parser();
+
+ if (onwarn) {
+ parser.on('warn', onwarn);
+ }
+
+ if (oninfo) {
+ parser.on('info', oninfo);
+ }
+
+ customTagParsers.forEach(function (customParser) {
+ return parser.addParser(customParser);
+ });
+ customTagMappers.forEach(function (mapper) {
+ return parser.addTagMapper(mapper);
+ });
+ parser.push(manifestString);
+ parser.end();
+ var manifest = parser.manifest; // remove llhls features from the parsed manifest
+ // if we don't want llhls support.
+
+ if (!experimentalLLHLS) {
+ ['preloadSegment', 'skip', 'serverControl', 'renditionReports', 'partInf', 'partTargetDuration'].forEach(function (k) {
+ if (manifest.hasOwnProperty(k)) {
+ delete manifest[k];
+ }
+ });
+
+ if (manifest.segments) {
+ manifest.segments.forEach(function (segment) {
+ ['parts', 'preloadHints'].forEach(function (k) {
+ if (segment.hasOwnProperty(k)) {
+ delete segment[k];
+ }
+ });
+ });
+ }
+ }
+
+ if (!manifest.targetDuration) {
+ var targetDuration = 10;
+
+ if (manifest.segments && manifest.segments.length) {
+ targetDuration = manifest.segments.reduce(function (acc, s) {
+ return Math.max(acc, s.duration);
+ }, 0);
+ }
+
+ if (onwarn) {
+ onwarn("manifest has no targetDuration defaulting to " + targetDuration);
+ }
+
+ manifest.targetDuration = targetDuration;
+ }
+
+ var parts = getLastParts(manifest);
+
+ if (parts.length && !manifest.partTargetDuration) {
+ var partTargetDuration = parts.reduce(function (acc, p) {
+ return Math.max(acc, p.duration);
+ }, 0);
+
+ if (onwarn) {
+ onwarn("manifest has no partTargetDuration defaulting to " + partTargetDuration);
+ log.error('LL-HLS manifest has parts but lacks required #EXT-X-PART-INF:PART-TARGET value. See https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-09#section-4.4.3.7. Playback is not guaranteed.');
+ }
+
+ manifest.partTargetDuration = partTargetDuration;
+ }
+
+ return manifest;
+};
+/**
+ * Loops through all supported media groups in master and calls the provided
+ * callback for each group
+ *
+ * @param {Object} master
+ * The parsed master manifest object
+ * @param {Function} callback
+ * Callback to call for each media group
+ */
+
+
+var forEachMediaGroup = function forEachMediaGroup(master, callback) {
+ if (!master.mediaGroups) {
+ return;
+ }
+
+ ['AUDIO', 'SUBTITLES'].forEach(function (mediaType) {
+ if (!master.mediaGroups[mediaType]) {
+ return;
+ }
+
+ for (var groupKey in master.mediaGroups[mediaType]) {
+ for (var labelKey in master.mediaGroups[mediaType][groupKey]) {
+ var mediaProperties = master.mediaGroups[mediaType][groupKey][labelKey];
+ callback(mediaProperties, mediaType, groupKey, labelKey);
+ }
+ }
+ });
+};
+/**
+ * Adds properties and attributes to the playlist to keep consistent functionality for
+ * playlists throughout VHS.
+ *
+ * @param {Object} config
+ * Arguments object
+ * @param {Object} config.playlist
+ * The media playlist
+ * @param {string} [config.uri]
+ * The uri to the media playlist (if media playlist is not from within a master
+ * playlist)
+ * @param {string} id
+ * ID to use for the playlist
+ */
+
+
+var setupMediaPlaylist = function setupMediaPlaylist(_ref2) {
+ var playlist = _ref2.playlist,
+ uri = _ref2.uri,
+ id = _ref2.id;
+ playlist.id = id;
+ playlist.playlistErrors_ = 0;
+
+ if (uri) {
+ // For media playlists, m3u8-parser does not have access to a URI, as HLS media
+ // playlists do not contain their own source URI, but one is needed for consistency in
+ // VHS.
+ playlist.uri = uri;
+ } // For HLS master playlists, even though certain attributes MUST be defined, the
+ // stream may still be played without them.
+ // For HLS media playlists, m3u8-parser does not attach an attributes object to the
+ // manifest.
+ //
+ // To avoid undefined reference errors through the project, and make the code easier
+ // to write/read, add an empty attributes object for these cases.
+
+
+ playlist.attributes = playlist.attributes || {};
+};
+/**
+ * Adds ID, resolvedUri, and attributes properties to each playlist of the master, where
+ * necessary. In addition, creates playlist IDs for each playlist and adds playlist ID to
+ * playlist references to the playlists array.
+ *
+ * @param {Object} master
+ * The master playlist
+ */
+
+
+var setupMediaPlaylists = function setupMediaPlaylists(master) {
+ var i = master.playlists.length;
+
+ while (i--) {
+ var playlist = master.playlists[i];
+ setupMediaPlaylist({
+ playlist: playlist,
+ id: createPlaylistID(i, playlist.uri)
+ });
+ playlist.resolvedUri = resolveUrl(master.uri, playlist.uri);
+ master.playlists[playlist.id] = playlist; // URI reference added for backwards compatibility
+
+ master.playlists[playlist.uri] = playlist; // Although the spec states an #EXT-X-STREAM-INF tag MUST have a BANDWIDTH attribute,
+ // the stream can be played without it. Although an attributes property may have been
+ // added to the playlist to prevent undefined references, issue a warning to fix the
+ // manifest.
+
+ if (!playlist.attributes.BANDWIDTH) {
+ log.warn('Invalid playlist STREAM-INF detected. Missing BANDWIDTH attribute.');
+ }
+ }
+};
+/**
+ * Adds resolvedUri properties to each media group.
+ *
+ * @param {Object} master
+ * The master playlist
+ */
+
+
+var resolveMediaGroupUris = function resolveMediaGroupUris(master) {
+ forEachMediaGroup(master, function (properties) {
+ if (properties.uri) {
+ properties.resolvedUri = resolveUrl(master.uri, properties.uri);
+ }
+ });
+};
+/**
+ * Creates a master playlist wrapper to insert a sole media playlist into.
+ *
+ * @param {Object} media
+ * Media playlist
+ * @param {string} uri
+ * The media URI
+ *
+ * @return {Object}
+ * Master playlist
+ */
+
+
+var masterForMedia = function masterForMedia(media, uri) {
+ var id = createPlaylistID(0, uri);
+ var master = {
+ mediaGroups: {
+ 'AUDIO': {},
+ 'VIDEO': {},
+ 'CLOSED-CAPTIONS': {},
+ 'SUBTITLES': {}
+ },
+ uri: window$1.location.href,
+ resolvedUri: window$1.location.href,
+ playlists: [{
+ uri: uri,
+ id: id,
+ resolvedUri: uri,
+ // m3u8-parser does not attach an attributes property to media playlists so make
+ // sure that the property is attached to avoid undefined reference errors
+ attributes: {}
+ }]
+ }; // set up ID reference
+
+ master.playlists[id] = master.playlists[0]; // URI reference added for backwards compatibility
+
+ master.playlists[uri] = master.playlists[0];
+ return master;
+};
+/**
+ * Does an in-place update of the master manifest to add updated playlist URI references
+ * as well as other properties needed by VHS that aren't included by the parser.
+ *
+ * @param {Object} master
+ * Master manifest object
+ * @param {string} uri
+ * The source URI
+ */
+
+
+var addPropertiesToMaster = function addPropertiesToMaster(master, uri) {
+ master.uri = uri;
+
+ for (var i = 0; i < master.playlists.length; i++) {
+ if (!master.playlists[i].uri) {
+ // Set up phony URIs for the playlists since playlists are referenced by their URIs
+ // throughout VHS, but some formats (e.g., DASH) don't have external URIs
+ // TODO: consider adding dummy URIs in mpd-parser
+ var phonyUri = "placeholder-uri-" + i;
+ master.playlists[i].uri = phonyUri;
+ }
+ }
+
+ var audioOnlyMaster = isAudioOnly(master);
+ forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
+ var groupId = "placeholder-uri-" + mediaType + "-" + groupKey + "-" + labelKey; // add a playlist array under properties
+
+ if (!properties.playlists || !properties.playlists.length) {
+ // If the manifest is audio only and this media group does not have a uri, check
+ // if the media group is located in the main list of playlists. If it is, don't add
+ // placeholder properties as it shouldn't be considered an alternate audio track.
+ if (audioOnlyMaster && mediaType === 'AUDIO' && !properties.uri) {
+ for (var _i = 0; _i < master.playlists.length; _i++) {
+ var p = master.playlists[_i];
+
+ if (p.attributes && p.attributes.AUDIO && p.attributes.AUDIO === groupKey) {
+ return;
+ }
+ }
+ }
+
+ properties.playlists = [_extends({}, properties)];
+ }
+
+ properties.playlists.forEach(function (p, i) {
+ var id = createPlaylistID(i, groupId);
+
+ if (p.uri) {
+ p.resolvedUri = p.resolvedUri || resolveUrl(master.uri, p.uri);
+ } else {
+ // DEPRECATED, this has been added to prevent a breaking change.
+ // previously we only ever had a single media group playlist, so
+ // we mark the first playlist uri without prepending the index as we used to
+ // ideally we would do all of the playlists the same way.
+ p.uri = i === 0 ? groupId : id; // don't resolve a placeholder uri to an absolute url, just use
+ // the placeholder again
+
+ p.resolvedUri = p.uri;
+ }
+
+ p.id = p.id || id; // add an empty attributes object, all playlists are
+ // expected to have this.
+
+ p.attributes = p.attributes || {}; // setup ID and URI references (URI for backwards compatibility)
+
+ master.playlists[p.id] = p;
+ master.playlists[p.uri] = p;
+ });
+ });
+ setupMediaPlaylists(master);
+ resolveMediaGroupUris(master);
+};
+
+var mergeOptions$2 = videojs.mergeOptions,
+ EventTarget$1 = videojs.EventTarget;
+
+var addLLHLSQueryDirectives = function addLLHLSQueryDirectives(uri, media) {
+ if (media.endList || !media.serverControl) {
+ return uri;
+ }
+
+ var parameters = {};
+
+ if (media.serverControl.canBlockReload) {
+ var preloadSegment = media.preloadSegment; // next msn is a zero based value, length is not.
+
+ var nextMSN = media.mediaSequence + media.segments.length; // If preload segment has parts then it is likely
+ // that we are going to request a part of that preload segment.
+ // the logic below is used to determine that.
+
+ if (preloadSegment) {
+ var parts = preloadSegment.parts || []; // _HLS_part is a zero based index
+
+ var nextPart = getKnownPartCount(media) - 1; // if nextPart is > -1 and not equal to just the
+ // length of parts, then we know we had part preload hints
+ // and we need to add the _HLS_part= query
+
+ if (nextPart > -1 && nextPart !== parts.length - 1) {
+ // add existing parts to our preload hints
+ // eslint-disable-next-line
+ parameters._HLS_part = nextPart;
+ } // this if statement makes sure that we request the msn
+ // of the preload segment if:
+ // 1. the preload segment had parts (and was not yet a full segment)
+ // but was added to our segments array
+ // 2. the preload segment had preload hints for parts that are not in
+ // the manifest yet.
+ // in all other cases we want the segment after the preload segment
+ // which will be given by using media.segments.length because it is 1 based
+ // rather than 0 based.
+
+
+ if (nextPart > -1 || parts.length) {
+ nextMSN--;
+ }
+ } // add _HLS_msn= in front of any _HLS_part query
+ // eslint-disable-next-line
+
+
+ parameters._HLS_msn = nextMSN;
+ }
+
+ if (media.serverControl && media.serverControl.canSkipUntil) {
+ // add _HLS_skip= infront of all other queries.
+ // eslint-disable-next-line
+ parameters._HLS_skip = media.serverControl.canSkipDateranges ? 'v2' : 'YES';
+ }
+
+ if (Object.keys(parameters).length) {
+ var parsedUri = new window$1.URL(uri);
+ ['_HLS_skip', '_HLS_msn', '_HLS_part'].forEach(function (name) {
+ if (!parameters.hasOwnProperty(name)) {
+ return;
+ }
+
+ parsedUri.searchParams.set(name, parameters[name]);
+ });
+ uri = parsedUri.toString();
+ }
+
+ return uri;
+};
+/**
+ * Returns a new segment object with properties and
+ * the parts array merged.
+ *
+ * @param {Object} a the old segment
+ * @param {Object} b the new segment
+ *
+ * @return {Object} the merged segment
+ */
+
+
+var updateSegment = function updateSegment(a, b) {
+ if (!a) {
+ return b;
+ }
+
+ var result = mergeOptions$2(a, b); // if only the old segment has preload hints
+ // and the new one does not, remove preload hints.
+
+ if (a.preloadHints && !b.preloadHints) {
+ delete result.preloadHints;
+ } // if only the old segment has parts
+ // then the parts are no longer valid
+
+
+ if (a.parts && !b.parts) {
+ delete result.parts; // if both segments have parts
+ // copy part propeties from the old segment
+ // to the new one.
+ } else if (a.parts && b.parts) {
+ for (var i = 0; i < b.parts.length; i++) {
+ if (a.parts && a.parts[i]) {
+ result.parts[i] = mergeOptions$2(a.parts[i], b.parts[i]);
+ }
+ }
+ } // set skipped to false for segments that have
+ // have had information merged from the old segment.
+
+
+ if (!a.skipped && b.skipped) {
+ result.skipped = false;
+ } // set preload to false for segments that have
+ // had information added in the new segment.
+
+
+ if (a.preload && !b.preload) {
+ result.preload = false;
+ }
+
+ return result;
+};
+/**
+ * Returns a new array of segments that is the result of merging
+ * properties from an older list of segments onto an updated
+ * list. No properties on the updated playlist will be ovewritten.
+ *
+ * @param {Array} original the outdated list of segments
+ * @param {Array} update the updated list of segments
+ * @param {number=} offset the index of the first update
+ * segment in the original segment list. For non-live playlists,
+ * this should always be zero and does not need to be
+ * specified. For live playlists, it should be the difference
+ * between the media sequence numbers in the original and updated
+ * playlists.
+ * @return {Array} a list of merged segment objects
+ */
+
+
+var updateSegments = function updateSegments(original, update, offset) {
+ var oldSegments = original.slice();
+ var newSegments = update.slice();
+ offset = offset || 0;
+ var result = [];
+ var currentMap;
+
+ for (var newIndex = 0; newIndex < newSegments.length; newIndex++) {
+ var oldSegment = oldSegments[newIndex + offset];
+ var newSegment = newSegments[newIndex];
+
+ if (oldSegment) {
+ currentMap = oldSegment.map || currentMap;
+ result.push(updateSegment(oldSegment, newSegment));
+ } else {
+ // carry over map to new segment if it is missing
+ if (currentMap && !newSegment.map) {
+ newSegment.map = currentMap;
+ }
+
+ result.push(newSegment);
+ }
+ }
+
+ return result;
+};
+
+var resolveSegmentUris = function resolveSegmentUris(segment, baseUri) {
+ // preloadSegment will not have a uri at all
+ // as the segment isn't actually in the manifest yet, only parts
+ if (!segment.resolvedUri && segment.uri) {
+ segment.resolvedUri = resolveUrl(baseUri, segment.uri);
+ }
+
+ if (segment.key && !segment.key.resolvedUri) {
+ segment.key.resolvedUri = resolveUrl(baseUri, segment.key.uri);
+ }
+
+ if (segment.map && !segment.map.resolvedUri) {
+ segment.map.resolvedUri = resolveUrl(baseUri, segment.map.uri);
+ }
+
+ if (segment.map && segment.map.key && !segment.map.key.resolvedUri) {
+ segment.map.key.resolvedUri = resolveUrl(baseUri, segment.map.key.uri);
+ }
+
+ if (segment.parts && segment.parts.length) {
+ segment.parts.forEach(function (p) {
+ if (p.resolvedUri) {
+ return;
+ }
+
+ p.resolvedUri = resolveUrl(baseUri, p.uri);
+ });
+ }
+
+ if (segment.preloadHints && segment.preloadHints.length) {
+ segment.preloadHints.forEach(function (p) {
+ if (p.resolvedUri) {
+ return;
+ }
+
+ p.resolvedUri = resolveUrl(baseUri, p.uri);
+ });
+ }
+};
+
+var getAllSegments = function getAllSegments(media) {
+ var segments = media.segments || [];
+ var preloadSegment = media.preloadSegment; // a preloadSegment with only preloadHints is not currently
+ // a usable segment, only include a preloadSegment that has
+ // parts.
+
+ if (preloadSegment && preloadSegment.parts && preloadSegment.parts.length) {
+ // if preloadHints has a MAP that means that the
+ // init segment is going to change. We cannot use any of the parts
+ // from this preload segment.
+ if (preloadSegment.preloadHints) {
+ for (var i = 0; i < preloadSegment.preloadHints.length; i++) {
+ if (preloadSegment.preloadHints[i].type === 'MAP') {
+ return segments;
+ }
+ }
+ } // set the duration for our preload segment to target duration.
+
+
+ preloadSegment.duration = media.targetDuration;
+ preloadSegment.preload = true;
+ segments.push(preloadSegment);
+ }
+
+ return segments;
+}; // consider the playlist unchanged if the playlist object is the same or
+// the number of segments is equal, the media sequence number is unchanged,
+// and this playlist hasn't become the end of the playlist
+
+
+var isPlaylistUnchanged = function isPlaylistUnchanged(a, b) {
+ return a === b || a.segments && b.segments && a.segments.length === b.segments.length && a.endList === b.endList && a.mediaSequence === b.mediaSequence && a.preloadSegment === b.preloadSegment;
+};
+/**
+ * Returns a new master playlist that is the result of merging an
+ * updated media playlist into the original version. If the
+ * updated media playlist does not match any of the playlist
+ * entries in the original master playlist, null is returned.
+ *
+ * @param {Object} master a parsed master M3U8 object
+ * @param {Object} media a parsed media M3U8 object
+ * @return {Object} a new object that represents the original
+ * master playlist with the updated media playlist merged in, or
+ * null if the merge produced no change.
+ */
+
+
+var updateMaster$1 = function updateMaster(master, newMedia, unchangedCheck) {
+ if (unchangedCheck === void 0) {
+ unchangedCheck = isPlaylistUnchanged;
+ }
+
+ var result = mergeOptions$2(master, {});
+ var oldMedia = result.playlists[newMedia.id];
+
+ if (!oldMedia) {
+ return null;
+ }
+
+ if (unchangedCheck(oldMedia, newMedia)) {
+ return null;
+ }
+
+ newMedia.segments = getAllSegments(newMedia);
+ var mergedPlaylist = mergeOptions$2(oldMedia, newMedia); // always use the new media's preload segment
+
+ if (mergedPlaylist.preloadSegment && !newMedia.preloadSegment) {
+ delete mergedPlaylist.preloadSegment;
+ } // if the update could overlap existing segment information, merge the two segment lists
+
+
+ if (oldMedia.segments) {
+ if (newMedia.skip) {
+ newMedia.segments = newMedia.segments || []; // add back in objects for skipped segments, so that we merge
+ // old properties into the new segments
+
+ for (var i = 0; i < newMedia.skip.skippedSegments; i++) {
+ newMedia.segments.unshift({
+ skipped: true
+ });
+ }
+ }
+
+ mergedPlaylist.segments = updateSegments(oldMedia.segments, newMedia.segments, newMedia.mediaSequence - oldMedia.mediaSequence);
+ } // resolve any segment URIs to prevent us from having to do it later
+
+
+ mergedPlaylist.segments.forEach(function (segment) {
+ resolveSegmentUris(segment, mergedPlaylist.resolvedUri);
+ }); // TODO Right now in the playlists array there are two references to each playlist, one
+ // that is referenced by index, and one by URI. The index reference may no longer be
+ // necessary.
+
+ for (var _i = 0; _i < result.playlists.length; _i++) {
+ if (result.playlists[_i].id === newMedia.id) {
+ result.playlists[_i] = mergedPlaylist;
+ }
+ }
+
+ result.playlists[newMedia.id] = mergedPlaylist; // URI reference added for backwards compatibility
+
+ result.playlists[newMedia.uri] = mergedPlaylist; // update media group playlist references.
+
+ forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
+ if (!properties.playlists) {
+ return;
+ }
+
+ for (var _i2 = 0; _i2 < properties.playlists.length; _i2++) {
+ if (newMedia.id === properties.playlists[_i2].id) {
+ properties.playlists[_i2] = mergedPlaylist;
+ }
+ }
+ });
+ return result;
+};
+/**
+ * Calculates the time to wait before refreshing a live playlist
+ *
+ * @param {Object} media
+ * The current media
+ * @param {boolean} update
+ * True if there were any updates from the last refresh, false otherwise
+ * @return {number}
+ * The time in ms to wait before refreshing the live playlist
+ */
+
+
+var refreshDelay = function refreshDelay(media, update) {
+ var segments = media.segments || [];
+ var lastSegment = segments[segments.length - 1];
+ var lastPart = lastSegment && lastSegment.parts && lastSegment.parts[lastSegment.parts.length - 1];
+ var lastDuration = lastPart && lastPart.duration || lastSegment && lastSegment.duration;
+
+ if (update && lastDuration) {
+ return lastDuration * 1000;
+ } // if the playlist is unchanged since the last reload or last segment duration
+ // cannot be determined, try again after half the target duration
+
+
+ return (media.partTargetDuration || media.targetDuration || 10) * 500;
+};
+/**
+ * Load a playlist from a remote location
+ *
+ * @class PlaylistLoader
+ * @extends Stream
+ * @param {string|Object} src url or object of manifest
+ * @param {boolean} withCredentials the withCredentials xhr option
+ * @class
+ */
+
+
+var PlaylistLoader = /*#__PURE__*/function (_EventTarget) {
+ _inheritsLoose(PlaylistLoader, _EventTarget);
+
+ function PlaylistLoader(src, vhs, options) {
+ var _this;
+
+ if (options === void 0) {
+ options = {};
+ }
+
+ _this = _EventTarget.call(this) || this;
+
+ if (!src) {
+ throw new Error('A non-empty playlist URL or object is required');
+ }
+
+ _this.logger_ = logger('PlaylistLoader');
+ var _options = options,
+ _options$withCredenti = _options.withCredentials,
+ withCredentials = _options$withCredenti === void 0 ? false : _options$withCredenti,
+ _options$handleManife = _options.handleManifestRedirects,
+ handleManifestRedirects = _options$handleManife === void 0 ? false : _options$handleManife;
+ _this.src = src;
+ _this.vhs_ = vhs;
+ _this.withCredentials = withCredentials;
+ _this.handleManifestRedirects = handleManifestRedirects;
+ var vhsOptions = vhs.options_;
+ _this.customTagParsers = vhsOptions && vhsOptions.customTagParsers || [];
+ _this.customTagMappers = vhsOptions && vhsOptions.customTagMappers || [];
+ _this.experimentalLLHLS = vhsOptions && vhsOptions.experimentalLLHLS || false; // force experimentalLLHLS for IE 11
+
+ if (videojs.browser.IE_VERSION) {
+ _this.experimentalLLHLS = false;
+ } // initialize the loader state
+
+
+ _this.state = 'HAVE_NOTHING'; // live playlist staleness timeout
+
+ _this.handleMediaupdatetimeout_ = _this.handleMediaupdatetimeout_.bind(_assertThisInitialized(_this));
+
+ _this.on('mediaupdatetimeout', _this.handleMediaupdatetimeout_);
+
+ return _this;
+ }
+
+ var _proto = PlaylistLoader.prototype;
+
+ _proto.handleMediaupdatetimeout_ = function handleMediaupdatetimeout_() {
+ var _this2 = this;
+
+ if (this.state !== 'HAVE_METADATA') {
+ // only refresh the media playlist if no other activity is going on
+ return;
+ }
+
+ var media = this.media();
+ var uri = resolveUrl(this.master.uri, media.uri);
+
+ if (this.experimentalLLHLS) {
+ uri = addLLHLSQueryDirectives(uri, media);
+ }
+
+ this.state = 'HAVE_CURRENT_METADATA';
+ this.request = this.vhs_.xhr({
+ uri: uri,
+ withCredentials: this.withCredentials
+ }, function (error, req) {
+ // disposed
+ if (!_this2.request) {
+ return;
+ }
+
+ if (error) {
+ return _this2.playlistRequestError(_this2.request, _this2.media(), 'HAVE_METADATA');
+ }
+
+ _this2.haveMetadata({
+ playlistString: _this2.request.responseText,
+ url: _this2.media().uri,
+ id: _this2.media().id
+ });
+ });
+ };
+
+ _proto.playlistRequestError = function playlistRequestError(xhr, playlist, startingState) {
+ var uri = playlist.uri,
+ id = playlist.id; // any in-flight request is now finished
+
+ this.request = null;
+
+ if (startingState) {
+ this.state = startingState;
+ }
+
+ this.error = {
+ playlist: this.master.playlists[id],
+ status: xhr.status,
+ message: "HLS playlist request error at URL: " + uri + ".",
+ responseText: xhr.responseText,
+ code: xhr.status >= 500 ? 4 : 2
+ };
+ this.trigger('error');
+ };
+
+ _proto.parseManifest_ = function parseManifest_(_ref) {
+ var _this3 = this;
+
+ var url = _ref.url,
+ manifestString = _ref.manifestString;
+ return parseManifest({
+ onwarn: function onwarn(_ref2) {
+ var message = _ref2.message;
+ return _this3.logger_("m3u8-parser warn for " + url + ": " + message);
+ },
+ oninfo: function oninfo(_ref3) {
+ var message = _ref3.message;
+ return _this3.logger_("m3u8-parser info for " + url + ": " + message);
+ },
+ manifestString: manifestString,
+ customTagParsers: this.customTagParsers,
+ customTagMappers: this.customTagMappers,
+ experimentalLLHLS: this.experimentalLLHLS
+ });
+ }
+ /**
+ * Update the playlist loader's state in response to a new or updated playlist.
+ *
+ * @param {string} [playlistString]
+ * Playlist string (if playlistObject is not provided)
+ * @param {Object} [playlistObject]
+ * Playlist object (if playlistString is not provided)
+ * @param {string} url
+ * URL of playlist
+ * @param {string} id
+ * ID to use for playlist
+ */
+ ;
+
+ _proto.haveMetadata = function haveMetadata(_ref4) {
+ var playlistString = _ref4.playlistString,
+ playlistObject = _ref4.playlistObject,
+ url = _ref4.url,
+ id = _ref4.id; // any in-flight request is now finished
+
+ this.request = null;
+ this.state = 'HAVE_METADATA';
+ var playlist = playlistObject || this.parseManifest_({
+ url: url,
+ manifestString: playlistString
+ });
+ playlist.lastRequest = Date.now();
+ setupMediaPlaylist({
+ playlist: playlist,
+ uri: url,
+ id: id
+ }); // merge this playlist into the master
+
+ var update = updateMaster$1(this.master, playlist);
+ this.targetDuration = playlist.partTargetDuration || playlist.targetDuration;
+ this.pendingMedia_ = null;
+
+ if (update) {
+ this.master = update;
+ this.media_ = this.master.playlists[id];
+ } else {
+ this.trigger('playlistunchanged');
+ }
+
+ this.updateMediaUpdateTimeout_(refreshDelay(this.media(), !!update));
+ this.trigger('loadedplaylist');
+ }
+ /**
+ * Abort any outstanding work and clean up.
+ */
+ ;
+
+ _proto.dispose = function dispose() {
+ this.trigger('dispose');
+ this.stopRequest();
+ window$1.clearTimeout(this.mediaUpdateTimeout);
+ window$1.clearTimeout(this.finalRenditionTimeout);
+ this.off();
+ };
+
+ _proto.stopRequest = function stopRequest() {
+ if (this.request) {
+ var oldRequest = this.request;
+ this.request = null;
+ oldRequest.onreadystatechange = null;
+ oldRequest.abort();
+ }
+ }
+ /**
+ * When called without any arguments, returns the currently
+ * active media playlist. When called with a single argument,
+ * triggers the playlist loader to asynchronously switch to the
+ * specified media playlist. Calling this method while the
+ * loader is in the HAVE_NOTHING causes an error to be emitted
+ * but otherwise has no effect.
+ *
+ * @param {Object=} playlist the parsed media playlist
+ * object to switch to
+ * @param {boolean=} shouldDelay whether we should delay the request by half target duration
+ *
+ * @return {Playlist} the current loaded media
+ */
+ ;
+
+ _proto.media = function media(playlist, shouldDelay) {
+ var _this4 = this; // getter
+
+
+ if (!playlist) {
+ return this.media_;
+ } // setter
+
+
+ if (this.state === 'HAVE_NOTHING') {
+ throw new Error('Cannot switch media playlist from ' + this.state);
+ } // find the playlist object if the target playlist has been
+ // specified by URI
+
+
+ if (typeof playlist === 'string') {
+ if (!this.master.playlists[playlist]) {
+ throw new Error('Unknown playlist URI: ' + playlist);
+ }
+
+ playlist = this.master.playlists[playlist];
+ }
+
+ window$1.clearTimeout(this.finalRenditionTimeout);
+
+ if (shouldDelay) {
+ var delay = (playlist.partTargetDuration || playlist.targetDuration) / 2 * 1000 || 5 * 1000;
+ this.finalRenditionTimeout = window$1.setTimeout(this.media.bind(this, playlist, false), delay);
+ return;
+ }
+
+ var startingState = this.state;
+ var mediaChange = !this.media_ || playlist.id !== this.media_.id;
+ var masterPlaylistRef = this.master.playlists[playlist.id]; // switch to fully loaded playlists immediately
+
+ if (masterPlaylistRef && masterPlaylistRef.endList || // handle the case of a playlist object (e.g., if using vhs-json with a resolved
+ // media playlist or, for the case of demuxed audio, a resolved audio media group)
+ playlist.endList && playlist.segments.length) {
+ // abort outstanding playlist requests
+ if (this.request) {
+ this.request.onreadystatechange = null;
+ this.request.abort();
+ this.request = null;
+ }
+
+ this.state = 'HAVE_METADATA';
+ this.media_ = playlist; // trigger media change if the active media has been updated
+
+ if (mediaChange) {
+ this.trigger('mediachanging');
+
+ if (startingState === 'HAVE_MASTER') {
+ // The initial playlist was a master manifest, and the first media selected was
+ // also provided (in the form of a resolved playlist object) as part of the
+ // source object (rather than just a URL). Therefore, since the media playlist
+ // doesn't need to be requested, loadedmetadata won't trigger as part of the
+ // normal flow, and needs an explicit trigger here.
+ this.trigger('loadedmetadata');
+ } else {
+ this.trigger('mediachange');
+ }
+ }
+
+ return;
+ } // We update/set the timeout here so that live playlists
+ // that are not a media change will "start" the loader as expected.
+ // We expect that this function will start the media update timeout
+ // cycle again. This also prevents a playlist switch failure from
+ // causing us to stall during live.
+
+
+ this.updateMediaUpdateTimeout_(refreshDelay(playlist, true)); // switching to the active playlist is a no-op
+
+ if (!mediaChange) {
+ return;
+ }
+
+ this.state = 'SWITCHING_MEDIA'; // there is already an outstanding playlist request
+
+ if (this.request) {
+ if (playlist.resolvedUri === this.request.url) {
+ // requesting to switch to the same playlist multiple times
+ // has no effect after the first
+ return;
+ }
+
+ this.request.onreadystatechange = null;
+ this.request.abort();
+ this.request = null;
+ } // request the new playlist
+
+
+ if (this.media_) {
+ this.trigger('mediachanging');
+ }
+
+ this.pendingMedia_ = playlist;
+ this.request = this.vhs_.xhr({
+ uri: playlist.resolvedUri,
+ withCredentials: this.withCredentials
+ }, function (error, req) {
+ // disposed
+ if (!_this4.request) {
+ return;
+ }
+
+ playlist.lastRequest = Date.now();
+ playlist.resolvedUri = resolveManifestRedirect(_this4.handleManifestRedirects, playlist.resolvedUri, req);
+
+ if (error) {
+ return _this4.playlistRequestError(_this4.request, playlist, startingState);
+ }
+
+ _this4.haveMetadata({
+ playlistString: req.responseText,
+ url: playlist.uri,
+ id: playlist.id
+ }); // fire loadedmetadata the first time a media playlist is loaded
+
+
+ if (startingState === 'HAVE_MASTER') {
+ _this4.trigger('loadedmetadata');
+ } else {
+ _this4.trigger('mediachange');
+ }
+ });
+ }
+ /**
+ * pause loading of the playlist
+ */
+ ;
+
+ _proto.pause = function pause() {
+ if (this.mediaUpdateTimeout) {
+ window$1.clearTimeout(this.mediaUpdateTimeout);
+ this.mediaUpdateTimeout = null;
+ }
+
+ this.stopRequest();
+
+ if (this.state === 'HAVE_NOTHING') {
+ // If we pause the loader before any data has been retrieved, its as if we never
+ // started, so reset to an unstarted state.
+ this.started = false;
+ } // Need to restore state now that no activity is happening
+
+
+ if (this.state === 'SWITCHING_MEDIA') {
+ // if the loader was in the process of switching media, it should either return to
+ // HAVE_MASTER or HAVE_METADATA depending on if the loader has loaded a media
+ // playlist yet. This is determined by the existence of loader.media_
+ if (this.media_) {
+ this.state = 'HAVE_METADATA';
+ } else {
+ this.state = 'HAVE_MASTER';
+ }
+ } else if (this.state === 'HAVE_CURRENT_METADATA') {
+ this.state = 'HAVE_METADATA';
+ }
+ }
+ /**
+ * start loading of the playlist
+ */
+ ;
+
+ _proto.load = function load(shouldDelay) {
+ var _this5 = this;
+
+ if (this.mediaUpdateTimeout) {
+ window$1.clearTimeout(this.mediaUpdateTimeout);
+ this.mediaUpdateTimeout = null;
+ }
+
+ var media = this.media();
+
+ if (shouldDelay) {
+ var delay = media ? (media.partTargetDuration || media.targetDuration) / 2 * 1000 : 5 * 1000;
+ this.mediaUpdateTimeout = window$1.setTimeout(function () {
+ _this5.mediaUpdateTimeout = null;
+
+ _this5.load();
+ }, delay);
+ return;
+ }
+
+ if (!this.started) {
+ this.start();
+ return;
+ }
+
+ if (media && !media.endList) {
+ this.trigger('mediaupdatetimeout');
+ } else {
+ this.trigger('loadedplaylist');
+ }
+ };
+
+ _proto.updateMediaUpdateTimeout_ = function updateMediaUpdateTimeout_(delay) {
+ var _this6 = this;
+
+ if (this.mediaUpdateTimeout) {
+ window$1.clearTimeout(this.mediaUpdateTimeout);
+ this.mediaUpdateTimeout = null;
+ } // we only have use mediaupdatetimeout for live playlists.
+
+
+ if (!this.media() || this.media().endList) {
+ return;
+ }
+
+ this.mediaUpdateTimeout = window$1.setTimeout(function () {
+ _this6.mediaUpdateTimeout = null;
+
+ _this6.trigger('mediaupdatetimeout');
+
+ _this6.updateMediaUpdateTimeout_(delay);
+ }, delay);
+ }
+ /**
+ * start loading of the playlist
+ */
+ ;
+
+ _proto.start = function start() {
+ var _this7 = this;
+
+ this.started = true;
+
+ if (typeof this.src === 'object') {
+ // in the case of an entirely constructed manifest object (meaning there's no actual
+ // manifest on a server), default the uri to the page's href
+ if (!this.src.uri) {
+ this.src.uri = window$1.location.href;
+ } // resolvedUri is added on internally after the initial request. Since there's no
+ // request for pre-resolved manifests, add on resolvedUri here.
+
+
+ this.src.resolvedUri = this.src.uri; // Since a manifest object was passed in as the source (instead of a URL), the first
+ // request can be skipped (since the top level of the manifest, at a minimum, is
+ // already available as a parsed manifest object). However, if the manifest object
+ // represents a master playlist, some media playlists may need to be resolved before
+ // the starting segment list is available. Therefore, go directly to setup of the
+ // initial playlist, and let the normal flow continue from there.
+ //
+ // Note that the call to setup is asynchronous, as other sections of VHS may assume
+ // that the first request is asynchronous.
+
+ setTimeout(function () {
+ _this7.setupInitialPlaylist(_this7.src);
+ }, 0);
+ return;
+ } // request the specified URL
+
+
+ this.request = this.vhs_.xhr({
+ uri: this.src,
+ withCredentials: this.withCredentials
+ }, function (error, req) {
+ // disposed
+ if (!_this7.request) {
+ return;
+ } // clear the loader's request reference
+
+
+ _this7.request = null;
+
+ if (error) {
+ _this7.error = {
+ status: req.status,
+ message: "HLS playlist request error at URL: " + _this7.src + ".",
+ responseText: req.responseText,
+ // MEDIA_ERR_NETWORK
+ code: 2
+ };
+
+ if (_this7.state === 'HAVE_NOTHING') {
+ _this7.started = false;
+ }
+
+ return _this7.trigger('error');
+ }
+
+ _this7.src = resolveManifestRedirect(_this7.handleManifestRedirects, _this7.src, req);
+
+ var manifest = _this7.parseManifest_({
+ manifestString: req.responseText,
+ url: _this7.src
+ });
+
+ _this7.setupInitialPlaylist(manifest);
+ });
+ };
+
+ _proto.srcUri = function srcUri() {
+ return typeof this.src === 'string' ? this.src : this.src.uri;
+ }
+ /**
+ * Given a manifest object that's either a master or media playlist, trigger the proper
+ * events and set the state of the playlist loader.
+ *
+ * If the manifest object represents a master playlist, `loadedplaylist` will be
+ * triggered to allow listeners to select a playlist. If none is selected, the loader
+ * will default to the first one in the playlists array.
+ *
+ * If the manifest object represents a media playlist, `loadedplaylist` will be
+ * triggered followed by `loadedmetadata`, as the only available playlist is loaded.
+ *
+ * In the case of a media playlist, a master playlist object wrapper with one playlist
+ * will be created so that all logic can handle playlists in the same fashion (as an
+ * assumed manifest object schema).
+ *
+ * @param {Object} manifest
+ * The parsed manifest object
+ */
+ ;
+
+ _proto.setupInitialPlaylist = function setupInitialPlaylist(manifest) {
+ this.state = 'HAVE_MASTER';
+
+ if (manifest.playlists) {
+ this.master = manifest;
+ addPropertiesToMaster(this.master, this.srcUri()); // If the initial master playlist has playlists wtih segments already resolved,
+ // then resolve URIs in advance, as they are usually done after a playlist request,
+ // which may not happen if the playlist is resolved.
+
+ manifest.playlists.forEach(function (playlist) {
+ playlist.segments = getAllSegments(playlist);
+ playlist.segments.forEach(function (segment) {
+ resolveSegmentUris(segment, playlist.resolvedUri);
+ });
+ });
+ this.trigger('loadedplaylist');
+
+ if (!this.request) {
+ // no media playlist was specifically selected so start
+ // from the first listed one
+ this.media(this.master.playlists[0]);
+ }
+
+ return;
+ } // In order to support media playlists passed in as vhs-json, the case where the uri
+ // is not provided as part of the manifest should be considered, and an appropriate
+ // default used.
+
+
+ var uri = this.srcUri() || window$1.location.href;
+ this.master = masterForMedia(manifest, uri);
+ this.haveMetadata({
+ playlistObject: manifest,
+ url: uri,
+ id: this.master.playlists[0].id
+ });
+ this.trigger('loadedmetadata');
+ };
+
+ return PlaylistLoader;
+}(EventTarget$1);
+/**
+ * @file xhr.js
+ */
+
+
+var videojsXHR = videojs.xhr,
+ mergeOptions$1 = videojs.mergeOptions;
+
+var callbackWrapper = function callbackWrapper(request, error, response, callback) {
+ var reqResponse = request.responseType === 'arraybuffer' ? request.response : request.responseText;
+
+ if (!error && reqResponse) {
+ request.responseTime = Date.now();
+ request.roundTripTime = request.responseTime - request.requestTime;
+ request.bytesReceived = reqResponse.byteLength || reqResponse.length;
+
+ if (!request.bandwidth) {
+ request.bandwidth = Math.floor(request.bytesReceived / request.roundTripTime * 8 * 1000);
+ }
+ }
+
+ if (response.headers) {
+ request.responseHeaders = response.headers;
+ } // videojs.xhr now uses a specific code on the error
+ // object to signal that a request has timed out instead
+ // of setting a boolean on the request object
+
+
+ if (error && error.code === 'ETIMEDOUT') {
+ request.timedout = true;
+ } // videojs.xhr no longer considers status codes outside of 200 and 0
+ // (for file uris) to be errors, but the old XHR did, so emulate that
+ // behavior. Status 206 may be used in response to byterange requests.
+
+
+ if (!error && !request.aborted && response.statusCode !== 200 && response.statusCode !== 206 && response.statusCode !== 0) {
+ error = new Error('XHR Failed with a response of: ' + (request && (reqResponse || request.responseText)));
+ }
+
+ callback(error, request);
+};
+
+var xhrFactory = function xhrFactory() {
+ var xhr = function XhrFunction(options, callback) {
+ // Add a default timeout
+ options = mergeOptions$1({
+ timeout: 45e3
+ }, options); // Allow an optional user-specified function to modify the option
+ // object before we construct the xhr request
+
+ var beforeRequest = XhrFunction.beforeRequest || videojs.Vhs.xhr.beforeRequest;
+
+ if (beforeRequest && typeof beforeRequest === 'function') {
+ var newOptions = beforeRequest(options);
+
+ if (newOptions) {
+ options = newOptions;
+ }
+ } // Use the standard videojs.xhr() method unless `videojs.Vhs.xhr` has been overriden
+ // TODO: switch back to videojs.Vhs.xhr.name === 'XhrFunction' when we drop IE11
+
+
+ var xhrMethod = videojs.Vhs.xhr.original === true ? videojsXHR : videojs.Vhs.xhr;
+ var request = xhrMethod(options, function (error, response) {
+ return callbackWrapper(request, error, response, callback);
+ });
+ var originalAbort = request.abort;
+
+ request.abort = function () {
+ request.aborted = true;
+ return originalAbort.apply(request, arguments);
+ };
+
+ request.uri = options.uri;
+ request.requestTime = Date.now();
+ return request;
+ };
+
+ xhr.original = true;
+ return xhr;
+};
+/**
+ * Turns segment byterange into a string suitable for use in
+ * HTTP Range requests
+ *
+ * @param {Object} byterange - an object with two values defining the start and end
+ * of a byte-range
+ */
+
+
+var byterangeStr = function byterangeStr(byterange) {
+ // `byterangeEnd` is one less than `offset + length` because the HTTP range
+ // header uses inclusive ranges
+ var byterangeEnd;
+ var byterangeStart = byterange.offset;
+
+ if (typeof byterange.offset === 'bigint' || typeof byterange.length === 'bigint') {
+ byterangeEnd = window$1.BigInt(byterange.offset) + window$1.BigInt(byterange.length) - window$1.BigInt(1);
+ } else {
+ byterangeEnd = byterange.offset + byterange.length - 1;
+ }
+
+ return 'bytes=' + byterangeStart + '-' + byterangeEnd;
+};
+/**
+ * Defines headers for use in the xhr request for a particular segment.
+ *
+ * @param {Object} segment - a simplified copy of the segmentInfo object
+ * from SegmentLoader
+ */
+
+
+var segmentXhrHeaders = function segmentXhrHeaders(segment) {
+ var headers = {};
+
+ if (segment.byterange) {
+ headers.Range = byterangeStr(segment.byterange);
+ }
+
+ return headers;
+};
+/**
+ * @file bin-utils.js
+ */
+
+/**
+ * convert a TimeRange to text
+ *
+ * @param {TimeRange} range the timerange to use for conversion
+ * @param {number} i the iterator on the range to convert
+ * @return {string} the range in string format
+ */
+
+
+var textRange = function textRange(range, i) {
+ return range.start(i) + '-' + range.end(i);
+};
+/**
+ * format a number as hex string
+ *
+ * @param {number} e The number
+ * @param {number} i the iterator
+ * @return {string} the hex formatted number as a string
+ */
+
+
+var formatHexString = function formatHexString(e, i) {
+ var value = e.toString(16);
+ return '00'.substring(0, 2 - value.length) + value + (i % 2 ? ' ' : '');
+};
+
+var formatAsciiString = function formatAsciiString(e) {
+ if (e >= 0x20 && e < 0x7e) {
+ return String.fromCharCode(e);
+ }
+
+ return '.';
+};
+/**
+ * Creates an object for sending to a web worker modifying properties that are TypedArrays
+ * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
+ *
+ * @param {Object} message
+ * Object of properties and values to send to the web worker
+ * @return {Object}
+ * Modified message with TypedArray values expanded
+ * @function createTransferableMessage
+ */
+
+
+var createTransferableMessage = function createTransferableMessage(message) {
+ var transferable = {};
+ Object.keys(message).forEach(function (key) {
+ var value = message[key];
+
+ if (isArrayBufferView(value)) {
+ transferable[key] = {
+ bytes: value.buffer,
+ byteOffset: value.byteOffset,
+ byteLength: value.byteLength
+ };
+ } else {
+ transferable[key] = value;
+ }
+ });
+ return transferable;
+};
+/**
+ * Returns a unique string identifier for a media initialization
+ * segment.
+ *
+ * @param {Object} initSegment
+ * the init segment object.
+ *
+ * @return {string} the generated init segment id
+ */
+
+
+var initSegmentId = function initSegmentId(initSegment) {
+ var byterange = initSegment.byterange || {
+ length: Infinity,
+ offset: 0
+ };
+ return [byterange.length, byterange.offset, initSegment.resolvedUri].join(',');
+};
+/**
+ * Returns a unique string identifier for a media segment key.
+ *
+ * @param {Object} key the encryption key
+ * @return {string} the unique id for the media segment key.
+ */
+
+
+var segmentKeyId = function segmentKeyId(key) {
+ return key.resolvedUri;
+};
+/**
+ * utils to help dump binary data to the console
+ *
+ * @param {Array|TypedArray} data
+ * data to dump to a string
+ *
+ * @return {string} the data as a hex string.
+ */
+
+
+var hexDump = function hexDump(data) {
+ var bytes = Array.prototype.slice.call(data);
+ var step = 16;
+ var result = '';
+ var hex;
+ var ascii;
+
+ for (var j = 0; j < bytes.length / step; j++) {
+ hex = bytes.slice(j * step, j * step + step).map(formatHexString).join('');
+ ascii = bytes.slice(j * step, j * step + step).map(formatAsciiString).join('');
+ result += hex + ' ' + ascii + '\n';
+ }
+
+ return result;
+};
+
+var tagDump = function tagDump(_ref) {
+ var bytes = _ref.bytes;
+ return hexDump(bytes);
+};
+
+var textRanges = function textRanges(ranges) {
+ var result = '';
+ var i;
+
+ for (i = 0; i < ranges.length; i++) {
+ result += textRange(ranges, i) + ' ';
+ }
+
+ return result;
+};
+
+var utils = /*#__PURE__*/Object.freeze({
+ __proto__: null,
+ createTransferableMessage: createTransferableMessage,
+ initSegmentId: initSegmentId,
+ segmentKeyId: segmentKeyId,
+ hexDump: hexDump,
+ tagDump: tagDump,
+ textRanges: textRanges
+}); // TODO handle fmp4 case where the timing info is accurate and doesn't involve transmux
+// 25% was arbitrarily chosen, and may need to be refined over time.
+
+var SEGMENT_END_FUDGE_PERCENT = 0.25;
+/**
+ * Converts a player time (any time that can be gotten/set from player.currentTime(),
+ * e.g., any time within player.seekable().start(0) to player.seekable().end(0)) to a
+ * program time (any time referencing the real world (e.g., EXT-X-PROGRAM-DATE-TIME)).
+ *
+ * The containing segment is required as the EXT-X-PROGRAM-DATE-TIME serves as an "anchor
+ * point" (a point where we have a mapping from program time to player time, with player
+ * time being the post transmux start of the segment).
+ *
+ * For more details, see [this doc](../../docs/program-time-from-player-time.md).
+ *
+ * @param {number} playerTime the player time
+ * @param {Object} segment the segment which contains the player time
+ * @return {Date} program time
+ */
+
+var playerTimeToProgramTime = function playerTimeToProgramTime(playerTime, segment) {
+ if (!segment.dateTimeObject) {
+ // Can't convert without an "anchor point" for the program time (i.e., a time that can
+ // be used to map the start of a segment with a real world time).
+ return null;
+ }
+
+ var transmuxerPrependedSeconds = segment.videoTimingInfo.transmuxerPrependedSeconds;
+ var transmuxedStart = segment.videoTimingInfo.transmuxedPresentationStart; // get the start of the content from before old content is prepended
+
+ var startOfSegment = transmuxedStart + transmuxerPrependedSeconds;
+ var offsetFromSegmentStart = playerTime - startOfSegment;
+ return new Date(segment.dateTimeObject.getTime() + offsetFromSegmentStart * 1000);
+};
+
+var originalSegmentVideoDuration = function originalSegmentVideoDuration(videoTimingInfo) {
+ return videoTimingInfo.transmuxedPresentationEnd - videoTimingInfo.transmuxedPresentationStart - videoTimingInfo.transmuxerPrependedSeconds;
+};
+/**
+ * Finds a segment that contains the time requested given as an ISO-8601 string. The
+ * returned segment might be an estimate or an accurate match.
+ *
+ * @param {string} programTime The ISO-8601 programTime to find a match for
+ * @param {Object} playlist A playlist object to search within
+ */
+
+
+var findSegmentForProgramTime = function findSegmentForProgramTime(programTime, playlist) {
+ // Assumptions:
+ // - verifyProgramDateTimeTags has already been run
+ // - live streams have been started
+ var dateTimeObject;
+
+ try {
+ dateTimeObject = new Date(programTime);
+ } catch (e) {
+ return null;
+ }
+
+ if (!playlist || !playlist.segments || playlist.segments.length === 0) {
+ return null;
+ }
+
+ var segment = playlist.segments[0];
+
+ if (dateTimeObject < segment.dateTimeObject) {
+ // Requested time is before stream start.
+ return null;
+ }
+
+ for (var i = 0; i < playlist.segments.length - 1; i++) {
+ segment = playlist.segments[i];
+ var nextSegmentStart = playlist.segments[i + 1].dateTimeObject;
+
+ if (dateTimeObject < nextSegmentStart) {
+ break;
+ }
+ }
+
+ var lastSegment = playlist.segments[playlist.segments.length - 1];
+ var lastSegmentStart = lastSegment.dateTimeObject;
+ var lastSegmentDuration = lastSegment.videoTimingInfo ? originalSegmentVideoDuration(lastSegment.videoTimingInfo) : lastSegment.duration + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT;
+ var lastSegmentEnd = new Date(lastSegmentStart.getTime() + lastSegmentDuration * 1000);
+
+ if (dateTimeObject > lastSegmentEnd) {
+ // Beyond the end of the stream, or our best guess of the end of the stream.
+ return null;
+ }
+
+ if (dateTimeObject > lastSegmentStart) {
+ segment = lastSegment;
+ }
+
+ return {
+ segment: segment,
+ estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : Playlist.duration(playlist, playlist.mediaSequence + playlist.segments.indexOf(segment)),
+ // Although, given that all segments have accurate date time objects, the segment
+ // selected should be accurate, unless the video has been transmuxed at some point
+ // (determined by the presence of the videoTimingInfo object), the segment's "player
+ // time" (the start time in the player) can't be considered accurate.
+ type: segment.videoTimingInfo ? 'accurate' : 'estimate'
+ };
+};
+/**
+ * Finds a segment that contains the given player time(in seconds).
+ *
+ * @param {number} time The player time to find a match for
+ * @param {Object} playlist A playlist object to search within
+ */
+
+
+var findSegmentForPlayerTime = function findSegmentForPlayerTime(time, playlist) {
+ // Assumptions:
+ // - there will always be a segment.duration
+ // - we can start from zero
+ // - segments are in time order
+ if (!playlist || !playlist.segments || playlist.segments.length === 0) {
+ return null;
+ }
+
+ var segmentEnd = 0;
+ var segment;
+
+ for (var i = 0; i < playlist.segments.length; i++) {
+ segment = playlist.segments[i]; // videoTimingInfo is set after the segment is downloaded and transmuxed, and
+ // should contain the most accurate values we have for the segment's player times.
+ //
+ // Use the accurate transmuxedPresentationEnd value if it is available, otherwise fall
+ // back to an estimate based on the manifest derived (inaccurate) segment.duration, to
+ // calculate an end value.
+
+ segmentEnd = segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationEnd : segmentEnd + segment.duration;
+
+ if (time <= segmentEnd) {
+ break;
+ }
+ }
+
+ var lastSegment = playlist.segments[playlist.segments.length - 1];
+
+ if (lastSegment.videoTimingInfo && lastSegment.videoTimingInfo.transmuxedPresentationEnd < time) {
+ // The time requested is beyond the stream end.
+ return null;
+ }
+
+ if (time > segmentEnd) {
+ // The time is within or beyond the last segment.
+ //
+ // Check to see if the time is beyond a reasonable guess of the end of the stream.
+ if (time > segmentEnd + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT) {
+ // Technically, because the duration value is only an estimate, the time may still
+ // exist in the last segment, however, there isn't enough information to make even
+ // a reasonable estimate.
+ return null;
+ }
+
+ segment = lastSegment;
+ }
+
+ return {
+ segment: segment,
+ estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : segmentEnd - segment.duration,
+ // Because videoTimingInfo is only set after transmux, it is the only way to get
+ // accurate timing values.
+ type: segment.videoTimingInfo ? 'accurate' : 'estimate'
+ };
+};
+/**
+ * Gives the offset of the comparisonTimestamp from the programTime timestamp in seconds.
+ * If the offset returned is positive, the programTime occurs after the
+ * comparisonTimestamp.
+ * If the offset is negative, the programTime occurs before the comparisonTimestamp.
+ *
+ * @param {string} comparisonTimeStamp An ISO-8601 timestamp to compare against
+ * @param {string} programTime The programTime as an ISO-8601 string
+ * @return {number} offset
+ */
+
+
+var getOffsetFromTimestamp = function getOffsetFromTimestamp(comparisonTimeStamp, programTime) {
+ var segmentDateTime;
+ var programDateTime;
+
+ try {
+ segmentDateTime = new Date(comparisonTimeStamp);
+ programDateTime = new Date(programTime);
+ } catch (e) {// TODO handle error
+ }
+
+ var segmentTimeEpoch = segmentDateTime.getTime();
+ var programTimeEpoch = programDateTime.getTime();
+ return (programTimeEpoch - segmentTimeEpoch) / 1000;
+};
+/**
+ * Checks that all segments in this playlist have programDateTime tags.
+ *
+ * @param {Object} playlist A playlist object
+ */
+
+
+var verifyProgramDateTimeTags = function verifyProgramDateTimeTags(playlist) {
+ if (!playlist.segments || playlist.segments.length === 0) {
+ return false;
+ }
+
+ for (var i = 0; i < playlist.segments.length; i++) {
+ var segment = playlist.segments[i];
+
+ if (!segment.dateTimeObject) {
+ return false;
+ }
+ }
+
+ return true;
+};
+/**
+ * Returns the programTime of the media given a playlist and a playerTime.
+ * The playlist must have programDateTime tags for a programDateTime tag to be returned.
+ * If the segments containing the time requested have not been buffered yet, an estimate
+ * may be returned to the callback.
+ *
+ * @param {Object} args
+ * @param {Object} args.playlist A playlist object to search within
+ * @param {number} time A playerTime in seconds
+ * @param {Function} callback(err, programTime)
+ * @return {string} err.message A detailed error message
+ * @return {Object} programTime
+ * @return {number} programTime.mediaSeconds The streamTime in seconds
+ * @return {string} programTime.programDateTime The programTime as an ISO-8601 String
+ */
+
+
+var getProgramTime = function getProgramTime(_ref) {
+ var playlist = _ref.playlist,
+ _ref$time = _ref.time,
+ time = _ref$time === void 0 ? undefined : _ref$time,
+ callback = _ref.callback;
+
+ if (!callback) {
+ throw new Error('getProgramTime: callback must be provided');
+ }
+
+ if (!playlist || time === undefined) {
+ return callback({
+ message: 'getProgramTime: playlist and time must be provided'
+ });
+ }
+
+ var matchedSegment = findSegmentForPlayerTime(time, playlist);
+
+ if (!matchedSegment) {
+ return callback({
+ message: 'valid programTime was not found'
+ });
+ }
+
+ if (matchedSegment.type === 'estimate') {
+ return callback({
+ message: 'Accurate programTime could not be determined.' + ' Please seek to e.seekTime and try again',
+ seekTime: matchedSegment.estimatedStart
+ });
+ }
+
+ var programTimeObject = {
+ mediaSeconds: time
+ };
+ var programTime = playerTimeToProgramTime(time, matchedSegment.segment);
+
+ if (programTime) {
+ programTimeObject.programDateTime = programTime.toISOString();
+ }
+
+ return callback(null, programTimeObject);
+};
+/**
+ * Seeks in the player to a time that matches the given programTime ISO-8601 string.
+ *
+ * @param {Object} args
+ * @param {string} args.programTime A programTime to seek to as an ISO-8601 String
+ * @param {Object} args.playlist A playlist to look within
+ * @param {number} args.retryCount The number of times to try for an accurate seek. Default is 2.
+ * @param {Function} args.seekTo A method to perform a seek
+ * @param {boolean} args.pauseAfterSeek Whether to end in a paused state after seeking. Default is true.
+ * @param {Object} args.tech The tech to seek on
+ * @param {Function} args.callback(err, newTime) A callback to return the new time to
+ * @return {string} err.message A detailed error message
+ * @return {number} newTime The exact time that was seeked to in seconds
+ */
+
+
+var seekToProgramTime = function seekToProgramTime(_ref2) {
+ var programTime = _ref2.programTime,
+ playlist = _ref2.playlist,
+ _ref2$retryCount = _ref2.retryCount,
+ retryCount = _ref2$retryCount === void 0 ? 2 : _ref2$retryCount,
+ seekTo = _ref2.seekTo,
+ _ref2$pauseAfterSeek = _ref2.pauseAfterSeek,
+ pauseAfterSeek = _ref2$pauseAfterSeek === void 0 ? true : _ref2$pauseAfterSeek,
+ tech = _ref2.tech,
+ callback = _ref2.callback;
+
+ if (!callback) {
+ throw new Error('seekToProgramTime: callback must be provided');
+ }
+
+ if (typeof programTime === 'undefined' || !playlist || !seekTo) {
+ return callback({
+ message: 'seekToProgramTime: programTime, seekTo and playlist must be provided'
+ });
+ }
+
+ if (!playlist.endList && !tech.hasStarted_) {
+ return callback({
+ message: 'player must be playing a live stream to start buffering'
+ });
+ }
+
+ if (!verifyProgramDateTimeTags(playlist)) {
+ return callback({
+ message: 'programDateTime tags must be provided in the manifest ' + playlist.resolvedUri
+ });
+ }
+
+ var matchedSegment = findSegmentForProgramTime(programTime, playlist); // no match
+
+ if (!matchedSegment) {
+ return callback({
+ message: programTime + " was not found in the stream"
+ });
+ }
+
+ var segment = matchedSegment.segment;
+ var mediaOffset = getOffsetFromTimestamp(segment.dateTimeObject, programTime);
+
+ if (matchedSegment.type === 'estimate') {
+ // we've run out of retries
+ if (retryCount === 0) {
+ return callback({
+ message: programTime + " is not buffered yet. Try again"
+ });
+ }
+
+ seekTo(matchedSegment.estimatedStart + mediaOffset);
+ tech.one('seeked', function () {
+ seekToProgramTime({
+ programTime: programTime,
+ playlist: playlist,
+ retryCount: retryCount - 1,
+ seekTo: seekTo,
+ pauseAfterSeek: pauseAfterSeek,
+ tech: tech,
+ callback: callback
+ });
+ });
+ return;
+ } // Since the segment.start value is determined from the buffered end or ending time
+ // of the prior segment, the seekToTime doesn't need to account for any transmuxer
+ // modifications.
+
+
+ var seekToTime = segment.start + mediaOffset;
+
+ var seekedCallback = function seekedCallback() {
+ return callback(null, tech.currentTime());
+ }; // listen for seeked event
+
+
+ tech.one('seeked', seekedCallback); // pause before seeking as video.js will restore this state
+
+ if (pauseAfterSeek) {
+ tech.pause();
+ }
+
+ seekTo(seekToTime);
+}; // which will only happen if the request is complete.
+
+
+var callbackOnCompleted = function callbackOnCompleted(request, cb) {
+ if (request.readyState === 4) {
+ return cb();
+ }
+
+ return;
+};
+
+var containerRequest = function containerRequest(uri, xhr, cb) {
+ var bytes = [];
+ var id3Offset;
+ var finished = false;
+
+ var endRequestAndCallback = function endRequestAndCallback(err, req, type, _bytes) {
+ req.abort();
+ finished = true;
+ return cb(err, req, type, _bytes);
+ };
+
+ var progressListener = function progressListener(error, request) {
+ if (finished) {
+ return;
+ }
+
+ if (error) {
+ return endRequestAndCallback(error, request, '', bytes);
+ } // grap the new part of content that was just downloaded
+
+
+ var newPart = request.responseText.substring(bytes && bytes.byteLength || 0, request.responseText.length); // add that onto bytes
+
+ bytes = concatTypedArrays(bytes, stringToBytes(newPart, true));
+ id3Offset = id3Offset || getId3Offset(bytes); // we need at least 10 bytes to determine a type
+ // or we need at least two bytes after an id3Offset
+
+ if (bytes.length < 10 || id3Offset && bytes.length < id3Offset + 2) {
+ return callbackOnCompleted(request, function () {
+ return endRequestAndCallback(error, request, '', bytes);
+ });
+ }
+
+ var type = detectContainerForBytes(bytes); // if this looks like a ts segment but we don't have enough data
+ // to see the second sync byte, wait until we have enough data
+ // before declaring it ts
+
+ if (type === 'ts' && bytes.length < 188) {
+ return callbackOnCompleted(request, function () {
+ return endRequestAndCallback(error, request, '', bytes);
+ });
+ } // this may be an unsynced ts segment
+ // wait for 376 bytes before detecting no container
+
+
+ if (!type && bytes.length < 376) {
+ return callbackOnCompleted(request, function () {
+ return endRequestAndCallback(error, request, '', bytes);
+ });
+ }
+
+ return endRequestAndCallback(null, request, type, bytes);
+ };
+
+ var options = {
+ uri: uri,
+ beforeSend: function beforeSend(request) {
+ // this forces the browser to pass the bytes to us unprocessed
+ request.overrideMimeType('text/plain; charset=x-user-defined');
+ request.addEventListener('progress', function (_ref) {
+ _ref.total;
+ _ref.loaded;
+ return callbackWrapper(request, null, {
+ statusCode: request.status
+ }, progressListener);
+ });
+ }
+ };
+ var request = xhr(options, function (error, response) {
+ return callbackWrapper(request, error, response, progressListener);
+ });
+ return request;
+};
+
+var EventTarget = videojs.EventTarget,
+ mergeOptions = videojs.mergeOptions;
+
+var dashPlaylistUnchanged = function dashPlaylistUnchanged(a, b) {
+ if (!isPlaylistUnchanged(a, b)) {
+ return false;
+ } // for dash the above check will often return true in scenarios where
+ // the playlist actually has changed because mediaSequence isn't a
+ // dash thing, and we often set it to 1. So if the playlists have the same amount
+ // of segments we return true.
+ // So for dash we need to make sure that the underlying segments are different.
+ // if sidx changed then the playlists are different.
+
+
+ if (a.sidx && b.sidx && (a.sidx.offset !== b.sidx.offset || a.sidx.length !== b.sidx.length)) {
+ return false;
+ } else if (!a.sidx && b.sidx || a.sidx && !b.sidx) {
+ return false;
+ } // one or the other does not have segments
+ // there was a change.
+
+
+ if (a.segments && !b.segments || !a.segments && b.segments) {
+ return false;
+ } // neither has segments nothing changed
+
+
+ if (!a.segments && !b.segments) {
+ return true;
+ } // check segments themselves
+
+
+ for (var i = 0; i < a.segments.length; i++) {
+ var aSegment = a.segments[i];
+ var bSegment = b.segments[i]; // if uris are different between segments there was a change
+
+ if (aSegment.uri !== bSegment.uri) {
+ return false;
+ } // neither segment has a byterange, there will be no byterange change.
+
+
+ if (!aSegment.byterange && !bSegment.byterange) {
+ continue;
+ }
+
+ var aByterange = aSegment.byterange;
+ var bByterange = bSegment.byterange; // if byterange only exists on one of the segments, there was a change.
+
+ if (aByterange && !bByterange || !aByterange && bByterange) {
+ return false;
+ } // if both segments have byterange with different offsets, there was a change.
+
+
+ if (aByterange.offset !== bByterange.offset || aByterange.length !== bByterange.length) {
+ return false;
+ }
+ } // if everything was the same with segments, this is the same playlist.
+
+
+ return true;
+};
+/**
+ * Parses the master XML string and updates playlist URI references.
+ *
+ * @param {Object} config
+ * Object of arguments
+ * @param {string} config.masterXml
+ * The mpd XML
+ * @param {string} config.srcUrl
+ * The mpd URL
+ * @param {Date} config.clientOffset
+ * A time difference between server and client
+ * @param {Object} config.sidxMapping
+ * SIDX mappings for moof/mdat URIs and byte ranges
+ * @return {Object}
+ * The parsed mpd manifest object
+ */
+
+
+var parseMasterXml = function parseMasterXml(_ref) {
+ var masterXml = _ref.masterXml,
+ srcUrl = _ref.srcUrl,
+ clientOffset = _ref.clientOffset,
+ sidxMapping = _ref.sidxMapping,
+ previousManifest = _ref.previousManifest;
+ var manifest = parse(masterXml, {
+ manifestUri: srcUrl,
+ clientOffset: clientOffset,
+ sidxMapping: sidxMapping,
+ previousManifest: previousManifest
+ });
+ addPropertiesToMaster(manifest, srcUrl);
+ return manifest;
+};
+/**
+ * Returns a new master manifest that is the result of merging an updated master manifest
+ * into the original version.
+ *
+ * @param {Object} oldMaster
+ * The old parsed mpd object
+ * @param {Object} newMaster
+ * The updated parsed mpd object
+ * @return {Object}
+ * A new object representing the original master manifest with the updated media
+ * playlists merged in
+ */
+
+
+var updateMaster = function updateMaster(oldMaster, newMaster, sidxMapping) {
+ var noChanges = true;
+ var update = mergeOptions(oldMaster, {
+ // These are top level properties that can be updated
+ duration: newMaster.duration,
+ minimumUpdatePeriod: newMaster.minimumUpdatePeriod,
+ timelineStarts: newMaster.timelineStarts
+ }); // First update the playlists in playlist list
+
+ for (var i = 0; i < newMaster.playlists.length; i++) {
+ var playlist = newMaster.playlists[i];
+
+ if (playlist.sidx) {
+ var sidxKey = generateSidxKey(playlist.sidx); // add sidx segments to the playlist if we have all the sidx info already
+
+ if (sidxMapping && sidxMapping[sidxKey] && sidxMapping[sidxKey].sidx) {
+ addSidxSegmentsToPlaylist(playlist, sidxMapping[sidxKey].sidx, playlist.sidx.resolvedUri);
+ }
+ }
+
+ var playlistUpdate = updateMaster$1(update, playlist, dashPlaylistUnchanged);
+
+ if (playlistUpdate) {
+ update = playlistUpdate;
+ noChanges = false;
+ }
+ } // Then update media group playlists
+
+
+ forEachMediaGroup(newMaster, function (properties, type, group, label) {
+ if (properties.playlists && properties.playlists.length) {
+ var id = properties.playlists[0].id;
+
+ var _playlistUpdate = updateMaster$1(update, properties.playlists[0], dashPlaylistUnchanged);
+
+ if (_playlistUpdate) {
+ update = _playlistUpdate; // update the playlist reference within media groups
+
+ update.mediaGroups[type][group][label].playlists[0] = update.playlists[id];
+ noChanges = false;
+ }
+ }
+ });
+
+ if (newMaster.minimumUpdatePeriod !== oldMaster.minimumUpdatePeriod) {
+ noChanges = false;
+ }
+
+ if (noChanges) {
+ return null;
+ }
+
+ return update;
+}; // SIDX should be equivalent if the URI and byteranges of the SIDX match.
+// If the SIDXs have maps, the two maps should match,
+// both `a` and `b` missing SIDXs is considered matching.
+// If `a` or `b` but not both have a map, they aren't matching.
+
+
+var equivalentSidx = function equivalentSidx(a, b) {
+ var neitherMap = Boolean(!a.map && !b.map);
+ var equivalentMap = neitherMap || Boolean(a.map && b.map && a.map.byterange.offset === b.map.byterange.offset && a.map.byterange.length === b.map.byterange.length);
+ return equivalentMap && a.uri === b.uri && a.byterange.offset === b.byterange.offset && a.byterange.length === b.byterange.length;
+}; // exported for testing
+
+
+var compareSidxEntry = function compareSidxEntry(playlists, oldSidxMapping) {
+ var newSidxMapping = {};
+
+ for (var id in playlists) {
+ var playlist = playlists[id];
+ var currentSidxInfo = playlist.sidx;
+
+ if (currentSidxInfo) {
+ var key = generateSidxKey(currentSidxInfo);
+
+ if (!oldSidxMapping[key]) {
+ break;
+ }
+
+ var savedSidxInfo = oldSidxMapping[key].sidxInfo;
+
+ if (equivalentSidx(savedSidxInfo, currentSidxInfo)) {
+ newSidxMapping[key] = oldSidxMapping[key];
+ }
+ }
+ }
+
+ return newSidxMapping;
+};
+/**
+ * A function that filters out changed items as they need to be requested separately.
+ *
+ * The method is exported for testing
+ *
+ * @param {Object} master the parsed mpd XML returned via mpd-parser
+ * @param {Object} oldSidxMapping the SIDX to compare against
+ */
+
+
+var filterChangedSidxMappings = function filterChangedSidxMappings(master, oldSidxMapping) {
+ var videoSidx = compareSidxEntry(master.playlists, oldSidxMapping);
+ var mediaGroupSidx = videoSidx;
+ forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
+ if (properties.playlists && properties.playlists.length) {
+ var playlists = properties.playlists;
+ mediaGroupSidx = mergeOptions(mediaGroupSidx, compareSidxEntry(playlists, oldSidxMapping));
+ }
+ });
+ return mediaGroupSidx;
+};
+
+var DashPlaylistLoader = /*#__PURE__*/function (_EventTarget) {
+ _inheritsLoose(DashPlaylistLoader, _EventTarget); // DashPlaylistLoader must accept either a src url or a playlist because subsequent
+ // playlist loader setups from media groups will expect to be able to pass a playlist
+ // (since there aren't external URLs to media playlists with DASH)
+
+
+ function DashPlaylistLoader(srcUrlOrPlaylist, vhs, options, masterPlaylistLoader) {
+ var _this;
+
+ if (options === void 0) {
+ options = {};
+ }
+
+ _this = _EventTarget.call(this) || this;
+ _this.masterPlaylistLoader_ = masterPlaylistLoader || _assertThisInitialized(_this);
+
+ if (!masterPlaylistLoader) {
+ _this.isMaster_ = true;
+ }
+
+ var _options = options,
+ _options$withCredenti = _options.withCredentials,
+ withCredentials = _options$withCredenti === void 0 ? false : _options$withCredenti,
+ _options$handleManife = _options.handleManifestRedirects,
+ handleManifestRedirects = _options$handleManife === void 0 ? false : _options$handleManife;
+ _this.vhs_ = vhs;
+ _this.withCredentials = withCredentials;
+ _this.handleManifestRedirects = handleManifestRedirects;
+
+ if (!srcUrlOrPlaylist) {
+ throw new Error('A non-empty playlist URL or object is required');
+ } // event naming?
+
+
+ _this.on('minimumUpdatePeriod', function () {
+ _this.refreshXml_();
+ }); // live playlist staleness timeout
+
+
+ _this.on('mediaupdatetimeout', function () {
+ _this.refreshMedia_(_this.media().id);
+ });
+
+ _this.state = 'HAVE_NOTHING';
+ _this.loadedPlaylists_ = {};
+ _this.logger_ = logger('DashPlaylistLoader'); // initialize the loader state
+ // The masterPlaylistLoader will be created with a string
+
+ if (_this.isMaster_) {
+ _this.masterPlaylistLoader_.srcUrl = srcUrlOrPlaylist; // TODO: reset sidxMapping between period changes
+ // once multi-period is refactored
+
+ _this.masterPlaylistLoader_.sidxMapping_ = {};
+ } else {
+ _this.childPlaylist_ = srcUrlOrPlaylist;
+ }
+
+ return _this;
+ }
+
+ var _proto = DashPlaylistLoader.prototype;
+
+ _proto.requestErrored_ = function requestErrored_(err, request, startingState) {
+ // disposed
+ if (!this.request) {
+ return true;
+ } // pending request is cleared
+
+
+ this.request = null;
+
+ if (err) {
+ // use the provided error object or create one
+ // based on the request/response
+ this.error = typeof err === 'object' && !(err instanceof Error) ? err : {
+ status: request.status,
+ message: 'DASH request error at URL: ' + request.uri,
+ response: request.response,
+ // MEDIA_ERR_NETWORK
+ code: 2
+ };
+
+ if (startingState) {
+ this.state = startingState;
+ }
+
+ this.trigger('error');
+ return true;
+ }
+ }
+ /**
+ * Verify that the container of the sidx segment can be parsed
+ * and if it can, get and parse that segment.
+ */
+ ;
+
+ _proto.addSidxSegments_ = function addSidxSegments_(playlist, startingState, cb) {
+ var _this2 = this;
+
+ var sidxKey = playlist.sidx && generateSidxKey(playlist.sidx); // playlist lacks sidx or sidx segments were added to this playlist already.
+
+ if (!playlist.sidx || !sidxKey || this.masterPlaylistLoader_.sidxMapping_[sidxKey]) {
+ // keep this function async
+ this.mediaRequest_ = window$1.setTimeout(function () {
+ return cb(false);
+ }, 0);
+ return;
+ } // resolve the segment URL relative to the playlist
+
+
+ var uri = resolveManifestRedirect(this.handleManifestRedirects, playlist.sidx.resolvedUri);
+
+ var fin = function fin(err, request) {
+ if (_this2.requestErrored_(err, request, startingState)) {
+ return;
+ }
+
+ var sidxMapping = _this2.masterPlaylistLoader_.sidxMapping_;
+ var sidx;
+
+ try {
+ sidx = parseSidx(toUint8(request.response).subarray(8));
+ } catch (e) {
+ // sidx parsing failed.
+ _this2.requestErrored_(e, request, startingState);
+
+ return;
+ }
+
+ sidxMapping[sidxKey] = {
+ sidxInfo: playlist.sidx,
+ sidx: sidx
+ };
+ addSidxSegmentsToPlaylist(playlist, sidx, playlist.sidx.resolvedUri);
+ return cb(true);
+ };
+
+ this.request = containerRequest(uri, this.vhs_.xhr, function (err, request, container, bytes) {
+ if (err) {
+ return fin(err, request);
+ }
+
+ if (!container || container !== 'mp4') {
+ return fin({
+ status: request.status,
+ message: "Unsupported " + (container || 'unknown') + " container type for sidx segment at URL: " + uri,
+ // response is just bytes in this case
+ // but we really don't want to return that.
+ response: '',
+ playlist: playlist,
+ internal: true,
+ blacklistDuration: Infinity,
+ // MEDIA_ERR_NETWORK
+ code: 2
+ }, request);
+ } // if we already downloaded the sidx bytes in the container request, use them
+
+
+ var _playlist$sidx$bytera = playlist.sidx.byterange,
+ offset = _playlist$sidx$bytera.offset,
+ length = _playlist$sidx$bytera.length;
+
+ if (bytes.length >= length + offset) {
+ return fin(err, {
+ response: bytes.subarray(offset, offset + length),
+ status: request.status,
+ uri: request.uri
+ });
+ } // otherwise request sidx bytes
+
+
+ _this2.request = _this2.vhs_.xhr({
+ uri: uri,
+ responseType: 'arraybuffer',
+ headers: segmentXhrHeaders({
+ byterange: playlist.sidx.byterange
+ })
+ }, fin);
+ });
+ };
+
+ _proto.dispose = function dispose() {
+ this.trigger('dispose');
+ this.stopRequest();
+ this.loadedPlaylists_ = {};
+ window$1.clearTimeout(this.minimumUpdatePeriodTimeout_);
+ window$1.clearTimeout(this.mediaRequest_);
+ window$1.clearTimeout(this.mediaUpdateTimeout);
+ this.mediaUpdateTimeout = null;
+ this.mediaRequest_ = null;
+ this.minimumUpdatePeriodTimeout_ = null;
+
+ if (this.masterPlaylistLoader_.createMupOnMedia_) {
+ this.off('loadedmetadata', this.masterPlaylistLoader_.createMupOnMedia_);
+ this.masterPlaylistLoader_.createMupOnMedia_ = null;
+ }
+
+ this.off();
+ };
+
+ _proto.hasPendingRequest = function hasPendingRequest() {
+ return this.request || this.mediaRequest_;
+ };
+
+ _proto.stopRequest = function stopRequest() {
+ if (this.request) {
+ var oldRequest = this.request;
+ this.request = null;
+ oldRequest.onreadystatechange = null;
+ oldRequest.abort();
+ }
+ };
+
+ _proto.media = function media(playlist) {
+ var _this3 = this; // getter
+
+
+ if (!playlist) {
+ return this.media_;
+ } // setter
+
+
+ if (this.state === 'HAVE_NOTHING') {
+ throw new Error('Cannot switch media playlist from ' + this.state);
+ }
+
+ var startingState = this.state; // find the playlist object if the target playlist has been specified by URI
+
+ if (typeof playlist === 'string') {
+ if (!this.masterPlaylistLoader_.master.playlists[playlist]) {
+ throw new Error('Unknown playlist URI: ' + playlist);
+ }
+
+ playlist = this.masterPlaylistLoader_.master.playlists[playlist];
+ }
+
+ var mediaChange = !this.media_ || playlist.id !== this.media_.id; // switch to previously loaded playlists immediately
+
+ if (mediaChange && this.loadedPlaylists_[playlist.id] && this.loadedPlaylists_[playlist.id].endList) {
+ this.state = 'HAVE_METADATA';
+ this.media_ = playlist; // trigger media change if the active media has been updated
+
+ if (mediaChange) {
+ this.trigger('mediachanging');
+ this.trigger('mediachange');
+ }
+
+ return;
+ } // switching to the active playlist is a no-op
+
+
+ if (!mediaChange) {
+ return;
+ } // switching from an already loaded playlist
+
+
+ if (this.media_) {
+ this.trigger('mediachanging');
+ }
+
+ this.addSidxSegments_(playlist, startingState, function (sidxChanged) {
+ // everything is ready just continue to haveMetadata
+ _this3.haveMetadata({
+ startingState: startingState,
+ playlist: playlist
+ });
+ });
+ };
+
+ _proto.haveMetadata = function haveMetadata(_ref2) {
+ var startingState = _ref2.startingState,
+ playlist = _ref2.playlist;
+ this.state = 'HAVE_METADATA';
+ this.loadedPlaylists_[playlist.id] = playlist;
+ this.mediaRequest_ = null; // This will trigger loadedplaylist
+
+ this.refreshMedia_(playlist.id); // fire loadedmetadata the first time a media playlist is loaded
+ // to resolve setup of media groups
+
+ if (startingState === 'HAVE_MASTER') {
+ this.trigger('loadedmetadata');
+ } else {
+ // trigger media change if the active media has been updated
+ this.trigger('mediachange');
+ }
+ };
+
+ _proto.pause = function pause() {
+ if (this.masterPlaylistLoader_.createMupOnMedia_) {
+ this.off('loadedmetadata', this.masterPlaylistLoader_.createMupOnMedia_);
+ this.masterPlaylistLoader_.createMupOnMedia_ = null;
+ }
+
+ this.stopRequest();
+ window$1.clearTimeout(this.mediaUpdateTimeout);
+ this.mediaUpdateTimeout = null;
+
+ if (this.isMaster_) {
+ window$1.clearTimeout(this.masterPlaylistLoader_.minimumUpdatePeriodTimeout_);
+ this.masterPlaylistLoader_.minimumUpdatePeriodTimeout_ = null;
+ }
+
+ if (this.state === 'HAVE_NOTHING') {
+ // If we pause the loader before any data has been retrieved, its as if we never
+ // started, so reset to an unstarted state.
+ this.started = false;
+ }
+ };
+
+ _proto.load = function load(isFinalRendition) {
+ var _this4 = this;
+
+ window$1.clearTimeout(this.mediaUpdateTimeout);
+ this.mediaUpdateTimeout = null;
+ var media = this.media();
+
+ if (isFinalRendition) {
+ var delay = media ? media.targetDuration / 2 * 1000 : 5 * 1000;
+ this.mediaUpdateTimeout = window$1.setTimeout(function () {
+ return _this4.load();
+ }, delay);
+ return;
+ } // because the playlists are internal to the manifest, load should either load the
+ // main manifest, or do nothing but trigger an event
+
+
+ if (!this.started) {
+ this.start();
+ return;
+ }
+
+ if (media && !media.endList) {
+ // Check to see if this is the master loader and the MUP was cleared (this happens
+ // when the loader was paused). `media` should be set at this point since one is always
+ // set during `start()`.
+ if (this.isMaster_ && !this.minimumUpdatePeriodTimeout_) {
+ // Trigger minimumUpdatePeriod to refresh the master manifest
+ this.trigger('minimumUpdatePeriod'); // Since there was no prior minimumUpdatePeriodTimeout it should be recreated
+
+ this.updateMinimumUpdatePeriodTimeout_();
+ }
+
+ this.trigger('mediaupdatetimeout');
+ } else {
+ this.trigger('loadedplaylist');
+ }
+ };
+
+ _proto.start = function start() {
+ var _this5 = this;
+
+ this.started = true; // We don't need to request the master manifest again
+ // Call this asynchronously to match the xhr request behavior below
+
+ if (!this.isMaster_) {
+ this.mediaRequest_ = window$1.setTimeout(function () {
+ return _this5.haveMaster_();
+ }, 0);
+ return;
+ }
+
+ this.requestMaster_(function (req, masterChanged) {
+ _this5.haveMaster_();
+
+ if (!_this5.hasPendingRequest() && !_this5.media_) {
+ _this5.media(_this5.masterPlaylistLoader_.master.playlists[0]);
+ }
+ });
+ };
+
+ _proto.requestMaster_ = function requestMaster_(cb) {
+ var _this6 = this;
+
+ this.request = this.vhs_.xhr({
+ uri: this.masterPlaylistLoader_.srcUrl,
+ withCredentials: this.withCredentials
+ }, function (error, req) {
+ if (_this6.requestErrored_(error, req)) {
+ if (_this6.state === 'HAVE_NOTHING') {
+ _this6.started = false;
+ }
+
+ return;
+ }
+
+ var masterChanged = req.responseText !== _this6.masterPlaylistLoader_.masterXml_;
+ _this6.masterPlaylistLoader_.masterXml_ = req.responseText;
+
+ if (req.responseHeaders && req.responseHeaders.date) {
+ _this6.masterLoaded_ = Date.parse(req.responseHeaders.date);
+ } else {
+ _this6.masterLoaded_ = Date.now();
+ }
+
+ _this6.masterPlaylistLoader_.srcUrl = resolveManifestRedirect(_this6.handleManifestRedirects, _this6.masterPlaylistLoader_.srcUrl, req);
+
+ if (masterChanged) {
+ _this6.handleMaster_();
+
+ _this6.syncClientServerClock_(function () {
+ return cb(req, masterChanged);
+ });
+
+ return;
+ }
+
+ return cb(req, masterChanged);
+ });
+ }
+ /**
+ * Parses the master xml for UTCTiming node to sync the client clock to the server
+ * clock. If the UTCTiming node requires a HEAD or GET request, that request is made.
+ *
+ * @param {Function} done
+ * Function to call when clock sync has completed
+ */
+ ;
+
+ _proto.syncClientServerClock_ = function syncClientServerClock_(done) {
+ var _this7 = this;
+
+ var utcTiming = parseUTCTiming(this.masterPlaylistLoader_.masterXml_); // No UTCTiming element found in the mpd. Use Date header from mpd request as the
+ // server clock
+
+ if (utcTiming === null) {
+ this.masterPlaylistLoader_.clientOffset_ = this.masterLoaded_ - Date.now();
+ return done();
+ }
+
+ if (utcTiming.method === 'DIRECT') {
+ this.masterPlaylistLoader_.clientOffset_ = utcTiming.value - Date.now();
+ return done();
+ }
+
+ this.request = this.vhs_.xhr({
+ uri: resolveUrl(this.masterPlaylistLoader_.srcUrl, utcTiming.value),
+ method: utcTiming.method,
+ withCredentials: this.withCredentials
+ }, function (error, req) {
+ // disposed
+ if (!_this7.request) {
+ return;
+ }
+
+ if (error) {
+ // sync request failed, fall back to using date header from mpd
+ // TODO: log warning
+ _this7.masterPlaylistLoader_.clientOffset_ = _this7.masterLoaded_ - Date.now();
+ return done();
+ }
+
+ var serverTime;
+
+ if (utcTiming.method === 'HEAD') {
+ if (!req.responseHeaders || !req.responseHeaders.date) {
+ // expected date header not preset, fall back to using date header from mpd
+ // TODO: log warning
+ serverTime = _this7.masterLoaded_;
+ } else {
+ serverTime = Date.parse(req.responseHeaders.date);
+ }
+ } else {
+ serverTime = Date.parse(req.responseText);
+ }
+
+ _this7.masterPlaylistLoader_.clientOffset_ = serverTime - Date.now();
+ done();
+ });
+ };
+
+ _proto.haveMaster_ = function haveMaster_() {
+ this.state = 'HAVE_MASTER';
+
+ if (this.isMaster_) {
+ // We have the master playlist at this point, so
+ // trigger this to allow MasterPlaylistController
+ // to make an initial playlist selection
+ this.trigger('loadedplaylist');
+ } else if (!this.media_) {
+ // no media playlist was specifically selected so select
+ // the one the child playlist loader was created with
+ this.media(this.childPlaylist_);
+ }
+ };
+
+ _proto.handleMaster_ = function handleMaster_() {
+ // clear media request
+ this.mediaRequest_ = null;
+ var oldMaster = this.masterPlaylistLoader_.master;
+ var newMaster = parseMasterXml({
+ masterXml: this.masterPlaylistLoader_.masterXml_,
+ srcUrl: this.masterPlaylistLoader_.srcUrl,
+ clientOffset: this.masterPlaylistLoader_.clientOffset_,
+ sidxMapping: this.masterPlaylistLoader_.sidxMapping_,
+ previousManifest: oldMaster
+ }); // if we have an old master to compare the new master against
+
+ if (oldMaster) {
+ newMaster = updateMaster(oldMaster, newMaster, this.masterPlaylistLoader_.sidxMapping_);
+ } // only update master if we have a new master
+
+
+ this.masterPlaylistLoader_.master = newMaster ? newMaster : oldMaster;
+ var location = this.masterPlaylistLoader_.master.locations && this.masterPlaylistLoader_.master.locations[0];
+
+ if (location && location !== this.masterPlaylistLoader_.srcUrl) {
+ this.masterPlaylistLoader_.srcUrl = location;
+ }
+
+ if (!oldMaster || newMaster && newMaster.minimumUpdatePeriod !== oldMaster.minimumUpdatePeriod) {
+ this.updateMinimumUpdatePeriodTimeout_();
+ }
+
+ return Boolean(newMaster);
+ };
+
+ _proto.updateMinimumUpdatePeriodTimeout_ = function updateMinimumUpdatePeriodTimeout_() {
+ var mpl = this.masterPlaylistLoader_; // cancel any pending creation of mup on media
+ // a new one will be added if needed.
+
+ if (mpl.createMupOnMedia_) {
+ mpl.off('loadedmetadata', mpl.createMupOnMedia_);
+ mpl.createMupOnMedia_ = null;
+ } // clear any pending timeouts
+
+
+ if (mpl.minimumUpdatePeriodTimeout_) {
+ window$1.clearTimeout(mpl.minimumUpdatePeriodTimeout_);
+ mpl.minimumUpdatePeriodTimeout_ = null;
+ }
+
+ var mup = mpl.master && mpl.master.minimumUpdatePeriod; // If the minimumUpdatePeriod has a value of 0, that indicates that the current
+ // MPD has no future validity, so a new one will need to be acquired when new
+ // media segments are to be made available. Thus, we use the target duration
+ // in this case
+
+ if (mup === 0) {
+ if (mpl.media()) {
+ mup = mpl.media().targetDuration * 1000;
+ } else {
+ mpl.createMupOnMedia_ = mpl.updateMinimumUpdatePeriodTimeout_;
+ mpl.one('loadedmetadata', mpl.createMupOnMedia_);
+ }
+ } // if minimumUpdatePeriod is invalid or <= zero, which
+ // can happen when a live video becomes VOD. skip timeout
+ // creation.
+
+
+ if (typeof mup !== 'number' || mup <= 0) {
+ if (mup < 0) {
+ this.logger_("found invalid minimumUpdatePeriod of " + mup + ", not setting a timeout");
+ }
+
+ return;
+ }
+
+ this.createMUPTimeout_(mup);
+ };
+
+ _proto.createMUPTimeout_ = function createMUPTimeout_(mup) {
+ var mpl = this.masterPlaylistLoader_;
+ mpl.minimumUpdatePeriodTimeout_ = window$1.setTimeout(function () {
+ mpl.minimumUpdatePeriodTimeout_ = null;
+ mpl.trigger('minimumUpdatePeriod');
+ mpl.createMUPTimeout_(mup);
+ }, mup);
+ }
+ /**
+ * Sends request to refresh the master xml and updates the parsed master manifest
+ */
+ ;
+
+ _proto.refreshXml_ = function refreshXml_() {
+ var _this8 = this;
+
+ this.requestMaster_(function (req, masterChanged) {
+ if (!masterChanged) {
+ return;
+ }
+
+ if (_this8.media_) {
+ _this8.media_ = _this8.masterPlaylistLoader_.master.playlists[_this8.media_.id];
+ } // This will filter out updated sidx info from the mapping
+
+
+ _this8.masterPlaylistLoader_.sidxMapping_ = filterChangedSidxMappings(_this8.masterPlaylistLoader_.master, _this8.masterPlaylistLoader_.sidxMapping_);
+
+ _this8.addSidxSegments_(_this8.media(), _this8.state, function (sidxChanged) {
+ // TODO: do we need to reload the current playlist?
+ _this8.refreshMedia_(_this8.media().id);
+ });
+ });
+ }
+ /**
+ * Refreshes the media playlist by re-parsing the master xml and updating playlist
+ * references. If this is an alternate loader, the updated parsed manifest is retrieved
+ * from the master loader.
+ */
+ ;
+
+ _proto.refreshMedia_ = function refreshMedia_(mediaID) {
+ var _this9 = this;
+
+ if (!mediaID) {
+ throw new Error('refreshMedia_ must take a media id');
+ } // for master we have to reparse the master xml
+ // to re-create segments based on current timing values
+ // which may change media. We only skip updating master
+ // if this is the first time this.media_ is being set.
+ // as master was just parsed in that case.
+
+
+ if (this.media_ && this.isMaster_) {
+ this.handleMaster_();
+ }
+
+ var playlists = this.masterPlaylistLoader_.master.playlists;
+ var mediaChanged = !this.media_ || this.media_ !== playlists[mediaID];
+
+ if (mediaChanged) {
+ this.media_ = playlists[mediaID];
+ } else {
+ this.trigger('playlistunchanged');
+ }
+
+ if (!this.mediaUpdateTimeout) {
+ var createMediaUpdateTimeout = function createMediaUpdateTimeout() {
+ if (_this9.media().endList) {
+ return;
+ }
+
+ _this9.mediaUpdateTimeout = window$1.setTimeout(function () {
+ _this9.trigger('mediaupdatetimeout');
+
+ createMediaUpdateTimeout();
+ }, refreshDelay(_this9.media(), Boolean(mediaChanged)));
+ };
+
+ createMediaUpdateTimeout();
+ }
+
+ this.trigger('loadedplaylist');
+ };
+
+ return DashPlaylistLoader;
+}(EventTarget);
+
+var Config = {
+ GOAL_BUFFER_LENGTH: 30,
+ MAX_GOAL_BUFFER_LENGTH: 60,
+ BACK_BUFFER_LENGTH: 30,
+ GOAL_BUFFER_LENGTH_RATE: 1,
+ // 0.5 MB/s
+ INITIAL_BANDWIDTH: 4194304,
+ // A fudge factor to apply to advertised playlist bitrates to account for
+ // temporary flucations in client bandwidth
+ BANDWIDTH_VARIANCE: 1.2,
+ // How much of the buffer must be filled before we consider upswitching
+ BUFFER_LOW_WATER_LINE: 0,
+ MAX_BUFFER_LOW_WATER_LINE: 30,
+ // TODO: Remove this when experimentalBufferBasedABR is removed
+ EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE: 16,
+ BUFFER_LOW_WATER_LINE_RATE: 1,
+ // If the buffer is greater than the high water line, we won't switch down
+ BUFFER_HIGH_WATER_LINE: 30
+};
+
+var stringToArrayBuffer = function stringToArrayBuffer(string) {
+ var view = new Uint8Array(new ArrayBuffer(string.length));
+
+ for (var i = 0; i < string.length; i++) {
+ view[i] = string.charCodeAt(i);
+ }
+
+ return view.buffer;
+};
+/* global Blob, BlobBuilder, Worker */
+// unify worker interface
+
+
+var browserWorkerPolyFill = function browserWorkerPolyFill(workerObj) {
+ // node only supports on/off
+ workerObj.on = workerObj.addEventListener;
+ workerObj.off = workerObj.removeEventListener;
+ return workerObj;
+};
+
+var createObjectURL = function createObjectURL(str) {
+ try {
+ return URL.createObjectURL(new Blob([str], {
+ type: 'application/javascript'
+ }));
+ } catch (e) {
+ var blob = new BlobBuilder();
+ blob.append(str);
+ return URL.createObjectURL(blob.getBlob());
+ }
+};
+
+var factory = function factory(code) {
+ return function () {
+ var objectUrl = createObjectURL(code);
+ var worker = browserWorkerPolyFill(new Worker(objectUrl));
+ worker.objURL = objectUrl;
+ var terminate = worker.terminate;
+ worker.on = worker.addEventListener;
+ worker.off = worker.removeEventListener;
+
+ worker.terminate = function () {
+ URL.revokeObjectURL(objectUrl);
+ return terminate.call(this);
+ };
+
+ return worker;
+ };
+};
+
+var transform = function transform(code) {
+ return "var browserWorkerPolyFill = " + browserWorkerPolyFill.toString() + ";\n" + 'browserWorkerPolyFill(self);\n' + code;
+};
+
+var getWorkerString = function getWorkerString(fn) {
+ return fn.toString().replace(/^function.+?{/, '').slice(0, -1);
+};
+/* rollup-plugin-worker-factory start for worker!/Users/bclifford/Code/vhs-release-test/src/transmuxer-worker.js */
+
+
+var workerCode$1 = transform(getWorkerString(function () {
+ /**
+ * mux.js
+ *
+ * Copyright (c) Brightcove
+ * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
+ *
+ * A lightweight readable stream implemention that handles event dispatching.
+ * Objects that inherit from streams should call init in their constructors.
+ */
+ var Stream = function Stream() {
+ this.init = function () {
+ var listeners = {};
+ /**
+ * Add a listener for a specified event type.
+ * @param type {string} the event name
+ * @param listener {function} the callback to be invoked when an event of
+ * the specified type occurs
+ */
+
+ this.on = function (type, listener) {
+ if (!listeners[type]) {
+ listeners[type] = [];
+ }
+
+ listeners[type] = listeners[type].concat(listener);
+ };
+ /**
+ * Remove a listener for a specified event type.
+ * @param type {string} the event name
+ * @param listener {function} a function previously registered for this
+ * type of event through `on`
+ */
+
+
+ this.off = function (type, listener) {
+ var index;
+
+ if (!listeners[type]) {
+ return false;
+ }
+
+ index = listeners[type].indexOf(listener);
+ listeners[type] = listeners[type].slice();
+ listeners[type].splice(index, 1);
+ return index > -1;
+ };
+ /**
+ * Trigger an event of the specified type on this stream. Any additional
+ * arguments to this function are passed as parameters to event listeners.
+ * @param type {string} the event name
+ */
+
+
+ this.trigger = function (type) {
+ var callbacks, i, length, args;
+ callbacks = listeners[type];
+
+ if (!callbacks) {
+ return;
+ } // Slicing the arguments on every invocation of this method
+ // can add a significant amount of overhead. Avoid the
+ // intermediate object creation for the common case of a
+ // single callback argument
+
+
+ if (arguments.length === 2) {
+ length = callbacks.length;
+
+ for (i = 0; i < length; ++i) {
+ callbacks[i].call(this, arguments[1]);
+ }
+ } else {
+ args = [];
+ i = arguments.length;
+
+ for (i = 1; i < arguments.length; ++i) {
+ args.push(arguments[i]);
+ }
+
+ length = callbacks.length;
+
+ for (i = 0; i < length; ++i) {
+ callbacks[i].apply(this, args);
+ }
+ }
+ };
+ /**
+ * Destroys the stream and cleans up.
+ */
+
+
+ this.dispose = function () {
+ listeners = {};
+ };
+ };
+ };
+ /**
+ * Forwards all `data` events on this stream to the destination stream. The
+ * destination stream should provide a method `push` to receive the data
+ * events as they arrive.
+ * @param destination {stream} the stream that will receive all `data` events
+ * @param autoFlush {boolean} if false, we will not call `flush` on the destination
+ * when the current stream emits a 'done' event
+ * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
+ */
+
+
+ Stream.prototype.pipe = function (destination) {
+ this.on('data', function (data) {
+ destination.push(data);
+ });
+ this.on('done', function (flushSource) {
+ destination.flush(flushSource);
+ });
+ this.on('partialdone', function (flushSource) {
+ destination.partialFlush(flushSource);
+ });
+ this.on('endedtimeline', function (flushSource) {
+ destination.endTimeline(flushSource);
+ });
+ this.on('reset', function (flushSource) {
+ destination.reset(flushSource);
+ });
+ return destination;
+ }; // Default stream functions that are expected to be overridden to perform
+ // actual work. These are provided by the prototype as a sort of no-op
+ // implementation so that we don't have to check for their existence in the
+ // `pipe` function above.
+
+
+ Stream.prototype.push = function (data) {
+ this.trigger('data', data);
+ };
+
+ Stream.prototype.flush = function (flushSource) {
+ this.trigger('done', flushSource);
+ };
+
+ Stream.prototype.partialFlush = function (flushSource) {
+ this.trigger('partialdone', flushSource);
+ };
+
+ Stream.prototype.endTimeline = function (flushSource) {
+ this.trigger('endedtimeline', flushSource);
+ };
+
+ Stream.prototype.reset = function (flushSource) {
+ this.trigger('reset', flushSource);
+ };
+
+ var stream = Stream;
+ var MAX_UINT32$1 = Math.pow(2, 32);
+
+ var getUint64$2 = function getUint64(uint8) {
+ var dv = new DataView(uint8.buffer, uint8.byteOffset, uint8.byteLength);
+ var value;
+
+ if (dv.getBigUint64) {
+ value = dv.getBigUint64(0);
+
+ if (value < Number.MAX_SAFE_INTEGER) {
+ return Number(value);
+ }
+
+ return value;
+ }
+
+ return dv.getUint32(0) * MAX_UINT32$1 + dv.getUint32(4);
+ };
+
+ var numbers = {
+ getUint64: getUint64$2,
+ MAX_UINT32: MAX_UINT32$1
+ };
+ var MAX_UINT32 = numbers.MAX_UINT32;
+ var box, dinf, esds, ftyp, mdat, mfhd, minf, moof, moov, mvex, mvhd, trak, tkhd, mdia, mdhd, hdlr, sdtp, stbl, stsd, traf, trex, trun$1, types, MAJOR_BRAND, MINOR_VERSION, AVC1_BRAND, VIDEO_HDLR, AUDIO_HDLR, HDLR_TYPES, VMHD, SMHD, DREF, STCO, STSC, STSZ, STTS; // pre-calculate constants
+
+ (function () {
+ var i;
+ types = {
+ avc1: [],
+ // codingname
+ avcC: [],
+ btrt: [],
+ dinf: [],
+ dref: [],
+ esds: [],
+ ftyp: [],
+ hdlr: [],
+ mdat: [],
+ mdhd: [],
+ mdia: [],
+ mfhd: [],
+ minf: [],
+ moof: [],
+ moov: [],
+ mp4a: [],
+ // codingname
+ mvex: [],
+ mvhd: [],
+ pasp: [],
+ sdtp: [],
+ smhd: [],
+ stbl: [],
+ stco: [],
+ stsc: [],
+ stsd: [],
+ stsz: [],
+ stts: [],
+ styp: [],
+ tfdt: [],
+ tfhd: [],
+ traf: [],
+ trak: [],
+ trun: [],
+ trex: [],
+ tkhd: [],
+ vmhd: []
+ }; // In environments where Uint8Array is undefined (e.g., IE8), skip set up so that we
+ // don't throw an error
+
+ if (typeof Uint8Array === 'undefined') {
+ return;
+ }
+
+ for (i in types) {
+ if (types.hasOwnProperty(i)) {
+ types[i] = [i.charCodeAt(0), i.charCodeAt(1), i.charCodeAt(2), i.charCodeAt(3)];
+ }
+ }
+
+ MAJOR_BRAND = new Uint8Array(['i'.charCodeAt(0), 's'.charCodeAt(0), 'o'.charCodeAt(0), 'm'.charCodeAt(0)]);
+ AVC1_BRAND = new Uint8Array(['a'.charCodeAt(0), 'v'.charCodeAt(0), 'c'.charCodeAt(0), '1'.charCodeAt(0)]);
+ MINOR_VERSION = new Uint8Array([0, 0, 0, 1]);
+ VIDEO_HDLR = new Uint8Array([0x00, // version 0
+ 0x00, 0x00, 0x00, // flags
+ 0x00, 0x00, 0x00, 0x00, // pre_defined
+ 0x76, 0x69, 0x64, 0x65, // handler_type: 'vide'
+ 0x00, 0x00, 0x00, 0x00, // reserved
+ 0x00, 0x00, 0x00, 0x00, // reserved
+ 0x00, 0x00, 0x00, 0x00, // reserved
+ 0x56, 0x69, 0x64, 0x65, 0x6f, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler'
+ ]);
+ AUDIO_HDLR = new Uint8Array([0x00, // version 0
+ 0x00, 0x00, 0x00, // flags
+ 0x00, 0x00, 0x00, 0x00, // pre_defined
+ 0x73, 0x6f, 0x75, 0x6e, // handler_type: 'soun'
+ 0x00, 0x00, 0x00, 0x00, // reserved
+ 0x00, 0x00, 0x00, 0x00, // reserved
+ 0x00, 0x00, 0x00, 0x00, // reserved
+ 0x53, 0x6f, 0x75, 0x6e, 0x64, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler'
+ ]);
+ HDLR_TYPES = {
+ video: VIDEO_HDLR,
+ audio: AUDIO_HDLR
+ };
+ DREF = new Uint8Array([0x00, // version 0
+ 0x00, 0x00, 0x00, // flags
+ 0x00, 0x00, 0x00, 0x01, // entry_count
+ 0x00, 0x00, 0x00, 0x0c, // entry_size
+ 0x75, 0x72, 0x6c, 0x20, // 'url' type
+ 0x00, // version 0
+ 0x00, 0x00, 0x01 // entry_flags
+ ]);
+ SMHD = new Uint8Array([0x00, // version
+ 0x00, 0x00, 0x00, // flags
+ 0x00, 0x00, // balance, 0 means centered
+ 0x00, 0x00 // reserved
+ ]);
+ STCO = new Uint8Array([0x00, // version
+ 0x00, 0x00, 0x00, // flags
+ 0x00, 0x00, 0x00, 0x00 // entry_count
+ ]);
+ STSC = STCO;
+ STSZ = new Uint8Array([0x00, // version
+ 0x00, 0x00, 0x00, // flags
+ 0x00, 0x00, 0x00, 0x00, // sample_size
+ 0x00, 0x00, 0x00, 0x00 // sample_count
+ ]);
+ STTS = STCO;
+ VMHD = new Uint8Array([0x00, // version
+ 0x00, 0x00, 0x01, // flags
+ 0x00, 0x00, // graphicsmode
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 // opcolor
+ ]);
+ })();
+
+ box = function box(type) {
+ var payload = [],
+ size = 0,
+ i,
+ result,
+ view;
+
+ for (i = 1; i < arguments.length; i++) {
+ payload.push(arguments[i]);
+ }
+
+ i = payload.length; // calculate the total size we need to allocate
+
+ while (i--) {
+ size += payload[i].byteLength;
+ }
+
+ result = new Uint8Array(size + 8);
+ view = new DataView(result.buffer, result.byteOffset, result.byteLength);
+ view.setUint32(0, result.byteLength);
+ result.set(type, 4); // copy the payload into the result
+
+ for (i = 0, size = 8; i < payload.length; i++) {
+ result.set(payload[i], size);
+ size += payload[i].byteLength;
+ }
+
+ return result;
+ };
+
+ dinf = function dinf() {
+ return box(types.dinf, box(types.dref, DREF));
+ };
+
+ esds = function esds(track) {
+ return box(types.esds, new Uint8Array([0x00, // version
+ 0x00, 0x00, 0x00, // flags
+ // ES_Descriptor
+ 0x03, // tag, ES_DescrTag
+ 0x19, // length
+ 0x00, 0x00, // ES_ID
+ 0x00, // streamDependenceFlag, URL_flag, reserved, streamPriority
+ // DecoderConfigDescriptor
+ 0x04, // tag, DecoderConfigDescrTag
+ 0x11, // length
+ 0x40, // object type
+ 0x15, // streamType
+ 0x00, 0x06, 0x00, // bufferSizeDB
+ 0x00, 0x00, 0xda, 0xc0, // maxBitrate
+ 0x00, 0x00, 0xda, 0xc0, // avgBitrate
+ // DecoderSpecificInfo
+ 0x05, // tag, DecoderSpecificInfoTag
+ 0x02, // length
+ // ISO/IEC 14496-3, AudioSpecificConfig
+ // for samplingFrequencyIndex see ISO/IEC 13818-7:2006, 8.1.3.2.2, Table 35
+ track.audioobjecttype << 3 | track.samplingfrequencyindex >>> 1, track.samplingfrequencyindex << 7 | track.channelcount << 3, 0x06, 0x01, 0x02 // GASpecificConfig
+ ]));
+ };
+
+ ftyp = function ftyp() {
+ return box(types.ftyp, MAJOR_BRAND, MINOR_VERSION, MAJOR_BRAND, AVC1_BRAND);
+ };
+
+ hdlr = function hdlr(type) {
+ return box(types.hdlr, HDLR_TYPES[type]);
+ };
+
+ mdat = function mdat(data) {
+ return box(types.mdat, data);
+ };
+
+ mdhd = function mdhd(track) {
+ var result = new Uint8Array([0x00, // version 0
+ 0x00, 0x00, 0x00, // flags
+ 0x00, 0x00, 0x00, 0x02, // creation_time
+ 0x00, 0x00, 0x00, 0x03, // modification_time
+ 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
+ track.duration >>> 24 & 0xFF, track.duration >>> 16 & 0xFF, track.duration >>> 8 & 0xFF, track.duration & 0xFF, // duration
+ 0x55, 0xc4, // 'und' language (undetermined)
+ 0x00, 0x00]); // Use the sample rate from the track metadata, when it is
+ // defined. The sample rate can be parsed out of an ADTS header, for
+ // instance.
+
+ if (track.samplerate) {
+ result[12] = track.samplerate >>> 24 & 0xFF;
+ result[13] = track.samplerate >>> 16 & 0xFF;
+ result[14] = track.samplerate >>> 8 & 0xFF;
+ result[15] = track.samplerate & 0xFF;
+ }
+
+ return box(types.mdhd, result);
+ };
+
+ mdia = function mdia(track) {
+ return box(types.mdia, mdhd(track), hdlr(track.type), minf(track));
+ };
+
+ mfhd = function mfhd(sequenceNumber) {
+ return box(types.mfhd, new Uint8Array([0x00, 0x00, 0x00, 0x00, // flags
+ (sequenceNumber & 0xFF000000) >> 24, (sequenceNumber & 0xFF0000) >> 16, (sequenceNumber & 0xFF00) >> 8, sequenceNumber & 0xFF // sequence_number
+ ]));
+ };
+
+ minf = function minf(track) {
+ return box(types.minf, track.type === 'video' ? box(types.vmhd, VMHD) : box(types.smhd, SMHD), dinf(), stbl(track));
+ };
+
+ moof = function moof(sequenceNumber, tracks) {
+ var trackFragments = [],
+ i = tracks.length; // build traf boxes for each track fragment
+
+ while (i--) {
+ trackFragments[i] = traf(tracks[i]);
+ }
+
+ return box.apply(null, [types.moof, mfhd(sequenceNumber)].concat(trackFragments));
+ };
+ /**
+ * Returns a movie box.
+ * @param tracks {array} the tracks associated with this movie
+ * @see ISO/IEC 14496-12:2012(E), section 8.2.1
+ */
+
+
+ moov = function moov(tracks) {
+ var i = tracks.length,
+ boxes = [];
+
+ while (i--) {
+ boxes[i] = trak(tracks[i]);
+ }
+
+ return box.apply(null, [types.moov, mvhd(0xffffffff)].concat(boxes).concat(mvex(tracks)));
+ };
+
+ mvex = function mvex(tracks) {
+ var i = tracks.length,
+ boxes = [];
+
+ while (i--) {
+ boxes[i] = trex(tracks[i]);
+ }
+
+ return box.apply(null, [types.mvex].concat(boxes));
+ };
+
+ mvhd = function mvhd(duration) {
+ var bytes = new Uint8Array([0x00, // version 0
+ 0x00, 0x00, 0x00, // flags
+ 0x00, 0x00, 0x00, 0x01, // creation_time
+ 0x00, 0x00, 0x00, 0x02, // modification_time
+ 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
+ (duration & 0xFF000000) >> 24, (duration & 0xFF0000) >> 16, (duration & 0xFF00) >> 8, duration & 0xFF, // duration
+ 0x00, 0x01, 0x00, 0x00, // 1.0 rate
+ 0x01, 0x00, // 1.0 volume
+ 0x00, 0x00, // reserved
+ 0x00, 0x00, 0x00, 0x00, // reserved
+ 0x00, 0x00, 0x00, 0x00, // reserved
+ 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
+ 0xff, 0xff, 0xff, 0xff // next_track_ID
+ ]);
+ return box(types.mvhd, bytes);
+ };
+
+ sdtp = function sdtp(track) {
+ var samples = track.samples || [],
+ bytes = new Uint8Array(4 + samples.length),
+ flags,
+ i; // leave the full box header (4 bytes) all zero
+ // write the sample table
+
+ for (i = 0; i < samples.length; i++) {
+ flags = samples[i].flags;
+ bytes[i + 4] = flags.dependsOn << 4 | flags.isDependedOn << 2 | flags.hasRedundancy;
+ }
+
+ return box(types.sdtp, bytes);
+ };
+
+ stbl = function stbl(track) {
+ return box(types.stbl, stsd(track), box(types.stts, STTS), box(types.stsc, STSC), box(types.stsz, STSZ), box(types.stco, STCO));
+ };
+
+ (function () {
+ var videoSample, audioSample;
+
+ stsd = function stsd(track) {
+ return box(types.stsd, new Uint8Array([0x00, // version 0
+ 0x00, 0x00, 0x00, // flags
+ 0x00, 0x00, 0x00, 0x01]), track.type === 'video' ? videoSample(track) : audioSample(track));
+ };
+
+ videoSample = function videoSample(track) {
+ var sps = track.sps || [],
+ pps = track.pps || [],
+ sequenceParameterSets = [],
+ pictureParameterSets = [],
+ i,
+ avc1Box; // assemble the SPSs
+
+ for (i = 0; i < sps.length; i++) {
+ sequenceParameterSets.push((sps[i].byteLength & 0xFF00) >>> 8);
+ sequenceParameterSets.push(sps[i].byteLength & 0xFF); // sequenceParameterSetLength
+
+ sequenceParameterSets = sequenceParameterSets.concat(Array.prototype.slice.call(sps[i])); // SPS
+ } // assemble the PPSs
+
+
+ for (i = 0; i < pps.length; i++) {
+ pictureParameterSets.push((pps[i].byteLength & 0xFF00) >>> 8);
+ pictureParameterSets.push(pps[i].byteLength & 0xFF);
+ pictureParameterSets = pictureParameterSets.concat(Array.prototype.slice.call(pps[i]));
+ }
+
+ avc1Box = [types.avc1, new Uint8Array([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
+ 0x00, 0x01, // data_reference_index
+ 0x00, 0x00, // pre_defined
+ 0x00, 0x00, // reserved
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
+ (track.width & 0xff00) >> 8, track.width & 0xff, // width
+ (track.height & 0xff00) >> 8, track.height & 0xff, // height
+ 0x00, 0x48, 0x00, 0x00, // horizresolution
+ 0x00, 0x48, 0x00, 0x00, // vertresolution
+ 0x00, 0x00, 0x00, 0x00, // reserved
+ 0x00, 0x01, // frame_count
+ 0x13, 0x76, 0x69, 0x64, 0x65, 0x6f, 0x6a, 0x73, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x69, 0x62, 0x2d, 0x68, 0x6c, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // compressorname
+ 0x00, 0x18, // depth = 24
+ 0x11, 0x11 // pre_defined = -1
+ ]), box(types.avcC, new Uint8Array([0x01, // configurationVersion
+ track.profileIdc, // AVCProfileIndication
+ track.profileCompatibility, // profile_compatibility
+ track.levelIdc, // AVCLevelIndication
+ 0xff // lengthSizeMinusOne, hard-coded to 4 bytes
+ ].concat([sps.length], // numOfSequenceParameterSets
+ sequenceParameterSets, // "SPS"
+ [pps.length], // numOfPictureParameterSets
+ pictureParameterSets // "PPS"
+ ))), box(types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80, // bufferSizeDB
+ 0x00, 0x2d, 0xc6, 0xc0, // maxBitrate
+ 0x00, 0x2d, 0xc6, 0xc0 // avgBitrate
+ ]))];
+
+ if (track.sarRatio) {
+ var hSpacing = track.sarRatio[0],
+ vSpacing = track.sarRatio[1];
+ avc1Box.push(box(types.pasp, new Uint8Array([(hSpacing & 0xFF000000) >> 24, (hSpacing & 0xFF0000) >> 16, (hSpacing & 0xFF00) >> 8, hSpacing & 0xFF, (vSpacing & 0xFF000000) >> 24, (vSpacing & 0xFF0000) >> 16, (vSpacing & 0xFF00) >> 8, vSpacing & 0xFF])));
+ }
+
+ return box.apply(null, avc1Box);
+ };
+
+ audioSample = function audioSample(track) {
+ return box(types.mp4a, new Uint8Array([// SampleEntry, ISO/IEC 14496-12
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
+ 0x00, 0x01, // data_reference_index
+ // AudioSampleEntry, ISO/IEC 14496-12
+ 0x00, 0x00, 0x00, 0x00, // reserved
+ 0x00, 0x00, 0x00, 0x00, // reserved
+ (track.channelcount & 0xff00) >> 8, track.channelcount & 0xff, // channelcount
+ (track.samplesize & 0xff00) >> 8, track.samplesize & 0xff, // samplesize
+ 0x00, 0x00, // pre_defined
+ 0x00, 0x00, // reserved
+ (track.samplerate & 0xff00) >> 8, track.samplerate & 0xff, 0x00, 0x00 // samplerate, 16.16
+ // MP4AudioSampleEntry, ISO/IEC 14496-14
+ ]), esds(track));
+ };
+ })();
+
+ tkhd = function tkhd(track) {
+ var result = new Uint8Array([0x00, // version 0
+ 0x00, 0x00, 0x07, // flags
+ 0x00, 0x00, 0x00, 0x00, // creation_time
+ 0x00, 0x00, 0x00, 0x00, // modification_time
+ (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
+ 0x00, 0x00, 0x00, 0x00, // reserved
+ (track.duration & 0xFF000000) >> 24, (track.duration & 0xFF0000) >> 16, (track.duration & 0xFF00) >> 8, track.duration & 0xFF, // duration
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
+ 0x00, 0x00, // layer
+ 0x00, 0x00, // alternate_group
+ 0x01, 0x00, // non-audio track volume
+ 0x00, 0x00, // reserved
+ 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
+ (track.width & 0xFF00) >> 8, track.width & 0xFF, 0x00, 0x00, // width
+ (track.height & 0xFF00) >> 8, track.height & 0xFF, 0x00, 0x00 // height
+ ]);
+ return box(types.tkhd, result);
+ };
+ /**
+ * Generate a track fragment (traf) box. A traf box collects metadata
+ * about tracks in a movie fragment (moof) box.
+ */
+
+
+ traf = function traf(track) {
+ var trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable, dataOffset, upperWordBaseMediaDecodeTime, lowerWordBaseMediaDecodeTime;
+ trackFragmentHeader = box(types.tfhd, new Uint8Array([0x00, // version 0
+ 0x00, 0x00, 0x3a, // flags
+ (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
+ 0x00, 0x00, 0x00, 0x01, // sample_description_index
+ 0x00, 0x00, 0x00, 0x00, // default_sample_duration
+ 0x00, 0x00, 0x00, 0x00, // default_sample_size
+ 0x00, 0x00, 0x00, 0x00 // default_sample_flags
+ ]));
+ upperWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime / MAX_UINT32);
+ lowerWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime % MAX_UINT32);
+ trackFragmentDecodeTime = box(types.tfdt, new Uint8Array([0x01, // version 1
+ 0x00, 0x00, 0x00, // flags
+ // baseMediaDecodeTime
+ upperWordBaseMediaDecodeTime >>> 24 & 0xFF, upperWordBaseMediaDecodeTime >>> 16 & 0xFF, upperWordBaseMediaDecodeTime >>> 8 & 0xFF, upperWordBaseMediaDecodeTime & 0xFF, lowerWordBaseMediaDecodeTime >>> 24 & 0xFF, lowerWordBaseMediaDecodeTime >>> 16 & 0xFF, lowerWordBaseMediaDecodeTime >>> 8 & 0xFF, lowerWordBaseMediaDecodeTime & 0xFF])); // the data offset specifies the number of bytes from the start of
+ // the containing moof to the first payload byte of the associated
+ // mdat
+
+ dataOffset = 32 + // tfhd
+ 20 + // tfdt
+ 8 + // traf header
+ 16 + // mfhd
+ 8 + // moof header
+ 8; // mdat header
+ // audio tracks require less metadata
+
+ if (track.type === 'audio') {
+ trackFragmentRun = trun$1(track, dataOffset);
+ return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun);
+ } // video tracks should contain an independent and disposable samples
+ // box (sdtp)
+ // generate one and adjust offsets to match
+
+
+ sampleDependencyTable = sdtp(track);
+ trackFragmentRun = trun$1(track, sampleDependencyTable.length + dataOffset);
+ return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable);
+ };
+ /**
+ * Generate a track box.
+ * @param track {object} a track definition
+ * @return {Uint8Array} the track box
+ */
+
+
+ trak = function trak(track) {
+ track.duration = track.duration || 0xffffffff;
+ return box(types.trak, tkhd(track), mdia(track));
+ };
+
+ trex = function trex(track) {
+ var result = new Uint8Array([0x00, // version 0
+ 0x00, 0x00, 0x00, // flags
+ (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
+ 0x00, 0x00, 0x00, 0x01, // default_sample_description_index
+ 0x00, 0x00, 0x00, 0x00, // default_sample_duration
+ 0x00, 0x00, 0x00, 0x00, // default_sample_size
+ 0x00, 0x01, 0x00, 0x01 // default_sample_flags
+ ]); // the last two bytes of default_sample_flags is the sample
+ // degradation priority, a hint about the importance of this sample
+ // relative to others. Lower the degradation priority for all sample
+ // types other than video.
+
+ if (track.type !== 'video') {
+ result[result.length - 1] = 0x00;
+ }
+
+ return box(types.trex, result);
+ };
+
+ (function () {
+ var audioTrun, videoTrun, trunHeader; // This method assumes all samples are uniform. That is, if a
+ // duration is present for the first sample, it will be present for
+ // all subsequent samples.
+ // see ISO/IEC 14496-12:2012, Section 8.8.8.1
+
+ trunHeader = function trunHeader(samples, offset) {
+ var durationPresent = 0,
+ sizePresent = 0,
+ flagsPresent = 0,
+ compositionTimeOffset = 0; // trun flag constants
+
+ if (samples.length) {
+ if (samples[0].duration !== undefined) {
+ durationPresent = 0x1;
+ }
+
+ if (samples[0].size !== undefined) {
+ sizePresent = 0x2;
+ }
+
+ if (samples[0].flags !== undefined) {
+ flagsPresent = 0x4;
+ }
+
+ if (samples[0].compositionTimeOffset !== undefined) {
+ compositionTimeOffset = 0x8;
+ }
+ }
+
+ return [0x00, // version 0
+ 0x00, durationPresent | sizePresent | flagsPresent | compositionTimeOffset, 0x01, // flags
+ (samples.length & 0xFF000000) >>> 24, (samples.length & 0xFF0000) >>> 16, (samples.length & 0xFF00) >>> 8, samples.length & 0xFF, // sample_count
+ (offset & 0xFF000000) >>> 24, (offset & 0xFF0000) >>> 16, (offset & 0xFF00) >>> 8, offset & 0xFF // data_offset
+ ];
+ };
+
+ videoTrun = function videoTrun(track, offset) {
+ var bytesOffest, bytes, header, samples, sample, i;
+ samples = track.samples || [];
+ offset += 8 + 12 + 16 * samples.length;
+ header = trunHeader(samples, offset);
+ bytes = new Uint8Array(header.length + samples.length * 16);
+ bytes.set(header);
+ bytesOffest = header.length;
+
+ for (i = 0; i < samples.length; i++) {
+ sample = samples[i];
+ bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
+ bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
+ bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
+ bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
+
+ bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
+ bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
+ bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
+ bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
+
+ bytes[bytesOffest++] = sample.flags.isLeading << 2 | sample.flags.dependsOn;
+ bytes[bytesOffest++] = sample.flags.isDependedOn << 6 | sample.flags.hasRedundancy << 4 | sample.flags.paddingValue << 1 | sample.flags.isNonSyncSample;
+ bytes[bytesOffest++] = sample.flags.degradationPriority & 0xF0 << 8;
+ bytes[bytesOffest++] = sample.flags.degradationPriority & 0x0F; // sample_flags
+
+ bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF000000) >>> 24;
+ bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF0000) >>> 16;
+ bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF00) >>> 8;
+ bytes[bytesOffest++] = sample.compositionTimeOffset & 0xFF; // sample_composition_time_offset
+ }
+
+ return box(types.trun, bytes);
+ };
+
+ audioTrun = function audioTrun(track, offset) {
+ var bytes, bytesOffest, header, samples, sample, i;
+ samples = track.samples || [];
+ offset += 8 + 12 + 8 * samples.length;
+ header = trunHeader(samples, offset);
+ bytes = new Uint8Array(header.length + samples.length * 8);
+ bytes.set(header);
+ bytesOffest = header.length;
+
+ for (i = 0; i < samples.length; i++) {
+ sample = samples[i];
+ bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
+ bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
+ bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
+ bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
+
+ bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
+ bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
+ bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
+ bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
+ }
+
+ return box(types.trun, bytes);
+ };
+
+ trun$1 = function trun(track, offset) {
+ if (track.type === 'audio') {
+ return audioTrun(track, offset);
+ }
+
+ return videoTrun(track, offset);
+ };
+ })();
+
+ var mp4Generator = {
+ ftyp: ftyp,
+ mdat: mdat,
+ moof: moof,
+ moov: moov,
+ initSegment: function initSegment(tracks) {
+ var fileType = ftyp(),
+ movie = moov(tracks),
+ result;
+ result = new Uint8Array(fileType.byteLength + movie.byteLength);
+ result.set(fileType);
+ result.set(movie, fileType.byteLength);
+ return result;
+ }
+ };
+ /**
+ * mux.js
+ *
+ * Copyright (c) Brightcove
+ * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
+ */
+ // Convert an array of nal units into an array of frames with each frame being
+ // composed of the nal units that make up that frame
+ // Also keep track of cummulative data about the frame from the nal units such
+ // as the frame duration, starting pts, etc.
+
+ var groupNalsIntoFrames = function groupNalsIntoFrames(nalUnits) {
+ var i,
+ currentNal,
+ currentFrame = [],
+ frames = []; // TODO added for LHLS, make sure this is OK
+
+ frames.byteLength = 0;
+ frames.nalCount = 0;
+ frames.duration = 0;
+ currentFrame.byteLength = 0;
+
+ for (i = 0; i < nalUnits.length; i++) {
+ currentNal = nalUnits[i]; // Split on 'aud'-type nal units
+
+ if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
+ // Since the very first nal unit is expected to be an AUD
+ // only push to the frames array when currentFrame is not empty
+ if (currentFrame.length) {
+ currentFrame.duration = currentNal.dts - currentFrame.dts; // TODO added for LHLS, make sure this is OK
+
+ frames.byteLength += currentFrame.byteLength;
+ frames.nalCount += currentFrame.length;
+ frames.duration += currentFrame.duration;
+ frames.push(currentFrame);
+ }
+
+ currentFrame = [currentNal];
+ currentFrame.byteLength = currentNal.data.byteLength;
+ currentFrame.pts = currentNal.pts;
+ currentFrame.dts = currentNal.dts;
+ } else {
+ // Specifically flag key frames for ease of use later
+ if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
+ currentFrame.keyFrame = true;
+ }
+
+ currentFrame.duration = currentNal.dts - currentFrame.dts;
+ currentFrame.byteLength += currentNal.data.byteLength;
+ currentFrame.push(currentNal);
+ }
+ } // For the last frame, use the duration of the previous frame if we
+ // have nothing better to go on
+
+
+ if (frames.length && (!currentFrame.duration || currentFrame.duration <= 0)) {
+ currentFrame.duration = frames[frames.length - 1].duration;
+ } // Push the final frame
+ // TODO added for LHLS, make sure this is OK
+
+
+ frames.byteLength += currentFrame.byteLength;
+ frames.nalCount += currentFrame.length;
+ frames.duration += currentFrame.duration;
+ frames.push(currentFrame);
+ return frames;
+ }; // Convert an array of frames into an array of Gop with each Gop being composed
+ // of the frames that make up that Gop
+ // Also keep track of cummulative data about the Gop from the frames such as the
+ // Gop duration, starting pts, etc.
+
+
+ var groupFramesIntoGops = function groupFramesIntoGops(frames) {
+ var i,
+ currentFrame,
+ currentGop = [],
+ gops = []; // We must pre-set some of the values on the Gop since we
+ // keep running totals of these values
+
+ currentGop.byteLength = 0;
+ currentGop.nalCount = 0;
+ currentGop.duration = 0;
+ currentGop.pts = frames[0].pts;
+ currentGop.dts = frames[0].dts; // store some metadata about all the Gops
+
+ gops.byteLength = 0;
+ gops.nalCount = 0;
+ gops.duration = 0;
+ gops.pts = frames[0].pts;
+ gops.dts = frames[0].dts;
+
+ for (i = 0; i < frames.length; i++) {
+ currentFrame = frames[i];
+
+ if (currentFrame.keyFrame) {
+ // Since the very first frame is expected to be an keyframe
+ // only push to the gops array when currentGop is not empty
+ if (currentGop.length) {
+ gops.push(currentGop);
+ gops.byteLength += currentGop.byteLength;
+ gops.nalCount += currentGop.nalCount;
+ gops.duration += currentGop.duration;
+ }
+
+ currentGop = [currentFrame];
+ currentGop.nalCount = currentFrame.length;
+ currentGop.byteLength = currentFrame.byteLength;
+ currentGop.pts = currentFrame.pts;
+ currentGop.dts = currentFrame.dts;
+ currentGop.duration = currentFrame.duration;
+ } else {
+ currentGop.duration += currentFrame.duration;
+ currentGop.nalCount += currentFrame.length;
+ currentGop.byteLength += currentFrame.byteLength;
+ currentGop.push(currentFrame);
+ }
+ }
+
+ if (gops.length && currentGop.duration <= 0) {
+ currentGop.duration = gops[gops.length - 1].duration;
+ }
+
+ gops.byteLength += currentGop.byteLength;
+ gops.nalCount += currentGop.nalCount;
+ gops.duration += currentGop.duration; // push the final Gop
+
+ gops.push(currentGop);
+ return gops;
+ };
+ /*
+ * Search for the first keyframe in the GOPs and throw away all frames
+ * until that keyframe. Then extend the duration of the pulled keyframe
+ * and pull the PTS and DTS of the keyframe so that it covers the time
+ * range of the frames that were disposed.
+ *
+ * @param {Array} gops video GOPs
+ * @returns {Array} modified video GOPs
+ */
+
+
+ var extendFirstKeyFrame = function extendFirstKeyFrame(gops) {
+ var currentGop;
+
+ if (!gops[0][0].keyFrame && gops.length > 1) {
+ // Remove the first GOP
+ currentGop = gops.shift();
+ gops.byteLength -= currentGop.byteLength;
+ gops.nalCount -= currentGop.nalCount; // Extend the first frame of what is now the
+ // first gop to cover the time period of the
+ // frames we just removed
+
+ gops[0][0].dts = currentGop.dts;
+ gops[0][0].pts = currentGop.pts;
+ gops[0][0].duration += currentGop.duration;
+ }
+
+ return gops;
+ };
+ /**
+ * Default sample object
+ * see ISO/IEC 14496-12:2012, section 8.6.4.3
+ */
+
+
+ var createDefaultSample = function createDefaultSample() {
+ return {
+ size: 0,
+ flags: {
+ isLeading: 0,
+ dependsOn: 1,
+ isDependedOn: 0,
+ hasRedundancy: 0,
+ degradationPriority: 0,
+ isNonSyncSample: 1
+ }
+ };
+ };
+ /*
+ * Collates information from a video frame into an object for eventual
+ * entry into an MP4 sample table.
+ *
+ * @param {Object} frame the video frame
+ * @param {Number} dataOffset the byte offset to position the sample
+ * @return {Object} object containing sample table info for a frame
+ */
+
+
+ var sampleForFrame = function sampleForFrame(frame, dataOffset) {
+ var sample = createDefaultSample();
+ sample.dataOffset = dataOffset;
+ sample.compositionTimeOffset = frame.pts - frame.dts;
+ sample.duration = frame.duration;
+ sample.size = 4 * frame.length; // Space for nal unit size
+
+ sample.size += frame.byteLength;
+
+ if (frame.keyFrame) {
+ sample.flags.dependsOn = 2;
+ sample.flags.isNonSyncSample = 0;
+ }
+
+ return sample;
+ }; // generate the track's sample table from an array of gops
+
+
+ var generateSampleTable$1 = function generateSampleTable(gops, baseDataOffset) {
+ var h,
+ i,
+ sample,
+ currentGop,
+ currentFrame,
+ dataOffset = baseDataOffset || 0,
+ samples = [];
+
+ for (h = 0; h < gops.length; h++) {
+ currentGop = gops[h];
+
+ for (i = 0; i < currentGop.length; i++) {
+ currentFrame = currentGop[i];
+ sample = sampleForFrame(currentFrame, dataOffset);
+ dataOffset += sample.size;
+ samples.push(sample);
+ }
+ }
+
+ return samples;
+ }; // generate the track's raw mdat data from an array of gops
+
+
+ var concatenateNalData = function concatenateNalData(gops) {
+ var h,
+ i,
+ j,
+ currentGop,
+ currentFrame,
+ currentNal,
+ dataOffset = 0,
+ nalsByteLength = gops.byteLength,
+ numberOfNals = gops.nalCount,
+ totalByteLength = nalsByteLength + 4 * numberOfNals,
+ data = new Uint8Array(totalByteLength),
+ view = new DataView(data.buffer); // For each Gop..
+
+ for (h = 0; h < gops.length; h++) {
+ currentGop = gops[h]; // For each Frame..
+
+ for (i = 0; i < currentGop.length; i++) {
+ currentFrame = currentGop[i]; // For each NAL..
+
+ for (j = 0; j < currentFrame.length; j++) {
+ currentNal = currentFrame[j];
+ view.setUint32(dataOffset, currentNal.data.byteLength);
+ dataOffset += 4;
+ data.set(currentNal.data, dataOffset);
+ dataOffset += currentNal.data.byteLength;
+ }
+ }
+ }
+
+ return data;
+ }; // generate the track's sample table from a frame
+
+
+ var generateSampleTableForFrame = function generateSampleTableForFrame(frame, baseDataOffset) {
+ var sample,
+ dataOffset = baseDataOffset || 0,
+ samples = [];
+ sample = sampleForFrame(frame, dataOffset);
+ samples.push(sample);
+ return samples;
+ }; // generate the track's raw mdat data from a frame
+
+
+ var concatenateNalDataForFrame = function concatenateNalDataForFrame(frame) {
+ var i,
+ currentNal,
+ dataOffset = 0,
+ nalsByteLength = frame.byteLength,
+ numberOfNals = frame.length,
+ totalByteLength = nalsByteLength + 4 * numberOfNals,
+ data = new Uint8Array(totalByteLength),
+ view = new DataView(data.buffer); // For each NAL..
+
+ for (i = 0; i < frame.length; i++) {
+ currentNal = frame[i];
+ view.setUint32(dataOffset, currentNal.data.byteLength);
+ dataOffset += 4;
+ data.set(currentNal.data, dataOffset);
+ dataOffset += currentNal.data.byteLength;
+ }
+
+ return data;
+ };
+
+ var frameUtils = {
+ groupNalsIntoFrames: groupNalsIntoFrames,
+ groupFramesIntoGops: groupFramesIntoGops,
+ extendFirstKeyFrame: extendFirstKeyFrame,
+ generateSampleTable: generateSampleTable$1,
+ concatenateNalData: concatenateNalData,
+ generateSampleTableForFrame: generateSampleTableForFrame,
+ concatenateNalDataForFrame: concatenateNalDataForFrame
+ };
+ /**
+ * mux.js
+ *
+ * Copyright (c) Brightcove
+ * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
+ */
+
+ var highPrefix = [33, 16, 5, 32, 164, 27];
+ var lowPrefix = [33, 65, 108, 84, 1, 2, 4, 8, 168, 2, 4, 8, 17, 191, 252];
+
+ var zeroFill = function zeroFill(count) {
+ var a = [];
+
+ while (count--) {
+ a.push(0);
+ }
+
+ return a;
+ };
+
+ var makeTable = function makeTable(metaTable) {
+ return Object.keys(metaTable).reduce(function (obj, key) {
+ obj[key] = new Uint8Array(metaTable[key].reduce(function (arr, part) {
+ return arr.concat(part);
+ }, []));
+ return obj;
+ }, {});
+ };
+
+ var silence;
+
+ var silence_1 = function silence_1() {
+ if (!silence) {
+ // Frames-of-silence to use for filling in missing AAC frames
+ var coneOfSilence = {
+ 96000: [highPrefix, [227, 64], zeroFill(154), [56]],
+ 88200: [highPrefix, [231], zeroFill(170), [56]],
+ 64000: [highPrefix, [248, 192], zeroFill(240), [56]],
+ 48000: [highPrefix, [255, 192], zeroFill(268), [55, 148, 128], zeroFill(54), [112]],
+ 44100: [highPrefix, [255, 192], zeroFill(268), [55, 163, 128], zeroFill(84), [112]],
+ 32000: [highPrefix, [255, 192], zeroFill(268), [55, 234], zeroFill(226), [112]],
+ 24000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 112], zeroFill(126), [224]],
+ 16000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 255], zeroFill(269), [223, 108], zeroFill(195), [1, 192]],
+ 12000: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 253, 128], zeroFill(259), [56]],
+ 11025: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 255, 192], zeroFill(268), [55, 175, 128], zeroFill(108), [112]],
+ 8000: [lowPrefix, zeroFill(268), [3, 121, 16], zeroFill(47), [7]]
+ };
+ silence = makeTable(coneOfSilence);
+ }
+
+ return silence;
+ };
+ /**
+ * mux.js
+ *
+ * Copyright (c) Brightcove
+ * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
+ */
+
+
+ var ONE_SECOND_IN_TS$4 = 90000,
+ // 90kHz clock
+ secondsToVideoTs,
+ secondsToAudioTs,
+ videoTsToSeconds,
+ audioTsToSeconds,
+ audioTsToVideoTs,
+ videoTsToAudioTs,
+ metadataTsToSeconds;
+
+ secondsToVideoTs = function secondsToVideoTs(seconds) {
+ return seconds * ONE_SECOND_IN_TS$4;
+ };
+
+ secondsToAudioTs = function secondsToAudioTs(seconds, sampleRate) {
+ return seconds * sampleRate;
+ };
+
+ videoTsToSeconds = function videoTsToSeconds(timestamp) {
+ return timestamp / ONE_SECOND_IN_TS$4;
+ };
+
+ audioTsToSeconds = function audioTsToSeconds(timestamp, sampleRate) {
+ return timestamp / sampleRate;
+ };
+
+ audioTsToVideoTs = function audioTsToVideoTs(timestamp, sampleRate) {
+ return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
+ };
+
+ videoTsToAudioTs = function videoTsToAudioTs(timestamp, sampleRate) {
+ return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
+ };
+ /**
+ * Adjust ID3 tag or caption timing information by the timeline pts values
+ * (if keepOriginalTimestamps is false) and convert to seconds
+ */
+
+
+ metadataTsToSeconds = function metadataTsToSeconds(timestamp, timelineStartPts, keepOriginalTimestamps) {
+ return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);
+ };
+
+ var clock = {
+ ONE_SECOND_IN_TS: ONE_SECOND_IN_TS$4,
+ secondsToVideoTs: secondsToVideoTs,
+ secondsToAudioTs: secondsToAudioTs,
+ videoTsToSeconds: videoTsToSeconds,
+ audioTsToSeconds: audioTsToSeconds,
+ audioTsToVideoTs: audioTsToVideoTs,
+ videoTsToAudioTs: videoTsToAudioTs,
+ metadataTsToSeconds: metadataTsToSeconds
+ };
+ /**
+ * mux.js
+ *
+ * Copyright (c) Brightcove
+ * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
+ */
+
+ /**
+ * Sum the `byteLength` properties of the data in each AAC frame
+ */
+
+ var sumFrameByteLengths = function sumFrameByteLengths(array) {
+ var i,
+ currentObj,
+ sum = 0; // sum the byteLength's all each nal unit in the frame
+
+ for (i = 0; i < array.length; i++) {
+ currentObj = array[i];
+ sum += currentObj.data.byteLength;
+ }
+
+ return sum;
+ }; // Possibly pad (prefix) the audio track with silence if appending this track
+ // would lead to the introduction of a gap in the audio buffer
+
+
+ var prefixWithSilence = function prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime) {
+ var baseMediaDecodeTimeTs,
+ frameDuration = 0,
+ audioGapDuration = 0,
+ audioFillFrameCount = 0,
+ audioFillDuration = 0,
+ silentFrame,
+ i,
+ firstFrame;
+
+ if (!frames.length) {
+ return;
+ }
+
+ baseMediaDecodeTimeTs = clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate); // determine frame clock duration based on sample rate, round up to avoid overfills
+
+ frameDuration = Math.ceil(clock.ONE_SECOND_IN_TS / (track.samplerate / 1024));
+
+ if (audioAppendStartTs && videoBaseMediaDecodeTime) {
+ // insert the shortest possible amount (audio gap or audio to video gap)
+ audioGapDuration = baseMediaDecodeTimeTs - Math.max(audioAppendStartTs, videoBaseMediaDecodeTime); // number of full frames in the audio gap
+
+ audioFillFrameCount = Math.floor(audioGapDuration / frameDuration);
+ audioFillDuration = audioFillFrameCount * frameDuration;
+ } // don't attempt to fill gaps smaller than a single frame or larger
+ // than a half second
+
+
+ if (audioFillFrameCount < 1 || audioFillDuration > clock.ONE_SECOND_IN_TS / 2) {
+ return;
+ }
+
+ silentFrame = silence_1()[track.samplerate];
+
+ if (!silentFrame) {
+ // we don't have a silent frame pregenerated for the sample rate, so use a frame
+ // from the content instead
+ silentFrame = frames[0].data;
+ }
+
+ for (i = 0; i < audioFillFrameCount; i++) {
+ firstFrame = frames[0];
+ frames.splice(0, 0, {
+ data: silentFrame,
+ dts: firstFrame.dts - frameDuration,
+ pts: firstFrame.pts - frameDuration
+ });
+ }
+
+ track.baseMediaDecodeTime -= Math.floor(clock.videoTsToAudioTs(audioFillDuration, track.samplerate));
+ return audioFillDuration;
+ }; // If the audio segment extends before the earliest allowed dts
+ // value, remove AAC frames until starts at or after the earliest
+ // allowed DTS so that we don't end up with a negative baseMedia-
+ // DecodeTime for the audio track
+
+
+ var trimAdtsFramesByEarliestDts = function trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts) {
+ if (track.minSegmentDts >= earliestAllowedDts) {
+ return adtsFrames;
+ } // We will need to recalculate the earliest segment Dts
+
+
+ track.minSegmentDts = Infinity;
+ return adtsFrames.filter(function (currentFrame) {
+ // If this is an allowed frame, keep it and record it's Dts
+ if (currentFrame.dts >= earliestAllowedDts) {
+ track.minSegmentDts = Math.min(track.minSegmentDts, currentFrame.dts);
+ track.minSegmentPts = track.minSegmentDts;
+ return true;
+ } // Otherwise, discard it
+
+
+ return false;
+ });
+ }; // generate the track's raw mdat data from an array of frames
+
+
+ var generateSampleTable = function generateSampleTable(frames) {
+ var i,
+ currentFrame,
+ samples = [];
+
+ for (i = 0; i < frames.length; i++) {
+ currentFrame = frames[i];
+ samples.push({
+ size: currentFrame.data.byteLength,
+ duration: 1024 // For AAC audio, all samples contain 1024 samples
+
+ });
+ }
+
+ return samples;
+ }; // generate the track's sample table from an array of frames
+
+
+ var concatenateFrameData = function concatenateFrameData(frames) {
+ var i,
+ currentFrame,
+ dataOffset = 0,
+ data = new Uint8Array(sumFrameByteLengths(frames));
+
+ for (i = 0; i < frames.length; i++) {
+ currentFrame = frames[i];
+ data.set(currentFrame.data, dataOffset);
+ dataOffset += currentFrame.data.byteLength;
+ }
+
+ return data;
+ };
+
+ var audioFrameUtils = {
+ prefixWithSilence: prefixWithSilence,
+ trimAdtsFramesByEarliestDts: trimAdtsFramesByEarliestDts,
+ generateSampleTable: generateSampleTable,
+ concatenateFrameData: concatenateFrameData
+ };
+ /**
+ * mux.js
+ *
+ * Copyright (c) Brightcove
+ * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
+ */
+
+ var ONE_SECOND_IN_TS$3 = clock.ONE_SECOND_IN_TS;
+ /**
+ * Store information about the start and end of the track and the
+ * duration for each frame/sample we process in order to calculate
+ * the baseMediaDecodeTime
+ */
+
+ var collectDtsInfo = function collectDtsInfo(track, data) {
+ if (typeof data.pts === 'number') {
+ if (track.timelineStartInfo.pts === undefined) {
+ track.timelineStartInfo.pts = data.pts;
+ }
+
+ if (track.minSegmentPts === undefined) {
+ track.minSegmentPts = data.pts;
+ } else {
+ track.minSegmentPts = Math.min(track.minSegmentPts, data.pts);
+ }
+
+ if (track.maxSegmentPts === undefined) {
+ track.maxSegmentPts = data.pts;
+ } else {
+ track.maxSegmentPts = Math.max(track.maxSegmentPts, data.pts);
+ }
+ }
+
+ if (typeof data.dts === 'number') {
+ if (track.timelineStartInfo.dts === undefined) {
+ track.timelineStartInfo.dts = data.dts;
+ }
+
+ if (track.minSegmentDts === undefined) {
+ track.minSegmentDts = data.dts;
+ } else {
+ track.minSegmentDts = Math.min(track.minSegmentDts, data.dts);
+ }
+
+ if (track.maxSegmentDts === undefined) {
+ track.maxSegmentDts = data.dts;
+ } else {
+ track.maxSegmentDts = Math.max(track.maxSegmentDts, data.dts);
+ }
+ }
+ };
+ /**
+ * Clear values used to calculate the baseMediaDecodeTime between
+ * tracks
+ */
+
+
+ var clearDtsInfo = function clearDtsInfo(track) {
+ delete track.minSegmentDts;
+ delete track.maxSegmentDts;
+ delete track.minSegmentPts;
+ delete track.maxSegmentPts;
+ };
+ /**
+ * Calculate the track's baseMediaDecodeTime based on the earliest
+ * DTS the transmuxer has ever seen and the minimum DTS for the
+ * current track
+ * @param track {object} track metadata configuration
+ * @param keepOriginalTimestamps {boolean} If true, keep the timestamps
+ * in the source; false to adjust the first segment to start at 0.
+ */
+
+
+ var calculateTrackBaseMediaDecodeTime = function calculateTrackBaseMediaDecodeTime(track, keepOriginalTimestamps) {
+ var baseMediaDecodeTime,
+ scale,
+ minSegmentDts = track.minSegmentDts; // Optionally adjust the time so the first segment starts at zero.
+
+ if (!keepOriginalTimestamps) {
+ minSegmentDts -= track.timelineStartInfo.dts;
+ } // track.timelineStartInfo.baseMediaDecodeTime is the location, in time, where
+ // we want the start of the first segment to be placed
+
+
+ baseMediaDecodeTime = track.timelineStartInfo.baseMediaDecodeTime; // Add to that the distance this segment is from the very first
+
+ baseMediaDecodeTime += minSegmentDts; // baseMediaDecodeTime must not become negative
+
+ baseMediaDecodeTime = Math.max(0, baseMediaDecodeTime);
+
+ if (track.type === 'audio') {
+ // Audio has a different clock equal to the sampling_rate so we need to
+ // scale the PTS values into the clock rate of the track
+ scale = track.samplerate / ONE_SECOND_IN_TS$3;
+ baseMediaDecodeTime *= scale;
+ baseMediaDecodeTime = Math.floor(baseMediaDecodeTime);
+ }
+
+ return baseMediaDecodeTime;
+ };
+
+ var trackDecodeInfo = {
+ clearDtsInfo: clearDtsInfo,
+ calculateTrackBaseMediaDecodeTime: calculateTrackBaseMediaDecodeTime,
+ collectDtsInfo: collectDtsInfo
+ };
+ /**
+ * mux.js
+ *
+ * Copyright (c) Brightcove
+ * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
+ *
+ * Reads in-band caption information from a video elementary
+ * stream. Captions must follow the CEA-708 standard for injection
+ * into an MPEG-2 transport streams.
+ * @see https://en.wikipedia.org/wiki/CEA-708
+ * @see https://www.gpo.gov/fdsys/pkg/CFR-2007-title47-vol1/pdf/CFR-2007-title47-vol1-sec15-119.pdf
+ */
+ // payload type field to indicate how they are to be
+ // interpreted. CEAS-708 caption content is always transmitted with
+ // payload type 0x04.
+
+ var USER_DATA_REGISTERED_ITU_T_T35 = 4,
+ RBSP_TRAILING_BITS = 128;
+ /**
+ * Parse a supplemental enhancement information (SEI) NAL unit.
+ * Stops parsing once a message of type ITU T T35 has been found.
+ *
+ * @param bytes {Uint8Array} the bytes of a SEI NAL unit
+ * @return {object} the parsed SEI payload
+ * @see Rec. ITU-T H.264, 7.3.2.3.1
+ */
+
+ var parseSei = function parseSei(bytes) {
+ var i = 0,
+ result = {
+ payloadType: -1,
+ payloadSize: 0
+ },
+ payloadType = 0,
+ payloadSize = 0; // go through the sei_rbsp parsing each each individual sei_message
+
+ while (i < bytes.byteLength) {
+ // stop once we have hit the end of the sei_rbsp
+ if (bytes[i] === RBSP_TRAILING_BITS) {
+ break;
+ } // Parse payload type
+
+
+ while (bytes[i] === 0xFF) {
+ payloadType += 255;
+ i++;
+ }
+
+ payloadType += bytes[i++]; // Parse payload size
+
+ while (bytes[i] === 0xFF) {
+ payloadSize += 255;
+ i++;
+ }
+
+ payloadSize += bytes[i++]; // this sei_message is a 608/708 caption so save it and break
+ // there can only ever be one caption message in a frame's sei
+
+ if (!result.payload && payloadType === USER_DATA_REGISTERED_ITU_T_T35) {
+ var userIdentifier = String.fromCharCode(bytes[i + 3], bytes[i + 4], bytes[i + 5], bytes[i + 6]);
+
+ if (userIdentifier === 'GA94') {
+ result.payloadType = payloadType;
+ result.payloadSize = payloadSize;
+ result.payload = bytes.subarray(i, i + payloadSize);
+ break;
+ } else {
+ result.payload = void 0;
+ }
+ } // skip the payload and parse the next message
+
+
+ i += payloadSize;
+ payloadType = 0;
+ payloadSize = 0;
+ }
+
+ return result;
+ }; // see ANSI/SCTE 128-1 (2013), section 8.1
+
+
+ var parseUserData = function parseUserData(sei) {
+ // itu_t_t35_contry_code must be 181 (United States) for
+ // captions
+ if (sei.payload[0] !== 181) {
+ return null;
+ } // itu_t_t35_provider_code should be 49 (ATSC) for captions
+
+
+ if ((sei.payload[1] << 8 | sei.payload[2]) !== 49) {
+ return null;
+ } // the user_identifier should be "GA94" to indicate ATSC1 data
+
+
+ if (String.fromCharCode(sei.payload[3], sei.payload[4], sei.payload[5], sei.payload[6]) !== 'GA94') {
+ return null;
+ } // finally, user_data_type_code should be 0x03 for caption data
+
+
+ if (sei.payload[7] !== 0x03) {
+ return null;
+ } // return the user_data_type_structure and strip the trailing
+ // marker bits
+
+
+ return sei.payload.subarray(8, sei.payload.length - 1);
+ }; // see CEA-708-D, section 4.4
+
+
+ var parseCaptionPackets = function parseCaptionPackets(pts, userData) {
+ var results = [],
+ i,
+ count,
+ offset,
+ data; // if this is just filler, return immediately
+
+ if (!(userData[0] & 0x40)) {
+ return results;
+ } // parse out the cc_data_1 and cc_data_2 fields
+
+
+ count = userData[0] & 0x1f;
+
+ for (i = 0; i < count; i++) {
+ offset = i * 3;
+ data = {
+ type: userData[offset + 2] & 0x03,
+ pts: pts
+ }; // capture cc data when cc_valid is 1
+
+ if (userData[offset + 2] & 0x04) {
+ data.ccData = userData[offset + 3] << 8 | userData[offset + 4];
+ results.push(data);
+ }
+ }
+
+ return results;
+ };
+
+ var discardEmulationPreventionBytes$1 = function discardEmulationPreventionBytes(data) {
+ var length = data.byteLength,
+ emulationPreventionBytesPositions = [],
+ i = 1,
+ newLength,
+ newData; // Find all `Emulation Prevention Bytes`
+
+ while (i < length - 2) {
+ if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
+ emulationPreventionBytesPositions.push(i + 2);
+ i += 2;
+ } else {
+ i++;
+ }
+ } // If no Emulation Prevention Bytes were found just return the original
+ // array
+
+
+ if (emulationPreventionBytesPositions.length === 0) {
+ return data;
+ } // Create a new array to hold the NAL unit data
+
+
+ newLength = length - emulationPreventionBytesPositions.length;
+ newData = new Uint8Array(newLength);
+ var sourceIndex = 0;
+
+ for (i = 0; i < newLength; sourceIndex++, i++) {
+ if (sourceIndex === emulationPreventionBytesPositions[0]) {
+ // Skip this byte
+ sourceIndex++; // Remove this position index
+
+ emulationPreventionBytesPositions.shift();
+ }
+
+ newData[i] = data[sourceIndex];
+ }
+
+ return newData;
+ }; // exports
+
+
+ var captionPacketParser = {
+ parseSei: parseSei,
+ parseUserData: parseUserData,
+ parseCaptionPackets: parseCaptionPackets,
+ discardEmulationPreventionBytes: discardEmulationPreventionBytes$1,
+ USER_DATA_REGISTERED_ITU_T_T35: USER_DATA_REGISTERED_ITU_T_T35
+ }; // Link To Transport
+ // -----------------
+
+ var CaptionStream$1 = function CaptionStream(options) {
+ options = options || {};
+ CaptionStream.prototype.init.call(this); // parse708captions flag, default to true
+
+ this.parse708captions_ = typeof options.parse708captions === 'boolean' ? options.parse708captions : true;
+ this.captionPackets_ = [];
+ this.ccStreams_ = [new Cea608Stream(0, 0), // eslint-disable-line no-use-before-define
+ new Cea608Stream(0, 1), // eslint-disable-line no-use-before-define
+ new Cea608Stream(1, 0), // eslint-disable-line no-use-before-define
+ new Cea608Stream(1, 1) // eslint-disable-line no-use-before-define
+ ];
+
+ if (this.parse708captions_) {
+ this.cc708Stream_ = new Cea708Stream({
+ captionServices: options.captionServices
+ }); // eslint-disable-line no-use-before-define
+ }
+
+ this.reset(); // forward data and done events from CCs to this CaptionStream
+
+ this.ccStreams_.forEach(function (cc) {
+ cc.on('data', this.trigger.bind(this, 'data'));
+ cc.on('partialdone', this.trigger.bind(this, 'partialdone'));
+ cc.on('done', this.trigger.bind(this, 'done'));
+ }, this);
+
+ if (this.parse708captions_) {
+ this.cc708Stream_.on('data', this.trigger.bind(this, 'data'));
+ this.cc708Stream_.on('partialdone', this.trigger.bind(this, 'partialdone'));
+ this.cc708Stream_.on('done', this.trigger.bind(this, 'done'));
+ }
+ };
+
+ CaptionStream$1.prototype = new stream();
+
+ CaptionStream$1.prototype.push = function (event) {
+ var sei, userData, newCaptionPackets; // only examine SEI NALs
+
+ if (event.nalUnitType !== 'sei_rbsp') {
+ return;
+ } // parse the sei
+
+
+ sei = captionPacketParser.parseSei(event.escapedRBSP); // no payload data, skip
+
+ if (!sei.payload) {
+ return;
+ } // ignore everything but user_data_registered_itu_t_t35
+
+
+ if (sei.payloadType !== captionPacketParser.USER_DATA_REGISTERED_ITU_T_T35) {
+ return;
+ } // parse out the user data payload
+
+
+ userData = captionPacketParser.parseUserData(sei); // ignore unrecognized userData
+
+ if (!userData) {
+ return;
+ } // Sometimes, the same segment # will be downloaded twice. To stop the
+ // caption data from being processed twice, we track the latest dts we've
+ // received and ignore everything with a dts before that. However, since
+ // data for a specific dts can be split across packets on either side of
+ // a segment boundary, we need to make sure we *don't* ignore the packets
+ // from the *next* segment that have dts === this.latestDts_. By constantly
+ // tracking the number of packets received with dts === this.latestDts_, we
+ // know how many should be ignored once we start receiving duplicates.
+
+
+ if (event.dts < this.latestDts_) {
+ // We've started getting older data, so set the flag.
+ this.ignoreNextEqualDts_ = true;
+ return;
+ } else if (event.dts === this.latestDts_ && this.ignoreNextEqualDts_) {
+ this.numSameDts_--;
+
+ if (!this.numSameDts_) {
+ // We've received the last duplicate packet, time to start processing again
+ this.ignoreNextEqualDts_ = false;
+ }
+
+ return;
+ } // parse out CC data packets and save them for later
+
+
+ newCaptionPackets = captionPacketParser.parseCaptionPackets(event.pts, userData);
+ this.captionPackets_ = this.captionPackets_.concat(newCaptionPackets);
+
+ if (this.latestDts_ !== event.dts) {
+ this.numSameDts_ = 0;
+ }
+
+ this.numSameDts_++;
+ this.latestDts_ = event.dts;
+ };
+
+ CaptionStream$1.prototype.flushCCStreams = function (flushType) {
+ this.ccStreams_.forEach(function (cc) {
+ return flushType === 'flush' ? cc.flush() : cc.partialFlush();
+ }, this);
+ };
+
+ CaptionStream$1.prototype.flushStream = function (flushType) {
+ // make sure we actually parsed captions before proceeding
+ if (!this.captionPackets_.length) {
+ this.flushCCStreams(flushType);
+ return;
+ } // In Chrome, the Array#sort function is not stable so add a
+ // presortIndex that we can use to ensure we get a stable-sort
+
+
+ this.captionPackets_.forEach(function (elem, idx) {
+ elem.presortIndex = idx;
+ }); // sort caption byte-pairs based on their PTS values
+
+ this.captionPackets_.sort(function (a, b) {
+ if (a.pts === b.pts) {
+ return a.presortIndex - b.presortIndex;
+ }
+
+ return a.pts - b.pts;
+ });
+ this.captionPackets_.forEach(function (packet) {
+ if (packet.type < 2) {
+ // Dispatch packet to the right Cea608Stream
+ this.dispatchCea608Packet(packet);
+ } else {
+ // Dispatch packet to the Cea708Stream
+ this.dispatchCea708Packet(packet);
+ }
+ }, this);
+ this.captionPackets_.length = 0;
+ this.flushCCStreams(flushType);
+ };
+
+ CaptionStream$1.prototype.flush = function () {
+ return this.flushStream('flush');
+ }; // Only called if handling partial data
+
+
+ CaptionStream$1.prototype.partialFlush = function () {
+ return this.flushStream('partialFlush');
+ };
+
+ CaptionStream$1.prototype.reset = function () {
+ this.latestDts_ = null;
+ this.ignoreNextEqualDts_ = false;
+ this.numSameDts_ = 0;
+ this.activeCea608Channel_ = [null, null];
+ this.ccStreams_.forEach(function (ccStream) {
+ ccStream.reset();
+ });
+ }; // From the CEA-608 spec:
+
+ /*
+ * When XDS sub-packets are interleaved with other services, the end of each sub-packet shall be followed
+ * by a control pair to change to a different service. When any of the control codes from 0x10 to 0x1F is
+ * used to begin a control code pair, it indicates the return to captioning or Text data. The control code pair
+ * and subsequent data should then be processed according to the FCC rules. It may be necessary for the
+ * line 21 data encoder to automatically insert a control code pair (i.e. RCL, RU2, RU3, RU4, RDC, or RTD)
+ * to switch to captioning or Text.
+ */
+ // With that in mind, we ignore any data between an XDS control code and a
+ // subsequent closed-captioning control code.
+
+
+ CaptionStream$1.prototype.dispatchCea608Packet = function (packet) {
+ // NOTE: packet.type is the CEA608 field
+ if (this.setsTextOrXDSActive(packet)) {
+ this.activeCea608Channel_[packet.type] = null;
+ } else if (this.setsChannel1Active(packet)) {
+ this.activeCea608Channel_[packet.type] = 0;
+ } else if (this.setsChannel2Active(packet)) {
+ this.activeCea608Channel_[packet.type] = 1;
+ }
+
+ if (this.activeCea608Channel_[packet.type] === null) {
+ // If we haven't received anything to set the active channel, or the
+ // packets are Text/XDS data, discard the data; we don't want jumbled
+ // captions
+ return;
+ }
+
+ this.ccStreams_[(packet.type << 1) + this.activeCea608Channel_[packet.type]].push(packet);
+ };
+
+ CaptionStream$1.prototype.setsChannel1Active = function (packet) {
+ return (packet.ccData & 0x7800) === 0x1000;
+ };
+
+ CaptionStream$1.prototype.setsChannel2Active = function (packet) {
+ return (packet.ccData & 0x7800) === 0x1800;
+ };
+
+ CaptionStream$1.prototype.setsTextOrXDSActive = function (packet) {
+ return (packet.ccData & 0x7100) === 0x0100 || (packet.ccData & 0x78fe) === 0x102a || (packet.ccData & 0x78fe) === 0x182a;
+ };
+
+ CaptionStream$1.prototype.dispatchCea708Packet = function (packet) {
+ if (this.parse708captions_) {
+ this.cc708Stream_.push(packet);
+ }
+ }; // ----------------------
+ // Session to Application
+ // ----------------------
+ // This hash maps special and extended character codes to their
+ // proper Unicode equivalent. The first one-byte key is just a
+ // non-standard character code. The two-byte keys that follow are
+ // the extended CEA708 character codes, along with the preceding
+ // 0x10 extended character byte to distinguish these codes from
+ // non-extended character codes. Every CEA708 character code that
+ // is not in this object maps directly to a standard unicode
+ // character code.
+ // The transparent space and non-breaking transparent space are
+ // technically not fully supported since there is no code to
+ // make them transparent, so they have normal non-transparent
+ // stand-ins.
+ // The special closed caption (CC) character isn't a standard
+ // unicode character, so a fairly similar unicode character was
+ // chosen in it's place.
+
+
+ var CHARACTER_TRANSLATION_708 = {
+ 0x7f: 0x266a,
+ // ♪
+ 0x1020: 0x20,
+ // Transparent Space
+ 0x1021: 0xa0,
+ // Nob-breaking Transparent Space
+ 0x1025: 0x2026,
+ // …
+ 0x102a: 0x0160,
+ // Š
+ 0x102c: 0x0152,
+ // Œ
+ 0x1030: 0x2588,
+ // █
+ 0x1031: 0x2018,
+ // ‘
+ 0x1032: 0x2019,
+ // ’
+ 0x1033: 0x201c,
+ // “
+ 0x1034: 0x201d,
+ // ”
+ 0x1035: 0x2022,
+ // •
+ 0x1039: 0x2122,
+ // ™
+ 0x103a: 0x0161,
+ // š
+ 0x103c: 0x0153,
+ // œ
+ 0x103d: 0x2120,
+ // ℠
+ 0x103f: 0x0178,
+ // Ÿ
+ 0x1076: 0x215b,
+ // ⅛
+ 0x1077: 0x215c,
+ // ⅜
+ 0x1078: 0x215d,
+ // ⅝
+ 0x1079: 0x215e,
+ // ⅞
+ 0x107a: 0x23d0,
+ // ⏐
+ 0x107b: 0x23a4,
+ // ⎤
+ 0x107c: 0x23a3,
+ // ⎣
+ 0x107d: 0x23af,
+ // ⎯
+ 0x107e: 0x23a6,
+ // ⎦
+ 0x107f: 0x23a1,
+ // ⎡
+ 0x10a0: 0x3138 // ㄸ (CC char)
+
+ };
+
+ var get708CharFromCode = function get708CharFromCode(code) {
+ var newCode = CHARACTER_TRANSLATION_708[code] || code;
+
+ if (code & 0x1000 && code === newCode) {
+ // Invalid extended code
+ return '';
+ }
+
+ return String.fromCharCode(newCode);
+ };
+
+ var within708TextBlock = function within708TextBlock(b) {
+ return 0x20 <= b && b <= 0x7f || 0xa0 <= b && b <= 0xff;
+ };
+
+ var Cea708Window = function Cea708Window(windowNum) {
+ this.windowNum = windowNum;
+ this.reset();
+ };
+
+ Cea708Window.prototype.reset = function () {
+ this.clearText();
+ this.pendingNewLine = false;
+ this.winAttr = {};
+ this.penAttr = {};
+ this.penLoc = {};
+ this.penColor = {}; // These default values are arbitrary,
+ // defineWindow will usually override them
+
+ this.visible = 0;
+ this.rowLock = 0;
+ this.columnLock = 0;
+ this.priority = 0;
+ this.relativePositioning = 0;
+ this.anchorVertical = 0;
+ this.anchorHorizontal = 0;
+ this.anchorPoint = 0;
+ this.rowCount = 1;
+ this.virtualRowCount = this.rowCount + 1;
+ this.columnCount = 41;
+ this.windowStyle = 0;
+ this.penStyle = 0;
+ };
+
+ Cea708Window.prototype.getText = function () {
+ return this.rows.join('\n');
+ };
+
+ Cea708Window.prototype.clearText = function () {
+ this.rows = [''];
+ this.rowIdx = 0;
+ };
+
+ Cea708Window.prototype.newLine = function (pts) {
+ if (this.rows.length >= this.virtualRowCount && typeof this.beforeRowOverflow === 'function') {
+ this.beforeRowOverflow(pts);
+ }
+
+ if (this.rows.length > 0) {
+ this.rows.push('');
+ this.rowIdx++;
+ } // Show all virtual rows since there's no visible scrolling
+
+
+ while (this.rows.length > this.virtualRowCount) {
+ this.rows.shift();
+ this.rowIdx--;
+ }
+ };
+
+ Cea708Window.prototype.isEmpty = function () {
+ if (this.rows.length === 0) {
+ return true;
+ } else if (this.rows.length === 1) {
+ return this.rows[0] === '';
+ }
+
+ return false;
+ };
+
+ Cea708Window.prototype.addText = function (text) {
+ this.rows[this.rowIdx] += text;
+ };
+
+ Cea708Window.prototype.backspace = function () {
+ if (!this.isEmpty()) {
+ var row = this.rows[this.rowIdx];
+ this.rows[this.rowIdx] = row.substr(0, row.length - 1);
+ }
+ };
+
+ var Cea708Service = function Cea708Service(serviceNum, encoding, stream) {
+ this.serviceNum = serviceNum;
+ this.text = '';
+ this.currentWindow = new Cea708Window(-1);
+ this.windows = [];
+ this.stream = stream; // Try to setup a TextDecoder if an `encoding` value was provided
+
+ if (typeof encoding === 'string') {
+ this.createTextDecoder(encoding);
+ }
+ };
+ /**
+ * Initialize service windows
+ * Must be run before service use
+ *
+ * @param {Integer} pts PTS value
+ * @param {Function} beforeRowOverflow Function to execute before row overflow of a window
+ */
+
+
+ Cea708Service.prototype.init = function (pts, beforeRowOverflow) {
+ this.startPts = pts;
+
+ for (var win = 0; win < 8; win++) {
+ this.windows[win] = new Cea708Window(win);
+
+ if (typeof beforeRowOverflow === 'function') {
+ this.windows[win].beforeRowOverflow = beforeRowOverflow;
+ }
+ }
+ };
+ /**
+ * Set current window of service to be affected by commands
+ *
+ * @param {Integer} windowNum Window number
+ */
+
+
+ Cea708Service.prototype.setCurrentWindow = function (windowNum) {
+ this.currentWindow = this.windows[windowNum];
+ };
+ /**
+ * Try to create a TextDecoder if it is natively supported
+ */
+
+
+ Cea708Service.prototype.createTextDecoder = function (encoding) {
+ if (typeof TextDecoder === 'undefined') {
+ this.stream.trigger('log', {
+ level: 'warn',
+ message: 'The `encoding` option is unsupported without TextDecoder support'
+ });
+ } else {
+ try {
+ this.textDecoder_ = new TextDecoder(encoding);
+ } catch (error) {
+ this.stream.trigger('log', {
+ level: 'warn',
+ message: 'TextDecoder could not be created with ' + encoding + ' encoding. ' + error
+ });
+ }
+ }
+ };
+
+ var Cea708Stream = function Cea708Stream(options) {
+ options = options || {};
+ Cea708Stream.prototype.init.call(this);
+ var self = this;
+ var captionServices = options.captionServices || {};
+ var captionServiceEncodings = {};
+ var serviceProps; // Get service encodings from captionServices option block
+
+ Object.keys(captionServices).forEach(function (serviceName) {
+ serviceProps = captionServices[serviceName];
+
+ if (/^SERVICE/.test(serviceName)) {
+ captionServiceEncodings[serviceName] = serviceProps.encoding;
+ }
+ });
+ this.serviceEncodings = captionServiceEncodings;
+ this.current708Packet = null;
+ this.services = {};
+
+ this.push = function (packet) {
+ if (packet.type === 3) {
+ // 708 packet start
+ self.new708Packet();
+ self.add708Bytes(packet);
+ } else {
+ if (self.current708Packet === null) {
+ // This should only happen at the start of a file if there's no packet start.
+ self.new708Packet();
+ }
+
+ self.add708Bytes(packet);
+ }
+ };
+ };
+
+ Cea708Stream.prototype = new stream();
+ /**
+ * Push current 708 packet, create new 708 packet.
+ */
+
+ Cea708Stream.prototype.new708Packet = function () {
+ if (this.current708Packet !== null) {
+ this.push708Packet();
+ }
+
+ this.current708Packet = {
+ data: [],
+ ptsVals: []
+ };
+ };
+ /**
+ * Add pts and both bytes from packet into current 708 packet.
+ */
+
+
+ Cea708Stream.prototype.add708Bytes = function (packet) {
+ var data = packet.ccData;
+ var byte0 = data >>> 8;
+ var byte1 = data & 0xff; // I would just keep a list of packets instead of bytes, but it isn't clear in the spec
+ // that service blocks will always line up with byte pairs.
+
+ this.current708Packet.ptsVals.push(packet.pts);
+ this.current708Packet.data.push(byte0);
+ this.current708Packet.data.push(byte1);
+ };
+ /**
+ * Parse completed 708 packet into service blocks and push each service block.
+ */
+
+
+ Cea708Stream.prototype.push708Packet = function () {
+ var packet708 = this.current708Packet;
+ var packetData = packet708.data;
+ var serviceNum = null;
+ var blockSize = null;
+ var i = 0;
+ var b = packetData[i++];
+ packet708.seq = b >> 6;
+ packet708.sizeCode = b & 0x3f; // 0b00111111;
+
+ for (; i < packetData.length; i++) {
+ b = packetData[i++];
+ serviceNum = b >> 5;
+ blockSize = b & 0x1f; // 0b00011111
+
+ if (serviceNum === 7 && blockSize > 0) {
+ // Extended service num
+ b = packetData[i++];
+ serviceNum = b;
+ }
+
+ this.pushServiceBlock(serviceNum, i, blockSize);
+
+ if (blockSize > 0) {
+ i += blockSize - 1;
+ }
+ }
+ };
+ /**
+ * Parse service block, execute commands, read text.
+ *
+ * Note: While many of these commands serve important purposes,
+ * many others just parse out the parameters or attributes, but
+ * nothing is done with them because this is not a full and complete
+ * implementation of the entire 708 spec.
+ *
+ * @param {Integer} serviceNum Service number
+ * @param {Integer} start Start index of the 708 packet data
+ * @param {Integer} size Block size
+ */
+
+
+ Cea708Stream.prototype.pushServiceBlock = function (serviceNum, start, size) {
+ var b;
+ var i = start;
+ var packetData = this.current708Packet.data;
+ var service = this.services[serviceNum];
+
+ if (!service) {
+ service = this.initService(serviceNum, i);
+ }
+
+ for (; i < start + size && i < packetData.length; i++) {
+ b = packetData[i];
+
+ if (within708TextBlock(b)) {
+ i = this.handleText(i, service);
+ } else if (b === 0x18) {
+ i = this.multiByteCharacter(i, service);
+ } else if (b === 0x10) {
+ i = this.extendedCommands(i, service);
+ } else if (0x80 <= b && b <= 0x87) {
+ i = this.setCurrentWindow(i, service);
+ } else if (0x98 <= b && b <= 0x9f) {
+ i = this.defineWindow(i, service);
+ } else if (b === 0x88) {
+ i = this.clearWindows(i, service);
+ } else if (b === 0x8c) {
+ i = this.deleteWindows(i, service);
+ } else if (b === 0x89) {
+ i = this.displayWindows(i, service);
+ } else if (b === 0x8a) {
+ i = this.hideWindows(i, service);
+ } else if (b === 0x8b) {
+ i = this.toggleWindows(i, service);
+ } else if (b === 0x97) {
+ i = this.setWindowAttributes(i, service);
+ } else if (b === 0x90) {
+ i = this.setPenAttributes(i, service);
+ } else if (b === 0x91) {
+ i = this.setPenColor(i, service);
+ } else if (b === 0x92) {
+ i = this.setPenLocation(i, service);
+ } else if (b === 0x8f) {
+ service = this.reset(i, service);
+ } else if (b === 0x08) {
+ // BS: Backspace
+ service.currentWindow.backspace();
+ } else if (b === 0x0c) {
+ // FF: Form feed
+ service.currentWindow.clearText();
+ } else if (b === 0x0d) {
+ // CR: Carriage return
+ service.currentWindow.pendingNewLine = true;
+ } else if (b === 0x0e) {
+ // HCR: Horizontal carriage return
+ service.currentWindow.clearText();
+ } else if (b === 0x8d) {
+ // DLY: Delay, nothing to do
+ i++;
+ } else ;
+ }
+ };
+ /**
+ * Execute an extended command
+ *
+ * @param {Integer} i Current index in the 708 packet
+ * @param {Service} service The service object to be affected
+ * @return {Integer} New index after parsing
+ */
+
+
+ Cea708Stream.prototype.extendedCommands = function (i, service) {
+ var packetData = this.current708Packet.data;
+ var b = packetData[++i];
+
+ if (within708TextBlock(b)) {
+ i = this.handleText(i, service, {
+ isExtended: true
+ });
+ }
+
+ return i;
+ };
+ /**
+ * Get PTS value of a given byte index
+ *
+ * @param {Integer} byteIndex Index of the byte
+ * @return {Integer} PTS
+ */
+
+
+ Cea708Stream.prototype.getPts = function (byteIndex) {
+ // There's 1 pts value per 2 bytes
+ return this.current708Packet.ptsVals[Math.floor(byteIndex / 2)];
+ };
+ /**
+ * Initializes a service
+ *
+ * @param {Integer} serviceNum Service number
+ * @return {Service} Initialized service object
+ */
+
+
+ Cea708Stream.prototype.initService = function (serviceNum, i) {
+ var serviceName = 'SERVICE' + serviceNum;
+ var self = this;
+ var serviceName;
+ var encoding;
+
+ if (serviceName in this.serviceEncodings) {
+ encoding = this.serviceEncodings[serviceName];
+ }
+
+ this.services[serviceNum] = new Cea708Service(serviceNum, encoding, self);
+ this.services[serviceNum].init(this.getPts(i), function (pts) {
+ self.flushDisplayed(pts, self.services[serviceNum]);
+ });
+ return this.services[serviceNum];
+ };
+ /**
+ * Execute text writing to current window
+ *
+ * @param {Integer} i Current index in the 708 packet
+ * @param {Service} service The service object to be affected
+ * @return {Integer} New index after parsing
+ */
+
+
+ Cea708Stream.prototype.handleText = function (i, service, options) {
+ var isExtended = options && options.isExtended;
+ var isMultiByte = options && options.isMultiByte;
+ var packetData = this.current708Packet.data;
+ var extended = isExtended ? 0x1000 : 0x0000;
+ var currentByte = packetData[i];
+ var nextByte = packetData[i + 1];
+ var win = service.currentWindow;
+
+ var _char;
+
+ var charCodeArray; // Use the TextDecoder if one was created for this service
+
+ if (service.textDecoder_ && !isExtended) {
+ if (isMultiByte) {
+ charCodeArray = [currentByte, nextByte];
+ i++;
+ } else {
+ charCodeArray = [currentByte];
+ }
+
+ _char = service.textDecoder_.decode(new Uint8Array(charCodeArray));
+ } else {
+ _char = get708CharFromCode(extended | currentByte);
+ }
+
+ if (win.pendingNewLine && !win.isEmpty()) {
+ win.newLine(this.getPts(i));
+ }
+
+ win.pendingNewLine = false;
+ win.addText(_char);
+ return i;
+ };
+ /**
+ * Handle decoding of multibyte character
+ *
+ * @param {Integer} i Current index in the 708 packet
+ * @param {Service} service The service object to be affected
+ * @return {Integer} New index after parsing
+ */
+
+
+ Cea708Stream.prototype.multiByteCharacter = function (i, service) {
+ var packetData = this.current708Packet.data;
+ var firstByte = packetData[i + 1];
+ var secondByte = packetData[i + 2];
+
+ if (within708TextBlock(firstByte) && within708TextBlock(secondByte)) {
+ i = this.handleText(++i, service, {
+ isMultiByte: true
+ });
+ }
+
+ return i;
+ };
+ /**
+ * Parse and execute the CW# command.
+ *
+ * Set the current window.
+ *
+ * @param {Integer} i Current index in the 708 packet
+ * @param {Service} service The service object to be affected
+ * @return {Integer} New index after parsing
+ */
+
+
+ Cea708Stream.prototype.setCurrentWindow = function (i, service) {
+ var packetData = this.current708Packet.data;
+ var b = packetData[i];
+ var windowNum = b & 0x07;
+ service.setCurrentWindow(windowNum);
+ return i;
+ };
+ /**
+ * Parse and execute the DF# command.
+ *
+ * Define a window and set it as the current window.
+ *
+ * @param {Integer} i Current index in the 708 packet
+ * @param {Service} service The service object to be affected
+ * @return {Integer} New index after parsing
+ */
+
+
+ Cea708Stream.prototype.defineWindow = function (i, service) {
+ var packetData = this.current708Packet.data;
+ var b = packetData[i];
+ var windowNum = b & 0x07;
+ service.setCurrentWindow(windowNum);
+ var win = service.currentWindow;
+ b = packetData[++i];
+ win.visible = (b & 0x20) >> 5; // v
+
+ win.rowLock = (b & 0x10) >> 4; // rl
+
+ win.columnLock = (b & 0x08) >> 3; // cl
+
+ win.priority = b & 0x07; // p
+
+ b = packetData[++i];
+ win.relativePositioning = (b & 0x80) >> 7; // rp
+
+ win.anchorVertical = b & 0x7f; // av
+
+ b = packetData[++i];
+ win.anchorHorizontal = b; // ah
+
+ b = packetData[++i];
+ win.anchorPoint = (b & 0xf0) >> 4; // ap
+
+ win.rowCount = b & 0x0f; // rc
+
+ b = packetData[++i];
+ win.columnCount = b & 0x3f; // cc
+
+ b = packetData[++i];
+ win.windowStyle = (b & 0x38) >> 3; // ws
+
+ win.penStyle = b & 0x07; // ps
+ // The spec says there are (rowCount+1) "virtual rows"
+
+ win.virtualRowCount = win.rowCount + 1;
+ return i;
+ };
+ /**
+ * Parse and execute the SWA command.
+ *
+ * Set attributes of the current window.
+ *
+ * @param {Integer} i Current index in the 708 packet
+ * @param {Service} service The service object to be affected
+ * @return {Integer} New index after parsing
+ */
+
+
+ Cea708Stream.prototype.setWindowAttributes = function (i, service) {
+ var packetData = this.current708Packet.data;
+ var b = packetData[i];
+ var winAttr = service.currentWindow.winAttr;
+ b = packetData[++i];
+ winAttr.fillOpacity = (b & 0xc0) >> 6; // fo
+
+ winAttr.fillRed = (b & 0x30) >> 4; // fr
+
+ winAttr.fillGreen = (b & 0x0c) >> 2; // fg
+
+ winAttr.fillBlue = b & 0x03; // fb
+
+ b = packetData[++i];
+ winAttr.borderType = (b & 0xc0) >> 6; // bt
+
+ winAttr.borderRed = (b & 0x30) >> 4; // br
+
+ winAttr.borderGreen = (b & 0x0c) >> 2; // bg
+
+ winAttr.borderBlue = b & 0x03; // bb
+
+ b = packetData[++i];
+ winAttr.borderType += (b & 0x80) >> 5; // bt
+
+ winAttr.wordWrap = (b & 0x40) >> 6; // ww
+
+ winAttr.printDirection = (b & 0x30) >> 4; // pd
+
+ winAttr.scrollDirection = (b & 0x0c) >> 2; // sd
+
+ winAttr.justify = b & 0x03; // j
+
+ b = packetData[++i];
+ winAttr.effectSpeed = (b & 0xf0) >> 4; // es
+
+ winAttr.effectDirection = (b & 0x0c) >> 2; // ed
+
+ winAttr.displayEffect = b & 0x03; // de
+
+ return i;
+ };
+ /**
+ * Gather text from all displayed windows and push a caption to output.
+ *
+ * @param {Integer} i Current index in the 708 packet
+ * @param {Service} service The service object to be affected
+ */
+
+
+ Cea708Stream.prototype.flushDisplayed = function (pts, service) {
+ var displayedText = []; // TODO: Positioning not supported, displaying multiple windows will not necessarily
+ // display text in the correct order, but sample files so far have not shown any issue.
+
+ for (var winId = 0; winId < 8; winId++) {
+ if (service.windows[winId].visible && !service.windows[winId].isEmpty()) {
+ displayedText.push(service.windows[winId].getText());
+ }
+ }
+
+ service.endPts = pts;
+ service.text = displayedText.join('\n\n');
+ this.pushCaption(service);
+ service.startPts = pts;
+ };
+ /**
+ * Push a caption to output if the caption contains text.
+ *
+ * @param {Service} service The service object to be affected
+ */
+
+
+ Cea708Stream.prototype.pushCaption = function (service) {
+ if (service.text !== '') {
+ this.trigger('data', {
+ startPts: service.startPts,
+ endPts: service.endPts,
+ text: service.text,
+ stream: 'cc708_' + service.serviceNum
+ });
+ service.text = '';
+ service.startPts = service.endPts;
+ }
+ };
+ /**
+ * Parse and execute the DSW command.
+ *
+ * Set visible property of windows based on the parsed bitmask.
+ *
+ * @param {Integer} i Current index in the 708 packet
+ * @param {Service} service The service object to be affected
+ * @return {Integer} New index after parsing
+ */
+
+
+ Cea708Stream.prototype.displayWindows = function (i, service) {
+ var packetData = this.current708Packet.data;
+ var b = packetData[++i];
+ var pts = this.getPts(i);
+ this.flushDisplayed(pts, service);
+
+ for (var winId = 0; winId < 8; winId++) {
+ if (b & 0x01 << winId) {
+ service.windows[winId].visible = 1;
+ }
+ }
+
+ return i;
+ };
+ /**
+ * Parse and execute the HDW command.
+ *
+ * Set visible property of windows based on the parsed bitmask.
+ *
+ * @param {Integer} i Current index in the 708 packet
+ * @param {Service} service The service object to be affected
+ * @return {Integer} New index after parsing
+ */
+
+
+ Cea708Stream.prototype.hideWindows = function (i, service) {
+ var packetData = this.current708Packet.data;
+ var b = packetData[++i];
+ var pts = this.getPts(i);
+ this.flushDisplayed(pts, service);
+
+ for (var winId = 0; winId < 8; winId++) {
+ if (b & 0x01 << winId) {
+ service.windows[winId].visible = 0;
+ }
+ }
+
+ return i;
+ };
+ /**
+ * Parse and execute the TGW command.
+ *
+ * Set visible property of windows based on the parsed bitmask.
+ *
+ * @param {Integer} i Current index in the 708 packet
+ * @param {Service} service The service object to be affected
+ * @return {Integer} New index after parsing
+ */
+
+
+ Cea708Stream.prototype.toggleWindows = function (i, service) {
+ var packetData = this.current708Packet.data;
+ var b = packetData[++i];
+ var pts = this.getPts(i);
+ this.flushDisplayed(pts, service);
+
+ for (var winId = 0; winId < 8; winId++) {
+ if (b & 0x01 << winId) {
+ service.windows[winId].visible ^= 1;
+ }
+ }
+
+ return i;
+ };
+ /**
+ * Parse and execute the CLW command.
+ *
+ * Clear text of windows based on the parsed bitmask.
+ *
+ * @param {Integer} i Current index in the 708 packet
+ * @param {Service} service The service object to be affected
+ * @return {Integer} New index after parsing
+ */
+
+
+ Cea708Stream.prototype.clearWindows = function (i, service) {
+ var packetData = this.current708Packet.data;
+ var b = packetData[++i];
+ var pts = this.getPts(i);
+ this.flushDisplayed(pts, service);
+
+ for (var winId = 0; winId < 8; winId++) {
+ if (b & 0x01 << winId) {
+ service.windows[winId].clearText();
+ }
+ }
+
+ return i;
+ };
+ /**
+ * Parse and execute the DLW command.
+ *
+ * Re-initialize windows based on the parsed bitmask.
+ *
+ * @param {Integer} i Current index in the 708 packet
+ * @param {Service} service The service object to be affected
+ * @return {Integer} New index after parsing
+ */
+
+
+ Cea708Stream.prototype.deleteWindows = function (i, service) {
+ var packetData = this.current708Packet.data;
+ var b = packetData[++i];
+ var pts = this.getPts(i);
+ this.flushDisplayed(pts, service);
+
+ for (var winId = 0; winId < 8; winId++) {
+ if (b & 0x01 << winId) {
+ service.windows[winId].reset();
+ }
+ }
+
+ return i;
+ };
+ /**
+ * Parse and execute the SPA command.
+ *
+ * Set pen attributes of the current window.
+ *
+ * @param {Integer} i Current index in the 708 packet
+ * @param {Service} service The service object to be affected
+ * @return {Integer} New index after parsing
+ */
+
+
+ Cea708Stream.prototype.setPenAttributes = function (i, service) {
+ var packetData = this.current708Packet.data;
+ var b = packetData[i];
+ var penAttr = service.currentWindow.penAttr;
+ b = packetData[++i];
+ penAttr.textTag = (b & 0xf0) >> 4; // tt
+
+ penAttr.offset = (b & 0x0c) >> 2; // o
+
+ penAttr.penSize = b & 0x03; // s
+
+ b = packetData[++i];
+ penAttr.italics = (b & 0x80) >> 7; // i
+
+ penAttr.underline = (b & 0x40) >> 6; // u
+
+ penAttr.edgeType = (b & 0x38) >> 3; // et
+
+ penAttr.fontStyle = b & 0x07; // fs
+
+ return i;
+ };
+ /**
+ * Parse and execute the SPC command.
+ *
+ * Set pen color of the current window.
+ *
+ * @param {Integer} i Current index in the 708 packet
+ * @param {Service} service The service object to be affected
+ * @return {Integer} New index after parsing
+ */
+
+
+ Cea708Stream.prototype.setPenColor = function (i, service) {
+ var packetData = this.current708Packet.data;
+ var b = packetData[i];
+ var penColor = service.currentWindow.penColor;
+ b = packetData[++i];
+ penColor.fgOpacity = (b & 0xc0) >> 6; // fo
+
+ penColor.fgRed = (b & 0x30) >> 4; // fr
+
+ penColor.fgGreen = (b & 0x0c) >> 2; // fg
+
+ penColor.fgBlue = b & 0x03; // fb
+
+ b = packetData[++i];
+ penColor.bgOpacity = (b & 0xc0) >> 6; // bo
+
+ penColor.bgRed = (b & 0x30) >> 4; // br
+
+ penColor.bgGreen = (b & 0x0c) >> 2; // bg
+
+ penColor.bgBlue = b & 0x03; // bb
+
+ b = packetData[++i];
+ penColor.edgeRed = (b & 0x30) >> 4; // er
+
+ penColor.edgeGreen = (b & 0x0c) >> 2; // eg
+
+ penColor.edgeBlue = b & 0x03; // eb
+
+ return i;
+ };
+ /**
+ * Parse and execute the SPL command.
+ *
+ * Set pen location of the current window.
+ *
+ * @param {Integer} i Current index in the 708 packet
+ * @param {Service} service The service object to be affected
+ * @return {Integer} New index after parsing
+ */
+
+
+ Cea708Stream.prototype.setPenLocation = function (i, service) {
+ var packetData = this.current708Packet.data;
+ var b = packetData[i];
+ var penLoc = service.currentWindow.penLoc; // Positioning isn't really supported at the moment, so this essentially just inserts a linebreak
+
+ service.currentWindow.pendingNewLine = true;
+ b = packetData[++i];
+ penLoc.row = b & 0x0f; // r
+
+ b = packetData[++i];
+ penLoc.column = b & 0x3f; // c
+
+ return i;
+ };
+ /**
+ * Execute the RST command.
+ *
+ * Reset service to a clean slate. Re-initialize.
+ *
+ * @param {Integer} i Current index in the 708 packet
+ * @param {Service} service The service object to be affected
+ * @return {Service} Re-initialized service
+ */
+
+
+ Cea708Stream.prototype.reset = function (i, service) {
+ var pts = this.getPts(i);
+ this.flushDisplayed(pts, service);
+ return this.initService(service.serviceNum, i);
+ }; // This hash maps non-ASCII, special, and extended character codes to their
+ // proper Unicode equivalent. The first keys that are only a single byte
+ // are the non-standard ASCII characters, which simply map the CEA608 byte
+ // to the standard ASCII/Unicode. The two-byte keys that follow are the CEA608
+ // character codes, but have their MSB bitmasked with 0x03 so that a lookup
+ // can be performed regardless of the field and data channel on which the
+ // character code was received.
+
+
+ var CHARACTER_TRANSLATION = {
+ 0x2a: 0xe1,
+ // á
+ 0x5c: 0xe9,
+ // é
+ 0x5e: 0xed,
+ // í
+ 0x5f: 0xf3,
+ // ó
+ 0x60: 0xfa,
+ // ú
+ 0x7b: 0xe7,
+ // ç
+ 0x7c: 0xf7,
+ // ÷
+ 0x7d: 0xd1,
+ // Ñ
+ 0x7e: 0xf1,
+ // ñ
+ 0x7f: 0x2588,
+ // █
+ 0x0130: 0xae,
+ // ®
+ 0x0131: 0xb0,
+ // °
+ 0x0132: 0xbd,
+ // ½
+ 0x0133: 0xbf,
+ // ¿
+ 0x0134: 0x2122,
+ // ™
+ 0x0135: 0xa2,
+ // ¢
+ 0x0136: 0xa3,
+ // £
+ 0x0137: 0x266a,
+ // ♪
+ 0x0138: 0xe0,
+ // à
+ 0x0139: 0xa0,
+ //
+ 0x013a: 0xe8,
+ // è
+ 0x013b: 0xe2,
+ // â
+ 0x013c: 0xea,
+ // ê
+ 0x013d: 0xee,
+ // î
+ 0x013e: 0xf4,
+ // ô
+ 0x013f: 0xfb,
+ // û
+ 0x0220: 0xc1,
+ // Á
+ 0x0221: 0xc9,
+ // É
+ 0x0222: 0xd3,
+ // Ó
+ 0x0223: 0xda,
+ // Ú
+ 0x0224: 0xdc,
+ // Ü
+ 0x0225: 0xfc,
+ // ü
+ 0x0226: 0x2018,
+ // ‘
+ 0x0227: 0xa1,
+ // ¡
+ 0x0228: 0x2a,
+ // *
+ 0x0229: 0x27,
+ // '
+ 0x022a: 0x2014,
+ // —
+ 0x022b: 0xa9,
+ // ©
+ 0x022c: 0x2120,
+ // ℠
+ 0x022d: 0x2022,
+ // •
+ 0x022e: 0x201c,
+ // “
+ 0x022f: 0x201d,
+ // ”
+ 0x0230: 0xc0,
+ // À
+ 0x0231: 0xc2,
+ // Â
+ 0x0232: 0xc7,
+ // Ç
+ 0x0233: 0xc8,
+ // È
+ 0x0234: 0xca,
+ // Ê
+ 0x0235: 0xcb,
+ // Ë
+ 0x0236: 0xeb,
+ // ë
+ 0x0237: 0xce,
+ // Î
+ 0x0238: 0xcf,
+ // Ï
+ 0x0239: 0xef,
+ // ï
+ 0x023a: 0xd4,
+ // Ô
+ 0x023b: 0xd9,
+ // Ù
+ 0x023c: 0xf9,
+ // ù
+ 0x023d: 0xdb,
+ // Û
+ 0x023e: 0xab,
+ // «
+ 0x023f: 0xbb,
+ // »
+ 0x0320: 0xc3,
+ // Ã
+ 0x0321: 0xe3,
+ // ã
+ 0x0322: 0xcd,
+ // Í
+ 0x0323: 0xcc,
+ // Ì
+ 0x0324: 0xec,
+ // ì
+ 0x0325: 0xd2,
+ // Ò
+ 0x0326: 0xf2,
+ // ò
+ 0x0327: 0xd5,
+ // Õ
+ 0x0328: 0xf5,
+ // õ
+ 0x0329: 0x7b,
+ // {
+ 0x032a: 0x7d,
+ // }
+ 0x032b: 0x5c,
+ // \
+ 0x032c: 0x5e,
+ // ^
+ 0x032d: 0x5f,
+ // _
+ 0x032e: 0x7c,
+ // |
+ 0x032f: 0x7e,
+ // ~
+ 0x0330: 0xc4,
+ // Ä
+ 0x0331: 0xe4,
+ // ä
+ 0x0332: 0xd6,
+ // Ö
+ 0x0333: 0xf6,
+ // ö
+ 0x0334: 0xdf,
+ // ß
+ 0x0335: 0xa5,
+ // ¥
+ 0x0336: 0xa4,
+ // ¤
+ 0x0337: 0x2502,
+ // │
+ 0x0338: 0xc5,
+ // Å
+ 0x0339: 0xe5,
+ // å
+ 0x033a: 0xd8,
+ // Ø
+ 0x033b: 0xf8,
+ // ø
+ 0x033c: 0x250c,
+ // ┌
+ 0x033d: 0x2510,
+ // ┐
+ 0x033e: 0x2514,
+ // └
+ 0x033f: 0x2518 // ┘
+
+ };
+
+ var getCharFromCode = function getCharFromCode(code) {
+ if (code === null) {
+ return '';
+ }
+
+ code = CHARACTER_TRANSLATION[code] || code;
+ return String.fromCharCode(code);
+ }; // the index of the last row in a CEA-608 display buffer
+
+
+ var BOTTOM_ROW = 14; // This array is used for mapping PACs -> row #, since there's no way of
+ // getting it through bit logic.
+
+ var ROWS = [0x1100, 0x1120, 0x1200, 0x1220, 0x1500, 0x1520, 0x1600, 0x1620, 0x1700, 0x1720, 0x1000, 0x1300, 0x1320, 0x1400, 0x1420]; // CEA-608 captions are rendered onto a 34x15 matrix of character
+ // cells. The "bottom" row is the last element in the outer array.
+
+ var createDisplayBuffer = function createDisplayBuffer() {
+ var result = [],
+ i = BOTTOM_ROW + 1;
+
+ while (i--) {
+ result.push('');
+ }
+
+ return result;
+ };
+
+ var Cea608Stream = function Cea608Stream(field, dataChannel) {
+ Cea608Stream.prototype.init.call(this);
+ this.field_ = field || 0;
+ this.dataChannel_ = dataChannel || 0;
+ this.name_ = 'CC' + ((this.field_ << 1 | this.dataChannel_) + 1);
+ this.setConstants();
+ this.reset();
+
+ this.push = function (packet) {
+ var data, swap, char0, char1, text; // remove the parity bits
+
+ data = packet.ccData & 0x7f7f; // ignore duplicate control codes; the spec demands they're sent twice
+
+ if (data === this.lastControlCode_) {
+ this.lastControlCode_ = null;
+ return;
+ } // Store control codes
+
+
+ if ((data & 0xf000) === 0x1000) {
+ this.lastControlCode_ = data;
+ } else if (data !== this.PADDING_) {
+ this.lastControlCode_ = null;
+ }
+
+ char0 = data >>> 8;
+ char1 = data & 0xff;
+
+ if (data === this.PADDING_) {
+ return;
+ } else if (data === this.RESUME_CAPTION_LOADING_) {
+ this.mode_ = 'popOn';
+ } else if (data === this.END_OF_CAPTION_) {
+ // If an EOC is received while in paint-on mode, the displayed caption
+ // text should be swapped to non-displayed memory as if it was a pop-on
+ // caption. Because of that, we should explicitly switch back to pop-on
+ // mode
+ this.mode_ = 'popOn';
+ this.clearFormatting(packet.pts); // if a caption was being displayed, it's gone now
+
+ this.flushDisplayed(packet.pts); // flip memory
+
+ swap = this.displayed_;
+ this.displayed_ = this.nonDisplayed_;
+ this.nonDisplayed_ = swap; // start measuring the time to display the caption
+
+ this.startPts_ = packet.pts;
+ } else if (data === this.ROLL_UP_2_ROWS_) {
+ this.rollUpRows_ = 2;
+ this.setRollUp(packet.pts);
+ } else if (data === this.ROLL_UP_3_ROWS_) {
+ this.rollUpRows_ = 3;
+ this.setRollUp(packet.pts);
+ } else if (data === this.ROLL_UP_4_ROWS_) {
+ this.rollUpRows_ = 4;
+ this.setRollUp(packet.pts);
+ } else if (data === this.CARRIAGE_RETURN_) {
+ this.clearFormatting(packet.pts);
+ this.flushDisplayed(packet.pts);
+ this.shiftRowsUp_();
+ this.startPts_ = packet.pts;
+ } else if (data === this.BACKSPACE_) {
+ if (this.mode_ === 'popOn') {
+ this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
+ } else {
+ this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
+ }
+ } else if (data === this.ERASE_DISPLAYED_MEMORY_) {
+ this.flushDisplayed(packet.pts);
+ this.displayed_ = createDisplayBuffer();
+ } else if (data === this.ERASE_NON_DISPLAYED_MEMORY_) {
+ this.nonDisplayed_ = createDisplayBuffer();
+ } else if (data === this.RESUME_DIRECT_CAPTIONING_) {
+ if (this.mode_ !== 'paintOn') {
+ // NOTE: This should be removed when proper caption positioning is
+ // implemented
+ this.flushDisplayed(packet.pts);
+ this.displayed_ = createDisplayBuffer();
+ }
+
+ this.mode_ = 'paintOn';
+ this.startPts_ = packet.pts; // Append special characters to caption text
+ } else if (this.isSpecialCharacter(char0, char1)) {
+ // Bitmask char0 so that we can apply character transformations
+ // regardless of field and data channel.
+ // Then byte-shift to the left and OR with char1 so we can pass the
+ // entire character code to `getCharFromCode`.
+ char0 = (char0 & 0x03) << 8;
+ text = getCharFromCode(char0 | char1);
+ this[this.mode_](packet.pts, text);
+ this.column_++; // Append extended characters to caption text
+ } else if (this.isExtCharacter(char0, char1)) {
+ // Extended characters always follow their "non-extended" equivalents.
+ // IE if a "è" is desired, you'll always receive "eè"; non-compliant
+ // decoders are supposed to drop the "è", while compliant decoders
+ // backspace the "e" and insert "è".
+ // Delete the previous character
+ if (this.mode_ === 'popOn') {
+ this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
+ } else {
+ this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
+ } // Bitmask char0 so that we can apply character transformations
+ // regardless of field and data channel.
+ // Then byte-shift to the left and OR with char1 so we can pass the
+ // entire character code to `getCharFromCode`.
+
+
+ char0 = (char0 & 0x03) << 8;
+ text = getCharFromCode(char0 | char1);
+ this[this.mode_](packet.pts, text);
+ this.column_++; // Process mid-row codes
+ } else if (this.isMidRowCode(char0, char1)) {
+ // Attributes are not additive, so clear all formatting
+ this.clearFormatting(packet.pts); // According to the standard, mid-row codes
+ // should be replaced with spaces, so add one now
+
+ this[this.mode_](packet.pts, ' ');
+ this.column_++;
+
+ if ((char1 & 0xe) === 0xe) {
+ this.addFormatting(packet.pts, ['i']);
+ }
+
+ if ((char1 & 0x1) === 0x1) {
+ this.addFormatting(packet.pts, ['u']);
+ } // Detect offset control codes and adjust cursor
+
+ } else if (this.isOffsetControlCode(char0, char1)) {
+ // Cursor position is set by indent PAC (see below) in 4-column
+ // increments, with an additional offset code of 1-3 to reach any
+ // of the 32 columns specified by CEA-608. So all we need to do
+ // here is increment the column cursor by the given offset.
+ this.column_ += char1 & 0x03; // Detect PACs (Preamble Address Codes)
+ } else if (this.isPAC(char0, char1)) {
+ // There's no logic for PAC -> row mapping, so we have to just
+ // find the row code in an array and use its index :(
+ var row = ROWS.indexOf(data & 0x1f20); // Configure the caption window if we're in roll-up mode
+
+ if (this.mode_ === 'rollUp') {
+ // This implies that the base row is incorrectly set.
+ // As per the recommendation in CEA-608(Base Row Implementation), defer to the number
+ // of roll-up rows set.
+ if (row - this.rollUpRows_ + 1 < 0) {
+ row = this.rollUpRows_ - 1;
+ }
+
+ this.setRollUp(packet.pts, row);
+ }
+
+ if (row !== this.row_) {
+ // formatting is only persistent for current row
+ this.clearFormatting(packet.pts);
+ this.row_ = row;
+ } // All PACs can apply underline, so detect and apply
+ // (All odd-numbered second bytes set underline)
+
+
+ if (char1 & 0x1 && this.formatting_.indexOf('u') === -1) {
+ this.addFormatting(packet.pts, ['u']);
+ }
+
+ if ((data & 0x10) === 0x10) {
+ // We've got an indent level code. Each successive even number
+ // increments the column cursor by 4, so we can get the desired
+ // column position by bit-shifting to the right (to get n/2)
+ // and multiplying by 4.
+ this.column_ = ((data & 0xe) >> 1) * 4;
+ }
+
+ if (this.isColorPAC(char1)) {
+ // it's a color code, though we only support white, which
+ // can be either normal or italicized. white italics can be
+ // either 0x4e or 0x6e depending on the row, so we just
+ // bitwise-and with 0xe to see if italics should be turned on
+ if ((char1 & 0xe) === 0xe) {
+ this.addFormatting(packet.pts, ['i']);
+ }
+ } // We have a normal character in char0, and possibly one in char1
+
+ } else if (this.isNormalChar(char0)) {
+ if (char1 === 0x00) {
+ char1 = null;
+ }
+
+ text = getCharFromCode(char0);
+ text += getCharFromCode(char1);
+ this[this.mode_](packet.pts, text);
+ this.column_ += text.length;
+ } // finish data processing
+
+ };
+ };
+
+ Cea608Stream.prototype = new stream(); // Trigger a cue point that captures the current state of the
+ // display buffer
+
+ Cea608Stream.prototype.flushDisplayed = function (pts) {
+ var content = this.displayed_ // remove spaces from the start and end of the string
+ .map(function (row, index) {
+ try {
+ return row.trim();
+ } catch (e) {
+ // Ordinarily, this shouldn't happen. However, caption
+ // parsing errors should not throw exceptions and
+ // break playback.
+ this.trigger('log', {
+ level: 'warn',
+ message: 'Skipping a malformed 608 caption at index ' + index + '.'
+ });
+ return '';
+ }
+ }, this) // combine all text rows to display in one cue
+ .join('\n') // and remove blank rows from the start and end, but not the middle
+ .replace(/^\n+|\n+$/g, '');
+
+ if (content.length) {
+ this.trigger('data', {
+ startPts: this.startPts_,
+ endPts: pts,
+ text: content,
+ stream: this.name_
+ });
+ }
+ };
+ /**
+ * Zero out the data, used for startup and on seek
+ */
+
+
+ Cea608Stream.prototype.reset = function () {
+ this.mode_ = 'popOn'; // When in roll-up mode, the index of the last row that will
+ // actually display captions. If a caption is shifted to a row
+ // with a lower index than this, it is cleared from the display
+ // buffer
+
+ this.topRow_ = 0;
+ this.startPts_ = 0;
+ this.displayed_ = createDisplayBuffer();
+ this.nonDisplayed_ = createDisplayBuffer();
+ this.lastControlCode_ = null; // Track row and column for proper line-breaking and spacing
+
+ this.column_ = 0;
+ this.row_ = BOTTOM_ROW;
+ this.rollUpRows_ = 2; // This variable holds currently-applied formatting
+
+ this.formatting_ = [];
+ };
+ /**
+ * Sets up control code and related constants for this instance
+ */
+
+
+ Cea608Stream.prototype.setConstants = function () {
+ // The following attributes have these uses:
+ // ext_ : char0 for mid-row codes, and the base for extended
+ // chars (ext_+0, ext_+1, and ext_+2 are char0s for
+ // extended codes)
+ // control_: char0 for control codes, except byte-shifted to the
+ // left so that we can do this.control_ | CONTROL_CODE
+ // offset_: char0 for tab offset codes
+ //
+ // It's also worth noting that control codes, and _only_ control codes,
+ // differ between field 1 and field2. Field 2 control codes are always
+ // their field 1 value plus 1. That's why there's the "| field" on the
+ // control value.
+ if (this.dataChannel_ === 0) {
+ this.BASE_ = 0x10;
+ this.EXT_ = 0x11;
+ this.CONTROL_ = (0x14 | this.field_) << 8;
+ this.OFFSET_ = 0x17;
+ } else if (this.dataChannel_ === 1) {
+ this.BASE_ = 0x18;
+ this.EXT_ = 0x19;
+ this.CONTROL_ = (0x1c | this.field_) << 8;
+ this.OFFSET_ = 0x1f;
+ } // Constants for the LSByte command codes recognized by Cea608Stream. This
+ // list is not exhaustive. For a more comprehensive listing and semantics see
+ // http://www.gpo.gov/fdsys/pkg/CFR-2010-title47-vol1/pdf/CFR-2010-title47-vol1-sec15-119.pdf
+ // Padding
+
+
+ this.PADDING_ = 0x0000; // Pop-on Mode
+
+ this.RESUME_CAPTION_LOADING_ = this.CONTROL_ | 0x20;
+ this.END_OF_CAPTION_ = this.CONTROL_ | 0x2f; // Roll-up Mode
+
+ this.ROLL_UP_2_ROWS_ = this.CONTROL_ | 0x25;
+ this.ROLL_UP_3_ROWS_ = this.CONTROL_ | 0x26;
+ this.ROLL_UP_4_ROWS_ = this.CONTROL_ | 0x27;
+ this.CARRIAGE_RETURN_ = this.CONTROL_ | 0x2d; // paint-on mode
+
+ this.RESUME_DIRECT_CAPTIONING_ = this.CONTROL_ | 0x29; // Erasure
+
+ this.BACKSPACE_ = this.CONTROL_ | 0x21;
+ this.ERASE_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2c;
+ this.ERASE_NON_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2e;
+ };
+ /**
+ * Detects if the 2-byte packet data is a special character
+ *
+ * Special characters have a second byte in the range 0x30 to 0x3f,
+ * with the first byte being 0x11 (for data channel 1) or 0x19 (for
+ * data channel 2).
+ *
+ * @param {Integer} char0 The first byte
+ * @param {Integer} char1 The second byte
+ * @return {Boolean} Whether the 2 bytes are an special character
+ */
+
+
+ Cea608Stream.prototype.isSpecialCharacter = function (char0, char1) {
+ return char0 === this.EXT_ && char1 >= 0x30 && char1 <= 0x3f;
+ };
+ /**
+ * Detects if the 2-byte packet data is an extended character
+ *
+ * Extended characters have a second byte in the range 0x20 to 0x3f,
+ * with the first byte being 0x12 or 0x13 (for data channel 1) or
+ * 0x1a or 0x1b (for data channel 2).
+ *
+ * @param {Integer} char0 The first byte
+ * @param {Integer} char1 The second byte
+ * @return {Boolean} Whether the 2 bytes are an extended character
+ */
+
+
+ Cea608Stream.prototype.isExtCharacter = function (char0, char1) {
+ return (char0 === this.EXT_ + 1 || char0 === this.EXT_ + 2) && char1 >= 0x20 && char1 <= 0x3f;
+ };
+ /**
+ * Detects if the 2-byte packet is a mid-row code
+ *
+ * Mid-row codes have a second byte in the range 0x20 to 0x2f, with
+ * the first byte being 0x11 (for data channel 1) or 0x19 (for data
+ * channel 2).
+ *
+ * @param {Integer} char0 The first byte
+ * @param {Integer} char1 The second byte
+ * @return {Boolean} Whether the 2 bytes are a mid-row code
+ */
+
+
+ Cea608Stream.prototype.isMidRowCode = function (char0, char1) {
+ return char0 === this.EXT_ && char1 >= 0x20 && char1 <= 0x2f;
+ };
+ /**
+ * Detects if the 2-byte packet is an offset control code
+ *
+ * Offset control codes have a second byte in the range 0x21 to 0x23,
+ * with the first byte being 0x17 (for data channel 1) or 0x1f (for
+ * data channel 2).
+ *
+ * @param {Integer} char0 The first byte
+ * @param {Integer} char1 The second byte
+ * @return {Boolean} Whether the 2 bytes are an offset control code
+ */
+
+
+ Cea608Stream.prototype.isOffsetControlCode = function (char0, char1) {
+ return char0 === this.OFFSET_ && char1 >= 0x21 && char1 <= 0x23;
+ };
+ /**
+ * Detects if the 2-byte packet is a Preamble Address Code
+ *
+ * PACs have a first byte in the range 0x10 to 0x17 (for data channel 1)
+ * or 0x18 to 0x1f (for data channel 2), with the second byte in the
+ * range 0x40 to 0x7f.
+ *
+ * @param {Integer} char0 The first byte
+ * @param {Integer} char1 The second byte
+ * @return {Boolean} Whether the 2 bytes are a PAC
+ */
+
+
+ Cea608Stream.prototype.isPAC = function (char0, char1) {
+ return char0 >= this.BASE_ && char0 < this.BASE_ + 8 && char1 >= 0x40 && char1 <= 0x7f;
+ };
+ /**
+ * Detects if a packet's second byte is in the range of a PAC color code
+ *
+ * PAC color codes have the second byte be in the range 0x40 to 0x4f, or
+ * 0x60 to 0x6f.
+ *
+ * @param {Integer} char1 The second byte
+ * @return {Boolean} Whether the byte is a color PAC
+ */
+
+
+ Cea608Stream.prototype.isColorPAC = function (char1) {
+ return char1 >= 0x40 && char1 <= 0x4f || char1 >= 0x60 && char1 <= 0x7f;
+ };
+ /**
+ * Detects if a single byte is in the range of a normal character
+ *
+ * Normal text bytes are in the range 0x20 to 0x7f.
+ *
+ * @param {Integer} char The byte
+ * @return {Boolean} Whether the byte is a normal character
+ */
+
+
+ Cea608Stream.prototype.isNormalChar = function (_char2) {
+ return _char2 >= 0x20 && _char2 <= 0x7f;
+ };
+ /**
+ * Configures roll-up
+ *
+ * @param {Integer} pts Current PTS
+ * @param {Integer} newBaseRow Used by PACs to slide the current window to
+ * a new position
+ */
+
+
+ Cea608Stream.prototype.setRollUp = function (pts, newBaseRow) {
+ // Reset the base row to the bottom row when switching modes
+ if (this.mode_ !== 'rollUp') {
+ this.row_ = BOTTOM_ROW;
+ this.mode_ = 'rollUp'; // Spec says to wipe memories when switching to roll-up
+
+ this.flushDisplayed(pts);
+ this.nonDisplayed_ = createDisplayBuffer();
+ this.displayed_ = createDisplayBuffer();
+ }
+
+ if (newBaseRow !== undefined && newBaseRow !== this.row_) {
+ // move currently displayed captions (up or down) to the new base row
+ for (var i = 0; i < this.rollUpRows_; i++) {
+ this.displayed_[newBaseRow - i] = this.displayed_[this.row_ - i];
+ this.displayed_[this.row_ - i] = '';
+ }
+ }
+
+ if (newBaseRow === undefined) {
+ newBaseRow = this.row_;
+ }
+
+ this.topRow_ = newBaseRow - this.rollUpRows_ + 1;
+ }; // Adds the opening HTML tag for the passed character to the caption text,
+ // and keeps track of it for later closing
+
+
+ Cea608Stream.prototype.addFormatting = function (pts, format) {
+ this.formatting_ = this.formatting_.concat(format);
+ var text = format.reduce(function (text, format) {
+ return text + '<' + format + '>';
+ }, '');
+ this[this.mode_](pts, text);
+ }; // Adds HTML closing tags for current formatting to caption text and
+ // clears remembered formatting
+
+
+ Cea608Stream.prototype.clearFormatting = function (pts) {
+ if (!this.formatting_.length) {
+ return;
+ }
+
+ var text = this.formatting_.reverse().reduce(function (text, format) {
+ return text + '' + format + '>';
+ }, '');
+ this.formatting_ = [];
+ this[this.mode_](pts, text);
+ }; // Mode Implementations
+
+
+ Cea608Stream.prototype.popOn = function (pts, text) {
+ var baseRow = this.nonDisplayed_[this.row_]; // buffer characters
+
+ baseRow += text;
+ this.nonDisplayed_[this.row_] = baseRow;
+ };
+
+ Cea608Stream.prototype.rollUp = function (pts, text) {
+ var baseRow = this.displayed_[this.row_];
+ baseRow += text;
+ this.displayed_[this.row_] = baseRow;
+ };
+
+ Cea608Stream.prototype.shiftRowsUp_ = function () {
+ var i; // clear out inactive rows
+
+ for (i = 0; i < this.topRow_; i++) {
+ this.displayed_[i] = '';
+ }
+
+ for (i = this.row_ + 1; i < BOTTOM_ROW + 1; i++) {
+ this.displayed_[i] = '';
+ } // shift displayed rows up
+
+
+ for (i = this.topRow_; i < this.row_; i++) {
+ this.displayed_[i] = this.displayed_[i + 1];
+ } // clear out the bottom row
+
+
+ this.displayed_[this.row_] = '';
+ };
+
+ Cea608Stream.prototype.paintOn = function (pts, text) {
+ var baseRow = this.displayed_[this.row_];
+ baseRow += text;
+ this.displayed_[this.row_] = baseRow;
+ }; // exports
+
+
+ var captionStream = {
+ CaptionStream: CaptionStream$1,
+ Cea608Stream: Cea608Stream,
+ Cea708Stream: Cea708Stream
+ };
+ /**
+ * mux.js
+ *
+ * Copyright (c) Brightcove
+ * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
+ */
+
+ var streamTypes = {
+ H264_STREAM_TYPE: 0x1B,
+ ADTS_STREAM_TYPE: 0x0F,
+ METADATA_STREAM_TYPE: 0x15
+ };
+ var MAX_TS = 8589934592;
+ var RO_THRESH = 4294967296;
+ var TYPE_SHARED = 'shared';
+
+ var handleRollover$1 = function handleRollover(value, reference) {
+ var direction = 1;
+
+ if (value > reference) {
+ // If the current timestamp value is greater than our reference timestamp and we detect a
+ // timestamp rollover, this means the roll over is happening in the opposite direction.
+ // Example scenario: Enter a long stream/video just after a rollover occurred. The reference
+ // point will be set to a small number, e.g. 1. The user then seeks backwards over the
+ // rollover point. In loading this segment, the timestamp values will be very large,
+ // e.g. 2^33 - 1. Since this comes before the data we loaded previously, we want to adjust
+ // the time stamp to be `value - 2^33`.
+ direction = -1;
+ } // Note: A seek forwards or back that is greater than the RO_THRESH (2^32, ~13 hours) will
+ // cause an incorrect adjustment.
+
+
+ while (Math.abs(reference - value) > RO_THRESH) {
+ value += direction * MAX_TS;
+ }
+
+ return value;
+ };
+
+ var TimestampRolloverStream$1 = function TimestampRolloverStream(type) {
+ var lastDTS, referenceDTS;
+ TimestampRolloverStream.prototype.init.call(this); // The "shared" type is used in cases where a stream will contain muxed
+ // video and audio. We could use `undefined` here, but having a string
+ // makes debugging a little clearer.
+
+ this.type_ = type || TYPE_SHARED;
+
+ this.push = function (data) {
+ // Any "shared" rollover streams will accept _all_ data. Otherwise,
+ // streams will only accept data that matches their type.
+ if (this.type_ !== TYPE_SHARED && data.type !== this.type_) {
+ return;
+ }
+
+ if (referenceDTS === undefined) {
+ referenceDTS = data.dts;
+ }
+
+ data.dts = handleRollover$1(data.dts, referenceDTS);
+ data.pts = handleRollover$1(data.pts, referenceDTS);
+ lastDTS = data.dts;
+ this.trigger('data', data);
+ };
+
+ this.flush = function () {
+ referenceDTS = lastDTS;
+ this.trigger('done');
+ };
+
+ this.endTimeline = function () {
+ this.flush();
+ this.trigger('endedtimeline');
+ };
+
+ this.discontinuity = function () {
+ referenceDTS = void 0;
+ lastDTS = void 0;
+ };
+
+ this.reset = function () {
+ this.discontinuity();
+ this.trigger('reset');
+ };
+ };
+
+ TimestampRolloverStream$1.prototype = new stream();
+ var timestampRolloverStream = {
+ TimestampRolloverStream: TimestampRolloverStream$1,
+ handleRollover: handleRollover$1
+ };
+
+ var percentEncode$1 = function percentEncode(bytes, start, end) {
+ var i,
+ result = '';
+
+ for (i = start; i < end; i++) {
+ result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
+ }
+
+ return result;
+ },
+ // return the string representation of the specified byte range,
+ // interpreted as UTf-8.
+ parseUtf8 = function parseUtf8(bytes, start, end) {
+ return decodeURIComponent(percentEncode$1(bytes, start, end));
+ },
+ // return the string representation of the specified byte range,
+ // interpreted as ISO-8859-1.
+ parseIso88591$1 = function parseIso88591(bytes, start, end) {
+ return unescape(percentEncode$1(bytes, start, end)); // jshint ignore:line
+ },
+ parseSyncSafeInteger$1 = function parseSyncSafeInteger(data) {
+ return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
+ },
+ tagParsers = {
+ TXXX: function TXXX(tag) {
+ var i;
+
+ if (tag.data[0] !== 3) {
+ // ignore frames with unrecognized character encodings
+ return;
+ }
+
+ for (i = 1; i < tag.data.length; i++) {
+ if (tag.data[i] === 0) {
+ // parse the text fields
+ tag.description = parseUtf8(tag.data, 1, i); // do not include the null terminator in the tag value
+
+ tag.value = parseUtf8(tag.data, i + 1, tag.data.length).replace(/\0*$/, '');
+ break;
+ }
+ }
+
+ tag.data = tag.value;
+ },
+ WXXX: function WXXX(tag) {
+ var i;
+
+ if (tag.data[0] !== 3) {
+ // ignore frames with unrecognized character encodings
+ return;
+ }
+
+ for (i = 1; i < tag.data.length; i++) {
+ if (tag.data[i] === 0) {
+ // parse the description and URL fields
+ tag.description = parseUtf8(tag.data, 1, i);
+ tag.url = parseUtf8(tag.data, i + 1, tag.data.length);
+ break;
+ }
+ }
+ },
+ PRIV: function PRIV(tag) {
+ var i;
+
+ for (i = 0; i < tag.data.length; i++) {
+ if (tag.data[i] === 0) {
+ // parse the description and URL fields
+ tag.owner = parseIso88591$1(tag.data, 0, i);
+ break;
+ }
+ }
+
+ tag.privateData = tag.data.subarray(i + 1);
+ tag.data = tag.privateData;
+ }
+ },
+ _MetadataStream;
+
+ _MetadataStream = function MetadataStream(options) {
+ var settings = {
+ // the bytes of the program-level descriptor field in MP2T
+ // see ISO/IEC 13818-1:2013 (E), section 2.6 "Program and
+ // program element descriptors"
+ descriptor: options && options.descriptor
+ },
+ // the total size in bytes of the ID3 tag being parsed
+ tagSize = 0,
+ // tag data that is not complete enough to be parsed
+ buffer = [],
+ // the total number of bytes currently in the buffer
+ bufferSize = 0,
+ i;
+
+ _MetadataStream.prototype.init.call(this); // calculate the text track in-band metadata track dispatch type
+ // https://html.spec.whatwg.org/multipage/embedded-content.html#steps-to-expose-a-media-resource-specific-text-track
+
+
+ this.dispatchType = streamTypes.METADATA_STREAM_TYPE.toString(16);
+
+ if (settings.descriptor) {
+ for (i = 0; i < settings.descriptor.length; i++) {
+ this.dispatchType += ('00' + settings.descriptor[i].toString(16)).slice(-2);
+ }
+ }
+
+ this.push = function (chunk) {
+ var tag, frameStart, frameSize, frame, i, frameHeader;
+
+ if (chunk.type !== 'timed-metadata') {
+ return;
+ } // if data_alignment_indicator is set in the PES header,
+ // we must have the start of a new ID3 tag. Assume anything
+ // remaining in the buffer was malformed and throw it out
+
+
+ if (chunk.dataAlignmentIndicator) {
+ bufferSize = 0;
+ buffer.length = 0;
+ } // ignore events that don't look like ID3 data
+
+
+ if (buffer.length === 0 && (chunk.data.length < 10 || chunk.data[0] !== 'I'.charCodeAt(0) || chunk.data[1] !== 'D'.charCodeAt(0) || chunk.data[2] !== '3'.charCodeAt(0))) {
+ this.trigger('log', {
+ level: 'warn',
+ message: 'Skipping unrecognized metadata packet'
+ });
+ return;
+ } // add this chunk to the data we've collected so far
+
+
+ buffer.push(chunk);
+ bufferSize += chunk.data.byteLength; // grab the size of the entire frame from the ID3 header
+
+ if (buffer.length === 1) {
+ // the frame size is transmitted as a 28-bit integer in the
+ // last four bytes of the ID3 header.
+ // The most significant bit of each byte is dropped and the
+ // results concatenated to recover the actual value.
+ tagSize = parseSyncSafeInteger$1(chunk.data.subarray(6, 10)); // ID3 reports the tag size excluding the header but it's more
+ // convenient for our comparisons to include it
+
+ tagSize += 10;
+ } // if the entire frame has not arrived, wait for more data
+
+
+ if (bufferSize < tagSize) {
+ return;
+ } // collect the entire frame so it can be parsed
+
+
+ tag = {
+ data: new Uint8Array(tagSize),
+ frames: [],
+ pts: buffer[0].pts,
+ dts: buffer[0].dts
+ };
+
+ for (i = 0; i < tagSize;) {
+ tag.data.set(buffer[0].data.subarray(0, tagSize - i), i);
+ i += buffer[0].data.byteLength;
+ bufferSize -= buffer[0].data.byteLength;
+ buffer.shift();
+ } // find the start of the first frame and the end of the tag
+
+
+ frameStart = 10;
+
+ if (tag.data[5] & 0x40) {
+ // advance the frame start past the extended header
+ frameStart += 4; // header size field
+
+ frameStart += parseSyncSafeInteger$1(tag.data.subarray(10, 14)); // clip any padding off the end
+
+ tagSize -= parseSyncSafeInteger$1(tag.data.subarray(16, 20));
+ } // parse one or more ID3 frames
+ // http://id3.org/id3v2.3.0#ID3v2_frame_overview
+
+
+ do {
+ // determine the number of bytes in this frame
+ frameSize = parseSyncSafeInteger$1(tag.data.subarray(frameStart + 4, frameStart + 8));
+
+ if (frameSize < 1) {
+ this.trigger('log', {
+ level: 'warn',
+ message: 'Malformed ID3 frame encountered. Skipping metadata parsing.'
+ });
+ return;
+ }
+
+ frameHeader = String.fromCharCode(tag.data[frameStart], tag.data[frameStart + 1], tag.data[frameStart + 2], tag.data[frameStart + 3]);
+ frame = {
+ id: frameHeader,
+ data: tag.data.subarray(frameStart + 10, frameStart + frameSize + 10)
+ };
+ frame.key = frame.id;
+
+ if (tagParsers[frame.id]) {
+ tagParsers[frame.id](frame); // handle the special PRIV frame used to indicate the start
+ // time for raw AAC data
+
+ if (frame.owner === 'com.apple.streaming.transportStreamTimestamp') {
+ var d = frame.data,
+ size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
+ size *= 4;
+ size += d[7] & 0x03;
+ frame.timeStamp = size; // in raw AAC, all subsequent data will be timestamped based
+ // on the value of this frame
+ // we couldn't have known the appropriate pts and dts before
+ // parsing this ID3 tag so set those values now
+
+ if (tag.pts === undefined && tag.dts === undefined) {
+ tag.pts = frame.timeStamp;
+ tag.dts = frame.timeStamp;
+ }
+
+ this.trigger('timestamp', frame);
+ }
+ }
+
+ tag.frames.push(frame);
+ frameStart += 10; // advance past the frame header
+
+ frameStart += frameSize; // advance past the frame body
+ } while (frameStart < tagSize);
+
+ this.trigger('data', tag);
+ };
+ };
+
+ _MetadataStream.prototype = new stream();
+ var metadataStream = _MetadataStream;
+ var TimestampRolloverStream = timestampRolloverStream.TimestampRolloverStream; // object types
+
+ var _TransportPacketStream, _TransportParseStream, _ElementaryStream; // constants
+
+
+ var MP2T_PACKET_LENGTH$1 = 188,
+ // bytes
+ SYNC_BYTE$1 = 0x47;
+ /**
+ * Splits an incoming stream of binary data into MPEG-2 Transport
+ * Stream packets.
+ */
+
+ _TransportPacketStream = function TransportPacketStream() {
+ var buffer = new Uint8Array(MP2T_PACKET_LENGTH$1),
+ bytesInBuffer = 0;
+
+ _TransportPacketStream.prototype.init.call(this); // Deliver new bytes to the stream.
+
+ /**
+ * Split a stream of data into M2TS packets
+ **/
+
+
+ this.push = function (bytes) {
+ var startIndex = 0,
+ endIndex = MP2T_PACKET_LENGTH$1,
+ everything; // If there are bytes remaining from the last segment, prepend them to the
+ // bytes that were pushed in
+
+ if (bytesInBuffer) {
+ everything = new Uint8Array(bytes.byteLength + bytesInBuffer);
+ everything.set(buffer.subarray(0, bytesInBuffer));
+ everything.set(bytes, bytesInBuffer);
+ bytesInBuffer = 0;
+ } else {
+ everything = bytes;
+ } // While we have enough data for a packet
+
+
+ while (endIndex < everything.byteLength) {
+ // Look for a pair of start and end sync bytes in the data..
+ if (everything[startIndex] === SYNC_BYTE$1 && everything[endIndex] === SYNC_BYTE$1) {
+ // We found a packet so emit it and jump one whole packet forward in
+ // the stream
+ this.trigger('data', everything.subarray(startIndex, endIndex));
+ startIndex += MP2T_PACKET_LENGTH$1;
+ endIndex += MP2T_PACKET_LENGTH$1;
+ continue;
+ } // If we get here, we have somehow become de-synchronized and we need to step
+ // forward one byte at a time until we find a pair of sync bytes that denote
+ // a packet
+
+
+ startIndex++;
+ endIndex++;
+ } // If there was some data left over at the end of the segment that couldn't
+ // possibly be a whole packet, keep it because it might be the start of a packet
+ // that continues in the next segment
+
+
+ if (startIndex < everything.byteLength) {
+ buffer.set(everything.subarray(startIndex), 0);
+ bytesInBuffer = everything.byteLength - startIndex;
+ }
+ };
+ /**
+ * Passes identified M2TS packets to the TransportParseStream to be parsed
+ **/
+
+
+ this.flush = function () {
+ // If the buffer contains a whole packet when we are being flushed, emit it
+ // and empty the buffer. Otherwise hold onto the data because it may be
+ // important for decoding the next segment
+ if (bytesInBuffer === MP2T_PACKET_LENGTH$1 && buffer[0] === SYNC_BYTE$1) {
+ this.trigger('data', buffer);
+ bytesInBuffer = 0;
+ }
+
+ this.trigger('done');
+ };
+
+ this.endTimeline = function () {
+ this.flush();
+ this.trigger('endedtimeline');
+ };
+
+ this.reset = function () {
+ bytesInBuffer = 0;
+ this.trigger('reset');
+ };
+ };
+
+ _TransportPacketStream.prototype = new stream();
+ /**
+ * Accepts an MP2T TransportPacketStream and emits data events with parsed
+ * forms of the individual transport stream packets.
+ */
+
+ _TransportParseStream = function TransportParseStream() {
+ var parsePsi, parsePat, parsePmt, self;
+
+ _TransportParseStream.prototype.init.call(this);
+
+ self = this;
+ this.packetsWaitingForPmt = [];
+ this.programMapTable = undefined;
+
+ parsePsi = function parsePsi(payload, psi) {
+ var offset = 0; // PSI packets may be split into multiple sections and those
+ // sections may be split into multiple packets. If a PSI
+ // section starts in this packet, the payload_unit_start_indicator
+ // will be true and the first byte of the payload will indicate
+ // the offset from the current position to the start of the
+ // section.
+
+ if (psi.payloadUnitStartIndicator) {
+ offset += payload[offset] + 1;
+ }
+
+ if (psi.type === 'pat') {
+ parsePat(payload.subarray(offset), psi);
+ } else {
+ parsePmt(payload.subarray(offset), psi);
+ }
+ };
+
+ parsePat = function parsePat(payload, pat) {
+ pat.section_number = payload[7]; // eslint-disable-line camelcase
+
+ pat.last_section_number = payload[8]; // eslint-disable-line camelcase
+ // skip the PSI header and parse the first PMT entry
+
+ self.pmtPid = (payload[10] & 0x1F) << 8 | payload[11];
+ pat.pmtPid = self.pmtPid;
+ };
+ /**
+ * Parse out the relevant fields of a Program Map Table (PMT).
+ * @param payload {Uint8Array} the PMT-specific portion of an MP2T
+ * packet. The first byte in this array should be the table_id
+ * field.
+ * @param pmt {object} the object that should be decorated with
+ * fields parsed from the PMT.
+ */
+
+
+ parsePmt = function parsePmt(payload, pmt) {
+ var sectionLength, tableEnd, programInfoLength, offset; // PMTs can be sent ahead of the time when they should actually
+ // take effect. We don't believe this should ever be the case
+ // for HLS but we'll ignore "forward" PMT declarations if we see
+ // them. Future PMT declarations have the current_next_indicator
+ // set to zero.
+
+ if (!(payload[5] & 0x01)) {
+ return;
+ } // overwrite any existing program map table
+
+
+ self.programMapTable = {
+ video: null,
+ audio: null,
+ 'timed-metadata': {}
+ }; // the mapping table ends at the end of the current section
+
+ sectionLength = (payload[1] & 0x0f) << 8 | payload[2];
+ tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
+ // long the program info descriptors are
+
+ programInfoLength = (payload[10] & 0x0f) << 8 | payload[11]; // advance the offset to the first entry in the mapping table
+
+ offset = 12 + programInfoLength;
+
+ while (offset < tableEnd) {
+ var streamType = payload[offset];
+ var pid = (payload[offset + 1] & 0x1F) << 8 | payload[offset + 2]; // only map a single elementary_pid for audio and video stream types
+ // TODO: should this be done for metadata too? for now maintain behavior of
+ // multiple metadata streams
+
+ if (streamType === streamTypes.H264_STREAM_TYPE && self.programMapTable.video === null) {
+ self.programMapTable.video = pid;
+ } else if (streamType === streamTypes.ADTS_STREAM_TYPE && self.programMapTable.audio === null) {
+ self.programMapTable.audio = pid;
+ } else if (streamType === streamTypes.METADATA_STREAM_TYPE) {
+ // map pid to stream type for metadata streams
+ self.programMapTable['timed-metadata'][pid] = streamType;
+ } // move to the next table entry
+ // skip past the elementary stream descriptors, if present
+
+
+ offset += ((payload[offset + 3] & 0x0F) << 8 | payload[offset + 4]) + 5;
+ } // record the map on the packet as well
+
+
+ pmt.programMapTable = self.programMapTable;
+ };
+ /**
+ * Deliver a new MP2T packet to the next stream in the pipeline.
+ */
+
+
+ this.push = function (packet) {
+ var result = {},
+ offset = 4;
+ result.payloadUnitStartIndicator = !!(packet[1] & 0x40); // pid is a 13-bit field starting at the last bit of packet[1]
+
+ result.pid = packet[1] & 0x1f;
+ result.pid <<= 8;
+ result.pid |= packet[2]; // if an adaption field is present, its length is specified by the
+ // fifth byte of the TS packet header. The adaptation field is
+ // used to add stuffing to PES packets that don't fill a complete
+ // TS packet, and to specify some forms of timing and control data
+ // that we do not currently use.
+
+ if ((packet[3] & 0x30) >>> 4 > 0x01) {
+ offset += packet[offset] + 1;
+ } // parse the rest of the packet based on the type
+
+
+ if (result.pid === 0) {
+ result.type = 'pat';
+ parsePsi(packet.subarray(offset), result);
+ this.trigger('data', result);
+ } else if (result.pid === this.pmtPid) {
+ result.type = 'pmt';
+ parsePsi(packet.subarray(offset), result);
+ this.trigger('data', result); // if there are any packets waiting for a PMT to be found, process them now
+
+ while (this.packetsWaitingForPmt.length) {
+ this.processPes_.apply(this, this.packetsWaitingForPmt.shift());
+ }
+ } else if (this.programMapTable === undefined) {
+ // When we have not seen a PMT yet, defer further processing of
+ // PES packets until one has been parsed
+ this.packetsWaitingForPmt.push([packet, offset, result]);
+ } else {
+ this.processPes_(packet, offset, result);
+ }
+ };
+
+ this.processPes_ = function (packet, offset, result) {
+ // set the appropriate stream type
+ if (result.pid === this.programMapTable.video) {
+ result.streamType = streamTypes.H264_STREAM_TYPE;
+ } else if (result.pid === this.programMapTable.audio) {
+ result.streamType = streamTypes.ADTS_STREAM_TYPE;
+ } else {
+ // if not video or audio, it is timed-metadata or unknown
+ // if unknown, streamType will be undefined
+ result.streamType = this.programMapTable['timed-metadata'][result.pid];
+ }
+
+ result.type = 'pes';
+ result.data = packet.subarray(offset);
+ this.trigger('data', result);
+ };
+ };
+
+ _TransportParseStream.prototype = new stream();
+ _TransportParseStream.STREAM_TYPES = {
+ h264: 0x1b,
+ adts: 0x0f
+ };
+ /**
+ * Reconsistutes program elementary stream (PES) packets from parsed
+ * transport stream packets. That is, if you pipe an
+ * mp2t.TransportParseStream into a mp2t.ElementaryStream, the output
+ * events will be events which capture the bytes for individual PES
+ * packets plus relevant metadata that has been extracted from the
+ * container.
+ */
+
+ _ElementaryStream = function ElementaryStream() {
+ var self = this,
+ segmentHadPmt = false,
+ // PES packet fragments
+ video = {
+ data: [],
+ size: 0
+ },
+ audio = {
+ data: [],
+ size: 0
+ },
+ timedMetadata = {
+ data: [],
+ size: 0
+ },
+ programMapTable,
+ parsePes = function parsePes(payload, pes) {
+ var ptsDtsFlags;
+ var startPrefix = payload[0] << 16 | payload[1] << 8 | payload[2]; // default to an empty array
+
+ pes.data = new Uint8Array(); // In certain live streams, the start of a TS fragment has ts packets
+ // that are frame data that is continuing from the previous fragment. This
+ // is to check that the pes data is the start of a new pes payload
+
+ if (startPrefix !== 1) {
+ return;
+ } // get the packet length, this will be 0 for video
+
+
+ pes.packetLength = 6 + (payload[4] << 8 | payload[5]); // find out if this packets starts a new keyframe
+
+ pes.dataAlignmentIndicator = (payload[6] & 0x04) !== 0; // PES packets may be annotated with a PTS value, or a PTS value
+ // and a DTS value. Determine what combination of values is
+ // available to work with.
+
+ ptsDtsFlags = payload[7]; // PTS and DTS are normally stored as a 33-bit number. Javascript
+ // performs all bitwise operations on 32-bit integers but javascript
+ // supports a much greater range (52-bits) of integer using standard
+ // mathematical operations.
+ // We construct a 31-bit value using bitwise operators over the 31
+ // most significant bits and then multiply by 4 (equal to a left-shift
+ // of 2) before we add the final 2 least significant bits of the
+ // timestamp (equal to an OR.)
+
+ if (ptsDtsFlags & 0xC0) {
+ // the PTS and DTS are not written out directly. For information
+ // on how they are encoded, see
+ // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
+ pes.pts = (payload[9] & 0x0E) << 27 | (payload[10] & 0xFF) << 20 | (payload[11] & 0xFE) << 12 | (payload[12] & 0xFF) << 5 | (payload[13] & 0xFE) >>> 3;
+ pes.pts *= 4; // Left shift by 2
+
+ pes.pts += (payload[13] & 0x06) >>> 1; // OR by the two LSBs
+
+ pes.dts = pes.pts;
+
+ if (ptsDtsFlags & 0x40) {
+ pes.dts = (payload[14] & 0x0E) << 27 | (payload[15] & 0xFF) << 20 | (payload[16] & 0xFE) << 12 | (payload[17] & 0xFF) << 5 | (payload[18] & 0xFE) >>> 3;
+ pes.dts *= 4; // Left shift by 2
+
+ pes.dts += (payload[18] & 0x06) >>> 1; // OR by the two LSBs
+ }
+ } // the data section starts immediately after the PES header.
+ // pes_header_data_length specifies the number of header bytes
+ // that follow the last byte of the field.
+
+
+ pes.data = payload.subarray(9 + payload[8]);
+ },
+
+ /**
+ * Pass completely parsed PES packets to the next stream in the pipeline
+ **/
+ flushStream = function flushStream(stream, type, forceFlush) {
+ var packetData = new Uint8Array(stream.size),
+ event = {
+ type: type
+ },
+ i = 0,
+ offset = 0,
+ packetFlushable = false,
+ fragment; // do nothing if there is not enough buffered data for a complete
+ // PES header
+
+ if (!stream.data.length || stream.size < 9) {
+ return;
+ }
+
+ event.trackId = stream.data[0].pid; // reassemble the packet
+
+ for (i = 0; i < stream.data.length; i++) {
+ fragment = stream.data[i];
+ packetData.set(fragment.data, offset);
+ offset += fragment.data.byteLength;
+ } // parse assembled packet's PES header
+
+
+ parsePes(packetData, event); // non-video PES packets MUST have a non-zero PES_packet_length
+ // check that there is enough stream data to fill the packet
+
+ packetFlushable = type === 'video' || event.packetLength <= stream.size; // flush pending packets if the conditions are right
+
+ if (forceFlush || packetFlushable) {
+ stream.size = 0;
+ stream.data.length = 0;
+ } // only emit packets that are complete. this is to avoid assembling
+ // incomplete PES packets due to poor segmentation
+
+
+ if (packetFlushable) {
+ self.trigger('data', event);
+ }
+ };
+
+ _ElementaryStream.prototype.init.call(this);
+ /**
+ * Identifies M2TS packet types and parses PES packets using metadata
+ * parsed from the PMT
+ **/
+
+
+ this.push = function (data) {
+ ({
+ pat: function pat() {// we have to wait for the PMT to arrive as well before we
+ // have any meaningful metadata
+ },
+ pes: function pes() {
+ var stream, streamType;
+
+ switch (data.streamType) {
+ case streamTypes.H264_STREAM_TYPE:
+ stream = video;
+ streamType = 'video';
+ break;
+
+ case streamTypes.ADTS_STREAM_TYPE:
+ stream = audio;
+ streamType = 'audio';
+ break;
+
+ case streamTypes.METADATA_STREAM_TYPE:
+ stream = timedMetadata;
+ streamType = 'timed-metadata';
+ break;
+
+ default:
+ // ignore unknown stream types
+ return;
+ } // if a new packet is starting, we can flush the completed
+ // packet
+
+
+ if (data.payloadUnitStartIndicator) {
+ flushStream(stream, streamType, true);
+ } // buffer this fragment until we are sure we've received the
+ // complete payload
+
+
+ stream.data.push(data);
+ stream.size += data.data.byteLength;
+ },
+ pmt: function pmt() {
+ var event = {
+ type: 'metadata',
+ tracks: []
+ };
+ programMapTable = data.programMapTable; // translate audio and video streams to tracks
+
+ if (programMapTable.video !== null) {
+ event.tracks.push({
+ timelineStartInfo: {
+ baseMediaDecodeTime: 0
+ },
+ id: +programMapTable.video,
+ codec: 'avc',
+ type: 'video'
+ });
+ }
+
+ if (programMapTable.audio !== null) {
+ event.tracks.push({
+ timelineStartInfo: {
+ baseMediaDecodeTime: 0
+ },
+ id: +programMapTable.audio,
+ codec: 'adts',
+ type: 'audio'
+ });
+ }
+
+ segmentHadPmt = true;
+ self.trigger('data', event);
+ }
+ })[data.type]();
+ };
+
+ this.reset = function () {
+ video.size = 0;
+ video.data.length = 0;
+ audio.size = 0;
+ audio.data.length = 0;
+ this.trigger('reset');
+ };
+ /**
+ * Flush any remaining input. Video PES packets may be of variable
+ * length. Normally, the start of a new video packet can trigger the
+ * finalization of the previous packet. That is not possible if no
+ * more video is forthcoming, however. In that case, some other
+ * mechanism (like the end of the file) has to be employed. When it is
+ * clear that no additional data is forthcoming, calling this method
+ * will flush the buffered packets.
+ */
+
+
+ this.flushStreams_ = function () {
+ // !!THIS ORDER IS IMPORTANT!!
+ // video first then audio
+ flushStream(video, 'video');
+ flushStream(audio, 'audio');
+ flushStream(timedMetadata, 'timed-metadata');
+ };
+
+ this.flush = function () {
+ // if on flush we haven't had a pmt emitted
+ // and we have a pmt to emit. emit the pmt
+ // so that we trigger a trackinfo downstream.
+ if (!segmentHadPmt && programMapTable) {
+ var pmt = {
+ type: 'metadata',
+ tracks: []
+ }; // translate audio and video streams to tracks
+
+ if (programMapTable.video !== null) {
+ pmt.tracks.push({
+ timelineStartInfo: {
+ baseMediaDecodeTime: 0
+ },
+ id: +programMapTable.video,
+ codec: 'avc',
+ type: 'video'
+ });
+ }
+
+ if (programMapTable.audio !== null) {
+ pmt.tracks.push({
+ timelineStartInfo: {
+ baseMediaDecodeTime: 0
+ },
+ id: +programMapTable.audio,
+ codec: 'adts',
+ type: 'audio'
+ });
+ }
+
+ self.trigger('data', pmt);
+ }
+
+ segmentHadPmt = false;
+ this.flushStreams_();
+ this.trigger('done');
+ };
+ };
+
+ _ElementaryStream.prototype = new stream();
+ var m2ts = {
+ PAT_PID: 0x0000,
+ MP2T_PACKET_LENGTH: MP2T_PACKET_LENGTH$1,
+ TransportPacketStream: _TransportPacketStream,
+ TransportParseStream: _TransportParseStream,
+ ElementaryStream: _ElementaryStream,
+ TimestampRolloverStream: TimestampRolloverStream,
+ CaptionStream: captionStream.CaptionStream,
+ Cea608Stream: captionStream.Cea608Stream,
+ Cea708Stream: captionStream.Cea708Stream,
+ MetadataStream: metadataStream
+ };
+
+ for (var type in streamTypes) {
+ if (streamTypes.hasOwnProperty(type)) {
+ m2ts[type] = streamTypes[type];
+ }
+ }
+
+ var m2ts_1 = m2ts;
+ var ONE_SECOND_IN_TS$2 = clock.ONE_SECOND_IN_TS;
+
+ var _AdtsStream;
+
+ var ADTS_SAMPLING_FREQUENCIES$1 = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
+ /*
+ * Accepts a ElementaryStream and emits data events with parsed
+ * AAC Audio Frames of the individual packets. Input audio in ADTS
+ * format is unpacked and re-emitted as AAC frames.
+ *
+ * @see http://wiki.multimedia.cx/index.php?title=ADTS
+ * @see http://wiki.multimedia.cx/?title=Understanding_AAC
+ */
+
+ _AdtsStream = function AdtsStream(handlePartialSegments) {
+ var buffer,
+ frameNum = 0;
+
+ _AdtsStream.prototype.init.call(this);
+
+ this.skipWarn_ = function (start, end) {
+ this.trigger('log', {
+ level: 'warn',
+ message: "adts skiping bytes " + start + " to " + end + " in frame " + frameNum + " outside syncword"
+ });
+ };
+
+ this.push = function (packet) {
+ var i = 0,
+ frameLength,
+ protectionSkipBytes,
+ oldBuffer,
+ sampleCount,
+ adtsFrameDuration;
+
+ if (!handlePartialSegments) {
+ frameNum = 0;
+ }
+
+ if (packet.type !== 'audio') {
+ // ignore non-audio data
+ return;
+ } // Prepend any data in the buffer to the input data so that we can parse
+ // aac frames the cross a PES packet boundary
+
+
+ if (buffer && buffer.length) {
+ oldBuffer = buffer;
+ buffer = new Uint8Array(oldBuffer.byteLength + packet.data.byteLength);
+ buffer.set(oldBuffer);
+ buffer.set(packet.data, oldBuffer.byteLength);
+ } else {
+ buffer = packet.data;
+ } // unpack any ADTS frames which have been fully received
+ // for details on the ADTS header, see http://wiki.multimedia.cx/index.php?title=ADTS
+
+
+ var skip; // We use i + 7 here because we want to be able to parse the entire header.
+ // If we don't have enough bytes to do that, then we definitely won't have a full frame.
+
+ while (i + 7 < buffer.length) {
+ // Look for the start of an ADTS header..
+ if (buffer[i] !== 0xFF || (buffer[i + 1] & 0xF6) !== 0xF0) {
+ if (typeof skip !== 'number') {
+ skip = i;
+ } // If a valid header was not found, jump one forward and attempt to
+ // find a valid ADTS header starting at the next byte
+
+
+ i++;
+ continue;
+ }
+
+ if (typeof skip === 'number') {
+ this.skipWarn_(skip, i);
+ skip = null;
+ } // The protection skip bit tells us if we have 2 bytes of CRC data at the
+ // end of the ADTS header
+
+
+ protectionSkipBytes = (~buffer[i + 1] & 0x01) * 2; // Frame length is a 13 bit integer starting 16 bits from the
+ // end of the sync sequence
+ // NOTE: frame length includes the size of the header
+
+ frameLength = (buffer[i + 3] & 0x03) << 11 | buffer[i + 4] << 3 | (buffer[i + 5] & 0xe0) >> 5;
+ sampleCount = ((buffer[i + 6] & 0x03) + 1) * 1024;
+ adtsFrameDuration = sampleCount * ONE_SECOND_IN_TS$2 / ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2]; // If we don't have enough data to actually finish this ADTS frame,
+ // then we have to wait for more data
+
+ if (buffer.byteLength - i < frameLength) {
+ break;
+ } // Otherwise, deliver the complete AAC frame
+
+
+ this.trigger('data', {
+ pts: packet.pts + frameNum * adtsFrameDuration,
+ dts: packet.dts + frameNum * adtsFrameDuration,
+ sampleCount: sampleCount,
+ audioobjecttype: (buffer[i + 2] >>> 6 & 0x03) + 1,
+ channelcount: (buffer[i + 2] & 1) << 2 | (buffer[i + 3] & 0xc0) >>> 6,
+ samplerate: ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2],
+ samplingfrequencyindex: (buffer[i + 2] & 0x3c) >>> 2,
+ // assume ISO/IEC 14496-12 AudioSampleEntry default of 16
+ samplesize: 16,
+ // data is the frame without it's header
+ data: buffer.subarray(i + 7 + protectionSkipBytes, i + frameLength)
+ });
+ frameNum++;
+ i += frameLength;
+ }
+
+ if (typeof skip === 'number') {
+ this.skipWarn_(skip, i);
+ skip = null;
+ } // remove processed bytes from the buffer.
+
+
+ buffer = buffer.subarray(i);
+ };
+
+ this.flush = function () {
+ frameNum = 0;
+ this.trigger('done');
+ };
+
+ this.reset = function () {
+ buffer = void 0;
+ this.trigger('reset');
+ };
+
+ this.endTimeline = function () {
+ buffer = void 0;
+ this.trigger('endedtimeline');
+ };
+ };
+
+ _AdtsStream.prototype = new stream();
+ var adts = _AdtsStream;
+ /**
+ * mux.js
+ *
+ * Copyright (c) Brightcove
+ * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
+ */
+
+ var ExpGolomb;
+ /**
+ * Parser for exponential Golomb codes, a variable-bitwidth number encoding
+ * scheme used by h264.
+ */
+
+ ExpGolomb = function ExpGolomb(workingData) {
+ var // the number of bytes left to examine in workingData
+ workingBytesAvailable = workingData.byteLength,
+ // the current word being examined
+ workingWord = 0,
+ // :uint
+ // the number of bits left to examine in the current word
+ workingBitsAvailable = 0; // :uint;
+ // ():uint
+
+ this.length = function () {
+ return 8 * workingBytesAvailable;
+ }; // ():uint
+
+
+ this.bitsAvailable = function () {
+ return 8 * workingBytesAvailable + workingBitsAvailable;
+ }; // ():void
+
+
+ this.loadWord = function () {
+ var position = workingData.byteLength - workingBytesAvailable,
+ workingBytes = new Uint8Array(4),
+ availableBytes = Math.min(4, workingBytesAvailable);
+
+ if (availableBytes === 0) {
+ throw new Error('no bytes available');
+ }
+
+ workingBytes.set(workingData.subarray(position, position + availableBytes));
+ workingWord = new DataView(workingBytes.buffer).getUint32(0); // track the amount of workingData that has been processed
+
+ workingBitsAvailable = availableBytes * 8;
+ workingBytesAvailable -= availableBytes;
+ }; // (count:int):void
+
+
+ this.skipBits = function (count) {
+ var skipBytes; // :int
+
+ if (workingBitsAvailable > count) {
+ workingWord <<= count;
+ workingBitsAvailable -= count;
+ } else {
+ count -= workingBitsAvailable;
+ skipBytes = Math.floor(count / 8);
+ count -= skipBytes * 8;
+ workingBytesAvailable -= skipBytes;
+ this.loadWord();
+ workingWord <<= count;
+ workingBitsAvailable -= count;
+ }
+ }; // (size:int):uint
+
+
+ this.readBits = function (size) {
+ var bits = Math.min(workingBitsAvailable, size),
+ // :uint
+ valu = workingWord >>> 32 - bits; // :uint
+ // if size > 31, handle error
+
+ workingBitsAvailable -= bits;
+
+ if (workingBitsAvailable > 0) {
+ workingWord <<= bits;
+ } else if (workingBytesAvailable > 0) {
+ this.loadWord();
+ }
+
+ bits = size - bits;
+
+ if (bits > 0) {
+ return valu << bits | this.readBits(bits);
+ }
+
+ return valu;
+ }; // ():uint
+
+
+ this.skipLeadingZeros = function () {
+ var leadingZeroCount; // :uint
+
+ for (leadingZeroCount = 0; leadingZeroCount < workingBitsAvailable; ++leadingZeroCount) {
+ if ((workingWord & 0x80000000 >>> leadingZeroCount) !== 0) {
+ // the first bit of working word is 1
+ workingWord <<= leadingZeroCount;
+ workingBitsAvailable -= leadingZeroCount;
+ return leadingZeroCount;
+ }
+ } // we exhausted workingWord and still have not found a 1
+
+
+ this.loadWord();
+ return leadingZeroCount + this.skipLeadingZeros();
+ }; // ():void
+
+
+ this.skipUnsignedExpGolomb = function () {
+ this.skipBits(1 + this.skipLeadingZeros());
+ }; // ():void
+
+
+ this.skipExpGolomb = function () {
+ this.skipBits(1 + this.skipLeadingZeros());
+ }; // ():uint
+
+
+ this.readUnsignedExpGolomb = function () {
+ var clz = this.skipLeadingZeros(); // :uint
+
+ return this.readBits(clz + 1) - 1;
+ }; // ():int
+
+
+ this.readExpGolomb = function () {
+ var valu = this.readUnsignedExpGolomb(); // :int
+
+ if (0x01 & valu) {
+ // the number is odd if the low order bit is set
+ return 1 + valu >>> 1; // add 1 to make it even, and divide by 2
+ }
+
+ return -1 * (valu >>> 1); // divide by two then make it negative
+ }; // Some convenience functions
+ // :Boolean
+
+
+ this.readBoolean = function () {
+ return this.readBits(1) === 1;
+ }; // ():int
+
+
+ this.readUnsignedByte = function () {
+ return this.readBits(8);
+ };
+
+ this.loadWord();
+ };
+
+ var expGolomb = ExpGolomb;
+
+ var _H264Stream, _NalByteStream;
+
+ var PROFILES_WITH_OPTIONAL_SPS_DATA;
+ /**
+ * Accepts a NAL unit byte stream and unpacks the embedded NAL units.
+ */
+
+ _NalByteStream = function NalByteStream() {
+ var syncPoint = 0,
+ i,
+ buffer;
+
+ _NalByteStream.prototype.init.call(this);
+ /*
+ * Scans a byte stream and triggers a data event with the NAL units found.
+ * @param {Object} data Event received from H264Stream
+ * @param {Uint8Array} data.data The h264 byte stream to be scanned
+ *
+ * @see H264Stream.push
+ */
+
+
+ this.push = function (data) {
+ var swapBuffer;
+
+ if (!buffer) {
+ buffer = data.data;
+ } else {
+ swapBuffer = new Uint8Array(buffer.byteLength + data.data.byteLength);
+ swapBuffer.set(buffer);
+ swapBuffer.set(data.data, buffer.byteLength);
+ buffer = swapBuffer;
+ }
+
+ var len = buffer.byteLength; // Rec. ITU-T H.264, Annex B
+ // scan for NAL unit boundaries
+ // a match looks like this:
+ // 0 0 1 .. NAL .. 0 0 1
+ // ^ sync point ^ i
+ // or this:
+ // 0 0 1 .. NAL .. 0 0 0
+ // ^ sync point ^ i
+ // advance the sync point to a NAL start, if necessary
+
+ for (; syncPoint < len - 3; syncPoint++) {
+ if (buffer[syncPoint + 2] === 1) {
+ // the sync point is properly aligned
+ i = syncPoint + 5;
+ break;
+ }
+ }
+
+ while (i < len) {
+ // look at the current byte to determine if we've hit the end of
+ // a NAL unit boundary
+ switch (buffer[i]) {
+ case 0:
+ // skip past non-sync sequences
+ if (buffer[i - 1] !== 0) {
+ i += 2;
+ break;
+ } else if (buffer[i - 2] !== 0) {
+ i++;
+ break;
+ } // deliver the NAL unit if it isn't empty
+
+
+ if (syncPoint + 3 !== i - 2) {
+ this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
+ } // drop trailing zeroes
+
+
+ do {
+ i++;
+ } while (buffer[i] !== 1 && i < len);
+
+ syncPoint = i - 2;
+ i += 3;
+ break;
+
+ case 1:
+ // skip past non-sync sequences
+ if (buffer[i - 1] !== 0 || buffer[i - 2] !== 0) {
+ i += 3;
+ break;
+ } // deliver the NAL unit
+
+
+ this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
+ syncPoint = i - 2;
+ i += 3;
+ break;
+
+ default:
+ // the current byte isn't a one or zero, so it cannot be part
+ // of a sync sequence
+ i += 3;
+ break;
+ }
+ } // filter out the NAL units that were delivered
+
+
+ buffer = buffer.subarray(syncPoint);
+ i -= syncPoint;
+ syncPoint = 0;
+ };
+
+ this.reset = function () {
+ buffer = null;
+ syncPoint = 0;
+ this.trigger('reset');
+ };
+
+ this.flush = function () {
+ // deliver the last buffered NAL unit
+ if (buffer && buffer.byteLength > 3) {
+ this.trigger('data', buffer.subarray(syncPoint + 3));
+ } // reset the stream state
+
+
+ buffer = null;
+ syncPoint = 0;
+ this.trigger('done');
+ };
+
+ this.endTimeline = function () {
+ this.flush();
+ this.trigger('endedtimeline');
+ };
+ };
+
+ _NalByteStream.prototype = new stream(); // values of profile_idc that indicate additional fields are included in the SPS
+ // see Recommendation ITU-T H.264 (4/2013),
+ // 7.3.2.1.1 Sequence parameter set data syntax
+
+ PROFILES_WITH_OPTIONAL_SPS_DATA = {
+ 100: true,
+ 110: true,
+ 122: true,
+ 244: true,
+ 44: true,
+ 83: true,
+ 86: true,
+ 118: true,
+ 128: true,
+ // TODO: the three profiles below don't
+ // appear to have sps data in the specificiation anymore?
+ 138: true,
+ 139: true,
+ 134: true
+ };
+ /**
+ * Accepts input from a ElementaryStream and produces H.264 NAL unit data
+ * events.
+ */
+
+ _H264Stream = function H264Stream() {
+ var nalByteStream = new _NalByteStream(),
+ self,
+ trackId,
+ currentPts,
+ currentDts,
+ discardEmulationPreventionBytes,
+ readSequenceParameterSet,
+ skipScalingList;
+
+ _H264Stream.prototype.init.call(this);
+
+ self = this;
+ /*
+ * Pushes a packet from a stream onto the NalByteStream
+ *
+ * @param {Object} packet - A packet received from a stream
+ * @param {Uint8Array} packet.data - The raw bytes of the packet
+ * @param {Number} packet.dts - Decode timestamp of the packet
+ * @param {Number} packet.pts - Presentation timestamp of the packet
+ * @param {Number} packet.trackId - The id of the h264 track this packet came from
+ * @param {('video'|'audio')} packet.type - The type of packet
+ *
+ */
+
+ this.push = function (packet) {
+ if (packet.type !== 'video') {
+ return;
+ }
+
+ trackId = packet.trackId;
+ currentPts = packet.pts;
+ currentDts = packet.dts;
+ nalByteStream.push(packet);
+ };
+ /*
+ * Identify NAL unit types and pass on the NALU, trackId, presentation and decode timestamps
+ * for the NALUs to the next stream component.
+ * Also, preprocess caption and sequence parameter NALUs.
+ *
+ * @param {Uint8Array} data - A NAL unit identified by `NalByteStream.push`
+ * @see NalByteStream.push
+ */
+
+
+ nalByteStream.on('data', function (data) {
+ var event = {
+ trackId: trackId,
+ pts: currentPts,
+ dts: currentDts,
+ data: data,
+ nalUnitTypeCode: data[0] & 0x1f
+ };
+
+ switch (event.nalUnitTypeCode) {
+ case 0x05:
+ event.nalUnitType = 'slice_layer_without_partitioning_rbsp_idr';
+ break;
+
+ case 0x06:
+ event.nalUnitType = 'sei_rbsp';
+ event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
+ break;
+
+ case 0x07:
+ event.nalUnitType = 'seq_parameter_set_rbsp';
+ event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
+ event.config = readSequenceParameterSet(event.escapedRBSP);
+ break;
+
+ case 0x08:
+ event.nalUnitType = 'pic_parameter_set_rbsp';
+ break;
+
+ case 0x09:
+ event.nalUnitType = 'access_unit_delimiter_rbsp';
+ break;
+ } // This triggers data on the H264Stream
+
+
+ self.trigger('data', event);
+ });
+ nalByteStream.on('done', function () {
+ self.trigger('done');
+ });
+ nalByteStream.on('partialdone', function () {
+ self.trigger('partialdone');
+ });
+ nalByteStream.on('reset', function () {
+ self.trigger('reset');
+ });
+ nalByteStream.on('endedtimeline', function () {
+ self.trigger('endedtimeline');
+ });
+
+ this.flush = function () {
+ nalByteStream.flush();
+ };
+
+ this.partialFlush = function () {
+ nalByteStream.partialFlush();
+ };
+
+ this.reset = function () {
+ nalByteStream.reset();
+ };
+
+ this.endTimeline = function () {
+ nalByteStream.endTimeline();
+ };
+ /**
+ * Advance the ExpGolomb decoder past a scaling list. The scaling
+ * list is optionally transmitted as part of a sequence parameter
+ * set and is not relevant to transmuxing.
+ * @param count {number} the number of entries in this scaling list
+ * @param expGolombDecoder {object} an ExpGolomb pointed to the
+ * start of a scaling list
+ * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
+ */
+
+
+ skipScalingList = function skipScalingList(count, expGolombDecoder) {
+ var lastScale = 8,
+ nextScale = 8,
+ j,
+ deltaScale;
+
+ for (j = 0; j < count; j++) {
+ if (nextScale !== 0) {
+ deltaScale = expGolombDecoder.readExpGolomb();
+ nextScale = (lastScale + deltaScale + 256) % 256;
+ }
+
+ lastScale = nextScale === 0 ? lastScale : nextScale;
+ }
+ };
+ /**
+ * Expunge any "Emulation Prevention" bytes from a "Raw Byte
+ * Sequence Payload"
+ * @param data {Uint8Array} the bytes of a RBSP from a NAL
+ * unit
+ * @return {Uint8Array} the RBSP without any Emulation
+ * Prevention Bytes
+ */
+
+
+ discardEmulationPreventionBytes = function discardEmulationPreventionBytes(data) {
+ var length = data.byteLength,
+ emulationPreventionBytesPositions = [],
+ i = 1,
+ newLength,
+ newData; // Find all `Emulation Prevention Bytes`
+
+ while (i < length - 2) {
+ if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
+ emulationPreventionBytesPositions.push(i + 2);
+ i += 2;
+ } else {
+ i++;
+ }
+ } // If no Emulation Prevention Bytes were found just return the original
+ // array
+
+
+ if (emulationPreventionBytesPositions.length === 0) {
+ return data;
+ } // Create a new array to hold the NAL unit data
+
+
+ newLength = length - emulationPreventionBytesPositions.length;
+ newData = new Uint8Array(newLength);
+ var sourceIndex = 0;
+
+ for (i = 0; i < newLength; sourceIndex++, i++) {
+ if (sourceIndex === emulationPreventionBytesPositions[0]) {
+ // Skip this byte
+ sourceIndex++; // Remove this position index
+
+ emulationPreventionBytesPositions.shift();
+ }
+
+ newData[i] = data[sourceIndex];
+ }
+
+ return newData;
+ };
+ /**
+ * Read a sequence parameter set and return some interesting video
+ * properties. A sequence parameter set is the H264 metadata that
+ * describes the properties of upcoming video frames.
+ * @param data {Uint8Array} the bytes of a sequence parameter set
+ * @return {object} an object with configuration parsed from the
+ * sequence parameter set, including the dimensions of the
+ * associated video frames.
+ */
+
+
+ readSequenceParameterSet = function readSequenceParameterSet(data) {
+ var frameCropLeftOffset = 0,
+ frameCropRightOffset = 0,
+ frameCropTopOffset = 0,
+ frameCropBottomOffset = 0,
+ expGolombDecoder,
+ profileIdc,
+ levelIdc,
+ profileCompatibility,
+ chromaFormatIdc,
+ picOrderCntType,
+ numRefFramesInPicOrderCntCycle,
+ picWidthInMbsMinus1,
+ picHeightInMapUnitsMinus1,
+ frameMbsOnlyFlag,
+ scalingListCount,
+ sarRatio = [1, 1],
+ aspectRatioIdc,
+ i;
+ expGolombDecoder = new expGolomb(data);
+ profileIdc = expGolombDecoder.readUnsignedByte(); // profile_idc
+
+ profileCompatibility = expGolombDecoder.readUnsignedByte(); // constraint_set[0-5]_flag
+
+ levelIdc = expGolombDecoder.readUnsignedByte(); // level_idc u(8)
+
+ expGolombDecoder.skipUnsignedExpGolomb(); // seq_parameter_set_id
+ // some profiles have more optional data we don't need
+
+ if (PROFILES_WITH_OPTIONAL_SPS_DATA[profileIdc]) {
+ chromaFormatIdc = expGolombDecoder.readUnsignedExpGolomb();
+
+ if (chromaFormatIdc === 3) {
+ expGolombDecoder.skipBits(1); // separate_colour_plane_flag
+ }
+
+ expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_luma_minus8
+
+ expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_chroma_minus8
+
+ expGolombDecoder.skipBits(1); // qpprime_y_zero_transform_bypass_flag
+
+ if (expGolombDecoder.readBoolean()) {
+ // seq_scaling_matrix_present_flag
+ scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
+
+ for (i = 0; i < scalingListCount; i++) {
+ if (expGolombDecoder.readBoolean()) {
+ // seq_scaling_list_present_flag[ i ]
+ if (i < 6) {
+ skipScalingList(16, expGolombDecoder);
+ } else {
+ skipScalingList(64, expGolombDecoder);
+ }
+ }
+ }
+ }
+ }
+
+ expGolombDecoder.skipUnsignedExpGolomb(); // log2_max_frame_num_minus4
+
+ picOrderCntType = expGolombDecoder.readUnsignedExpGolomb();
+
+ if (picOrderCntType === 0) {
+ expGolombDecoder.readUnsignedExpGolomb(); // log2_max_pic_order_cnt_lsb_minus4
+ } else if (picOrderCntType === 1) {
+ expGolombDecoder.skipBits(1); // delta_pic_order_always_zero_flag
+
+ expGolombDecoder.skipExpGolomb(); // offset_for_non_ref_pic
+
+ expGolombDecoder.skipExpGolomb(); // offset_for_top_to_bottom_field
+
+ numRefFramesInPicOrderCntCycle = expGolombDecoder.readUnsignedExpGolomb();
+
+ for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
+ expGolombDecoder.skipExpGolomb(); // offset_for_ref_frame[ i ]
+ }
+ }
+
+ expGolombDecoder.skipUnsignedExpGolomb(); // max_num_ref_frames
+
+ expGolombDecoder.skipBits(1); // gaps_in_frame_num_value_allowed_flag
+
+ picWidthInMbsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
+ picHeightInMapUnitsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
+ frameMbsOnlyFlag = expGolombDecoder.readBits(1);
+
+ if (frameMbsOnlyFlag === 0) {
+ expGolombDecoder.skipBits(1); // mb_adaptive_frame_field_flag
+ }
+
+ expGolombDecoder.skipBits(1); // direct_8x8_inference_flag
+
+ if (expGolombDecoder.readBoolean()) {
+ // frame_cropping_flag
+ frameCropLeftOffset = expGolombDecoder.readUnsignedExpGolomb();
+ frameCropRightOffset = expGolombDecoder.readUnsignedExpGolomb();
+ frameCropTopOffset = expGolombDecoder.readUnsignedExpGolomb();
+ frameCropBottomOffset = expGolombDecoder.readUnsignedExpGolomb();
+ }
+
+ if (expGolombDecoder.readBoolean()) {
+ // vui_parameters_present_flag
+ if (expGolombDecoder.readBoolean()) {
+ // aspect_ratio_info_present_flag
+ aspectRatioIdc = expGolombDecoder.readUnsignedByte();
+
+ switch (aspectRatioIdc) {
+ case 1:
+ sarRatio = [1, 1];
+ break;
+
+ case 2:
+ sarRatio = [12, 11];
+ break;
+
+ case 3:
+ sarRatio = [10, 11];
+ break;
+
+ case 4:
+ sarRatio = [16, 11];
+ break;
+
+ case 5:
+ sarRatio = [40, 33];
+ break;
+
+ case 6:
+ sarRatio = [24, 11];
+ break;
+
+ case 7:
+ sarRatio = [20, 11];
+ break;
+
+ case 8:
+ sarRatio = [32, 11];
+ break;
+
+ case 9:
+ sarRatio = [80, 33];
+ break;
+
+ case 10:
+ sarRatio = [18, 11];
+ break;
+
+ case 11:
+ sarRatio = [15, 11];
+ break;
+
+ case 12:
+ sarRatio = [64, 33];
+ break;
+
+ case 13:
+ sarRatio = [160, 99];
+ break;
+
+ case 14:
+ sarRatio = [4, 3];
+ break;
+
+ case 15:
+ sarRatio = [3, 2];
+ break;
+
+ case 16:
+ sarRatio = [2, 1];
+ break;
+
+ case 255:
+ {
+ sarRatio = [expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte(), expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte()];
+ break;
+ }
+ }
+
+ if (sarRatio) {
+ sarRatio[0] / sarRatio[1];
+ }
+ }
+ }
+
+ return {
+ profileIdc: profileIdc,
+ levelIdc: levelIdc,
+ profileCompatibility: profileCompatibility,
+ width: (picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2,
+ height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - frameCropTopOffset * 2 - frameCropBottomOffset * 2,
+ // sar is sample aspect ratio
+ sarRatio: sarRatio
+ };
+ };
+ };
+
+ _H264Stream.prototype = new stream();
+ var h264 = {
+ H264Stream: _H264Stream,
+ NalByteStream: _NalByteStream
+ };
+ /**
+ * mux.js
+ *
+ * Copyright (c) Brightcove
+ * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
+ *
+ * Utilities to detect basic properties and metadata about Aac data.
+ */
+
+ var ADTS_SAMPLING_FREQUENCIES = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
+
+ var parseId3TagSize = function parseId3TagSize(header, byteIndex) {
+ var returnSize = header[byteIndex + 6] << 21 | header[byteIndex + 7] << 14 | header[byteIndex + 8] << 7 | header[byteIndex + 9],
+ flags = header[byteIndex + 5],
+ footerPresent = (flags & 16) >> 4; // if we get a negative returnSize clamp it to 0
+
+ returnSize = returnSize >= 0 ? returnSize : 0;
+
+ if (footerPresent) {
+ return returnSize + 20;
+ }
+
+ return returnSize + 10;
+ };
+
+ var getId3Offset = function getId3Offset(data, offset) {
+ if (data.length - offset < 10 || data[offset] !== 'I'.charCodeAt(0) || data[offset + 1] !== 'D'.charCodeAt(0) || data[offset + 2] !== '3'.charCodeAt(0)) {
+ return offset;
+ }
+
+ offset += parseId3TagSize(data, offset);
+ return getId3Offset(data, offset);
+ }; // TODO: use vhs-utils
+
+
+ var isLikelyAacData$1 = function isLikelyAacData(data) {
+ var offset = getId3Offset(data, 0);
+ return data.length >= offset + 2 && (data[offset] & 0xFF) === 0xFF && (data[offset + 1] & 0xF0) === 0xF0 && // verify that the 2 layer bits are 0, aka this
+ // is not mp3 data but aac data.
+ (data[offset + 1] & 0x16) === 0x10;
+ };
+
+ var parseSyncSafeInteger = function parseSyncSafeInteger(data) {
+ return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
+ }; // return a percent-encoded representation of the specified byte range
+ // @see http://en.wikipedia.org/wiki/Percent-encoding
+
+
+ var percentEncode = function percentEncode(bytes, start, end) {
+ var i,
+ result = '';
+
+ for (i = start; i < end; i++) {
+ result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
+ }
+
+ return result;
+ }; // return the string representation of the specified byte range,
+ // interpreted as ISO-8859-1.
+
+
+ var parseIso88591 = function parseIso88591(bytes, start, end) {
+ return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
+ };
+
+ var parseAdtsSize = function parseAdtsSize(header, byteIndex) {
+ var lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
+ middle = header[byteIndex + 4] << 3,
+ highTwo = header[byteIndex + 3] & 0x3 << 11;
+ return highTwo | middle | lowThree;
+ };
+
+ var parseType$2 = function parseType(header, byteIndex) {
+ if (header[byteIndex] === 'I'.charCodeAt(0) && header[byteIndex + 1] === 'D'.charCodeAt(0) && header[byteIndex + 2] === '3'.charCodeAt(0)) {
+ return 'timed-metadata';
+ } else if (header[byteIndex] & 0xff === 0xff && (header[byteIndex + 1] & 0xf0) === 0xf0) {
+ return 'audio';
+ }
+
+ return null;
+ };
+
+ var parseSampleRate = function parseSampleRate(packet) {
+ var i = 0;
+
+ while (i + 5 < packet.length) {
+ if (packet[i] !== 0xFF || (packet[i + 1] & 0xF6) !== 0xF0) {
+ // If a valid header was not found, jump one forward and attempt to
+ // find a valid ADTS header starting at the next byte
+ i++;
+ continue;
+ }
+
+ return ADTS_SAMPLING_FREQUENCIES[(packet[i + 2] & 0x3c) >>> 2];
+ }
+
+ return null;
+ };
+
+ var parseAacTimestamp = function parseAacTimestamp(packet) {
+ var frameStart, frameSize, frame, frameHeader; // find the start of the first frame and the end of the tag
+
+ frameStart = 10;
+
+ if (packet[5] & 0x40) {
+ // advance the frame start past the extended header
+ frameStart += 4; // header size field
+
+ frameStart += parseSyncSafeInteger(packet.subarray(10, 14));
+ } // parse one or more ID3 frames
+ // http://id3.org/id3v2.3.0#ID3v2_frame_overview
+
+
+ do {
+ // determine the number of bytes in this frame
+ frameSize = parseSyncSafeInteger(packet.subarray(frameStart + 4, frameStart + 8));
+
+ if (frameSize < 1) {
+ return null;
+ }
+
+ frameHeader = String.fromCharCode(packet[frameStart], packet[frameStart + 1], packet[frameStart + 2], packet[frameStart + 3]);
+
+ if (frameHeader === 'PRIV') {
+ frame = packet.subarray(frameStart + 10, frameStart + frameSize + 10);
+
+ for (var i = 0; i < frame.byteLength; i++) {
+ if (frame[i] === 0) {
+ var owner = parseIso88591(frame, 0, i);
+
+ if (owner === 'com.apple.streaming.transportStreamTimestamp') {
+ var d = frame.subarray(i + 1);
+ var size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
+ size *= 4;
+ size += d[7] & 0x03;
+ return size;
+ }
+
+ break;
+ }
+ }
+ }
+
+ frameStart += 10; // advance past the frame header
+
+ frameStart += frameSize; // advance past the frame body
+ } while (frameStart < packet.byteLength);
+
+ return null;
+ };
+
+ var utils = {
+ isLikelyAacData: isLikelyAacData$1,
+ parseId3TagSize: parseId3TagSize,
+ parseAdtsSize: parseAdtsSize,
+ parseType: parseType$2,
+ parseSampleRate: parseSampleRate,
+ parseAacTimestamp: parseAacTimestamp
+ };
+
+ var _AacStream;
+ /**
+ * Splits an incoming stream of binary data into ADTS and ID3 Frames.
+ */
+
+
+ _AacStream = function AacStream() {
+ var everything = new Uint8Array(),
+ timeStamp = 0;
+
+ _AacStream.prototype.init.call(this);
+
+ this.setTimestamp = function (timestamp) {
+ timeStamp = timestamp;
+ };
+
+ this.push = function (bytes) {
+ var frameSize = 0,
+ byteIndex = 0,
+ bytesLeft,
+ chunk,
+ packet,
+ tempLength; // If there are bytes remaining from the last segment, prepend them to the
+ // bytes that were pushed in
+
+ if (everything.length) {
+ tempLength = everything.length;
+ everything = new Uint8Array(bytes.byteLength + tempLength);
+ everything.set(everything.subarray(0, tempLength));
+ everything.set(bytes, tempLength);
+ } else {
+ everything = bytes;
+ }
+
+ while (everything.length - byteIndex >= 3) {
+ if (everything[byteIndex] === 'I'.charCodeAt(0) && everything[byteIndex + 1] === 'D'.charCodeAt(0) && everything[byteIndex + 2] === '3'.charCodeAt(0)) {
+ // Exit early because we don't have enough to parse
+ // the ID3 tag header
+ if (everything.length - byteIndex < 10) {
+ break;
+ } // check framesize
+
+
+ frameSize = utils.parseId3TagSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
+ // to emit a full packet
+ // Add to byteIndex to support multiple ID3 tags in sequence
+
+ if (byteIndex + frameSize > everything.length) {
+ break;
+ }
+
+ chunk = {
+ type: 'timed-metadata',
+ data: everything.subarray(byteIndex, byteIndex + frameSize)
+ };
+ this.trigger('data', chunk);
+ byteIndex += frameSize;
+ continue;
+ } else if ((everything[byteIndex] & 0xff) === 0xff && (everything[byteIndex + 1] & 0xf0) === 0xf0) {
+ // Exit early because we don't have enough to parse
+ // the ADTS frame header
+ if (everything.length - byteIndex < 7) {
+ break;
+ }
+
+ frameSize = utils.parseAdtsSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
+ // to emit a full packet
+
+ if (byteIndex + frameSize > everything.length) {
+ break;
+ }
+
+ packet = {
+ type: 'audio',
+ data: everything.subarray(byteIndex, byteIndex + frameSize),
+ pts: timeStamp,
+ dts: timeStamp
+ };
+ this.trigger('data', packet);
+ byteIndex += frameSize;
+ continue;
+ }
+
+ byteIndex++;
+ }
+
+ bytesLeft = everything.length - byteIndex;
+
+ if (bytesLeft > 0) {
+ everything = everything.subarray(byteIndex);
+ } else {
+ everything = new Uint8Array();
+ }
+ };
+
+ this.reset = function () {
+ everything = new Uint8Array();
+ this.trigger('reset');
+ };
+
+ this.endTimeline = function () {
+ everything = new Uint8Array();
+ this.trigger('endedtimeline');
+ };
+ };
+
+ _AacStream.prototype = new stream();
+ var aac = _AacStream; // constants
+
+ var AUDIO_PROPERTIES = ['audioobjecttype', 'channelcount', 'samplerate', 'samplingfrequencyindex', 'samplesize'];
+ var audioProperties = AUDIO_PROPERTIES;
+ var VIDEO_PROPERTIES = ['width', 'height', 'profileIdc', 'levelIdc', 'profileCompatibility', 'sarRatio'];
+ var videoProperties = VIDEO_PROPERTIES;
+ var H264Stream = h264.H264Stream;
+ var isLikelyAacData = utils.isLikelyAacData;
+ var ONE_SECOND_IN_TS$1 = clock.ONE_SECOND_IN_TS; // object types
+
+ var _VideoSegmentStream, _AudioSegmentStream, _Transmuxer, _CoalesceStream;
+
+ var retriggerForStream = function retriggerForStream(key, event) {
+ event.stream = key;
+ this.trigger('log', event);
+ };
+
+ var addPipelineLogRetriggers = function addPipelineLogRetriggers(transmuxer, pipeline) {
+ var keys = Object.keys(pipeline);
+
+ for (var i = 0; i < keys.length; i++) {
+ var key = keys[i]; // skip non-stream keys and headOfPipeline
+ // which is just a duplicate
+
+ if (key === 'headOfPipeline' || !pipeline[key].on) {
+ continue;
+ }
+
+ pipeline[key].on('log', retriggerForStream.bind(transmuxer, key));
+ }
+ };
+ /**
+ * Compare two arrays (even typed) for same-ness
+ */
+
+
+ var arrayEquals = function arrayEquals(a, b) {
+ var i;
+
+ if (a.length !== b.length) {
+ return false;
+ } // compare the value of each element in the array
+
+
+ for (i = 0; i < a.length; i++) {
+ if (a[i] !== b[i]) {
+ return false;
+ }
+ }
+
+ return true;
+ };
+
+ var generateSegmentTimingInfo = function generateSegmentTimingInfo(baseMediaDecodeTime, startDts, startPts, endDts, endPts, prependedContentDuration) {
+ var ptsOffsetFromDts = startPts - startDts,
+ decodeDuration = endDts - startDts,
+ presentationDuration = endPts - startPts; // The PTS and DTS values are based on the actual stream times from the segment,
+ // however, the player time values will reflect a start from the baseMediaDecodeTime.
+ // In order to provide relevant values for the player times, base timing info on the
+ // baseMediaDecodeTime and the DTS and PTS durations of the segment.
+
+ return {
+ start: {
+ dts: baseMediaDecodeTime,
+ pts: baseMediaDecodeTime + ptsOffsetFromDts
+ },
+ end: {
+ dts: baseMediaDecodeTime + decodeDuration,
+ pts: baseMediaDecodeTime + presentationDuration
+ },
+ prependedContentDuration: prependedContentDuration,
+ baseMediaDecodeTime: baseMediaDecodeTime
+ };
+ };
+ /**
+ * Constructs a single-track, ISO BMFF media segment from AAC data
+ * events. The output of this stream can be fed to a SourceBuffer
+ * configured with a suitable initialization segment.
+ * @param track {object} track metadata configuration
+ * @param options {object} transmuxer options object
+ * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
+ * in the source; false to adjust the first segment to start at 0.
+ */
+
+
+ _AudioSegmentStream = function AudioSegmentStream(track, options) {
+ var adtsFrames = [],
+ sequenceNumber,
+ earliestAllowedDts = 0,
+ audioAppendStartTs = 0,
+ videoBaseMediaDecodeTime = Infinity;
+ options = options || {};
+ sequenceNumber = options.firstSequenceNumber || 0;
+
+ _AudioSegmentStream.prototype.init.call(this);
+
+ this.push = function (data) {
+ trackDecodeInfo.collectDtsInfo(track, data);
+
+ if (track) {
+ audioProperties.forEach(function (prop) {
+ track[prop] = data[prop];
+ });
+ } // buffer audio data until end() is called
+
+
+ adtsFrames.push(data);
+ };
+
+ this.setEarliestDts = function (earliestDts) {
+ earliestAllowedDts = earliestDts;
+ };
+
+ this.setVideoBaseMediaDecodeTime = function (baseMediaDecodeTime) {
+ videoBaseMediaDecodeTime = baseMediaDecodeTime;
+ };
+
+ this.setAudioAppendStart = function (timestamp) {
+ audioAppendStartTs = timestamp;
+ };
+
+ this.flush = function () {
+ var frames, moof, mdat, boxes, frameDuration, segmentDuration, videoClockCyclesOfSilencePrefixed; // return early if no audio data has been observed
+
+ if (adtsFrames.length === 0) {
+ this.trigger('done', 'AudioSegmentStream');
+ return;
+ }
+
+ frames = audioFrameUtils.trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts);
+ track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps); // amount of audio filled but the value is in video clock rather than audio clock
+
+ videoClockCyclesOfSilencePrefixed = audioFrameUtils.prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime); // we have to build the index from byte locations to
+ // samples (that is, adts frames) in the audio data
+
+ track.samples = audioFrameUtils.generateSampleTable(frames); // concatenate the audio data to constuct the mdat
+
+ mdat = mp4Generator.mdat(audioFrameUtils.concatenateFrameData(frames));
+ adtsFrames = [];
+ moof = mp4Generator.moof(sequenceNumber, [track]);
+ boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // bump the sequence number for next time
+
+ sequenceNumber++;
+ boxes.set(moof);
+ boxes.set(mdat, moof.byteLength);
+ trackDecodeInfo.clearDtsInfo(track);
+ frameDuration = Math.ceil(ONE_SECOND_IN_TS$1 * 1024 / track.samplerate); // TODO this check was added to maintain backwards compatibility (particularly with
+ // tests) on adding the timingInfo event. However, it seems unlikely that there's a
+ // valid use-case where an init segment/data should be triggered without associated
+ // frames. Leaving for now, but should be looked into.
+
+ if (frames.length) {
+ segmentDuration = frames.length * frameDuration;
+ this.trigger('segmentTimingInfo', generateSegmentTimingInfo( // The audio track's baseMediaDecodeTime is in audio clock cycles, but the
+ // frame info is in video clock cycles. Convert to match expectation of
+ // listeners (that all timestamps will be based on video clock cycles).
+ clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate), // frame times are already in video clock, as is segment duration
+ frames[0].dts, frames[0].pts, frames[0].dts + segmentDuration, frames[0].pts + segmentDuration, videoClockCyclesOfSilencePrefixed || 0));
+ this.trigger('timingInfo', {
+ start: frames[0].pts,
+ end: frames[0].pts + segmentDuration
+ });
+ }
+
+ this.trigger('data', {
+ track: track,
+ boxes: boxes
+ });
+ this.trigger('done', 'AudioSegmentStream');
+ };
+
+ this.reset = function () {
+ trackDecodeInfo.clearDtsInfo(track);
+ adtsFrames = [];
+ this.trigger('reset');
+ };
+ };
+
+ _AudioSegmentStream.prototype = new stream();
+ /**
+ * Constructs a single-track, ISO BMFF media segment from H264 data
+ * events. The output of this stream can be fed to a SourceBuffer
+ * configured with a suitable initialization segment.
+ * @param track {object} track metadata configuration
+ * @param options {object} transmuxer options object
+ * @param options.alignGopsAtEnd {boolean} If true, start from the end of the
+ * gopsToAlignWith list when attempting to align gop pts
+ * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
+ * in the source; false to adjust the first segment to start at 0.
+ */
+
+ _VideoSegmentStream = function VideoSegmentStream(track, options) {
+ var sequenceNumber,
+ nalUnits = [],
+ gopsToAlignWith = [],
+ config,
+ pps;
+ options = options || {};
+ sequenceNumber = options.firstSequenceNumber || 0;
+
+ _VideoSegmentStream.prototype.init.call(this);
+
+ delete track.minPTS;
+ this.gopCache_ = [];
+ /**
+ * Constructs a ISO BMFF segment given H264 nalUnits
+ * @param {Object} nalUnit A data event representing a nalUnit
+ * @param {String} nalUnit.nalUnitType
+ * @param {Object} nalUnit.config Properties for a mp4 track
+ * @param {Uint8Array} nalUnit.data The nalUnit bytes
+ * @see lib/codecs/h264.js
+ **/
+
+ this.push = function (nalUnit) {
+ trackDecodeInfo.collectDtsInfo(track, nalUnit); // record the track config
+
+ if (nalUnit.nalUnitType === 'seq_parameter_set_rbsp' && !config) {
+ config = nalUnit.config;
+ track.sps = [nalUnit.data];
+ videoProperties.forEach(function (prop) {
+ track[prop] = config[prop];
+ }, this);
+ }
+
+ if (nalUnit.nalUnitType === 'pic_parameter_set_rbsp' && !pps) {
+ pps = nalUnit.data;
+ track.pps = [nalUnit.data];
+ } // buffer video until flush() is called
+
+
+ nalUnits.push(nalUnit);
+ };
+ /**
+ * Pass constructed ISO BMFF track and boxes on to the
+ * next stream in the pipeline
+ **/
+
+
+ this.flush = function () {
+ var frames,
+ gopForFusion,
+ gops,
+ moof,
+ mdat,
+ boxes,
+ prependedContentDuration = 0,
+ firstGop,
+ lastGop; // Throw away nalUnits at the start of the byte stream until
+ // we find the first AUD
+
+ while (nalUnits.length) {
+ if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
+ break;
+ }
+
+ nalUnits.shift();
+ } // Return early if no video data has been observed
+
+
+ if (nalUnits.length === 0) {
+ this.resetStream_();
+ this.trigger('done', 'VideoSegmentStream');
+ return;
+ } // Organize the raw nal-units into arrays that represent
+ // higher-level constructs such as frames and gops
+ // (group-of-pictures)
+
+
+ frames = frameUtils.groupNalsIntoFrames(nalUnits);
+ gops = frameUtils.groupFramesIntoGops(frames); // If the first frame of this fragment is not a keyframe we have
+ // a problem since MSE (on Chrome) requires a leading keyframe.
+ //
+ // We have two approaches to repairing this situation:
+ // 1) GOP-FUSION:
+ // This is where we keep track of the GOPS (group-of-pictures)
+ // from previous fragments and attempt to find one that we can
+ // prepend to the current fragment in order to create a valid
+ // fragment.
+ // 2) KEYFRAME-PULLING:
+ // Here we search for the first keyframe in the fragment and
+ // throw away all the frames between the start of the fragment
+ // and that keyframe. We then extend the duration and pull the
+ // PTS of the keyframe forward so that it covers the time range
+ // of the frames that were disposed of.
+ //
+ // #1 is far prefereable over #2 which can cause "stuttering" but
+ // requires more things to be just right.
+
+ if (!gops[0][0].keyFrame) {
+ // Search for a gop for fusion from our gopCache
+ gopForFusion = this.getGopForFusion_(nalUnits[0], track);
+
+ if (gopForFusion) {
+ // in order to provide more accurate timing information about the segment, save
+ // the number of seconds prepended to the original segment due to GOP fusion
+ prependedContentDuration = gopForFusion.duration;
+ gops.unshift(gopForFusion); // Adjust Gops' metadata to account for the inclusion of the
+ // new gop at the beginning
+
+ gops.byteLength += gopForFusion.byteLength;
+ gops.nalCount += gopForFusion.nalCount;
+ gops.pts = gopForFusion.pts;
+ gops.dts = gopForFusion.dts;
+ gops.duration += gopForFusion.duration;
+ } else {
+ // If we didn't find a candidate gop fall back to keyframe-pulling
+ gops = frameUtils.extendFirstKeyFrame(gops);
+ }
+ } // Trim gops to align with gopsToAlignWith
+
+
+ if (gopsToAlignWith.length) {
+ var alignedGops;
+
+ if (options.alignGopsAtEnd) {
+ alignedGops = this.alignGopsAtEnd_(gops);
+ } else {
+ alignedGops = this.alignGopsAtStart_(gops);
+ }
+
+ if (!alignedGops) {
+ // save all the nals in the last GOP into the gop cache
+ this.gopCache_.unshift({
+ gop: gops.pop(),
+ pps: track.pps,
+ sps: track.sps
+ }); // Keep a maximum of 6 GOPs in the cache
+
+ this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
+
+ nalUnits = []; // return early no gops can be aligned with desired gopsToAlignWith
+
+ this.resetStream_();
+ this.trigger('done', 'VideoSegmentStream');
+ return;
+ } // Some gops were trimmed. clear dts info so minSegmentDts and pts are correct
+ // when recalculated before sending off to CoalesceStream
+
+
+ trackDecodeInfo.clearDtsInfo(track);
+ gops = alignedGops;
+ }
+
+ trackDecodeInfo.collectDtsInfo(track, gops); // First, we have to build the index from byte locations to
+ // samples (that is, frames) in the video data
+
+ track.samples = frameUtils.generateSampleTable(gops); // Concatenate the video data and construct the mdat
+
+ mdat = mp4Generator.mdat(frameUtils.concatenateNalData(gops));
+ track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps);
+ this.trigger('processedGopsInfo', gops.map(function (gop) {
+ return {
+ pts: gop.pts,
+ dts: gop.dts,
+ byteLength: gop.byteLength
+ };
+ }));
+ firstGop = gops[0];
+ lastGop = gops[gops.length - 1];
+ this.trigger('segmentTimingInfo', generateSegmentTimingInfo(track.baseMediaDecodeTime, firstGop.dts, firstGop.pts, lastGop.dts + lastGop.duration, lastGop.pts + lastGop.duration, prependedContentDuration));
+ this.trigger('timingInfo', {
+ start: gops[0].pts,
+ end: gops[gops.length - 1].pts + gops[gops.length - 1].duration
+ }); // save all the nals in the last GOP into the gop cache
+
+ this.gopCache_.unshift({
+ gop: gops.pop(),
+ pps: track.pps,
+ sps: track.sps
+ }); // Keep a maximum of 6 GOPs in the cache
+
+ this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
+
+ nalUnits = [];
+ this.trigger('baseMediaDecodeTime', track.baseMediaDecodeTime);
+ this.trigger('timelineStartInfo', track.timelineStartInfo);
+ moof = mp4Generator.moof(sequenceNumber, [track]); // it would be great to allocate this array up front instead of
+ // throwing away hundreds of media segment fragments
+
+ boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // Bump the sequence number for next time
+
+ sequenceNumber++;
+ boxes.set(moof);
+ boxes.set(mdat, moof.byteLength);
+ this.trigger('data', {
+ track: track,
+ boxes: boxes
+ });
+ this.resetStream_(); // Continue with the flush process now
+
+ this.trigger('done', 'VideoSegmentStream');
+ };
+
+ this.reset = function () {
+ this.resetStream_();
+ nalUnits = [];
+ this.gopCache_.length = 0;
+ gopsToAlignWith.length = 0;
+ this.trigger('reset');
+ };
+
+ this.resetStream_ = function () {
+ trackDecodeInfo.clearDtsInfo(track); // reset config and pps because they may differ across segments
+ // for instance, when we are rendition switching
+
+ config = undefined;
+ pps = undefined;
+ }; // Search for a candidate Gop for gop-fusion from the gop cache and
+ // return it or return null if no good candidate was found
+
+
+ this.getGopForFusion_ = function (nalUnit) {
+ var halfSecond = 45000,
+ // Half-a-second in a 90khz clock
+ allowableOverlap = 10000,
+ // About 3 frames @ 30fps
+ nearestDistance = Infinity,
+ dtsDistance,
+ nearestGopObj,
+ currentGop,
+ currentGopObj,
+ i; // Search for the GOP nearest to the beginning of this nal unit
+
+ for (i = 0; i < this.gopCache_.length; i++) {
+ currentGopObj = this.gopCache_[i];
+ currentGop = currentGopObj.gop; // Reject Gops with different SPS or PPS
+
+ if (!(track.pps && arrayEquals(track.pps[0], currentGopObj.pps[0])) || !(track.sps && arrayEquals(track.sps[0], currentGopObj.sps[0]))) {
+ continue;
+ } // Reject Gops that would require a negative baseMediaDecodeTime
+
+
+ if (currentGop.dts < track.timelineStartInfo.dts) {
+ continue;
+ } // The distance between the end of the gop and the start of the nalUnit
+
+
+ dtsDistance = nalUnit.dts - currentGop.dts - currentGop.duration; // Only consider GOPS that start before the nal unit and end within
+ // a half-second of the nal unit
+
+ if (dtsDistance >= -allowableOverlap && dtsDistance <= halfSecond) {
+ // Always use the closest GOP we found if there is more than
+ // one candidate
+ if (!nearestGopObj || nearestDistance > dtsDistance) {
+ nearestGopObj = currentGopObj;
+ nearestDistance = dtsDistance;
+ }
+ }
+ }
+
+ if (nearestGopObj) {
+ return nearestGopObj.gop;
+ }
+
+ return null;
+ }; // trim gop list to the first gop found that has a matching pts with a gop in the list
+ // of gopsToAlignWith starting from the START of the list
+
+
+ this.alignGopsAtStart_ = function (gops) {
+ var alignIndex, gopIndex, align, gop, byteLength, nalCount, duration, alignedGops;
+ byteLength = gops.byteLength;
+ nalCount = gops.nalCount;
+ duration = gops.duration;
+ alignIndex = gopIndex = 0;
+
+ while (alignIndex < gopsToAlignWith.length && gopIndex < gops.length) {
+ align = gopsToAlignWith[alignIndex];
+ gop = gops[gopIndex];
+
+ if (align.pts === gop.pts) {
+ break;
+ }
+
+ if (gop.pts > align.pts) {
+ // this current gop starts after the current gop we want to align on, so increment
+ // align index
+ alignIndex++;
+ continue;
+ } // current gop starts before the current gop we want to align on. so increment gop
+ // index
+
+
+ gopIndex++;
+ byteLength -= gop.byteLength;
+ nalCount -= gop.nalCount;
+ duration -= gop.duration;
+ }
+
+ if (gopIndex === 0) {
+ // no gops to trim
+ return gops;
+ }
+
+ if (gopIndex === gops.length) {
+ // all gops trimmed, skip appending all gops
+ return null;
+ }
+
+ alignedGops = gops.slice(gopIndex);
+ alignedGops.byteLength = byteLength;
+ alignedGops.duration = duration;
+ alignedGops.nalCount = nalCount;
+ alignedGops.pts = alignedGops[0].pts;
+ alignedGops.dts = alignedGops[0].dts;
+ return alignedGops;
+ }; // trim gop list to the first gop found that has a matching pts with a gop in the list
+ // of gopsToAlignWith starting from the END of the list
+
+
+ this.alignGopsAtEnd_ = function (gops) {
+ var alignIndex, gopIndex, align, gop, alignEndIndex, matchFound;
+ alignIndex = gopsToAlignWith.length - 1;
+ gopIndex = gops.length - 1;
+ alignEndIndex = null;
+ matchFound = false;
+
+ while (alignIndex >= 0 && gopIndex >= 0) {
+ align = gopsToAlignWith[alignIndex];
+ gop = gops[gopIndex];
+
+ if (align.pts === gop.pts) {
+ matchFound = true;
+ break;
+ }
+
+ if (align.pts > gop.pts) {
+ alignIndex--;
+ continue;
+ }
+
+ if (alignIndex === gopsToAlignWith.length - 1) {
+ // gop.pts is greater than the last alignment candidate. If no match is found
+ // by the end of this loop, we still want to append gops that come after this
+ // point
+ alignEndIndex = gopIndex;
+ }
+
+ gopIndex--;
+ }
+
+ if (!matchFound && alignEndIndex === null) {
+ return null;
+ }
+
+ var trimIndex;
+
+ if (matchFound) {
+ trimIndex = gopIndex;
+ } else {
+ trimIndex = alignEndIndex;
+ }
+
+ if (trimIndex === 0) {
+ return gops;
+ }
+
+ var alignedGops = gops.slice(trimIndex);
+ var metadata = alignedGops.reduce(function (total, gop) {
+ total.byteLength += gop.byteLength;
+ total.duration += gop.duration;
+ total.nalCount += gop.nalCount;
+ return total;
+ }, {
+ byteLength: 0,
+ duration: 0,
+ nalCount: 0
+ });
+ alignedGops.byteLength = metadata.byteLength;
+ alignedGops.duration = metadata.duration;
+ alignedGops.nalCount = metadata.nalCount;
+ alignedGops.pts = alignedGops[0].pts;
+ alignedGops.dts = alignedGops[0].dts;
+ return alignedGops;
+ };
+
+ this.alignGopsWith = function (newGopsToAlignWith) {
+ gopsToAlignWith = newGopsToAlignWith;
+ };
+ };
+
+ _VideoSegmentStream.prototype = new stream();
+ /**
+ * A Stream that can combine multiple streams (ie. audio & video)
+ * into a single output segment for MSE. Also supports audio-only
+ * and video-only streams.
+ * @param options {object} transmuxer options object
+ * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
+ * in the source; false to adjust the first segment to start at media timeline start.
+ */
+
+ _CoalesceStream = function CoalesceStream(options, metadataStream) {
+ // Number of Tracks per output segment
+ // If greater than 1, we combine multiple
+ // tracks into a single segment
+ this.numberOfTracks = 0;
+ this.metadataStream = metadataStream;
+ options = options || {};
+
+ if (typeof options.remux !== 'undefined') {
+ this.remuxTracks = !!options.remux;
+ } else {
+ this.remuxTracks = true;
+ }
+
+ if (typeof options.keepOriginalTimestamps === 'boolean') {
+ this.keepOriginalTimestamps = options.keepOriginalTimestamps;
+ } else {
+ this.keepOriginalTimestamps = false;
+ }
+
+ this.pendingTracks = [];
+ this.videoTrack = null;
+ this.pendingBoxes = [];
+ this.pendingCaptions = [];
+ this.pendingMetadata = [];
+ this.pendingBytes = 0;
+ this.emittedTracks = 0;
+
+ _CoalesceStream.prototype.init.call(this); // Take output from multiple
+
+
+ this.push = function (output) {
+ // buffer incoming captions until the associated video segment
+ // finishes
+ if (output.text) {
+ return this.pendingCaptions.push(output);
+ } // buffer incoming id3 tags until the final flush
+
+
+ if (output.frames) {
+ return this.pendingMetadata.push(output);
+ } // Add this track to the list of pending tracks and store
+ // important information required for the construction of
+ // the final segment
+
+
+ this.pendingTracks.push(output.track);
+ this.pendingBytes += output.boxes.byteLength; // TODO: is there an issue for this against chrome?
+ // We unshift audio and push video because
+ // as of Chrome 75 when switching from
+ // one init segment to another if the video
+ // mdat does not appear after the audio mdat
+ // only audio will play for the duration of our transmux.
+
+ if (output.track.type === 'video') {
+ this.videoTrack = output.track;
+ this.pendingBoxes.push(output.boxes);
+ }
+
+ if (output.track.type === 'audio') {
+ this.audioTrack = output.track;
+ this.pendingBoxes.unshift(output.boxes);
+ }
+ };
+ };
+
+ _CoalesceStream.prototype = new stream();
+
+ _CoalesceStream.prototype.flush = function (flushSource) {
+ var offset = 0,
+ event = {
+ captions: [],
+ captionStreams: {},
+ metadata: [],
+ info: {}
+ },
+ caption,
+ id3,
+ initSegment,
+ timelineStartPts = 0,
+ i;
+
+ if (this.pendingTracks.length < this.numberOfTracks) {
+ if (flushSource !== 'VideoSegmentStream' && flushSource !== 'AudioSegmentStream') {
+ // Return because we haven't received a flush from a data-generating
+ // portion of the segment (meaning that we have only recieved meta-data
+ // or captions.)
+ return;
+ } else if (this.remuxTracks) {
+ // Return until we have enough tracks from the pipeline to remux (if we
+ // are remuxing audio and video into a single MP4)
+ return;
+ } else if (this.pendingTracks.length === 0) {
+ // In the case where we receive a flush without any data having been
+ // received we consider it an emitted track for the purposes of coalescing
+ // `done` events.
+ // We do this for the case where there is an audio and video track in the
+ // segment but no audio data. (seen in several playlists with alternate
+ // audio tracks and no audio present in the main TS segments.)
+ this.emittedTracks++;
+
+ if (this.emittedTracks >= this.numberOfTracks) {
+ this.trigger('done');
+ this.emittedTracks = 0;
+ }
+
+ return;
+ }
+ }
+
+ if (this.videoTrack) {
+ timelineStartPts = this.videoTrack.timelineStartInfo.pts;
+ videoProperties.forEach(function (prop) {
+ event.info[prop] = this.videoTrack[prop];
+ }, this);
+ } else if (this.audioTrack) {
+ timelineStartPts = this.audioTrack.timelineStartInfo.pts;
+ audioProperties.forEach(function (prop) {
+ event.info[prop] = this.audioTrack[prop];
+ }, this);
+ }
+
+ if (this.videoTrack || this.audioTrack) {
+ if (this.pendingTracks.length === 1) {
+ event.type = this.pendingTracks[0].type;
+ } else {
+ event.type = 'combined';
+ }
+
+ this.emittedTracks += this.pendingTracks.length;
+ initSegment = mp4Generator.initSegment(this.pendingTracks); // Create a new typed array to hold the init segment
+
+ event.initSegment = new Uint8Array(initSegment.byteLength); // Create an init segment containing a moov
+ // and track definitions
+
+ event.initSegment.set(initSegment); // Create a new typed array to hold the moof+mdats
+
+ event.data = new Uint8Array(this.pendingBytes); // Append each moof+mdat (one per track) together
+
+ for (i = 0; i < this.pendingBoxes.length; i++) {
+ event.data.set(this.pendingBoxes[i], offset);
+ offset += this.pendingBoxes[i].byteLength;
+ } // Translate caption PTS times into second offsets to match the
+ // video timeline for the segment, and add track info
+
+
+ for (i = 0; i < this.pendingCaptions.length; i++) {
+ caption = this.pendingCaptions[i];
+ caption.startTime = clock.metadataTsToSeconds(caption.startPts, timelineStartPts, this.keepOriginalTimestamps);
+ caption.endTime = clock.metadataTsToSeconds(caption.endPts, timelineStartPts, this.keepOriginalTimestamps);
+ event.captionStreams[caption.stream] = true;
+ event.captions.push(caption);
+ } // Translate ID3 frame PTS times into second offsets to match the
+ // video timeline for the segment
+
+
+ for (i = 0; i < this.pendingMetadata.length; i++) {
+ id3 = this.pendingMetadata[i];
+ id3.cueTime = clock.metadataTsToSeconds(id3.pts, timelineStartPts, this.keepOriginalTimestamps);
+ event.metadata.push(id3);
+ } // We add this to every single emitted segment even though we only need
+ // it for the first
+
+
+ event.metadata.dispatchType = this.metadataStream.dispatchType; // Reset stream state
+
+ this.pendingTracks.length = 0;
+ this.videoTrack = null;
+ this.pendingBoxes.length = 0;
+ this.pendingCaptions.length = 0;
+ this.pendingBytes = 0;
+ this.pendingMetadata.length = 0; // Emit the built segment
+ // We include captions and ID3 tags for backwards compatibility,
+ // ideally we should send only video and audio in the data event
+
+ this.trigger('data', event); // Emit each caption to the outside world
+ // Ideally, this would happen immediately on parsing captions,
+ // but we need to ensure that video data is sent back first
+ // so that caption timing can be adjusted to match video timing
+
+ for (i = 0; i < event.captions.length; i++) {
+ caption = event.captions[i];
+ this.trigger('caption', caption);
+ } // Emit each id3 tag to the outside world
+ // Ideally, this would happen immediately on parsing the tag,
+ // but we need to ensure that video data is sent back first
+ // so that ID3 frame timing can be adjusted to match video timing
+
+
+ for (i = 0; i < event.metadata.length; i++) {
+ id3 = event.metadata[i];
+ this.trigger('id3Frame', id3);
+ }
+ } // Only emit `done` if all tracks have been flushed and emitted
+
+
+ if (this.emittedTracks >= this.numberOfTracks) {
+ this.trigger('done');
+ this.emittedTracks = 0;
+ }
+ };
+
+ _CoalesceStream.prototype.setRemux = function (val) {
+ this.remuxTracks = val;
+ };
+ /**
+ * A Stream that expects MP2T binary data as input and produces
+ * corresponding media segments, suitable for use with Media Source
+ * Extension (MSE) implementations that support the ISO BMFF byte
+ * stream format, like Chrome.
+ */
+
+
+ _Transmuxer = function Transmuxer(options) {
+ var self = this,
+ hasFlushed = true,
+ videoTrack,
+ audioTrack;
+
+ _Transmuxer.prototype.init.call(this);
+
+ options = options || {};
+ this.baseMediaDecodeTime = options.baseMediaDecodeTime || 0;
+ this.transmuxPipeline_ = {};
+
+ this.setupAacPipeline = function () {
+ var pipeline = {};
+ this.transmuxPipeline_ = pipeline;
+ pipeline.type = 'aac';
+ pipeline.metadataStream = new m2ts_1.MetadataStream(); // set up the parsing pipeline
+
+ pipeline.aacStream = new aac();
+ pipeline.audioTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('audio');
+ pipeline.timedMetadataTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('timed-metadata');
+ pipeline.adtsStream = new adts();
+ pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
+ pipeline.headOfPipeline = pipeline.aacStream;
+ pipeline.aacStream.pipe(pipeline.audioTimestampRolloverStream).pipe(pipeline.adtsStream);
+ pipeline.aacStream.pipe(pipeline.timedMetadataTimestampRolloverStream).pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream);
+ pipeline.metadataStream.on('timestamp', function (frame) {
+ pipeline.aacStream.setTimestamp(frame.timeStamp);
+ });
+ pipeline.aacStream.on('data', function (data) {
+ if (data.type !== 'timed-metadata' && data.type !== 'audio' || pipeline.audioSegmentStream) {
+ return;
+ }
+
+ audioTrack = audioTrack || {
+ timelineStartInfo: {
+ baseMediaDecodeTime: self.baseMediaDecodeTime
+ },
+ codec: 'adts',
+ type: 'audio'
+ }; // hook up the audio segment stream to the first track with aac data
+
+ pipeline.coalesceStream.numberOfTracks++;
+ pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
+ pipeline.audioSegmentStream.on('log', self.getLogTrigger_('audioSegmentStream'));
+ pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo')); // Set up the final part of the audio pipeline
+
+ pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream); // emit pmt info
+
+ self.trigger('trackinfo', {
+ hasAudio: !!audioTrack,
+ hasVideo: !!videoTrack
+ });
+ }); // Re-emit any data coming from the coalesce stream to the outside world
+
+ pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data')); // Let the consumer know we have finished flushing the entire pipeline
+
+ pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
+ addPipelineLogRetriggers(this, pipeline);
+ };
+
+ this.setupTsPipeline = function () {
+ var pipeline = {};
+ this.transmuxPipeline_ = pipeline;
+ pipeline.type = 'ts';
+ pipeline.metadataStream = new m2ts_1.MetadataStream(); // set up the parsing pipeline
+
+ pipeline.packetStream = new m2ts_1.TransportPacketStream();
+ pipeline.parseStream = new m2ts_1.TransportParseStream();
+ pipeline.elementaryStream = new m2ts_1.ElementaryStream();
+ pipeline.timestampRolloverStream = new m2ts_1.TimestampRolloverStream();
+ pipeline.adtsStream = new adts();
+ pipeline.h264Stream = new H264Stream();
+ pipeline.captionStream = new m2ts_1.CaptionStream(options);
+ pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
+ pipeline.headOfPipeline = pipeline.packetStream; // disassemble MPEG2-TS packets into elementary streams
+
+ pipeline.packetStream.pipe(pipeline.parseStream).pipe(pipeline.elementaryStream).pipe(pipeline.timestampRolloverStream); // !!THIS ORDER IS IMPORTANT!!
+ // demux the streams
+
+ pipeline.timestampRolloverStream.pipe(pipeline.h264Stream);
+ pipeline.timestampRolloverStream.pipe(pipeline.adtsStream);
+ pipeline.timestampRolloverStream.pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream); // Hook up CEA-608/708 caption stream
+
+ pipeline.h264Stream.pipe(pipeline.captionStream).pipe(pipeline.coalesceStream);
+ pipeline.elementaryStream.on('data', function (data) {
+ var i;
+
+ if (data.type === 'metadata') {
+ i = data.tracks.length; // scan the tracks listed in the metadata
+
+ while (i--) {
+ if (!videoTrack && data.tracks[i].type === 'video') {
+ videoTrack = data.tracks[i];
+ videoTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
+ } else if (!audioTrack && data.tracks[i].type === 'audio') {
+ audioTrack = data.tracks[i];
+ audioTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
+ }
+ } // hook up the video segment stream to the first track with h264 data
+
+
+ if (videoTrack && !pipeline.videoSegmentStream) {
+ pipeline.coalesceStream.numberOfTracks++;
+ pipeline.videoSegmentStream = new _VideoSegmentStream(videoTrack, options);
+ pipeline.videoSegmentStream.on('log', self.getLogTrigger_('videoSegmentStream'));
+ pipeline.videoSegmentStream.on('timelineStartInfo', function (timelineStartInfo) {
+ // When video emits timelineStartInfo data after a flush, we forward that
+ // info to the AudioSegmentStream, if it exists, because video timeline
+ // data takes precedence. Do not do this if keepOriginalTimestamps is set,
+ // because this is a particularly subtle form of timestamp alteration.
+ if (audioTrack && !options.keepOriginalTimestamps) {
+ audioTrack.timelineStartInfo = timelineStartInfo; // On the first segment we trim AAC frames that exist before the
+ // very earliest DTS we have seen in video because Chrome will
+ // interpret any video track with a baseMediaDecodeTime that is
+ // non-zero as a gap.
+
+ pipeline.audioSegmentStream.setEarliestDts(timelineStartInfo.dts - self.baseMediaDecodeTime);
+ }
+ });
+ pipeline.videoSegmentStream.on('processedGopsInfo', self.trigger.bind(self, 'gopInfo'));
+ pipeline.videoSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'videoSegmentTimingInfo'));
+ pipeline.videoSegmentStream.on('baseMediaDecodeTime', function (baseMediaDecodeTime) {
+ if (audioTrack) {
+ pipeline.audioSegmentStream.setVideoBaseMediaDecodeTime(baseMediaDecodeTime);
+ }
+ });
+ pipeline.videoSegmentStream.on('timingInfo', self.trigger.bind(self, 'videoTimingInfo')); // Set up the final part of the video pipeline
+
+ pipeline.h264Stream.pipe(pipeline.videoSegmentStream).pipe(pipeline.coalesceStream);
+ }
+
+ if (audioTrack && !pipeline.audioSegmentStream) {
+ // hook up the audio segment stream to the first track with aac data
+ pipeline.coalesceStream.numberOfTracks++;
+ pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
+ pipeline.audioSegmentStream.on('log', self.getLogTrigger_('audioSegmentStream'));
+ pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo'));
+ pipeline.audioSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'audioSegmentTimingInfo')); // Set up the final part of the audio pipeline
+
+ pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream);
+ } // emit pmt info
+
+
+ self.trigger('trackinfo', {
+ hasAudio: !!audioTrack,
+ hasVideo: !!videoTrack
+ });
+ }
+ }); // Re-emit any data coming from the coalesce stream to the outside world
+
+ pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
+ pipeline.coalesceStream.on('id3Frame', function (id3Frame) {
+ id3Frame.dispatchType = pipeline.metadataStream.dispatchType;
+ self.trigger('id3Frame', id3Frame);
+ });
+ pipeline.coalesceStream.on('caption', this.trigger.bind(this, 'caption')); // Let the consumer know we have finished flushing the entire pipeline
+
+ pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
+ addPipelineLogRetriggers(this, pipeline);
+ }; // hook up the segment streams once track metadata is delivered
+
+
+ this.setBaseMediaDecodeTime = function (baseMediaDecodeTime) {
+ var pipeline = this.transmuxPipeline_;
+
+ if (!options.keepOriginalTimestamps) {
+ this.baseMediaDecodeTime = baseMediaDecodeTime;
+ }
+
+ if (audioTrack) {
+ audioTrack.timelineStartInfo.dts = undefined;
+ audioTrack.timelineStartInfo.pts = undefined;
+ trackDecodeInfo.clearDtsInfo(audioTrack);
+
+ if (pipeline.audioTimestampRolloverStream) {
+ pipeline.audioTimestampRolloverStream.discontinuity();
+ }
+ }
+
+ if (videoTrack) {
+ if (pipeline.videoSegmentStream) {
+ pipeline.videoSegmentStream.gopCache_ = [];
+ }
+
+ videoTrack.timelineStartInfo.dts = undefined;
+ videoTrack.timelineStartInfo.pts = undefined;
+ trackDecodeInfo.clearDtsInfo(videoTrack);
+ pipeline.captionStream.reset();
+ }
+
+ if (pipeline.timestampRolloverStream) {
+ pipeline.timestampRolloverStream.discontinuity();
+ }
+ };
+
+ this.setAudioAppendStart = function (timestamp) {
+ if (audioTrack) {
+ this.transmuxPipeline_.audioSegmentStream.setAudioAppendStart(timestamp);
+ }
+ };
+
+ this.setRemux = function (val) {
+ var pipeline = this.transmuxPipeline_;
+ options.remux = val;
+
+ if (pipeline && pipeline.coalesceStream) {
+ pipeline.coalesceStream.setRemux(val);
+ }
+ };
+
+ this.alignGopsWith = function (gopsToAlignWith) {
+ if (videoTrack && this.transmuxPipeline_.videoSegmentStream) {
+ this.transmuxPipeline_.videoSegmentStream.alignGopsWith(gopsToAlignWith);
+ }
+ };
+
+ this.getLogTrigger_ = function (key) {
+ var self = this;
+ return function (event) {
+ event.stream = key;
+ self.trigger('log', event);
+ };
+ }; // feed incoming data to the front of the parsing pipeline
+
+
+ this.push = function (data) {
+ if (hasFlushed) {
+ var isAac = isLikelyAacData(data);
+
+ if (isAac && this.transmuxPipeline_.type !== 'aac') {
+ this.setupAacPipeline();
+ } else if (!isAac && this.transmuxPipeline_.type !== 'ts') {
+ this.setupTsPipeline();
+ }
+
+ hasFlushed = false;
+ }
+
+ this.transmuxPipeline_.headOfPipeline.push(data);
+ }; // flush any buffered data
+
+
+ this.flush = function () {
+ hasFlushed = true; // Start at the top of the pipeline and flush all pending work
+
+ this.transmuxPipeline_.headOfPipeline.flush();
+ };
+
+ this.endTimeline = function () {
+ this.transmuxPipeline_.headOfPipeline.endTimeline();
+ };
+
+ this.reset = function () {
+ if (this.transmuxPipeline_.headOfPipeline) {
+ this.transmuxPipeline_.headOfPipeline.reset();
+ }
+ }; // Caption data has to be reset when seeking outside buffered range
+
+
+ this.resetCaptions = function () {
+ if (this.transmuxPipeline_.captionStream) {
+ this.transmuxPipeline_.captionStream.reset();
+ }
+ };
+ };
+
+ _Transmuxer.prototype = new stream();
+ var transmuxer = {
+ Transmuxer: _Transmuxer,
+ VideoSegmentStream: _VideoSegmentStream,
+ AudioSegmentStream: _AudioSegmentStream,
+ AUDIO_PROPERTIES: audioProperties,
+ VIDEO_PROPERTIES: videoProperties,
+ // exported for testing
+ generateSegmentTimingInfo: generateSegmentTimingInfo
+ };
+ /**
+ * mux.js
+ *
+ * Copyright (c) Brightcove
+ * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
+ */
+
+ var toUnsigned$3 = function toUnsigned(value) {
+ return value >>> 0;
+ };
+
+ var toHexString$1 = function toHexString(value) {
+ return ('00' + value.toString(16)).slice(-2);
+ };
+
+ var bin = {
+ toUnsigned: toUnsigned$3,
+ toHexString: toHexString$1
+ };
+
+ var parseType$1 = function parseType(buffer) {
+ var result = '';
+ result += String.fromCharCode(buffer[0]);
+ result += String.fromCharCode(buffer[1]);
+ result += String.fromCharCode(buffer[2]);
+ result += String.fromCharCode(buffer[3]);
+ return result;
+ };
+
+ var parseType_1 = parseType$1;
+ var toUnsigned$2 = bin.toUnsigned;
+
+ var findBox = function findBox(data, path) {
+ var results = [],
+ i,
+ size,
+ type,
+ end,
+ subresults;
+
+ if (!path.length) {
+ // short-circuit the search for empty paths
+ return null;
+ }
+
+ for (i = 0; i < data.byteLength;) {
+ size = toUnsigned$2(data[i] << 24 | data[i + 1] << 16 | data[i + 2] << 8 | data[i + 3]);
+ type = parseType_1(data.subarray(i + 4, i + 8));
+ end = size > 1 ? i + size : data.byteLength;
+
+ if (type === path[0]) {
+ if (path.length === 1) {
+ // this is the end of the path and we've found the box we were
+ // looking for
+ results.push(data.subarray(i + 8, end));
+ } else {
+ // recursively search for the next box along the path
+ subresults = findBox(data.subarray(i + 8, end), path.slice(1));
+
+ if (subresults.length) {
+ results = results.concat(subresults);
+ }
+ }
+ }
+
+ i = end;
+ } // we've finished searching all of data
+
+
+ return results;
+ };
+
+ var findBox_1 = findBox;
+ var toUnsigned$1 = bin.toUnsigned;
+ var getUint64$1 = numbers.getUint64;
+
+ var tfdt = function tfdt(data) {
+ var result = {
+ version: data[0],
+ flags: new Uint8Array(data.subarray(1, 4))
+ };
+
+ if (result.version === 1) {
+ result.baseMediaDecodeTime = getUint64$1(data.subarray(4));
+ } else {
+ result.baseMediaDecodeTime = toUnsigned$1(data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7]);
+ }
+
+ return result;
+ };
+
+ var parseTfdt = tfdt;
+
+ var parseSampleFlags = function parseSampleFlags(flags) {
+ return {
+ isLeading: (flags[0] & 0x0c) >>> 2,
+ dependsOn: flags[0] & 0x03,
+ isDependedOn: (flags[1] & 0xc0) >>> 6,
+ hasRedundancy: (flags[1] & 0x30) >>> 4,
+ paddingValue: (flags[1] & 0x0e) >>> 1,
+ isNonSyncSample: flags[1] & 0x01,
+ degradationPriority: flags[2] << 8 | flags[3]
+ };
+ };
+
+ var parseSampleFlags_1 = parseSampleFlags;
+
+ var trun = function trun(data) {
+ var result = {
+ version: data[0],
+ flags: new Uint8Array(data.subarray(1, 4)),
+ samples: []
+ },
+ view = new DataView(data.buffer, data.byteOffset, data.byteLength),
+ // Flag interpretation
+ dataOffsetPresent = result.flags[2] & 0x01,
+ // compare with 2nd byte of 0x1
+ firstSampleFlagsPresent = result.flags[2] & 0x04,
+ // compare with 2nd byte of 0x4
+ sampleDurationPresent = result.flags[1] & 0x01,
+ // compare with 2nd byte of 0x100
+ sampleSizePresent = result.flags[1] & 0x02,
+ // compare with 2nd byte of 0x200
+ sampleFlagsPresent = result.flags[1] & 0x04,
+ // compare with 2nd byte of 0x400
+ sampleCompositionTimeOffsetPresent = result.flags[1] & 0x08,
+ // compare with 2nd byte of 0x800
+ sampleCount = view.getUint32(4),
+ offset = 8,
+ sample;
+
+ if (dataOffsetPresent) {
+ // 32 bit signed integer
+ result.dataOffset = view.getInt32(offset);
+ offset += 4;
+ } // Overrides the flags for the first sample only. The order of
+ // optional values will be: duration, size, compositionTimeOffset
+
+
+ if (firstSampleFlagsPresent && sampleCount) {
+ sample = {
+ flags: parseSampleFlags_1(data.subarray(offset, offset + 4))
+ };
+ offset += 4;
+
+ if (sampleDurationPresent) {
+ sample.duration = view.getUint32(offset);
+ offset += 4;
+ }
+
+ if (sampleSizePresent) {
+ sample.size = view.getUint32(offset);
+ offset += 4;
+ }
+
+ if (sampleCompositionTimeOffsetPresent) {
+ if (result.version === 1) {
+ sample.compositionTimeOffset = view.getInt32(offset);
+ } else {
+ sample.compositionTimeOffset = view.getUint32(offset);
+ }
+
+ offset += 4;
+ }
+
+ result.samples.push(sample);
+ sampleCount--;
+ }
+
+ while (sampleCount--) {
+ sample = {};
+
+ if (sampleDurationPresent) {
+ sample.duration = view.getUint32(offset);
+ offset += 4;
+ }
+
+ if (sampleSizePresent) {
+ sample.size = view.getUint32(offset);
+ offset += 4;
+ }
+
+ if (sampleFlagsPresent) {
+ sample.flags = parseSampleFlags_1(data.subarray(offset, offset + 4));
+ offset += 4;
+ }
+
+ if (sampleCompositionTimeOffsetPresent) {
+ if (result.version === 1) {
+ sample.compositionTimeOffset = view.getInt32(offset);
+ } else {
+ sample.compositionTimeOffset = view.getUint32(offset);
+ }
+
+ offset += 4;
+ }
+
+ result.samples.push(sample);
+ }
+
+ return result;
+ };
+
+ var parseTrun = trun;
+
+ var tfhd = function tfhd(data) {
+ var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
+ result = {
+ version: data[0],
+ flags: new Uint8Array(data.subarray(1, 4)),
+ trackId: view.getUint32(4)
+ },
+ baseDataOffsetPresent = result.flags[2] & 0x01,
+ sampleDescriptionIndexPresent = result.flags[2] & 0x02,
+ defaultSampleDurationPresent = result.flags[2] & 0x08,
+ defaultSampleSizePresent = result.flags[2] & 0x10,
+ defaultSampleFlagsPresent = result.flags[2] & 0x20,
+ durationIsEmpty = result.flags[0] & 0x010000,
+ defaultBaseIsMoof = result.flags[0] & 0x020000,
+ i;
+ i = 8;
+
+ if (baseDataOffsetPresent) {
+ i += 4; // truncate top 4 bytes
+ // FIXME: should we read the full 64 bits?
+
+ result.baseDataOffset = view.getUint32(12);
+ i += 4;
+ }
+
+ if (sampleDescriptionIndexPresent) {
+ result.sampleDescriptionIndex = view.getUint32(i);
+ i += 4;
+ }
+
+ if (defaultSampleDurationPresent) {
+ result.defaultSampleDuration = view.getUint32(i);
+ i += 4;
+ }
+
+ if (defaultSampleSizePresent) {
+ result.defaultSampleSize = view.getUint32(i);
+ i += 4;
+ }
+
+ if (defaultSampleFlagsPresent) {
+ result.defaultSampleFlags = view.getUint32(i);
+ }
+
+ if (durationIsEmpty) {
+ result.durationIsEmpty = true;
+ }
+
+ if (!baseDataOffsetPresent && defaultBaseIsMoof) {
+ result.baseDataOffsetIsMoof = true;
+ }
+
+ return result;
+ };
+
+ var parseTfhd = tfhd;
+ var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
+ var win;
+
+ if (typeof window !== "undefined") {
+ win = window;
+ } else if (typeof commonjsGlobal !== "undefined") {
+ win = commonjsGlobal;
+ } else if (typeof self !== "undefined") {
+ win = self;
+ } else {
+ win = {};
+ }
+
+ var window_1 = win;
+ var discardEmulationPreventionBytes = captionPacketParser.discardEmulationPreventionBytes;
+ var CaptionStream = captionStream.CaptionStream;
+ /**
+ * Maps an offset in the mdat to a sample based on the the size of the samples.
+ * Assumes that `parseSamples` has been called first.
+ *
+ * @param {Number} offset - The offset into the mdat
+ * @param {Object[]} samples - An array of samples, parsed using `parseSamples`
+ * @return {?Object} The matching sample, or null if no match was found.
+ *
+ * @see ISO-BMFF-12/2015, Section 8.8.8
+ **/
+
+ var mapToSample = function mapToSample(offset, samples) {
+ var approximateOffset = offset;
+
+ for (var i = 0; i < samples.length; i++) {
+ var sample = samples[i];
+
+ if (approximateOffset < sample.size) {
+ return sample;
+ }
+
+ approximateOffset -= sample.size;
+ }
+
+ return null;
+ };
+ /**
+ * Finds SEI nal units contained in a Media Data Box.
+ * Assumes that `parseSamples` has been called first.
+ *
+ * @param {Uint8Array} avcStream - The bytes of the mdat
+ * @param {Object[]} samples - The samples parsed out by `parseSamples`
+ * @param {Number} trackId - The trackId of this video track
+ * @return {Object[]} seiNals - the parsed SEI NALUs found.
+ * The contents of the seiNal should match what is expected by
+ * CaptionStream.push (nalUnitType, size, data, escapedRBSP, pts, dts)
+ *
+ * @see ISO-BMFF-12/2015, Section 8.1.1
+ * @see Rec. ITU-T H.264, 7.3.2.3.1
+ **/
+
+
+ var findSeiNals = function findSeiNals(avcStream, samples, trackId) {
+ var avcView = new DataView(avcStream.buffer, avcStream.byteOffset, avcStream.byteLength),
+ result = {
+ logs: [],
+ seiNals: []
+ },
+ seiNal,
+ i,
+ length,
+ lastMatchedSample;
+
+ for (i = 0; i + 4 < avcStream.length; i += length) {
+ length = avcView.getUint32(i);
+ i += 4; // Bail if this doesn't appear to be an H264 stream
+
+ if (length <= 0) {
+ continue;
+ }
+
+ switch (avcStream[i] & 0x1F) {
+ case 0x06:
+ var data = avcStream.subarray(i + 1, i + 1 + length);
+ var matchingSample = mapToSample(i, samples);
+ seiNal = {
+ nalUnitType: 'sei_rbsp',
+ size: length,
+ data: data,
+ escapedRBSP: discardEmulationPreventionBytes(data),
+ trackId: trackId
+ };
+
+ if (matchingSample) {
+ seiNal.pts = matchingSample.pts;
+ seiNal.dts = matchingSample.dts;
+ lastMatchedSample = matchingSample;
+ } else if (lastMatchedSample) {
+ // If a matching sample cannot be found, use the last
+ // sample's values as they should be as close as possible
+ seiNal.pts = lastMatchedSample.pts;
+ seiNal.dts = lastMatchedSample.dts;
+ } else {
+ result.logs.push({
+ level: 'warn',
+ message: 'We\'ve encountered a nal unit without data at ' + i + ' for trackId ' + trackId + '. See mux.js#223.'
+ });
+ break;
+ }
+
+ result.seiNals.push(seiNal);
+ break;
+ }
+ }
+
+ return result;
+ };
+ /**
+ * Parses sample information out of Track Run Boxes and calculates
+ * the absolute presentation and decode timestamps of each sample.
+ *
+ * @param {Array} truns - The Trun Run boxes to be parsed
+ * @param {Number|BigInt} baseMediaDecodeTime - base media decode time from tfdt
+ @see ISO-BMFF-12/2015, Section 8.8.12
+ * @param {Object} tfhd - The parsed Track Fragment Header
+ * @see inspect.parseTfhd
+ * @return {Object[]} the parsed samples
+ *
+ * @see ISO-BMFF-12/2015, Section 8.8.8
+ **/
+
+
+ var parseSamples = function parseSamples(truns, baseMediaDecodeTime, tfhd) {
+ var currentDts = baseMediaDecodeTime;
+ var defaultSampleDuration = tfhd.defaultSampleDuration || 0;
+ var defaultSampleSize = tfhd.defaultSampleSize || 0;
+ var trackId = tfhd.trackId;
+ var allSamples = [];
+ truns.forEach(function (trun) {
+ // Note: We currently do not parse the sample table as well
+ // as the trun. It's possible some sources will require this.
+ // moov > trak > mdia > minf > stbl
+ var trackRun = parseTrun(trun);
+ var samples = trackRun.samples;
+ samples.forEach(function (sample) {
+ if (sample.duration === undefined) {
+ sample.duration = defaultSampleDuration;
+ }
+
+ if (sample.size === undefined) {
+ sample.size = defaultSampleSize;
+ }
+
+ sample.trackId = trackId;
+ sample.dts = currentDts;
+
+ if (sample.compositionTimeOffset === undefined) {
+ sample.compositionTimeOffset = 0;
+ }
+
+ if (typeof currentDts === 'bigint') {
+ sample.pts = currentDts + window_1.BigInt(sample.compositionTimeOffset);
+ currentDts += window_1.BigInt(sample.duration);
+ } else {
+ sample.pts = currentDts + sample.compositionTimeOffset;
+ currentDts += sample.duration;
+ }
+ });
+ allSamples = allSamples.concat(samples);
+ });
+ return allSamples;
+ };
+ /**
+ * Parses out caption nals from an FMP4 segment's video tracks.
+ *
+ * @param {Uint8Array} segment - The bytes of a single segment
+ * @param {Number} videoTrackId - The trackId of a video track in the segment
+ * @return {Object.} A mapping of video trackId to
+ * a list of seiNals found in that track
+ **/
+
+
+ var parseCaptionNals = function parseCaptionNals(segment, videoTrackId) {
+ // To get the samples
+ var trafs = findBox_1(segment, ['moof', 'traf']); // To get SEI NAL units
+
+ var mdats = findBox_1(segment, ['mdat']);
+ var captionNals = {};
+ var mdatTrafPairs = []; // Pair up each traf with a mdat as moofs and mdats are in pairs
+
+ mdats.forEach(function (mdat, index) {
+ var matchingTraf = trafs[index];
+ mdatTrafPairs.push({
+ mdat: mdat,
+ traf: matchingTraf
+ });
+ });
+ mdatTrafPairs.forEach(function (pair) {
+ var mdat = pair.mdat;
+ var traf = pair.traf;
+ var tfhd = findBox_1(traf, ['tfhd']); // Exactly 1 tfhd per traf
+
+ var headerInfo = parseTfhd(tfhd[0]);
+ var trackId = headerInfo.trackId;
+ var tfdt = findBox_1(traf, ['tfdt']); // Either 0 or 1 tfdt per traf
+
+ var baseMediaDecodeTime = tfdt.length > 0 ? parseTfdt(tfdt[0]).baseMediaDecodeTime : 0;
+ var truns = findBox_1(traf, ['trun']);
+ var samples;
+ var result; // Only parse video data for the chosen video track
+
+ if (videoTrackId === trackId && truns.length > 0) {
+ samples = parseSamples(truns, baseMediaDecodeTime, headerInfo);
+ result = findSeiNals(mdat, samples, trackId);
+
+ if (!captionNals[trackId]) {
+ captionNals[trackId] = {
+ seiNals: [],
+ logs: []
+ };
+ }
+
+ captionNals[trackId].seiNals = captionNals[trackId].seiNals.concat(result.seiNals);
+ captionNals[trackId].logs = captionNals[trackId].logs.concat(result.logs);
+ }
+ });
+ return captionNals;
+ };
+ /**
+ * Parses out inband captions from an MP4 container and returns
+ * caption objects that can be used by WebVTT and the TextTrack API.
+ * @see https://developer.mozilla.org/en-US/docs/Web/API/VTTCue
+ * @see https://developer.mozilla.org/en-US/docs/Web/API/TextTrack
+ * Assumes that `probe.getVideoTrackIds` and `probe.timescale` have been called first
+ *
+ * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
+ * @param {Number} trackId - The id of the video track to parse
+ * @param {Number} timescale - The timescale for the video track from the init segment
+ *
+ * @return {?Object[]} parsedCaptions - A list of captions or null if no video tracks
+ * @return {Number} parsedCaptions[].startTime - The time to show the caption in seconds
+ * @return {Number} parsedCaptions[].endTime - The time to stop showing the caption in seconds
+ * @return {String} parsedCaptions[].text - The visible content of the caption
+ **/
+
+
+ var parseEmbeddedCaptions = function parseEmbeddedCaptions(segment, trackId, timescale) {
+ var captionNals; // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
+
+ if (trackId === null) {
+ return null;
+ }
+
+ captionNals = parseCaptionNals(segment, trackId);
+ var trackNals = captionNals[trackId] || {};
+ return {
+ seiNals: trackNals.seiNals,
+ logs: trackNals.logs,
+ timescale: timescale
+ };
+ };
+ /**
+ * Converts SEI NALUs into captions that can be used by video.js
+ **/
+
+
+ var CaptionParser = function CaptionParser() {
+ var isInitialized = false;
+ var captionStream; // Stores segments seen before trackId and timescale are set
+
+ var segmentCache; // Stores video track ID of the track being parsed
+
+ var trackId; // Stores the timescale of the track being parsed
+
+ var timescale; // Stores captions parsed so far
+
+ var parsedCaptions; // Stores whether we are receiving partial data or not
+
+ var parsingPartial;
+ /**
+ * A method to indicate whether a CaptionParser has been initalized
+ * @returns {Boolean}
+ **/
+
+ this.isInitialized = function () {
+ return isInitialized;
+ };
+ /**
+ * Initializes the underlying CaptionStream, SEI NAL parsing
+ * and management, and caption collection
+ **/
+
+
+ this.init = function (options) {
+ captionStream = new CaptionStream();
+ isInitialized = true;
+ parsingPartial = options ? options.isPartial : false; // Collect dispatched captions
+
+ captionStream.on('data', function (event) {
+ // Convert to seconds in the source's timescale
+ event.startTime = event.startPts / timescale;
+ event.endTime = event.endPts / timescale;
+ parsedCaptions.captions.push(event);
+ parsedCaptions.captionStreams[event.stream] = true;
+ });
+ captionStream.on('log', function (log) {
+ parsedCaptions.logs.push(log);
+ });
+ };
+ /**
+ * Determines if a new video track will be selected
+ * or if the timescale changed
+ * @return {Boolean}
+ **/
+
+
+ this.isNewInit = function (videoTrackIds, timescales) {
+ if (videoTrackIds && videoTrackIds.length === 0 || timescales && typeof timescales === 'object' && Object.keys(timescales).length === 0) {
+ return false;
+ }
+
+ return trackId !== videoTrackIds[0] || timescale !== timescales[trackId];
+ };
+ /**
+ * Parses out SEI captions and interacts with underlying
+ * CaptionStream to return dispatched captions
+ *
+ * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
+ * @param {Number[]} videoTrackIds - A list of video tracks found in the init segment
+ * @param {Object.} timescales - The timescales found in the init segment
+ * @see parseEmbeddedCaptions
+ * @see m2ts/caption-stream.js
+ **/
+
+
+ this.parse = function (segment, videoTrackIds, timescales) {
+ var parsedData;
+
+ if (!this.isInitialized()) {
+ return null; // This is not likely to be a video segment
+ } else if (!videoTrackIds || !timescales) {
+ return null;
+ } else if (this.isNewInit(videoTrackIds, timescales)) {
+ // Use the first video track only as there is no
+ // mechanism to switch to other video tracks
+ trackId = videoTrackIds[0];
+ timescale = timescales[trackId]; // If an init segment has not been seen yet, hold onto segment
+ // data until we have one.
+ // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
+ } else if (trackId === null || !timescale) {
+ segmentCache.push(segment);
+ return null;
+ } // Now that a timescale and trackId is set, parse cached segments
+
+
+ while (segmentCache.length > 0) {
+ var cachedSegment = segmentCache.shift();
+ this.parse(cachedSegment, videoTrackIds, timescales);
+ }
+
+ parsedData = parseEmbeddedCaptions(segment, trackId, timescale);
+
+ if (parsedData && parsedData.logs) {
+ parsedCaptions.logs = parsedCaptions.logs.concat(parsedData.logs);
+ }
+
+ if (parsedData === null || !parsedData.seiNals) {
+ if (parsedCaptions.logs.length) {
+ return {
+ logs: parsedCaptions.logs,
+ captions: [],
+ captionStreams: []
+ };
+ }
+
+ return null;
+ }
+
+ this.pushNals(parsedData.seiNals); // Force the parsed captions to be dispatched
+
+ this.flushStream();
+ return parsedCaptions;
+ };
+ /**
+ * Pushes SEI NALUs onto CaptionStream
+ * @param {Object[]} nals - A list of SEI nals parsed using `parseCaptionNals`
+ * Assumes that `parseCaptionNals` has been called first
+ * @see m2ts/caption-stream.js
+ **/
+
+
+ this.pushNals = function (nals) {
+ if (!this.isInitialized() || !nals || nals.length === 0) {
+ return null;
+ }
+
+ nals.forEach(function (nal) {
+ captionStream.push(nal);
+ });
+ };
+ /**
+ * Flushes underlying CaptionStream to dispatch processed, displayable captions
+ * @see m2ts/caption-stream.js
+ **/
+
+
+ this.flushStream = function () {
+ if (!this.isInitialized()) {
+ return null;
+ }
+
+ if (!parsingPartial) {
+ captionStream.flush();
+ } else {
+ captionStream.partialFlush();
+ }
+ };
+ /**
+ * Reset caption buckets for new data
+ **/
+
+
+ this.clearParsedCaptions = function () {
+ parsedCaptions.captions = [];
+ parsedCaptions.captionStreams = {};
+ parsedCaptions.logs = [];
+ };
+ /**
+ * Resets underlying CaptionStream
+ * @see m2ts/caption-stream.js
+ **/
+
+
+ this.resetCaptionStream = function () {
+ if (!this.isInitialized()) {
+ return null;
+ }
+
+ captionStream.reset();
+ };
+ /**
+ * Convenience method to clear all captions flushed from the
+ * CaptionStream and still being parsed
+ * @see m2ts/caption-stream.js
+ **/
+
+
+ this.clearAllCaptions = function () {
+ this.clearParsedCaptions();
+ this.resetCaptionStream();
+ };
+ /**
+ * Reset caption parser
+ **/
+
+
+ this.reset = function () {
+ segmentCache = [];
+ trackId = null;
+ timescale = null;
+
+ if (!parsedCaptions) {
+ parsedCaptions = {
+ captions: [],
+ // CC1, CC2, CC3, CC4
+ captionStreams: {},
+ logs: []
+ };
+ } else {
+ this.clearParsedCaptions();
+ }
+
+ this.resetCaptionStream();
+ };
+
+ this.reset();
+ };
+
+ var captionParser = CaptionParser;
+ var toUnsigned = bin.toUnsigned;
+ var toHexString = bin.toHexString;
+ var getUint64 = numbers.getUint64;
+ var timescale, startTime, compositionStartTime, getVideoTrackIds, getTracks, getTimescaleFromMediaHeader;
+ /**
+ * Parses an MP4 initialization segment and extracts the timescale
+ * values for any declared tracks. Timescale values indicate the
+ * number of clock ticks per second to assume for time-based values
+ * elsewhere in the MP4.
+ *
+ * To determine the start time of an MP4, you need two pieces of
+ * information: the timescale unit and the earliest base media decode
+ * time. Multiple timescales can be specified within an MP4 but the
+ * base media decode time is always expressed in the timescale from
+ * the media header box for the track:
+ * ```
+ * moov > trak > mdia > mdhd.timescale
+ * ```
+ * @param init {Uint8Array} the bytes of the init segment
+ * @return {object} a hash of track ids to timescale values or null if
+ * the init segment is malformed.
+ */
+
+ timescale = function timescale(init) {
+ var result = {},
+ traks = findBox_1(init, ['moov', 'trak']); // mdhd timescale
+
+ return traks.reduce(function (result, trak) {
+ var tkhd, version, index, id, mdhd;
+ tkhd = findBox_1(trak, ['tkhd'])[0];
+
+ if (!tkhd) {
+ return null;
+ }
+
+ version = tkhd[0];
+ index = version === 0 ? 12 : 20;
+ id = toUnsigned(tkhd[index] << 24 | tkhd[index + 1] << 16 | tkhd[index + 2] << 8 | tkhd[index + 3]);
+ mdhd = findBox_1(trak, ['mdia', 'mdhd'])[0];
+
+ if (!mdhd) {
+ return null;
+ }
+
+ version = mdhd[0];
+ index = version === 0 ? 12 : 20;
+ result[id] = toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
+ return result;
+ }, result);
+ };
+ /**
+ * Determine the base media decode start time, in seconds, for an MP4
+ * fragment. If multiple fragments are specified, the earliest time is
+ * returned.
+ *
+ * The base media decode time can be parsed from track fragment
+ * metadata:
+ * ```
+ * moof > traf > tfdt.baseMediaDecodeTime
+ * ```
+ * It requires the timescale value from the mdhd to interpret.
+ *
+ * @param timescale {object} a hash of track ids to timescale values.
+ * @return {number} the earliest base media decode start time for the
+ * fragment, in seconds
+ */
+
+
+ startTime = function startTime(timescale, fragment) {
+ var trafs; // we need info from two childrend of each track fragment box
+
+ trafs = findBox_1(fragment, ['moof', 'traf']); // determine the start times for each track
+
+ var lowestTime = trafs.reduce(function (acc, traf) {
+ var tfhd = findBox_1(traf, ['tfhd'])[0]; // get the track id from the tfhd
+
+ var id = toUnsigned(tfhd[4] << 24 | tfhd[5] << 16 | tfhd[6] << 8 | tfhd[7]); // assume a 90kHz clock if no timescale was specified
+
+ var scale = timescale[id] || 90e3; // get the base media decode time from the tfdt
+
+ var tfdt = findBox_1(traf, ['tfdt'])[0];
+ var dv = new DataView(tfdt.buffer, tfdt.byteOffset, tfdt.byteLength);
+ var baseTime; // version 1 is 64 bit
+
+ if (tfdt[0] === 1) {
+ baseTime = getUint64(tfdt.subarray(4, 12));
+ } else {
+ baseTime = dv.getUint32(4);
+ } // convert base time to seconds if it is a valid number.
+
+
+ var seconds;
+
+ if (typeof baseTime === 'bigint') {
+ seconds = baseTime / window_1.BigInt(scale);
+ } else if (typeof baseTime === 'number' && !isNaN(baseTime)) {
+ seconds = baseTime / scale;
+ }
+
+ if (seconds < Number.MAX_SAFE_INTEGER) {
+ seconds = Number(seconds);
+ }
+
+ if (seconds < acc) {
+ acc = seconds;
+ }
+
+ return acc;
+ }, Infinity);
+ return typeof lowestTime === 'bigint' || isFinite(lowestTime) ? lowestTime : 0;
+ };
+ /**
+ * Determine the composition start, in seconds, for an MP4
+ * fragment.
+ *
+ * The composition start time of a fragment can be calculated using the base
+ * media decode time, composition time offset, and timescale, as follows:
+ *
+ * compositionStartTime = (baseMediaDecodeTime + compositionTimeOffset) / timescale
+ *
+ * All of the aforementioned information is contained within a media fragment's
+ * `traf` box, except for timescale info, which comes from the initialization
+ * segment, so a track id (also contained within a `traf`) is also necessary to
+ * associate it with a timescale
+ *
+ *
+ * @param timescales {object} - a hash of track ids to timescale values.
+ * @param fragment {Unit8Array} - the bytes of a media segment
+ * @return {number} the composition start time for the fragment, in seconds
+ **/
+
+
+ compositionStartTime = function compositionStartTime(timescales, fragment) {
+ var trafBoxes = findBox_1(fragment, ['moof', 'traf']);
+ var baseMediaDecodeTime = 0;
+ var compositionTimeOffset = 0;
+ var trackId;
+
+ if (trafBoxes && trafBoxes.length) {
+ // The spec states that track run samples contained within a `traf` box are contiguous, but
+ // it does not explicitly state whether the `traf` boxes themselves are contiguous.
+ // We will assume that they are, so we only need the first to calculate start time.
+ var tfhd = findBox_1(trafBoxes[0], ['tfhd'])[0];
+ var trun = findBox_1(trafBoxes[0], ['trun'])[0];
+ var tfdt = findBox_1(trafBoxes[0], ['tfdt'])[0];
+
+ if (tfhd) {
+ var parsedTfhd = parseTfhd(tfhd);
+ trackId = parsedTfhd.trackId;
+ }
+
+ if (tfdt) {
+ var parsedTfdt = parseTfdt(tfdt);
+ baseMediaDecodeTime = parsedTfdt.baseMediaDecodeTime;
+ }
+
+ if (trun) {
+ var parsedTrun = parseTrun(trun);
+
+ if (parsedTrun.samples && parsedTrun.samples.length) {
+ compositionTimeOffset = parsedTrun.samples[0].compositionTimeOffset || 0;
+ }
+ }
+ } // Get timescale for this specific track. Assume a 90kHz clock if no timescale was
+ // specified.
+
+
+ var timescale = timescales[trackId] || 90e3; // return the composition start time, in seconds
+
+ if (typeof baseMediaDecodeTime === 'bigint') {
+ compositionTimeOffset = window_1.BigInt(compositionTimeOffset);
+ timescale = window_1.BigInt(timescale);
+ }
+
+ var result = (baseMediaDecodeTime + compositionTimeOffset) / timescale;
+
+ if (typeof result === 'bigint' && result < Number.MAX_SAFE_INTEGER) {
+ result = Number(result);
+ }
+
+ return result;
+ };
+ /**
+ * Find the trackIds of the video tracks in this source.
+ * Found by parsing the Handler Reference and Track Header Boxes:
+ * moov > trak > mdia > hdlr
+ * moov > trak > tkhd
+ *
+ * @param {Uint8Array} init - The bytes of the init segment for this source
+ * @return {Number[]} A list of trackIds
+ *
+ * @see ISO-BMFF-12/2015, Section 8.4.3
+ **/
+
+
+ getVideoTrackIds = function getVideoTrackIds(init) {
+ var traks = findBox_1(init, ['moov', 'trak']);
+ var videoTrackIds = [];
+ traks.forEach(function (trak) {
+ var hdlrs = findBox_1(trak, ['mdia', 'hdlr']);
+ var tkhds = findBox_1(trak, ['tkhd']);
+ hdlrs.forEach(function (hdlr, index) {
+ var handlerType = parseType_1(hdlr.subarray(8, 12));
+ var tkhd = tkhds[index];
+ var view;
+ var version;
+ var trackId;
+
+ if (handlerType === 'vide') {
+ view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
+ version = view.getUint8(0);
+ trackId = version === 0 ? view.getUint32(12) : view.getUint32(20);
+ videoTrackIds.push(trackId);
+ }
+ });
+ });
+ return videoTrackIds;
+ };
+
+ getTimescaleFromMediaHeader = function getTimescaleFromMediaHeader(mdhd) {
+ // mdhd is a FullBox, meaning it will have its own version as the first byte
+ var version = mdhd[0];
+ var index = version === 0 ? 12 : 20;
+ return toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
+ };
+ /**
+ * Get all the video, audio, and hint tracks from a non fragmented
+ * mp4 segment
+ */
+
+
+ getTracks = function getTracks(init) {
+ var traks = findBox_1(init, ['moov', 'trak']);
+ var tracks = [];
+ traks.forEach(function (trak) {
+ var track = {};
+ var tkhd = findBox_1(trak, ['tkhd'])[0];
+ var view, tkhdVersion; // id
+
+ if (tkhd) {
+ view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
+ tkhdVersion = view.getUint8(0);
+ track.id = tkhdVersion === 0 ? view.getUint32(12) : view.getUint32(20);
+ }
+
+ var hdlr = findBox_1(trak, ['mdia', 'hdlr'])[0]; // type
+
+ if (hdlr) {
+ var type = parseType_1(hdlr.subarray(8, 12));
+
+ if (type === 'vide') {
+ track.type = 'video';
+ } else if (type === 'soun') {
+ track.type = 'audio';
+ } else {
+ track.type = type;
+ }
+ } // codec
+
+
+ var stsd = findBox_1(trak, ['mdia', 'minf', 'stbl', 'stsd'])[0];
+
+ if (stsd) {
+ var sampleDescriptions = stsd.subarray(8); // gives the codec type string
+
+ track.codec = parseType_1(sampleDescriptions.subarray(4, 8));
+ var codecBox = findBox_1(sampleDescriptions, [track.codec])[0];
+ var codecConfig, codecConfigType;
+
+ if (codecBox) {
+ // https://tools.ietf.org/html/rfc6381#section-3.3
+ if (/^[asm]vc[1-9]$/i.test(track.codec)) {
+ // we don't need anything but the "config" parameter of the
+ // avc1 codecBox
+ codecConfig = codecBox.subarray(78);
+ codecConfigType = parseType_1(codecConfig.subarray(4, 8));
+
+ if (codecConfigType === 'avcC' && codecConfig.length > 11) {
+ track.codec += '.'; // left padded with zeroes for single digit hex
+ // profile idc
+
+ track.codec += toHexString(codecConfig[9]); // the byte containing the constraint_set flags
+
+ track.codec += toHexString(codecConfig[10]); // level idc
+
+ track.codec += toHexString(codecConfig[11]);
+ } else {
+ // TODO: show a warning that we couldn't parse the codec
+ // and are using the default
+ track.codec = 'avc1.4d400d';
+ }
+ } else if (/^mp4[a,v]$/i.test(track.codec)) {
+ // we do not need anything but the streamDescriptor of the mp4a codecBox
+ codecConfig = codecBox.subarray(28);
+ codecConfigType = parseType_1(codecConfig.subarray(4, 8));
+
+ if (codecConfigType === 'esds' && codecConfig.length > 20 && codecConfig[19] !== 0) {
+ track.codec += '.' + toHexString(codecConfig[19]); // this value is only a single digit
+
+ track.codec += '.' + toHexString(codecConfig[20] >>> 2 & 0x3f).replace(/^0/, '');
+ } else {
+ // TODO: show a warning that we couldn't parse the codec
+ // and are using the default
+ track.codec = 'mp4a.40.2';
+ }
+ } else {
+ // flac, opus, etc
+ track.codec = track.codec.toLowerCase();
+ }
+ }
+ }
+
+ var mdhd = findBox_1(trak, ['mdia', 'mdhd'])[0];
+
+ if (mdhd) {
+ track.timescale = getTimescaleFromMediaHeader(mdhd);
+ }
+
+ tracks.push(track);
+ });
+ return tracks;
+ };
+
+ var probe$2 = {
+ // export mp4 inspector's findBox and parseType for backwards compatibility
+ findBox: findBox_1,
+ parseType: parseType_1,
+ timescale: timescale,
+ startTime: startTime,
+ compositionStartTime: compositionStartTime,
+ videoTrackIds: getVideoTrackIds,
+ tracks: getTracks,
+ getTimescaleFromMediaHeader: getTimescaleFromMediaHeader
+ };
+
+ var parsePid = function parsePid(packet) {
+ var pid = packet[1] & 0x1f;
+ pid <<= 8;
+ pid |= packet[2];
+ return pid;
+ };
+
+ var parsePayloadUnitStartIndicator = function parsePayloadUnitStartIndicator(packet) {
+ return !!(packet[1] & 0x40);
+ };
+
+ var parseAdaptionField = function parseAdaptionField(packet) {
+ var offset = 0; // if an adaption field is present, its length is specified by the
+ // fifth byte of the TS packet header. The adaptation field is
+ // used to add stuffing to PES packets that don't fill a complete
+ // TS packet, and to specify some forms of timing and control data
+ // that we do not currently use.
+
+ if ((packet[3] & 0x30) >>> 4 > 0x01) {
+ offset += packet[4] + 1;
+ }
+
+ return offset;
+ };
+
+ var parseType = function parseType(packet, pmtPid) {
+ var pid = parsePid(packet);
+
+ if (pid === 0) {
+ return 'pat';
+ } else if (pid === pmtPid) {
+ return 'pmt';
+ } else if (pmtPid) {
+ return 'pes';
+ }
+
+ return null;
+ };
+
+ var parsePat = function parsePat(packet) {
+ var pusi = parsePayloadUnitStartIndicator(packet);
+ var offset = 4 + parseAdaptionField(packet);
+
+ if (pusi) {
+ offset += packet[offset] + 1;
+ }
+
+ return (packet[offset + 10] & 0x1f) << 8 | packet[offset + 11];
+ };
+
+ var parsePmt = function parsePmt(packet) {
+ var programMapTable = {};
+ var pusi = parsePayloadUnitStartIndicator(packet);
+ var payloadOffset = 4 + parseAdaptionField(packet);
+
+ if (pusi) {
+ payloadOffset += packet[payloadOffset] + 1;
+ } // PMTs can be sent ahead of the time when they should actually
+ // take effect. We don't believe this should ever be the case
+ // for HLS but we'll ignore "forward" PMT declarations if we see
+ // them. Future PMT declarations have the current_next_indicator
+ // set to zero.
+
+
+ if (!(packet[payloadOffset + 5] & 0x01)) {
+ return;
+ }
+
+ var sectionLength, tableEnd, programInfoLength; // the mapping table ends at the end of the current section
+
+ sectionLength = (packet[payloadOffset + 1] & 0x0f) << 8 | packet[payloadOffset + 2];
+ tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
+ // long the program info descriptors are
+
+ programInfoLength = (packet[payloadOffset + 10] & 0x0f) << 8 | packet[payloadOffset + 11]; // advance the offset to the first entry in the mapping table
+
+ var offset = 12 + programInfoLength;
+
+ while (offset < tableEnd) {
+ var i = payloadOffset + offset; // add an entry that maps the elementary_pid to the stream_type
+
+ programMapTable[(packet[i + 1] & 0x1F) << 8 | packet[i + 2]] = packet[i]; // move to the next table entry
+ // skip past the elementary stream descriptors, if present
+
+ offset += ((packet[i + 3] & 0x0F) << 8 | packet[i + 4]) + 5;
+ }
+
+ return programMapTable;
+ };
+
+ var parsePesType = function parsePesType(packet, programMapTable) {
+ var pid = parsePid(packet);
+ var type = programMapTable[pid];
+
+ switch (type) {
+ case streamTypes.H264_STREAM_TYPE:
+ return 'video';
+
+ case streamTypes.ADTS_STREAM_TYPE:
+ return 'audio';
+
+ case streamTypes.METADATA_STREAM_TYPE:
+ return 'timed-metadata';
+
+ default:
+ return null;
+ }
+ };
+
+ var parsePesTime = function parsePesTime(packet) {
+ var pusi = parsePayloadUnitStartIndicator(packet);
+
+ if (!pusi) {
+ return null;
+ }
+
+ var offset = 4 + parseAdaptionField(packet);
+
+ if (offset >= packet.byteLength) {
+ // From the H 222.0 MPEG-TS spec
+ // "For transport stream packets carrying PES packets, stuffing is needed when there
+ // is insufficient PES packet data to completely fill the transport stream packet
+ // payload bytes. Stuffing is accomplished by defining an adaptation field longer than
+ // the sum of the lengths of the data elements in it, so that the payload bytes
+ // remaining after the adaptation field exactly accommodates the available PES packet
+ // data."
+ //
+ // If the offset is >= the length of the packet, then the packet contains no data
+ // and instead is just adaption field stuffing bytes
+ return null;
+ }
+
+ var pes = null;
+ var ptsDtsFlags; // PES packets may be annotated with a PTS value, or a PTS value
+ // and a DTS value. Determine what combination of values is
+ // available to work with.
+
+ ptsDtsFlags = packet[offset + 7]; // PTS and DTS are normally stored as a 33-bit number. Javascript
+ // performs all bitwise operations on 32-bit integers but javascript
+ // supports a much greater range (52-bits) of integer using standard
+ // mathematical operations.
+ // We construct a 31-bit value using bitwise operators over the 31
+ // most significant bits and then multiply by 4 (equal to a left-shift
+ // of 2) before we add the final 2 least significant bits of the
+ // timestamp (equal to an OR.)
+
+ if (ptsDtsFlags & 0xC0) {
+ pes = {}; // the PTS and DTS are not written out directly. For information
+ // on how they are encoded, see
+ // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
+
+ pes.pts = (packet[offset + 9] & 0x0E) << 27 | (packet[offset + 10] & 0xFF) << 20 | (packet[offset + 11] & 0xFE) << 12 | (packet[offset + 12] & 0xFF) << 5 | (packet[offset + 13] & 0xFE) >>> 3;
+ pes.pts *= 4; // Left shift by 2
+
+ pes.pts += (packet[offset + 13] & 0x06) >>> 1; // OR by the two LSBs
+
+ pes.dts = pes.pts;
+
+ if (ptsDtsFlags & 0x40) {
+ pes.dts = (packet[offset + 14] & 0x0E) << 27 | (packet[offset + 15] & 0xFF) << 20 | (packet[offset + 16] & 0xFE) << 12 | (packet[offset + 17] & 0xFF) << 5 | (packet[offset + 18] & 0xFE) >>> 3;
+ pes.dts *= 4; // Left shift by 2
+
+ pes.dts += (packet[offset + 18] & 0x06) >>> 1; // OR by the two LSBs
+ }
+ }
+
+ return pes;
+ };
+
+ var parseNalUnitType = function parseNalUnitType(type) {
+ switch (type) {
+ case 0x05:
+ return 'slice_layer_without_partitioning_rbsp_idr';
+
+ case 0x06:
+ return 'sei_rbsp';
+
+ case 0x07:
+ return 'seq_parameter_set_rbsp';
+
+ case 0x08:
+ return 'pic_parameter_set_rbsp';
+
+ case 0x09:
+ return 'access_unit_delimiter_rbsp';
+
+ default:
+ return null;
+ }
+ };
+
+ var videoPacketContainsKeyFrame = function videoPacketContainsKeyFrame(packet) {
+ var offset = 4 + parseAdaptionField(packet);
+ var frameBuffer = packet.subarray(offset);
+ var frameI = 0;
+ var frameSyncPoint = 0;
+ var foundKeyFrame = false;
+ var nalType; // advance the sync point to a NAL start, if necessary
+
+ for (; frameSyncPoint < frameBuffer.byteLength - 3; frameSyncPoint++) {
+ if (frameBuffer[frameSyncPoint + 2] === 1) {
+ // the sync point is properly aligned
+ frameI = frameSyncPoint + 5;
+ break;
+ }
+ }
+
+ while (frameI < frameBuffer.byteLength) {
+ // look at the current byte to determine if we've hit the end of
+ // a NAL unit boundary
+ switch (frameBuffer[frameI]) {
+ case 0:
+ // skip past non-sync sequences
+ if (frameBuffer[frameI - 1] !== 0) {
+ frameI += 2;
+ break;
+ } else if (frameBuffer[frameI - 2] !== 0) {
+ frameI++;
+ break;
+ }
+
+ if (frameSyncPoint + 3 !== frameI - 2) {
+ nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
+
+ if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
+ foundKeyFrame = true;
+ }
+ } // drop trailing zeroes
+
+
+ do {
+ frameI++;
+ } while (frameBuffer[frameI] !== 1 && frameI < frameBuffer.length);
+
+ frameSyncPoint = frameI - 2;
+ frameI += 3;
+ break;
+
+ case 1:
+ // skip past non-sync sequences
+ if (frameBuffer[frameI - 1] !== 0 || frameBuffer[frameI - 2] !== 0) {
+ frameI += 3;
+ break;
+ }
+
+ nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
+
+ if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
+ foundKeyFrame = true;
+ }
+
+ frameSyncPoint = frameI - 2;
+ frameI += 3;
+ break;
+
+ default:
+ // the current byte isn't a one or zero, so it cannot be part
+ // of a sync sequence
+ frameI += 3;
+ break;
+ }
+ }
+
+ frameBuffer = frameBuffer.subarray(frameSyncPoint);
+ frameI -= frameSyncPoint;
+ frameSyncPoint = 0; // parse the final nal
+
+ if (frameBuffer && frameBuffer.byteLength > 3) {
+ nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
+
+ if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
+ foundKeyFrame = true;
+ }
+ }
+
+ return foundKeyFrame;
+ };
+
+ var probe$1 = {
+ parseType: parseType,
+ parsePat: parsePat,
+ parsePmt: parsePmt,
+ parsePayloadUnitStartIndicator: parsePayloadUnitStartIndicator,
+ parsePesType: parsePesType,
+ parsePesTime: parsePesTime,
+ videoPacketContainsKeyFrame: videoPacketContainsKeyFrame
+ };
+ var handleRollover = timestampRolloverStream.handleRollover;
+ var probe = {};
+ probe.ts = probe$1;
+ probe.aac = utils;
+ var ONE_SECOND_IN_TS = clock.ONE_SECOND_IN_TS;
+ var MP2T_PACKET_LENGTH = 188,
+ // bytes
+ SYNC_BYTE = 0x47;
+ /**
+ * walks through segment data looking for pat and pmt packets to parse out
+ * program map table information
+ */
+
+ var parsePsi_ = function parsePsi_(bytes, pmt) {
+ var startIndex = 0,
+ endIndex = MP2T_PACKET_LENGTH,
+ packet,
+ type;
+
+ while (endIndex < bytes.byteLength) {
+ // Look for a pair of start and end sync bytes in the data..
+ if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
+ // We found a packet
+ packet = bytes.subarray(startIndex, endIndex);
+ type = probe.ts.parseType(packet, pmt.pid);
+
+ switch (type) {
+ case 'pat':
+ pmt.pid = probe.ts.parsePat(packet);
+ break;
+
+ case 'pmt':
+ var table = probe.ts.parsePmt(packet);
+ pmt.table = pmt.table || {};
+ Object.keys(table).forEach(function (key) {
+ pmt.table[key] = table[key];
+ });
+ break;
+ }
+
+ startIndex += MP2T_PACKET_LENGTH;
+ endIndex += MP2T_PACKET_LENGTH;
+ continue;
+ } // If we get here, we have somehow become de-synchronized and we need to step
+ // forward one byte at a time until we find a pair of sync bytes that denote
+ // a packet
+
+
+ startIndex++;
+ endIndex++;
+ }
+ };
+ /**
+ * walks through the segment data from the start and end to get timing information
+ * for the first and last audio pes packets
+ */
+
+
+ var parseAudioPes_ = function parseAudioPes_(bytes, pmt, result) {
+ var startIndex = 0,
+ endIndex = MP2T_PACKET_LENGTH,
+ packet,
+ type,
+ pesType,
+ pusi,
+ parsed;
+ var endLoop = false; // Start walking from start of segment to get first audio packet
+
+ while (endIndex <= bytes.byteLength) {
+ // Look for a pair of start and end sync bytes in the data..
+ if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
+ // We found a packet
+ packet = bytes.subarray(startIndex, endIndex);
+ type = probe.ts.parseType(packet, pmt.pid);
+
+ switch (type) {
+ case 'pes':
+ pesType = probe.ts.parsePesType(packet, pmt.table);
+ pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
+
+ if (pesType === 'audio' && pusi) {
+ parsed = probe.ts.parsePesTime(packet);
+
+ if (parsed) {
+ parsed.type = 'audio';
+ result.audio.push(parsed);
+ endLoop = true;
+ }
+ }
+
+ break;
+ }
+
+ if (endLoop) {
+ break;
+ }
+
+ startIndex += MP2T_PACKET_LENGTH;
+ endIndex += MP2T_PACKET_LENGTH;
+ continue;
+ } // If we get here, we have somehow become de-synchronized and we need to step
+ // forward one byte at a time until we find a pair of sync bytes that denote
+ // a packet
+
+
+ startIndex++;
+ endIndex++;
+ } // Start walking from end of segment to get last audio packet
+
+
+ endIndex = bytes.byteLength;
+ startIndex = endIndex - MP2T_PACKET_LENGTH;
+ endLoop = false;
+
+ while (startIndex >= 0) {
+ // Look for a pair of start and end sync bytes in the data..
+ if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
+ // We found a packet
+ packet = bytes.subarray(startIndex, endIndex);
+ type = probe.ts.parseType(packet, pmt.pid);
+
+ switch (type) {
+ case 'pes':
+ pesType = probe.ts.parsePesType(packet, pmt.table);
+ pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
+
+ if (pesType === 'audio' && pusi) {
+ parsed = probe.ts.parsePesTime(packet);
+
+ if (parsed) {
+ parsed.type = 'audio';
+ result.audio.push(parsed);
+ endLoop = true;
+ }
+ }
+
+ break;
+ }
+
+ if (endLoop) {
+ break;
+ }
+
+ startIndex -= MP2T_PACKET_LENGTH;
+ endIndex -= MP2T_PACKET_LENGTH;
+ continue;
+ } // If we get here, we have somehow become de-synchronized and we need to step
+ // forward one byte at a time until we find a pair of sync bytes that denote
+ // a packet
+
+
+ startIndex--;
+ endIndex--;
+ }
+ };
+ /**
+ * walks through the segment data from the start and end to get timing information
+ * for the first and last video pes packets as well as timing information for the first
+ * key frame.
+ */
+
+
+ var parseVideoPes_ = function parseVideoPes_(bytes, pmt, result) {
+ var startIndex = 0,
+ endIndex = MP2T_PACKET_LENGTH,
+ packet,
+ type,
+ pesType,
+ pusi,
+ parsed,
+ frame,
+ i,
+ pes;
+ var endLoop = false;
+ var currentFrame = {
+ data: [],
+ size: 0
+ }; // Start walking from start of segment to get first video packet
+
+ while (endIndex < bytes.byteLength) {
+ // Look for a pair of start and end sync bytes in the data..
+ if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
+ // We found a packet
+ packet = bytes.subarray(startIndex, endIndex);
+ type = probe.ts.parseType(packet, pmt.pid);
+
+ switch (type) {
+ case 'pes':
+ pesType = probe.ts.parsePesType(packet, pmt.table);
+ pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
+
+ if (pesType === 'video') {
+ if (pusi && !endLoop) {
+ parsed = probe.ts.parsePesTime(packet);
+
+ if (parsed) {
+ parsed.type = 'video';
+ result.video.push(parsed);
+ endLoop = true;
+ }
+ }
+
+ if (!result.firstKeyFrame) {
+ if (pusi) {
+ if (currentFrame.size !== 0) {
+ frame = new Uint8Array(currentFrame.size);
+ i = 0;
+
+ while (currentFrame.data.length) {
+ pes = currentFrame.data.shift();
+ frame.set(pes, i);
+ i += pes.byteLength;
+ }
+
+ if (probe.ts.videoPacketContainsKeyFrame(frame)) {
+ var firstKeyFrame = probe.ts.parsePesTime(frame); // PTS/DTS may not be available. Simply *not* setting
+ // the keyframe seems to work fine with HLS playback
+ // and definitely preferable to a crash with TypeError...
+
+ if (firstKeyFrame) {
+ result.firstKeyFrame = firstKeyFrame;
+ result.firstKeyFrame.type = 'video';
+ } else {
+ // eslint-disable-next-line
+ console.warn('Failed to extract PTS/DTS from PES at first keyframe. ' + 'This could be an unusual TS segment, or else mux.js did not ' + 'parse your TS segment correctly. If you know your TS ' + 'segments do contain PTS/DTS on keyframes please file a bug ' + 'report! You can try ffprobe to double check for yourself.');
+ }
+ }
+
+ currentFrame.size = 0;
+ }
+ }
+
+ currentFrame.data.push(packet);
+ currentFrame.size += packet.byteLength;
+ }
+ }
+
+ break;
+ }
+
+ if (endLoop && result.firstKeyFrame) {
+ break;
+ }
+
+ startIndex += MP2T_PACKET_LENGTH;
+ endIndex += MP2T_PACKET_LENGTH;
+ continue;
+ } // If we get here, we have somehow become de-synchronized and we need to step
+ // forward one byte at a time until we find a pair of sync bytes that denote
+ // a packet
+
+
+ startIndex++;
+ endIndex++;
+ } // Start walking from end of segment to get last video packet
+
+
+ endIndex = bytes.byteLength;
+ startIndex = endIndex - MP2T_PACKET_LENGTH;
+ endLoop = false;
+
+ while (startIndex >= 0) {
+ // Look for a pair of start and end sync bytes in the data..
+ if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
+ // We found a packet
+ packet = bytes.subarray(startIndex, endIndex);
+ type = probe.ts.parseType(packet, pmt.pid);
+
+ switch (type) {
+ case 'pes':
+ pesType = probe.ts.parsePesType(packet, pmt.table);
+ pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
+
+ if (pesType === 'video' && pusi) {
+ parsed = probe.ts.parsePesTime(packet);
+
+ if (parsed) {
+ parsed.type = 'video';
+ result.video.push(parsed);
+ endLoop = true;
+ }
+ }
+
+ break;
+ }
+
+ if (endLoop) {
+ break;
+ }
+
+ startIndex -= MP2T_PACKET_LENGTH;
+ endIndex -= MP2T_PACKET_LENGTH;
+ continue;
+ } // If we get here, we have somehow become de-synchronized and we need to step
+ // forward one byte at a time until we find a pair of sync bytes that denote
+ // a packet
+
+
+ startIndex--;
+ endIndex--;
+ }
+ };
+ /**
+ * Adjusts the timestamp information for the segment to account for
+ * rollover and convert to seconds based on pes packet timescale (90khz clock)
+ */
+
+
+ var adjustTimestamp_ = function adjustTimestamp_(segmentInfo, baseTimestamp) {
+ if (segmentInfo.audio && segmentInfo.audio.length) {
+ var audioBaseTimestamp = baseTimestamp;
+
+ if (typeof audioBaseTimestamp === 'undefined' || isNaN(audioBaseTimestamp)) {
+ audioBaseTimestamp = segmentInfo.audio[0].dts;
+ }
+
+ segmentInfo.audio.forEach(function (info) {
+ info.dts = handleRollover(info.dts, audioBaseTimestamp);
+ info.pts = handleRollover(info.pts, audioBaseTimestamp); // time in seconds
+
+ info.dtsTime = info.dts / ONE_SECOND_IN_TS;
+ info.ptsTime = info.pts / ONE_SECOND_IN_TS;
+ });
+ }
+
+ if (segmentInfo.video && segmentInfo.video.length) {
+ var videoBaseTimestamp = baseTimestamp;
+
+ if (typeof videoBaseTimestamp === 'undefined' || isNaN(videoBaseTimestamp)) {
+ videoBaseTimestamp = segmentInfo.video[0].dts;
+ }
+
+ segmentInfo.video.forEach(function (info) {
+ info.dts = handleRollover(info.dts, videoBaseTimestamp);
+ info.pts = handleRollover(info.pts, videoBaseTimestamp); // time in seconds
+
+ info.dtsTime = info.dts / ONE_SECOND_IN_TS;
+ info.ptsTime = info.pts / ONE_SECOND_IN_TS;
+ });
+
+ if (segmentInfo.firstKeyFrame) {
+ var frame = segmentInfo.firstKeyFrame;
+ frame.dts = handleRollover(frame.dts, videoBaseTimestamp);
+ frame.pts = handleRollover(frame.pts, videoBaseTimestamp); // time in seconds
+
+ frame.dtsTime = frame.dts / ONE_SECOND_IN_TS;
+ frame.ptsTime = frame.pts / ONE_SECOND_IN_TS;
+ }
+ }
+ };
+ /**
+ * inspects the aac data stream for start and end time information
+ */
+
+
+ var inspectAac_ = function inspectAac_(bytes) {
+ var endLoop = false,
+ audioCount = 0,
+ sampleRate = null,
+ timestamp = null,
+ frameSize = 0,
+ byteIndex = 0,
+ packet;
+
+ while (bytes.length - byteIndex >= 3) {
+ var type = probe.aac.parseType(bytes, byteIndex);
+
+ switch (type) {
+ case 'timed-metadata':
+ // Exit early because we don't have enough to parse
+ // the ID3 tag header
+ if (bytes.length - byteIndex < 10) {
+ endLoop = true;
+ break;
+ }
+
+ frameSize = probe.aac.parseId3TagSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer
+ // to emit a full packet
+
+ if (frameSize > bytes.length) {
+ endLoop = true;
+ break;
+ }
+
+ if (timestamp === null) {
+ packet = bytes.subarray(byteIndex, byteIndex + frameSize);
+ timestamp = probe.aac.parseAacTimestamp(packet);
+ }
+
+ byteIndex += frameSize;
+ break;
+
+ case 'audio':
+ // Exit early because we don't have enough to parse
+ // the ADTS frame header
+ if (bytes.length - byteIndex < 7) {
+ endLoop = true;
+ break;
+ }
+
+ frameSize = probe.aac.parseAdtsSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer
+ // to emit a full packet
+
+ if (frameSize > bytes.length) {
+ endLoop = true;
+ break;
+ }
+
+ if (sampleRate === null) {
+ packet = bytes.subarray(byteIndex, byteIndex + frameSize);
+ sampleRate = probe.aac.parseSampleRate(packet);
+ }
+
+ audioCount++;
+ byteIndex += frameSize;
+ break;
+
+ default:
+ byteIndex++;
+ break;
+ }
+
+ if (endLoop) {
+ return null;
+ }
+ }
+
+ if (sampleRate === null || timestamp === null) {
+ return null;
+ }
+
+ var audioTimescale = ONE_SECOND_IN_TS / sampleRate;
+ var result = {
+ audio: [{
+ type: 'audio',
+ dts: timestamp,
+ pts: timestamp
+ }, {
+ type: 'audio',
+ dts: timestamp + audioCount * 1024 * audioTimescale,
+ pts: timestamp + audioCount * 1024 * audioTimescale
+ }]
+ };
+ return result;
+ };
+ /**
+ * inspects the transport stream segment data for start and end time information
+ * of the audio and video tracks (when present) as well as the first key frame's
+ * start time.
+ */
+
+
+ var inspectTs_ = function inspectTs_(bytes) {
+ var pmt = {
+ pid: null,
+ table: null
+ };
+ var result = {};
+ parsePsi_(bytes, pmt);
+
+ for (var pid in pmt.table) {
+ if (pmt.table.hasOwnProperty(pid)) {
+ var type = pmt.table[pid];
+
+ switch (type) {
+ case streamTypes.H264_STREAM_TYPE:
+ result.video = [];
+ parseVideoPes_(bytes, pmt, result);
+
+ if (result.video.length === 0) {
+ delete result.video;
+ }
+
+ break;
+
+ case streamTypes.ADTS_STREAM_TYPE:
+ result.audio = [];
+ parseAudioPes_(bytes, pmt, result);
+
+ if (result.audio.length === 0) {
+ delete result.audio;
+ }
+
+ break;
+ }
+ }
+ }
+
+ return result;
+ };
+ /**
+ * Inspects segment byte data and returns an object with start and end timing information
+ *
+ * @param {Uint8Array} bytes The segment byte data
+ * @param {Number} baseTimestamp Relative reference timestamp used when adjusting frame
+ * timestamps for rollover. This value must be in 90khz clock.
+ * @return {Object} Object containing start and end frame timing info of segment.
+ */
+
+
+ var inspect = function inspect(bytes, baseTimestamp) {
+ var isAacData = probe.aac.isLikelyAacData(bytes);
+ var result;
+
+ if (isAacData) {
+ result = inspectAac_(bytes);
+ } else {
+ result = inspectTs_(bytes);
+ }
+
+ if (!result || !result.audio && !result.video) {
+ return null;
+ }
+
+ adjustTimestamp_(result, baseTimestamp);
+ return result;
+ };
+
+ var tsInspector = {
+ inspect: inspect,
+ parseAudioPes_: parseAudioPes_
+ };
+ /* global self */
+
+ /**
+ * Re-emits transmuxer events by converting them into messages to the
+ * world outside the worker.
+ *
+ * @param {Object} transmuxer the transmuxer to wire events on
+ * @private
+ */
+
+ var wireTransmuxerEvents = function wireTransmuxerEvents(self, transmuxer) {
+ transmuxer.on('data', function (segment) {
+ // transfer ownership of the underlying ArrayBuffer
+ // instead of doing a copy to save memory
+ // ArrayBuffers are transferable but generic TypedArrays are not
+ // @link https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Using_web_workers#Passing_data_by_transferring_ownership_(transferable_objects)
+ var initArray = segment.initSegment;
+ segment.initSegment = {
+ data: initArray.buffer,
+ byteOffset: initArray.byteOffset,
+ byteLength: initArray.byteLength
+ };
+ var typedArray = segment.data;
+ segment.data = typedArray.buffer;
+ self.postMessage({
+ action: 'data',
+ segment: segment,
+ byteOffset: typedArray.byteOffset,
+ byteLength: typedArray.byteLength
+ }, [segment.data]);
+ });
+ transmuxer.on('done', function (data) {
+ self.postMessage({
+ action: 'done'
+ });
+ });
+ transmuxer.on('gopInfo', function (gopInfo) {
+ self.postMessage({
+ action: 'gopInfo',
+ gopInfo: gopInfo
+ });
+ });
+ transmuxer.on('videoSegmentTimingInfo', function (timingInfo) {
+ var videoSegmentTimingInfo = {
+ start: {
+ decode: clock.videoTsToSeconds(timingInfo.start.dts),
+ presentation: clock.videoTsToSeconds(timingInfo.start.pts)
+ },
+ end: {
+ decode: clock.videoTsToSeconds(timingInfo.end.dts),
+ presentation: clock.videoTsToSeconds(timingInfo.end.pts)
+ },
+ baseMediaDecodeTime: clock.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
+ };
+
+ if (timingInfo.prependedContentDuration) {
+ videoSegmentTimingInfo.prependedContentDuration = clock.videoTsToSeconds(timingInfo.prependedContentDuration);
+ }
+
+ self.postMessage({
+ action: 'videoSegmentTimingInfo',
+ videoSegmentTimingInfo: videoSegmentTimingInfo
+ });
+ });
+ transmuxer.on('audioSegmentTimingInfo', function (timingInfo) {
+ // Note that all times for [audio/video]SegmentTimingInfo events are in video clock
+ var audioSegmentTimingInfo = {
+ start: {
+ decode: clock.videoTsToSeconds(timingInfo.start.dts),
+ presentation: clock.videoTsToSeconds(timingInfo.start.pts)
+ },
+ end: {
+ decode: clock.videoTsToSeconds(timingInfo.end.dts),
+ presentation: clock.videoTsToSeconds(timingInfo.end.pts)
+ },
+ baseMediaDecodeTime: clock.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
+ };
+
+ if (timingInfo.prependedContentDuration) {
+ audioSegmentTimingInfo.prependedContentDuration = clock.videoTsToSeconds(timingInfo.prependedContentDuration);
+ }
+
+ self.postMessage({
+ action: 'audioSegmentTimingInfo',
+ audioSegmentTimingInfo: audioSegmentTimingInfo
+ });
+ });
+ transmuxer.on('id3Frame', function (id3Frame) {
+ self.postMessage({
+ action: 'id3Frame',
+ id3Frame: id3Frame
+ });
+ });
+ transmuxer.on('caption', function (caption) {
+ self.postMessage({
+ action: 'caption',
+ caption: caption
+ });
+ });
+ transmuxer.on('trackinfo', function (trackInfo) {
+ self.postMessage({
+ action: 'trackinfo',
+ trackInfo: trackInfo
+ });
+ });
+ transmuxer.on('audioTimingInfo', function (audioTimingInfo) {
+ // convert to video TS since we prioritize video time over audio
+ self.postMessage({
+ action: 'audioTimingInfo',
+ audioTimingInfo: {
+ start: clock.videoTsToSeconds(audioTimingInfo.start),
+ end: clock.videoTsToSeconds(audioTimingInfo.end)
+ }
+ });
+ });
+ transmuxer.on('videoTimingInfo', function (videoTimingInfo) {
+ self.postMessage({
+ action: 'videoTimingInfo',
+ videoTimingInfo: {
+ start: clock.videoTsToSeconds(videoTimingInfo.start),
+ end: clock.videoTsToSeconds(videoTimingInfo.end)
+ }
+ });
+ });
+ transmuxer.on('log', function (log) {
+ self.postMessage({
+ action: 'log',
+ log: log
+ });
+ });
+ };
+ /**
+ * All incoming messages route through this hash. If no function exists
+ * to handle an incoming message, then we ignore the message.
+ *
+ * @class MessageHandlers
+ * @param {Object} options the options to initialize with
+ */
+
+
+ var MessageHandlers = /*#__PURE__*/function () {
+ function MessageHandlers(self, options) {
+ this.options = options || {};
+ this.self = self;
+ this.init();
+ }
+ /**
+ * initialize our web worker and wire all the events.
+ */
+
+
+ var _proto = MessageHandlers.prototype;
+
+ _proto.init = function init() {
+ if (this.transmuxer) {
+ this.transmuxer.dispose();
+ }
+
+ this.transmuxer = new transmuxer.Transmuxer(this.options);
+ wireTransmuxerEvents(this.self, this.transmuxer);
+ };
+
+ _proto.pushMp4Captions = function pushMp4Captions(data) {
+ if (!this.captionParser) {
+ this.captionParser = new captionParser();
+ this.captionParser.init();
+ }
+
+ var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
+ var parsed = this.captionParser.parse(segment, data.trackIds, data.timescales);
+ this.self.postMessage({
+ action: 'mp4Captions',
+ captions: parsed && parsed.captions || [],
+ logs: parsed && parsed.logs || [],
+ data: segment.buffer
+ }, [segment.buffer]);
+ };
+
+ _proto.probeMp4StartTime = function probeMp4StartTime(_ref) {
+ var timescales = _ref.timescales,
+ data = _ref.data;
+ var startTime = probe$2.startTime(timescales, data);
+ this.self.postMessage({
+ action: 'probeMp4StartTime',
+ startTime: startTime,
+ data: data
+ }, [data.buffer]);
+ };
+
+ _proto.probeMp4Tracks = function probeMp4Tracks(_ref2) {
+ var data = _ref2.data;
+ var tracks = probe$2.tracks(data);
+ this.self.postMessage({
+ action: 'probeMp4Tracks',
+ tracks: tracks,
+ data: data
+ }, [data.buffer]);
+ }
+ /**
+ * Probe an mpeg2-ts segment to determine the start time of the segment in it's
+ * internal "media time," as well as whether it contains video and/or audio.
+ *
+ * @private
+ * @param {Uint8Array} bytes - segment bytes
+ * @param {number} baseStartTime
+ * Relative reference timestamp used when adjusting frame timestamps for rollover.
+ * This value should be in seconds, as it's converted to a 90khz clock within the
+ * function body.
+ * @return {Object} The start time of the current segment in "media time" as well as
+ * whether it contains video and/or audio
+ */
+ ;
+
+ _proto.probeTs = function probeTs(_ref3) {
+ var data = _ref3.data,
+ baseStartTime = _ref3.baseStartTime;
+ var tsStartTime = typeof baseStartTime === 'number' && !isNaN(baseStartTime) ? baseStartTime * clock.ONE_SECOND_IN_TS : void 0;
+ var timeInfo = tsInspector.inspect(data, tsStartTime);
+ var result = null;
+
+ if (timeInfo) {
+ result = {
+ // each type's time info comes back as an array of 2 times, start and end
+ hasVideo: timeInfo.video && timeInfo.video.length === 2 || false,
+ hasAudio: timeInfo.audio && timeInfo.audio.length === 2 || false
+ };
+
+ if (result.hasVideo) {
+ result.videoStart = timeInfo.video[0].ptsTime;
+ }
+
+ if (result.hasAudio) {
+ result.audioStart = timeInfo.audio[0].ptsTime;
+ }
+ }
+
+ this.self.postMessage({
+ action: 'probeTs',
+ result: result,
+ data: data
+ }, [data.buffer]);
+ };
+
+ _proto.clearAllMp4Captions = function clearAllMp4Captions() {
+ if (this.captionParser) {
+ this.captionParser.clearAllCaptions();
+ }
+ };
+
+ _proto.clearParsedMp4Captions = function clearParsedMp4Captions() {
+ if (this.captionParser) {
+ this.captionParser.clearParsedCaptions();
+ }
+ }
+ /**
+ * Adds data (a ts segment) to the start of the transmuxer pipeline for
+ * processing.
+ *
+ * @param {ArrayBuffer} data data to push into the muxer
+ */
+ ;
+
+ _proto.push = function push(data) {
+ // Cast array buffer to correct type for transmuxer
+ var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
+ this.transmuxer.push(segment);
+ }
+ /**
+ * Recreate the transmuxer so that the next segment added via `push`
+ * start with a fresh transmuxer.
+ */
+ ;
+
+ _proto.reset = function reset() {
+ this.transmuxer.reset();
+ }
+ /**
+ * Set the value that will be used as the `baseMediaDecodeTime` time for the
+ * next segment pushed in. Subsequent segments will have their `baseMediaDecodeTime`
+ * set relative to the first based on the PTS values.
+ *
+ * @param {Object} data used to set the timestamp offset in the muxer
+ */
+ ;
+
+ _proto.setTimestampOffset = function setTimestampOffset(data) {
+ var timestampOffset = data.timestampOffset || 0;
+ this.transmuxer.setBaseMediaDecodeTime(Math.round(clock.secondsToVideoTs(timestampOffset)));
+ };
+
+ _proto.setAudioAppendStart = function setAudioAppendStart(data) {
+ this.transmuxer.setAudioAppendStart(Math.ceil(clock.secondsToVideoTs(data.appendStart)));
+ };
+
+ _proto.setRemux = function setRemux(data) {
+ this.transmuxer.setRemux(data.remux);
+ }
+ /**
+ * Forces the pipeline to finish processing the last segment and emit it's
+ * results.
+ *
+ * @param {Object} data event data, not really used
+ */
+ ;
+
+ _proto.flush = function flush(data) {
+ this.transmuxer.flush(); // transmuxed done action is fired after both audio/video pipelines are flushed
+
+ self.postMessage({
+ action: 'done',
+ type: 'transmuxed'
+ });
+ };
+
+ _proto.endTimeline = function endTimeline() {
+ this.transmuxer.endTimeline(); // transmuxed endedtimeline action is fired after both audio/video pipelines end their
+ // timelines
+
+ self.postMessage({
+ action: 'endedtimeline',
+ type: 'transmuxed'
+ });
+ };
+
+ _proto.alignGopsWith = function alignGopsWith(data) {
+ this.transmuxer.alignGopsWith(data.gopsToAlignWith.slice());
+ };
+
+ return MessageHandlers;
+ }();
+ /**
+ * Our web worker interface so that things can talk to mux.js
+ * that will be running in a web worker. the scope is passed to this by
+ * webworkify.
+ *
+ * @param {Object} self the scope for the web worker
+ */
+
+
+ self.onmessage = function (event) {
+ if (event.data.action === 'init' && event.data.options) {
+ this.messageHandlers = new MessageHandlers(self, event.data.options);
+ return;
+ }
+
+ if (!this.messageHandlers) {
+ this.messageHandlers = new MessageHandlers(self);
+ }
+
+ if (event.data && event.data.action && event.data.action !== 'init') {
+ if (this.messageHandlers[event.data.action]) {
+ this.messageHandlers[event.data.action](event.data);
+ }
+ }
+ };
+}));
+var TransmuxWorker = factory(workerCode$1);
+/* rollup-plugin-worker-factory end for worker!/Users/bclifford/Code/vhs-release-test/src/transmuxer-worker.js */
+
+var handleData_ = function handleData_(event, transmuxedData, callback) {
+ var _event$data$segment = event.data.segment,
+ type = _event$data$segment.type,
+ initSegment = _event$data$segment.initSegment,
+ captions = _event$data$segment.captions,
+ captionStreams = _event$data$segment.captionStreams,
+ metadata = _event$data$segment.metadata,
+ videoFrameDtsTime = _event$data$segment.videoFrameDtsTime,
+ videoFramePtsTime = _event$data$segment.videoFramePtsTime;
+ transmuxedData.buffer.push({
+ captions: captions,
+ captionStreams: captionStreams,
+ metadata: metadata
+ });
+ var boxes = event.data.segment.boxes || {
+ data: event.data.segment.data
+ };
+ var result = {
+ type: type,
+ // cast ArrayBuffer to TypedArray
+ data: new Uint8Array(boxes.data, boxes.data.byteOffset, boxes.data.byteLength),
+ initSegment: new Uint8Array(initSegment.data, initSegment.byteOffset, initSegment.byteLength)
+ };
+
+ if (typeof videoFrameDtsTime !== 'undefined') {
+ result.videoFrameDtsTime = videoFrameDtsTime;
+ }
+
+ if (typeof videoFramePtsTime !== 'undefined') {
+ result.videoFramePtsTime = videoFramePtsTime;
+ }
+
+ callback(result);
+};
+
+var handleDone_ = function handleDone_(_ref) {
+ var transmuxedData = _ref.transmuxedData,
+ callback = _ref.callback; // Previously we only returned data on data events,
+ // not on done events. Clear out the buffer to keep that consistent.
+
+ transmuxedData.buffer = []; // all buffers should have been flushed from the muxer, so start processing anything we
+ // have received
+
+ callback(transmuxedData);
+};
+
+var handleGopInfo_ = function handleGopInfo_(event, transmuxedData) {
+ transmuxedData.gopInfo = event.data.gopInfo;
+};
+
+var processTransmux = function processTransmux(options) {
+ var transmuxer = options.transmuxer,
+ bytes = options.bytes,
+ audioAppendStart = options.audioAppendStart,
+ gopsToAlignWith = options.gopsToAlignWith,
+ remux = options.remux,
+ onData = options.onData,
+ onTrackInfo = options.onTrackInfo,
+ onAudioTimingInfo = options.onAudioTimingInfo,
+ onVideoTimingInfo = options.onVideoTimingInfo,
+ onVideoSegmentTimingInfo = options.onVideoSegmentTimingInfo,
+ onAudioSegmentTimingInfo = options.onAudioSegmentTimingInfo,
+ onId3 = options.onId3,
+ onCaptions = options.onCaptions,
+ onDone = options.onDone,
+ onEndedTimeline = options.onEndedTimeline,
+ onTransmuxerLog = options.onTransmuxerLog,
+ isEndOfTimeline = options.isEndOfTimeline;
+ var transmuxedData = {
+ buffer: []
+ };
+ var waitForEndedTimelineEvent = isEndOfTimeline;
+
+ var handleMessage = function handleMessage(event) {
+ if (transmuxer.currentTransmux !== options) {
+ // disposed
+ return;
+ }
+
+ if (event.data.action === 'data') {
+ handleData_(event, transmuxedData, onData);
+ }
+
+ if (event.data.action === 'trackinfo') {
+ onTrackInfo(event.data.trackInfo);
+ }
+
+ if (event.data.action === 'gopInfo') {
+ handleGopInfo_(event, transmuxedData);
+ }
+
+ if (event.data.action === 'audioTimingInfo') {
+ onAudioTimingInfo(event.data.audioTimingInfo);
+ }
+
+ if (event.data.action === 'videoTimingInfo') {
+ onVideoTimingInfo(event.data.videoTimingInfo);
+ }
+
+ if (event.data.action === 'videoSegmentTimingInfo') {
+ onVideoSegmentTimingInfo(event.data.videoSegmentTimingInfo);
+ }
+
+ if (event.data.action === 'audioSegmentTimingInfo') {
+ onAudioSegmentTimingInfo(event.data.audioSegmentTimingInfo);
+ }
+
+ if (event.data.action === 'id3Frame') {
+ onId3([event.data.id3Frame], event.data.id3Frame.dispatchType);
+ }
+
+ if (event.data.action === 'caption') {
+ onCaptions(event.data.caption);
+ }
+
+ if (event.data.action === 'endedtimeline') {
+ waitForEndedTimelineEvent = false;
+ onEndedTimeline();
+ }
+
+ if (event.data.action === 'log') {
+ onTransmuxerLog(event.data.log);
+ } // wait for the transmuxed event since we may have audio and video
+
+
+ if (event.data.type !== 'transmuxed') {
+ return;
+ } // If the "endedtimeline" event has not yet fired, and this segment represents the end
+ // of a timeline, that means there may still be data events before the segment
+ // processing can be considerred complete. In that case, the final event should be
+ // an "endedtimeline" event with the type "transmuxed."
+
+
+ if (waitForEndedTimelineEvent) {
+ return;
+ }
+
+ transmuxer.onmessage = null;
+ handleDone_({
+ transmuxedData: transmuxedData,
+ callback: onDone
+ });
+ /* eslint-disable no-use-before-define */
+
+ dequeue(transmuxer);
+ /* eslint-enable */
+ };
+
+ transmuxer.onmessage = handleMessage;
+
+ if (audioAppendStart) {
+ transmuxer.postMessage({
+ action: 'setAudioAppendStart',
+ appendStart: audioAppendStart
+ });
+ } // allow empty arrays to be passed to clear out GOPs
+
+
+ if (Array.isArray(gopsToAlignWith)) {
+ transmuxer.postMessage({
+ action: 'alignGopsWith',
+ gopsToAlignWith: gopsToAlignWith
+ });
+ }
+
+ if (typeof remux !== 'undefined') {
+ transmuxer.postMessage({
+ action: 'setRemux',
+ remux: remux
+ });
+ }
+
+ if (bytes.byteLength) {
+ var buffer = bytes instanceof ArrayBuffer ? bytes : bytes.buffer;
+ var byteOffset = bytes instanceof ArrayBuffer ? 0 : bytes.byteOffset;
+ transmuxer.postMessage({
+ action: 'push',
+ // Send the typed-array of data as an ArrayBuffer so that
+ // it can be sent as a "Transferable" and avoid the costly
+ // memory copy
+ data: buffer,
+ // To recreate the original typed-array, we need information
+ // about what portion of the ArrayBuffer it was a view into
+ byteOffset: byteOffset,
+ byteLength: bytes.byteLength
+ }, [buffer]);
+ }
+
+ if (isEndOfTimeline) {
+ transmuxer.postMessage({
+ action: 'endTimeline'
+ });
+ } // even if we didn't push any bytes, we have to make sure we flush in case we reached
+ // the end of the segment
+
+
+ transmuxer.postMessage({
+ action: 'flush'
+ });
+};
+
+var dequeue = function dequeue(transmuxer) {
+ transmuxer.currentTransmux = null;
+
+ if (transmuxer.transmuxQueue.length) {
+ transmuxer.currentTransmux = transmuxer.transmuxQueue.shift();
+
+ if (typeof transmuxer.currentTransmux === 'function') {
+ transmuxer.currentTransmux();
+ } else {
+ processTransmux(transmuxer.currentTransmux);
+ }
+ }
+};
+
+var processAction = function processAction(transmuxer, action) {
+ transmuxer.postMessage({
+ action: action
+ });
+ dequeue(transmuxer);
+};
+
+var enqueueAction = function enqueueAction(action, transmuxer) {
+ if (!transmuxer.currentTransmux) {
+ transmuxer.currentTransmux = action;
+ processAction(transmuxer, action);
+ return;
+ }
+
+ transmuxer.transmuxQueue.push(processAction.bind(null, transmuxer, action));
+};
+
+var reset = function reset(transmuxer) {
+ enqueueAction('reset', transmuxer);
+};
+
+var endTimeline = function endTimeline(transmuxer) {
+ enqueueAction('endTimeline', transmuxer);
+};
+
+var transmux = function transmux(options) {
+ if (!options.transmuxer.currentTransmux) {
+ options.transmuxer.currentTransmux = options;
+ processTransmux(options);
+ return;
+ }
+
+ options.transmuxer.transmuxQueue.push(options);
+};
+
+var createTransmuxer = function createTransmuxer(options) {
+ var transmuxer = new TransmuxWorker();
+ transmuxer.currentTransmux = null;
+ transmuxer.transmuxQueue = [];
+ var term = transmuxer.terminate;
+
+ transmuxer.terminate = function () {
+ transmuxer.currentTransmux = null;
+ transmuxer.transmuxQueue.length = 0;
+ return term.call(transmuxer);
+ };
+
+ transmuxer.postMessage({
+ action: 'init',
+ options: options
+ });
+ return transmuxer;
+};
+
+var segmentTransmuxer = {
+ reset: reset,
+ endTimeline: endTimeline,
+ transmux: transmux,
+ createTransmuxer: createTransmuxer
+};
+
+var workerCallback = function workerCallback(options) {
+ var transmuxer = options.transmuxer;
+ var endAction = options.endAction || options.action;
+ var callback = options.callback;
+
+ var message = _extends({}, options, {
+ endAction: null,
+ transmuxer: null,
+ callback: null
+ });
+
+ var listenForEndEvent = function listenForEndEvent(event) {
+ if (event.data.action !== endAction) {
+ return;
+ }
+
+ transmuxer.removeEventListener('message', listenForEndEvent); // transfer ownership of bytes back to us.
+
+ if (event.data.data) {
+ event.data.data = new Uint8Array(event.data.data, options.byteOffset || 0, options.byteLength || event.data.data.byteLength);
+
+ if (options.data) {
+ options.data = event.data.data;
+ }
+ }
+
+ callback(event.data);
+ };
+
+ transmuxer.addEventListener('message', listenForEndEvent);
+
+ if (options.data) {
+ var isArrayBuffer = options.data instanceof ArrayBuffer;
+ message.byteOffset = isArrayBuffer ? 0 : options.data.byteOffset;
+ message.byteLength = options.data.byteLength;
+ var transfers = [isArrayBuffer ? options.data : options.data.buffer];
+ transmuxer.postMessage(message, transfers);
+ } else {
+ transmuxer.postMessage(message);
+ }
+};
+
+var REQUEST_ERRORS = {
+ FAILURE: 2,
+ TIMEOUT: -101,
+ ABORTED: -102
+};
+/**
+ * Abort all requests
+ *
+ * @param {Object} activeXhrs - an object that tracks all XHR requests
+ */
+
+var abortAll = function abortAll(activeXhrs) {
+ activeXhrs.forEach(function (xhr) {
+ xhr.abort();
+ });
+};
+/**
+ * Gather important bandwidth stats once a request has completed
+ *
+ * @param {Object} request - the XHR request from which to gather stats
+ */
+
+
+var getRequestStats = function getRequestStats(request) {
+ return {
+ bandwidth: request.bandwidth,
+ bytesReceived: request.bytesReceived || 0,
+ roundTripTime: request.roundTripTime || 0
+ };
+};
+/**
+ * If possible gather bandwidth stats as a request is in
+ * progress
+ *
+ * @param {Event} progressEvent - an event object from an XHR's progress event
+ */
+
+
+var getProgressStats = function getProgressStats(progressEvent) {
+ var request = progressEvent.target;
+ var roundTripTime = Date.now() - request.requestTime;
+ var stats = {
+ bandwidth: Infinity,
+ bytesReceived: 0,
+ roundTripTime: roundTripTime || 0
+ };
+ stats.bytesReceived = progressEvent.loaded; // This can result in Infinity if stats.roundTripTime is 0 but that is ok
+ // because we should only use bandwidth stats on progress to determine when
+ // abort a request early due to insufficient bandwidth
+
+ stats.bandwidth = Math.floor(stats.bytesReceived / stats.roundTripTime * 8 * 1000);
+ return stats;
+};
+/**
+ * Handle all error conditions in one place and return an object
+ * with all the information
+ *
+ * @param {Error|null} error - if non-null signals an error occured with the XHR
+ * @param {Object} request - the XHR request that possibly generated the error
+ */
+
+
+var handleErrors = function handleErrors(error, request) {
+ if (request.timedout) {
+ return {
+ status: request.status,
+ message: 'HLS request timed-out at URL: ' + request.uri,
+ code: REQUEST_ERRORS.TIMEOUT,
+ xhr: request
+ };
+ }
+
+ if (request.aborted) {
+ return {
+ status: request.status,
+ message: 'HLS request aborted at URL: ' + request.uri,
+ code: REQUEST_ERRORS.ABORTED,
+ xhr: request
+ };
+ }
+
+ if (error) {
+ return {
+ status: request.status,
+ message: 'HLS request errored at URL: ' + request.uri,
+ code: REQUEST_ERRORS.FAILURE,
+ xhr: request
+ };
+ }
+
+ if (request.responseType === 'arraybuffer' && request.response.byteLength === 0) {
+ return {
+ status: request.status,
+ message: 'Empty HLS response at URL: ' + request.uri,
+ code: REQUEST_ERRORS.FAILURE,
+ xhr: request
+ };
+ }
+
+ return null;
+};
+/**
+ * Handle responses for key data and convert the key data to the correct format
+ * for the decryption step later
+ *
+ * @param {Object} segment - a simplified copy of the segmentInfo object
+ * from SegmentLoader
+ * @param {Array} objects - objects to add the key bytes to.
+ * @param {Function} finishProcessingFn - a callback to execute to continue processing
+ * this request
+ */
+
+
+var handleKeyResponse = function handleKeyResponse(segment, objects, finishProcessingFn) {
+ return function (error, request) {
+ var response = request.response;
+ var errorObj = handleErrors(error, request);
+
+ if (errorObj) {
+ return finishProcessingFn(errorObj, segment);
+ }
+
+ if (response.byteLength !== 16) {
+ return finishProcessingFn({
+ status: request.status,
+ message: 'Invalid HLS key at URL: ' + request.uri,
+ code: REQUEST_ERRORS.FAILURE,
+ xhr: request
+ }, segment);
+ }
+
+ var view = new DataView(response);
+ var bytes = new Uint32Array([view.getUint32(0), view.getUint32(4), view.getUint32(8), view.getUint32(12)]);
+
+ for (var i = 0; i < objects.length; i++) {
+ objects[i].bytes = bytes;
+ }
+
+ return finishProcessingFn(null, segment);
+ };
+};
+
+var parseInitSegment = function parseInitSegment(segment, _callback) {
+ var type = detectContainerForBytes(segment.map.bytes); // TODO: We should also handle ts init segments here, but we
+ // only know how to parse mp4 init segments at the moment
+
+ if (type !== 'mp4') {
+ var uri = segment.map.resolvedUri || segment.map.uri;
+ return _callback({
+ internal: true,
+ message: "Found unsupported " + (type || 'unknown') + " container for initialization segment at URL: " + uri,
+ code: REQUEST_ERRORS.FAILURE
+ });
+ }
+
+ workerCallback({
+ action: 'probeMp4Tracks',
+ data: segment.map.bytes,
+ transmuxer: segment.transmuxer,
+ callback: function callback(_ref) {
+ var tracks = _ref.tracks,
+ data = _ref.data; // transfer bytes back to us
+
+ segment.map.bytes = data;
+ tracks.forEach(function (track) {
+ segment.map.tracks = segment.map.tracks || {}; // only support one track of each type for now
+
+ if (segment.map.tracks[track.type]) {
+ return;
+ }
+
+ segment.map.tracks[track.type] = track;
+
+ if (typeof track.id === 'number' && track.timescale) {
+ segment.map.timescales = segment.map.timescales || {};
+ segment.map.timescales[track.id] = track.timescale;
+ }
+ });
+ return _callback(null);
+ }
+ });
+};
+/**
+ * Handle init-segment responses
+ *
+ * @param {Object} segment - a simplified copy of the segmentInfo object
+ * from SegmentLoader
+ * @param {Function} finishProcessingFn - a callback to execute to continue processing
+ * this request
+ */
+
+
+var handleInitSegmentResponse = function handleInitSegmentResponse(_ref2) {
+ var segment = _ref2.segment,
+ finishProcessingFn = _ref2.finishProcessingFn;
+ return function (error, request) {
+ var errorObj = handleErrors(error, request);
+
+ if (errorObj) {
+ return finishProcessingFn(errorObj, segment);
+ }
+
+ var bytes = new Uint8Array(request.response); // init segment is encypted, we will have to wait
+ // until the key request is done to decrypt.
+
+ if (segment.map.key) {
+ segment.map.encryptedBytes = bytes;
+ return finishProcessingFn(null, segment);
+ }
+
+ segment.map.bytes = bytes;
+ parseInitSegment(segment, function (parseError) {
+ if (parseError) {
+ parseError.xhr = request;
+ parseError.status = request.status;
+ return finishProcessingFn(parseError, segment);
+ }
+
+ finishProcessingFn(null, segment);
+ });
+ };
+};
+/**
+ * Response handler for segment-requests being sure to set the correct
+ * property depending on whether the segment is encryped or not
+ * Also records and keeps track of stats that are used for ABR purposes
+ *
+ * @param {Object} segment - a simplified copy of the segmentInfo object
+ * from SegmentLoader
+ * @param {Function} finishProcessingFn - a callback to execute to continue processing
+ * this request
+ */
+
+
+var handleSegmentResponse = function handleSegmentResponse(_ref3) {
+ var segment = _ref3.segment,
+ finishProcessingFn = _ref3.finishProcessingFn,
+ responseType = _ref3.responseType;
+ return function (error, request) {
+ var errorObj = handleErrors(error, request);
+
+ if (errorObj) {
+ return finishProcessingFn(errorObj, segment);
+ }
+
+ var newBytes = // although responseText "should" exist, this guard serves to prevent an error being
+ // thrown for two primary cases:
+ // 1. the mime type override stops working, or is not implemented for a specific
+ // browser
+ // 2. when using mock XHR libraries like sinon that do not allow the override behavior
+ responseType === 'arraybuffer' || !request.responseText ? request.response : stringToArrayBuffer(request.responseText.substring(segment.lastReachedChar || 0));
+ segment.stats = getRequestStats(request);
+
+ if (segment.key) {
+ segment.encryptedBytes = new Uint8Array(newBytes);
+ } else {
+ segment.bytes = new Uint8Array(newBytes);
+ }
+
+ return finishProcessingFn(null, segment);
+ };
+};
+
+var transmuxAndNotify = function transmuxAndNotify(_ref4) {
+ var segment = _ref4.segment,
+ bytes = _ref4.bytes,
+ trackInfoFn = _ref4.trackInfoFn,
+ timingInfoFn = _ref4.timingInfoFn,
+ videoSegmentTimingInfoFn = _ref4.videoSegmentTimingInfoFn,
+ audioSegmentTimingInfoFn = _ref4.audioSegmentTimingInfoFn,
+ id3Fn = _ref4.id3Fn,
+ captionsFn = _ref4.captionsFn,
+ isEndOfTimeline = _ref4.isEndOfTimeline,
+ endedTimelineFn = _ref4.endedTimelineFn,
+ dataFn = _ref4.dataFn,
+ doneFn = _ref4.doneFn,
+ onTransmuxerLog = _ref4.onTransmuxerLog;
+ var fmp4Tracks = segment.map && segment.map.tracks || {};
+ var isMuxed = Boolean(fmp4Tracks.audio && fmp4Tracks.video); // Keep references to each function so we can null them out after we're done with them.
+ // One reason for this is that in the case of full segments, we want to trust start
+ // times from the probe, rather than the transmuxer.
+
+ var audioStartFn = timingInfoFn.bind(null, segment, 'audio', 'start');
+ var audioEndFn = timingInfoFn.bind(null, segment, 'audio', 'end');
+ var videoStartFn = timingInfoFn.bind(null, segment, 'video', 'start');
+ var videoEndFn = timingInfoFn.bind(null, segment, 'video', 'end');
+
+ var finish = function finish() {
+ return transmux({
+ bytes: bytes,
+ transmuxer: segment.transmuxer,
+ audioAppendStart: segment.audioAppendStart,
+ gopsToAlignWith: segment.gopsToAlignWith,
+ remux: isMuxed,
+ onData: function onData(result) {
+ result.type = result.type === 'combined' ? 'video' : result.type;
+ dataFn(segment, result);
+ },
+ onTrackInfo: function onTrackInfo(trackInfo) {
+ if (trackInfoFn) {
+ if (isMuxed) {
+ trackInfo.isMuxed = true;
+ }
+
+ trackInfoFn(segment, trackInfo);
+ }
+ },
+ onAudioTimingInfo: function onAudioTimingInfo(audioTimingInfo) {
+ // we only want the first start value we encounter
+ if (audioStartFn && typeof audioTimingInfo.start !== 'undefined') {
+ audioStartFn(audioTimingInfo.start);
+ audioStartFn = null;
+ } // we want to continually update the end time
+
+
+ if (audioEndFn && typeof audioTimingInfo.end !== 'undefined') {
+ audioEndFn(audioTimingInfo.end);
+ }
+ },
+ onVideoTimingInfo: function onVideoTimingInfo(videoTimingInfo) {
+ // we only want the first start value we encounter
+ if (videoStartFn && typeof videoTimingInfo.start !== 'undefined') {
+ videoStartFn(videoTimingInfo.start);
+ videoStartFn = null;
+ } // we want to continually update the end time
+
+
+ if (videoEndFn && typeof videoTimingInfo.end !== 'undefined') {
+ videoEndFn(videoTimingInfo.end);
+ }
+ },
+ onVideoSegmentTimingInfo: function onVideoSegmentTimingInfo(videoSegmentTimingInfo) {
+ videoSegmentTimingInfoFn(videoSegmentTimingInfo);
+ },
+ onAudioSegmentTimingInfo: function onAudioSegmentTimingInfo(audioSegmentTimingInfo) {
+ audioSegmentTimingInfoFn(audioSegmentTimingInfo);
+ },
+ onId3: function onId3(id3Frames, dispatchType) {
+ id3Fn(segment, id3Frames, dispatchType);
+ },
+ onCaptions: function onCaptions(captions) {
+ captionsFn(segment, [captions]);
+ },
+ isEndOfTimeline: isEndOfTimeline,
+ onEndedTimeline: function onEndedTimeline() {
+ endedTimelineFn();
+ },
+ onTransmuxerLog: onTransmuxerLog,
+ onDone: function onDone(result) {
+ if (!doneFn) {
+ return;
+ }
+
+ result.type = result.type === 'combined' ? 'video' : result.type;
+ doneFn(null, segment, result);
+ }
+ });
+ }; // In the transmuxer, we don't yet have the ability to extract a "proper" start time.
+ // Meaning cached frame data may corrupt our notion of where this segment
+ // really starts. To get around this, probe for the info needed.
+
+
+ workerCallback({
+ action: 'probeTs',
+ transmuxer: segment.transmuxer,
+ data: bytes,
+ baseStartTime: segment.baseStartTime,
+ callback: function callback(data) {
+ segment.bytes = bytes = data.data;
+ var probeResult = data.result;
+
+ if (probeResult) {
+ trackInfoFn(segment, {
+ hasAudio: probeResult.hasAudio,
+ hasVideo: probeResult.hasVideo,
+ isMuxed: isMuxed
+ });
+ trackInfoFn = null;
+
+ if (probeResult.hasAudio && !isMuxed) {
+ audioStartFn(probeResult.audioStart);
+ }
+
+ if (probeResult.hasVideo) {
+ videoStartFn(probeResult.videoStart);
+ }
+
+ audioStartFn = null;
+ videoStartFn = null;
+ }
+
+ finish();
+ }
+ });
+};
+
+var handleSegmentBytes = function handleSegmentBytes(_ref5) {
+ var segment = _ref5.segment,
+ bytes = _ref5.bytes,
+ trackInfoFn = _ref5.trackInfoFn,
+ timingInfoFn = _ref5.timingInfoFn,
+ videoSegmentTimingInfoFn = _ref5.videoSegmentTimingInfoFn,
+ audioSegmentTimingInfoFn = _ref5.audioSegmentTimingInfoFn,
+ id3Fn = _ref5.id3Fn,
+ captionsFn = _ref5.captionsFn,
+ isEndOfTimeline = _ref5.isEndOfTimeline,
+ endedTimelineFn = _ref5.endedTimelineFn,
+ dataFn = _ref5.dataFn,
+ doneFn = _ref5.doneFn,
+ onTransmuxerLog = _ref5.onTransmuxerLog;
+ var bytesAsUint8Array = new Uint8Array(bytes); // TODO:
+ // We should have a handler that fetches the number of bytes required
+ // to check if something is fmp4. This will allow us to save bandwidth
+ // because we can only blacklist a playlist and abort requests
+ // by codec after trackinfo triggers.
+
+ if (isLikelyFmp4MediaSegment(bytesAsUint8Array)) {
+ segment.isFmp4 = true;
+ var tracks = segment.map.tracks;
+ var trackInfo = {
+ isFmp4: true,
+ hasVideo: !!tracks.video,
+ hasAudio: !!tracks.audio
+ }; // if we have a audio track, with a codec that is not set to
+ // encrypted audio
+
+ if (tracks.audio && tracks.audio.codec && tracks.audio.codec !== 'enca') {
+ trackInfo.audioCodec = tracks.audio.codec;
+ } // if we have a video track, with a codec that is not set to
+ // encrypted video
+
+
+ if (tracks.video && tracks.video.codec && tracks.video.codec !== 'encv') {
+ trackInfo.videoCodec = tracks.video.codec;
+ }
+
+ if (tracks.video && tracks.audio) {
+ trackInfo.isMuxed = true;
+ } // since we don't support appending fmp4 data on progress, we know we have the full
+ // segment here
+
+
+ trackInfoFn(segment, trackInfo); // The probe doesn't provide the segment end time, so only callback with the start
+ // time. The end time can be roughly calculated by the receiver using the duration.
+ //
+ // Note that the start time returned by the probe reflects the baseMediaDecodeTime, as
+ // that is the true start of the segment (where the playback engine should begin
+ // decoding).
+
+ var finishLoading = function finishLoading(captions) {
+ // if the track still has audio at this point it is only possible
+ // for it to be audio only. See `tracks.video && tracks.audio` if statement
+ // above.
+ // we make sure to use segment.bytes here as that
+ dataFn(segment, {
+ data: bytesAsUint8Array,
+ type: trackInfo.hasAudio && !trackInfo.isMuxed ? 'audio' : 'video'
+ });
+
+ if (captions && captions.length) {
+ captionsFn(segment, captions);
+ }
+
+ doneFn(null, segment, {});
+ };
+
+ workerCallback({
+ action: 'probeMp4StartTime',
+ timescales: segment.map.timescales,
+ data: bytesAsUint8Array,
+ transmuxer: segment.transmuxer,
+ callback: function callback(_ref6) {
+ var data = _ref6.data,
+ startTime = _ref6.startTime; // transfer bytes back to us
+
+ bytes = data.buffer;
+ segment.bytes = bytesAsUint8Array = data;
+
+ if (trackInfo.hasAudio && !trackInfo.isMuxed) {
+ timingInfoFn(segment, 'audio', 'start', startTime);
+ }
+
+ if (trackInfo.hasVideo) {
+ timingInfoFn(segment, 'video', 'start', startTime);
+ } // Run through the CaptionParser in case there are captions.
+ // Initialize CaptionParser if it hasn't been yet
+
+
+ if (!tracks.video || !data.byteLength || !segment.transmuxer) {
+ finishLoading();
+ return;
+ }
+
+ workerCallback({
+ action: 'pushMp4Captions',
+ endAction: 'mp4Captions',
+ transmuxer: segment.transmuxer,
+ data: bytesAsUint8Array,
+ timescales: segment.map.timescales,
+ trackIds: [tracks.video.id],
+ callback: function callback(message) {
+ // transfer bytes back to us
+ bytes = message.data.buffer;
+ segment.bytes = bytesAsUint8Array = message.data;
+ message.logs.forEach(function (log) {
+ onTransmuxerLog(videojs.mergeOptions(log, {
+ stream: 'mp4CaptionParser'
+ }));
+ });
+ finishLoading(message.captions);
+ }
+ });
+ }
+ });
+ return;
+ } // VTT or other segments that don't need processing
+
+
+ if (!segment.transmuxer) {
+ doneFn(null, segment, {});
+ return;
+ }
+
+ if (typeof segment.container === 'undefined') {
+ segment.container = detectContainerForBytes(bytesAsUint8Array);
+ }
+
+ if (segment.container !== 'ts' && segment.container !== 'aac') {
+ trackInfoFn(segment, {
+ hasAudio: false,
+ hasVideo: false
+ });
+ doneFn(null, segment, {});
+ return;
+ } // ts or aac
+
+
+ transmuxAndNotify({
+ segment: segment,
+ bytes: bytes,
+ trackInfoFn: trackInfoFn,
+ timingInfoFn: timingInfoFn,
+ videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
+ audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
+ id3Fn: id3Fn,
+ captionsFn: captionsFn,
+ isEndOfTimeline: isEndOfTimeline,
+ endedTimelineFn: endedTimelineFn,
+ dataFn: dataFn,
+ doneFn: doneFn,
+ onTransmuxerLog: onTransmuxerLog
+ });
+};
+
+var decrypt = function decrypt(_ref7, callback) {
+ var id = _ref7.id,
+ key = _ref7.key,
+ encryptedBytes = _ref7.encryptedBytes,
+ decryptionWorker = _ref7.decryptionWorker;
+
+ var decryptionHandler = function decryptionHandler(event) {
+ if (event.data.source === id) {
+ decryptionWorker.removeEventListener('message', decryptionHandler);
+ var decrypted = event.data.decrypted;
+ callback(new Uint8Array(decrypted.bytes, decrypted.byteOffset, decrypted.byteLength));
+ }
+ };
+
+ decryptionWorker.addEventListener('message', decryptionHandler);
+ var keyBytes;
+
+ if (key.bytes.slice) {
+ keyBytes = key.bytes.slice();
+ } else {
+ keyBytes = new Uint32Array(Array.prototype.slice.call(key.bytes));
+ } // incrementally decrypt the bytes
+
+
+ decryptionWorker.postMessage(createTransferableMessage({
+ source: id,
+ encrypted: encryptedBytes,
+ key: keyBytes,
+ iv: key.iv
+ }), [encryptedBytes.buffer, keyBytes.buffer]);
+};
+/**
+ * Decrypt the segment via the decryption web worker
+ *
+ * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
+ * routines
+ * @param {Object} segment - a simplified copy of the segmentInfo object
+ * from SegmentLoader
+ * @param {Function} trackInfoFn - a callback that receives track info
+ * @param {Function} timingInfoFn - a callback that receives timing info
+ * @param {Function} videoSegmentTimingInfoFn
+ * a callback that receives video timing info based on media times and
+ * any adjustments made by the transmuxer
+ * @param {Function} audioSegmentTimingInfoFn
+ * a callback that receives audio timing info based on media times and
+ * any adjustments made by the transmuxer
+ * @param {boolean} isEndOfTimeline
+ * true if this segment represents the last segment in a timeline
+ * @param {Function} endedTimelineFn
+ * a callback made when a timeline is ended, will only be called if
+ * isEndOfTimeline is true
+ * @param {Function} dataFn - a callback that is executed when segment bytes are available
+ * and ready to use
+ * @param {Function} doneFn - a callback that is executed after decryption has completed
+ */
+
+
+var decryptSegment = function decryptSegment(_ref8) {
+ var decryptionWorker = _ref8.decryptionWorker,
+ segment = _ref8.segment,
+ trackInfoFn = _ref8.trackInfoFn,
+ timingInfoFn = _ref8.timingInfoFn,
+ videoSegmentTimingInfoFn = _ref8.videoSegmentTimingInfoFn,
+ audioSegmentTimingInfoFn = _ref8.audioSegmentTimingInfoFn,
+ id3Fn = _ref8.id3Fn,
+ captionsFn = _ref8.captionsFn,
+ isEndOfTimeline = _ref8.isEndOfTimeline,
+ endedTimelineFn = _ref8.endedTimelineFn,
+ dataFn = _ref8.dataFn,
+ doneFn = _ref8.doneFn,
+ onTransmuxerLog = _ref8.onTransmuxerLog;
+ decrypt({
+ id: segment.requestId,
+ key: segment.key,
+ encryptedBytes: segment.encryptedBytes,
+ decryptionWorker: decryptionWorker
+ }, function (decryptedBytes) {
+ segment.bytes = decryptedBytes;
+ handleSegmentBytes({
+ segment: segment,
+ bytes: segment.bytes,
+ trackInfoFn: trackInfoFn,
+ timingInfoFn: timingInfoFn,
+ videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
+ audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
+ id3Fn: id3Fn,
+ captionsFn: captionsFn,
+ isEndOfTimeline: isEndOfTimeline,
+ endedTimelineFn: endedTimelineFn,
+ dataFn: dataFn,
+ doneFn: doneFn,
+ onTransmuxerLog: onTransmuxerLog
+ });
+ });
+};
+/**
+ * This function waits for all XHRs to finish (with either success or failure)
+ * before continueing processing via it's callback. The function gathers errors
+ * from each request into a single errors array so that the error status for
+ * each request can be examined later.
+ *
+ * @param {Object} activeXhrs - an object that tracks all XHR requests
+ * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
+ * routines
+ * @param {Function} trackInfoFn - a callback that receives track info
+ * @param {Function} timingInfoFn - a callback that receives timing info
+ * @param {Function} videoSegmentTimingInfoFn
+ * a callback that receives video timing info based on media times and
+ * any adjustments made by the transmuxer
+ * @param {Function} audioSegmentTimingInfoFn
+ * a callback that receives audio timing info based on media times and
+ * any adjustments made by the transmuxer
+ * @param {Function} id3Fn - a callback that receives ID3 metadata
+ * @param {Function} captionsFn - a callback that receives captions
+ * @param {boolean} isEndOfTimeline
+ * true if this segment represents the last segment in a timeline
+ * @param {Function} endedTimelineFn
+ * a callback made when a timeline is ended, will only be called if
+ * isEndOfTimeline is true
+ * @param {Function} dataFn - a callback that is executed when segment bytes are available
+ * and ready to use
+ * @param {Function} doneFn - a callback that is executed after all resources have been
+ * downloaded and any decryption completed
+ */
+
+
+var waitForCompletion = function waitForCompletion(_ref9) {
+ var activeXhrs = _ref9.activeXhrs,
+ decryptionWorker = _ref9.decryptionWorker,
+ trackInfoFn = _ref9.trackInfoFn,
+ timingInfoFn = _ref9.timingInfoFn,
+ videoSegmentTimingInfoFn = _ref9.videoSegmentTimingInfoFn,
+ audioSegmentTimingInfoFn = _ref9.audioSegmentTimingInfoFn,
+ id3Fn = _ref9.id3Fn,
+ captionsFn = _ref9.captionsFn,
+ isEndOfTimeline = _ref9.isEndOfTimeline,
+ endedTimelineFn = _ref9.endedTimelineFn,
+ dataFn = _ref9.dataFn,
+ doneFn = _ref9.doneFn,
+ onTransmuxerLog = _ref9.onTransmuxerLog;
+ var count = 0;
+ var didError = false;
+ return function (error, segment) {
+ if (didError) {
+ return;
+ }
+
+ if (error) {
+ didError = true; // If there are errors, we have to abort any outstanding requests
+
+ abortAll(activeXhrs); // Even though the requests above are aborted, and in theory we could wait until we
+ // handle the aborted events from those requests, there are some cases where we may
+ // never get an aborted event. For instance, if the network connection is lost and
+ // there were two requests, the first may have triggered an error immediately, while
+ // the second request remains unsent. In that case, the aborted algorithm will not
+ // trigger an abort: see https://xhr.spec.whatwg.org/#the-abort()-method
+ //
+ // We also can't rely on the ready state of the XHR, since the request that
+ // triggered the connection error may also show as a ready state of 0 (unsent).
+ // Therefore, we have to finish this group of requests immediately after the first
+ // seen error.
+
+ return doneFn(error, segment);
+ }
+
+ count += 1;
+
+ if (count === activeXhrs.length) {
+ var segmentFinish = function segmentFinish() {
+ if (segment.encryptedBytes) {
+ return decryptSegment({
+ decryptionWorker: decryptionWorker,
+ segment: segment,
+ trackInfoFn: trackInfoFn,
+ timingInfoFn: timingInfoFn,
+ videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
+ audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
+ id3Fn: id3Fn,
+ captionsFn: captionsFn,
+ isEndOfTimeline: isEndOfTimeline,
+ endedTimelineFn: endedTimelineFn,
+ dataFn: dataFn,
+ doneFn: doneFn,
+ onTransmuxerLog: onTransmuxerLog
+ });
+ } // Otherwise, everything is ready just continue
+
+
+ handleSegmentBytes({
+ segment: segment,
+ bytes: segment.bytes,
+ trackInfoFn: trackInfoFn,
+ timingInfoFn: timingInfoFn,
+ videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
+ audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
+ id3Fn: id3Fn,
+ captionsFn: captionsFn,
+ isEndOfTimeline: isEndOfTimeline,
+ endedTimelineFn: endedTimelineFn,
+ dataFn: dataFn,
+ doneFn: doneFn,
+ onTransmuxerLog: onTransmuxerLog
+ });
+ }; // Keep track of when *all* of the requests have completed
+
+
+ segment.endOfAllRequests = Date.now();
+
+ if (segment.map && segment.map.encryptedBytes && !segment.map.bytes) {
+ return decrypt({
+ decryptionWorker: decryptionWorker,
+ // add -init to the "id" to differentiate between segment
+ // and init segment decryption, just in case they happen
+ // at the same time at some point in the future.
+ id: segment.requestId + '-init',
+ encryptedBytes: segment.map.encryptedBytes,
+ key: segment.map.key
+ }, function (decryptedBytes) {
+ segment.map.bytes = decryptedBytes;
+ parseInitSegment(segment, function (parseError) {
+ if (parseError) {
+ abortAll(activeXhrs);
+ return doneFn(parseError, segment);
+ }
+
+ segmentFinish();
+ });
+ });
+ }
+
+ segmentFinish();
+ }
+ };
+};
+/**
+ * Calls the abort callback if any request within the batch was aborted. Will only call
+ * the callback once per batch of requests, even if multiple were aborted.
+ *
+ * @param {Object} loadendState - state to check to see if the abort function was called
+ * @param {Function} abortFn - callback to call for abort
+ */
+
+
+var handleLoadEnd = function handleLoadEnd(_ref10) {
+ var loadendState = _ref10.loadendState,
+ abortFn = _ref10.abortFn;
+ return function (event) {
+ var request = event.target;
+
+ if (request.aborted && abortFn && !loadendState.calledAbortFn) {
+ abortFn();
+ loadendState.calledAbortFn = true;
+ }
+ };
+};
+/**
+ * Simple progress event callback handler that gathers some stats before
+ * executing a provided callback with the `segment` object
+ *
+ * @param {Object} segment - a simplified copy of the segmentInfo object
+ * from SegmentLoader
+ * @param {Function} progressFn - a callback that is executed each time a progress event
+ * is received
+ * @param {Function} trackInfoFn - a callback that receives track info
+ * @param {Function} timingInfoFn - a callback that receives timing info
+ * @param {Function} videoSegmentTimingInfoFn
+ * a callback that receives video timing info based on media times and
+ * any adjustments made by the transmuxer
+ * @param {Function} audioSegmentTimingInfoFn
+ * a callback that receives audio timing info based on media times and
+ * any adjustments made by the transmuxer
+ * @param {boolean} isEndOfTimeline
+ * true if this segment represents the last segment in a timeline
+ * @param {Function} endedTimelineFn
+ * a callback made when a timeline is ended, will only be called if
+ * isEndOfTimeline is true
+ * @param {Function} dataFn - a callback that is executed when segment bytes are available
+ * and ready to use
+ * @param {Event} event - the progress event object from XMLHttpRequest
+ */
+
+
+var handleProgress = function handleProgress(_ref11) {
+ var segment = _ref11.segment,
+ progressFn = _ref11.progressFn;
+ _ref11.trackInfoFn;
+ _ref11.timingInfoFn;
+ _ref11.videoSegmentTimingInfoFn;
+ _ref11.audioSegmentTimingInfoFn;
+ _ref11.id3Fn;
+ _ref11.captionsFn;
+ _ref11.isEndOfTimeline;
+ _ref11.endedTimelineFn;
+ _ref11.dataFn;
+ return function (event) {
+ var request = event.target;
+
+ if (request.aborted) {
+ return;
+ }
+
+ segment.stats = videojs.mergeOptions(segment.stats, getProgressStats(event)); // record the time that we receive the first byte of data
+
+ if (!segment.stats.firstBytesReceivedAt && segment.stats.bytesReceived) {
+ segment.stats.firstBytesReceivedAt = Date.now();
+ }
+
+ return progressFn(event, segment);
+ };
+};
+/**
+ * Load all resources and does any processing necessary for a media-segment
+ *
+ * Features:
+ * decrypts the media-segment if it has a key uri and an iv
+ * aborts *all* requests if *any* one request fails
+ *
+ * The segment object, at minimum, has the following format:
+ * {
+ * resolvedUri: String,
+ * [transmuxer]: Object,
+ * [byterange]: {
+ * offset: Number,
+ * length: Number
+ * },
+ * [key]: {
+ * resolvedUri: String
+ * [byterange]: {
+ * offset: Number,
+ * length: Number
+ * },
+ * iv: {
+ * bytes: Uint32Array
+ * }
+ * },
+ * [map]: {
+ * resolvedUri: String,
+ * [byterange]: {
+ * offset: Number,
+ * length: Number
+ * },
+ * [bytes]: Uint8Array
+ * }
+ * }
+ * ...where [name] denotes optional properties
+ *
+ * @param {Function} xhr - an instance of the xhr wrapper in xhr.js
+ * @param {Object} xhrOptions - the base options to provide to all xhr requests
+ * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128
+ * decryption routines
+ * @param {Object} segment - a simplified copy of the segmentInfo object
+ * from SegmentLoader
+ * @param {Function} abortFn - a callback called (only once) if any piece of a request was
+ * aborted
+ * @param {Function} progressFn - a callback that receives progress events from the main
+ * segment's xhr request
+ * @param {Function} trackInfoFn - a callback that receives track info
+ * @param {Function} timingInfoFn - a callback that receives timing info
+ * @param {Function} videoSegmentTimingInfoFn
+ * a callback that receives video timing info based on media times and
+ * any adjustments made by the transmuxer
+ * @param {Function} audioSegmentTimingInfoFn
+ * a callback that receives audio timing info based on media times and
+ * any adjustments made by the transmuxer
+ * @param {Function} id3Fn - a callback that receives ID3 metadata
+ * @param {Function} captionsFn - a callback that receives captions
+ * @param {boolean} isEndOfTimeline
+ * true if this segment represents the last segment in a timeline
+ * @param {Function} endedTimelineFn
+ * a callback made when a timeline is ended, will only be called if
+ * isEndOfTimeline is true
+ * @param {Function} dataFn - a callback that receives data from the main segment's xhr
+ * request, transmuxed if needed
+ * @param {Function} doneFn - a callback that is executed only once all requests have
+ * succeeded or failed
+ * @return {Function} a function that, when invoked, immediately aborts all
+ * outstanding requests
+ */
+
+
+var mediaSegmentRequest = function mediaSegmentRequest(_ref12) {
+ var xhr = _ref12.xhr,
+ xhrOptions = _ref12.xhrOptions,
+ decryptionWorker = _ref12.decryptionWorker,
+ segment = _ref12.segment,
+ abortFn = _ref12.abortFn,
+ progressFn = _ref12.progressFn,
+ trackInfoFn = _ref12.trackInfoFn,
+ timingInfoFn = _ref12.timingInfoFn,
+ videoSegmentTimingInfoFn = _ref12.videoSegmentTimingInfoFn,
+ audioSegmentTimingInfoFn = _ref12.audioSegmentTimingInfoFn,
+ id3Fn = _ref12.id3Fn,
+ captionsFn = _ref12.captionsFn,
+ isEndOfTimeline = _ref12.isEndOfTimeline,
+ endedTimelineFn = _ref12.endedTimelineFn,
+ dataFn = _ref12.dataFn,
+ doneFn = _ref12.doneFn,
+ onTransmuxerLog = _ref12.onTransmuxerLog;
+ var activeXhrs = [];
+ var finishProcessingFn = waitForCompletion({
+ activeXhrs: activeXhrs,
+ decryptionWorker: decryptionWorker,
+ trackInfoFn: trackInfoFn,
+ timingInfoFn: timingInfoFn,
+ videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
+ audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
+ id3Fn: id3Fn,
+ captionsFn: captionsFn,
+ isEndOfTimeline: isEndOfTimeline,
+ endedTimelineFn: endedTimelineFn,
+ dataFn: dataFn,
+ doneFn: doneFn,
+ onTransmuxerLog: onTransmuxerLog
+ }); // optionally, request the decryption key
+
+ if (segment.key && !segment.key.bytes) {
+ var objects = [segment.key];
+
+ if (segment.map && !segment.map.bytes && segment.map.key && segment.map.key.resolvedUri === segment.key.resolvedUri) {
+ objects.push(segment.map.key);
+ }
+
+ var keyRequestOptions = videojs.mergeOptions(xhrOptions, {
+ uri: segment.key.resolvedUri,
+ responseType: 'arraybuffer'
+ });
+ var keyRequestCallback = handleKeyResponse(segment, objects, finishProcessingFn);
+ var keyXhr = xhr(keyRequestOptions, keyRequestCallback);
+ activeXhrs.push(keyXhr);
+ } // optionally, request the associated media init segment
+
+
+ if (segment.map && !segment.map.bytes) {
+ var differentMapKey = segment.map.key && (!segment.key || segment.key.resolvedUri !== segment.map.key.resolvedUri);
+
+ if (differentMapKey) {
+ var mapKeyRequestOptions = videojs.mergeOptions(xhrOptions, {
+ uri: segment.map.key.resolvedUri,
+ responseType: 'arraybuffer'
+ });
+ var mapKeyRequestCallback = handleKeyResponse(segment, [segment.map.key], finishProcessingFn);
+ var mapKeyXhr = xhr(mapKeyRequestOptions, mapKeyRequestCallback);
+ activeXhrs.push(mapKeyXhr);
+ }
+
+ var initSegmentOptions = videojs.mergeOptions(xhrOptions, {
+ uri: segment.map.resolvedUri,
+ responseType: 'arraybuffer',
+ headers: segmentXhrHeaders(segment.map)
+ });
+ var initSegmentRequestCallback = handleInitSegmentResponse({
+ segment: segment,
+ finishProcessingFn: finishProcessingFn
+ });
+ var initSegmentXhr = xhr(initSegmentOptions, initSegmentRequestCallback);
+ activeXhrs.push(initSegmentXhr);
+ }
+
+ var segmentRequestOptions = videojs.mergeOptions(xhrOptions, {
+ uri: segment.part && segment.part.resolvedUri || segment.resolvedUri,
+ responseType: 'arraybuffer',
+ headers: segmentXhrHeaders(segment)
+ });
+ var segmentRequestCallback = handleSegmentResponse({
+ segment: segment,
+ finishProcessingFn: finishProcessingFn,
+ responseType: segmentRequestOptions.responseType
+ });
+ var segmentXhr = xhr(segmentRequestOptions, segmentRequestCallback);
+ segmentXhr.addEventListener('progress', handleProgress({
+ segment: segment,
+ progressFn: progressFn,
+ trackInfoFn: trackInfoFn,
+ timingInfoFn: timingInfoFn,
+ videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
+ audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
+ id3Fn: id3Fn,
+ captionsFn: captionsFn,
+ isEndOfTimeline: isEndOfTimeline,
+ endedTimelineFn: endedTimelineFn,
+ dataFn: dataFn
+ }));
+ activeXhrs.push(segmentXhr); // since all parts of the request must be considered, but should not make callbacks
+ // multiple times, provide a shared state object
+
+ var loadendState = {};
+ activeXhrs.forEach(function (activeXhr) {
+ activeXhr.addEventListener('loadend', handleLoadEnd({
+ loadendState: loadendState,
+ abortFn: abortFn
+ }));
+ });
+ return function () {
+ return abortAll(activeXhrs);
+ };
+};
+/**
+ * @file - codecs.js - Handles tasks regarding codec strings such as translating them to
+ * codec strings, or translating codec strings into objects that can be examined.
+ */
+
+
+var logFn$1 = logger('CodecUtils');
+/**
+ * Returns a set of codec strings parsed from the playlist or the default
+ * codec strings if no codecs were specified in the playlist
+ *
+ * @param {Playlist} media the current media playlist
+ * @return {Object} an object with the video and audio codecs
+ */
+
+var getCodecs = function getCodecs(media) {
+ // if the codecs were explicitly specified, use them instead of the
+ // defaults
+ var mediaAttributes = media.attributes || {};
+
+ if (mediaAttributes.CODECS) {
+ return parseCodecs(mediaAttributes.CODECS);
+ }
+};
+
+var isMaat = function isMaat(master, media) {
+ var mediaAttributes = media.attributes || {};
+ return master && master.mediaGroups && master.mediaGroups.AUDIO && mediaAttributes.AUDIO && master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
+};
+
+var isMuxed = function isMuxed(master, media) {
+ if (!isMaat(master, media)) {
+ return true;
+ }
+
+ var mediaAttributes = media.attributes || {};
+ var audioGroup = master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
+
+ for (var groupId in audioGroup) {
+ // If an audio group has a URI (the case for HLS, as HLS will use external playlists),
+ // or there are listed playlists (the case for DASH, as the manifest will have already
+ // provided all of the details necessary to generate the audio playlist, as opposed to
+ // HLS' externally requested playlists), then the content is demuxed.
+ if (!audioGroup[groupId].uri && !audioGroup[groupId].playlists) {
+ return true;
+ }
+ }
+
+ return false;
+};
+
+var unwrapCodecList = function unwrapCodecList(codecList) {
+ var codecs = {};
+ codecList.forEach(function (_ref) {
+ var mediaType = _ref.mediaType,
+ type = _ref.type,
+ details = _ref.details;
+ codecs[mediaType] = codecs[mediaType] || [];
+ codecs[mediaType].push(translateLegacyCodec("" + type + details));
+ });
+ Object.keys(codecs).forEach(function (mediaType) {
+ if (codecs[mediaType].length > 1) {
+ logFn$1("multiple " + mediaType + " codecs found as attributes: " + codecs[mediaType].join(', ') + ". Setting playlist codecs to null so that we wait for mux.js to probe segments for real codecs.");
+ codecs[mediaType] = null;
+ return;
+ }
+
+ codecs[mediaType] = codecs[mediaType][0];
+ });
+ return codecs;
+};
+
+var codecCount = function codecCount(codecObj) {
+ var count = 0;
+
+ if (codecObj.audio) {
+ count++;
+ }
+
+ if (codecObj.video) {
+ count++;
+ }
+
+ return count;
+};
+/**
+ * Calculates the codec strings for a working configuration of
+ * SourceBuffers to play variant streams in a master playlist. If
+ * there is no possible working configuration, an empty object will be
+ * returned.
+ *
+ * @param master {Object} the m3u8 object for the master playlist
+ * @param media {Object} the m3u8 object for the variant playlist
+ * @return {Object} the codec strings.
+ *
+ * @private
+ */
+
+
+var codecsForPlaylist = function codecsForPlaylist(master, media) {
+ var mediaAttributes = media.attributes || {};
+ var codecInfo = unwrapCodecList(getCodecs(media) || []); // HLS with multiple-audio tracks must always get an audio codec.
+ // Put another way, there is no way to have a video-only multiple-audio HLS!
+
+ if (isMaat(master, media) && !codecInfo.audio) {
+ if (!isMuxed(master, media)) {
+ // It is possible for codecs to be specified on the audio media group playlist but
+ // not on the rendition playlist. This is mostly the case for DASH, where audio and
+ // video are always separate (and separately specified).
+ var defaultCodecs = unwrapCodecList(codecsFromDefault(master, mediaAttributes.AUDIO) || []);
+
+ if (defaultCodecs.audio) {
+ codecInfo.audio = defaultCodecs.audio;
+ }
+ }
+ }
+
+ return codecInfo;
+};
+
+var logFn = logger('PlaylistSelector');
+
+var representationToString = function representationToString(representation) {
+ if (!representation || !representation.playlist) {
+ return;
+ }
+
+ var playlist = representation.playlist;
+ return JSON.stringify({
+ id: playlist.id,
+ bandwidth: representation.bandwidth,
+ width: representation.width,
+ height: representation.height,
+ codecs: playlist.attributes && playlist.attributes.CODECS || ''
+ });
+}; // Utilities
+
+/**
+ * Returns the CSS value for the specified property on an element
+ * using `getComputedStyle`. Firefox has a long-standing issue where
+ * getComputedStyle() may return null when running in an iframe with
+ * `display: none`.
+ *
+ * @see https://bugzilla.mozilla.org/show_bug.cgi?id=548397
+ * @param {HTMLElement} el the htmlelement to work on
+ * @param {string} the proprety to get the style for
+ */
+
+
+var safeGetComputedStyle = function safeGetComputedStyle(el, property) {
+ if (!el) {
+ return '';
+ }
+
+ var result = window$1.getComputedStyle(el);
+
+ if (!result) {
+ return '';
+ }
+
+ return result[property];
+};
+/**
+ * Resuable stable sort function
+ *
+ * @param {Playlists} array
+ * @param {Function} sortFn Different comparators
+ * @function stableSort
+ */
+
+
+var stableSort = function stableSort(array, sortFn) {
+ var newArray = array.slice();
+ array.sort(function (left, right) {
+ var cmp = sortFn(left, right);
+
+ if (cmp === 0) {
+ return newArray.indexOf(left) - newArray.indexOf(right);
+ }
+
+ return cmp;
+ });
+};
+/**
+ * A comparator function to sort two playlist object by bandwidth.
+ *
+ * @param {Object} left a media playlist object
+ * @param {Object} right a media playlist object
+ * @return {number} Greater than zero if the bandwidth attribute of
+ * left is greater than the corresponding attribute of right. Less
+ * than zero if the bandwidth of right is greater than left and
+ * exactly zero if the two are equal.
+ */
+
+
+var comparePlaylistBandwidth = function comparePlaylistBandwidth(left, right) {
+ var leftBandwidth;
+ var rightBandwidth;
+
+ if (left.attributes.BANDWIDTH) {
+ leftBandwidth = left.attributes.BANDWIDTH;
+ }
+
+ leftBandwidth = leftBandwidth || window$1.Number.MAX_VALUE;
+
+ if (right.attributes.BANDWIDTH) {
+ rightBandwidth = right.attributes.BANDWIDTH;
+ }
+
+ rightBandwidth = rightBandwidth || window$1.Number.MAX_VALUE;
+ return leftBandwidth - rightBandwidth;
+};
+/**
+ * A comparator function to sort two playlist object by resolution (width).
+ *
+ * @param {Object} left a media playlist object
+ * @param {Object} right a media playlist object
+ * @return {number} Greater than zero if the resolution.width attribute of
+ * left is greater than the corresponding attribute of right. Less
+ * than zero if the resolution.width of right is greater than left and
+ * exactly zero if the two are equal.
+ */
+
+
+var comparePlaylistResolution = function comparePlaylistResolution(left, right) {
+ var leftWidth;
+ var rightWidth;
+
+ if (left.attributes.RESOLUTION && left.attributes.RESOLUTION.width) {
+ leftWidth = left.attributes.RESOLUTION.width;
+ }
+
+ leftWidth = leftWidth || window$1.Number.MAX_VALUE;
+
+ if (right.attributes.RESOLUTION && right.attributes.RESOLUTION.width) {
+ rightWidth = right.attributes.RESOLUTION.width;
+ }
+
+ rightWidth = rightWidth || window$1.Number.MAX_VALUE; // NOTE - Fallback to bandwidth sort as appropriate in cases where multiple renditions
+ // have the same media dimensions/ resolution
+
+ if (leftWidth === rightWidth && left.attributes.BANDWIDTH && right.attributes.BANDWIDTH) {
+ return left.attributes.BANDWIDTH - right.attributes.BANDWIDTH;
+ }
+
+ return leftWidth - rightWidth;
+};
+/**
+ * Chooses the appropriate media playlist based on bandwidth and player size
+ *
+ * @param {Object} master
+ * Object representation of the master manifest
+ * @param {number} playerBandwidth
+ * Current calculated bandwidth of the player
+ * @param {number} playerWidth
+ * Current width of the player element (should account for the device pixel ratio)
+ * @param {number} playerHeight
+ * Current height of the player element (should account for the device pixel ratio)
+ * @param {boolean} limitRenditionByPlayerDimensions
+ * True if the player width and height should be used during the selection, false otherwise
+ * @param {Object} masterPlaylistController
+ * the current masterPlaylistController object
+ * @return {Playlist} the highest bitrate playlist less than the
+ * currently detected bandwidth, accounting for some amount of
+ * bandwidth variance
+ */
+
+
+var simpleSelector = function simpleSelector(master, playerBandwidth, playerWidth, playerHeight, limitRenditionByPlayerDimensions, masterPlaylistController) {
+ // If we end up getting called before `master` is available, exit early
+ if (!master) {
+ return;
+ }
+
+ var options = {
+ bandwidth: playerBandwidth,
+ width: playerWidth,
+ height: playerHeight,
+ limitRenditionByPlayerDimensions: limitRenditionByPlayerDimensions
+ };
+ var playlists = master.playlists; // if playlist is audio only, select between currently active audio group playlists.
+
+ if (Playlist.isAudioOnly(master)) {
+ playlists = masterPlaylistController.getAudioTrackPlaylists_(); // add audioOnly to options so that we log audioOnly: true
+ // at the buttom of this function for debugging.
+
+ options.audioOnly = true;
+ } // convert the playlists to an intermediary representation to make comparisons easier
+
+
+ var sortedPlaylistReps = playlists.map(function (playlist) {
+ var bandwidth;
+ var width = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.width;
+ var height = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height;
+ bandwidth = playlist.attributes && playlist.attributes.BANDWIDTH;
+ bandwidth = bandwidth || window$1.Number.MAX_VALUE;
+ return {
+ bandwidth: bandwidth,
+ width: width,
+ height: height,
+ playlist: playlist
+ };
+ });
+ stableSort(sortedPlaylistReps, function (left, right) {
+ return left.bandwidth - right.bandwidth;
+ }); // filter out any playlists that have been excluded due to
+ // incompatible configurations
+
+ sortedPlaylistReps = sortedPlaylistReps.filter(function (rep) {
+ return !Playlist.isIncompatible(rep.playlist);
+ }); // filter out any playlists that have been disabled manually through the representations
+ // api or blacklisted temporarily due to playback errors.
+
+ var enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
+ return Playlist.isEnabled(rep.playlist);
+ });
+
+ if (!enabledPlaylistReps.length) {
+ // if there are no enabled playlists, then they have all been blacklisted or disabled
+ // by the user through the representations api. In this case, ignore blacklisting and
+ // fallback to what the user wants by using playlists the user has not disabled.
+ enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
+ return !Playlist.isDisabled(rep.playlist);
+ });
+ } // filter out any variant that has greater effective bitrate
+ // than the current estimated bandwidth
+
+
+ var bandwidthPlaylistReps = enabledPlaylistReps.filter(function (rep) {
+ return rep.bandwidth * Config.BANDWIDTH_VARIANCE < playerBandwidth;
+ });
+ var highestRemainingBandwidthRep = bandwidthPlaylistReps[bandwidthPlaylistReps.length - 1]; // get all of the renditions with the same (highest) bandwidth
+ // and then taking the very first element
+
+ var bandwidthBestRep = bandwidthPlaylistReps.filter(function (rep) {
+ return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
+ })[0]; // if we're not going to limit renditions by player size, make an early decision.
+
+ if (limitRenditionByPlayerDimensions === false) {
+ var _chosenRep = bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
+
+ if (_chosenRep && _chosenRep.playlist) {
+ var type = 'sortedPlaylistReps';
+
+ if (bandwidthBestRep) {
+ type = 'bandwidthBestRep';
+ }
+
+ if (enabledPlaylistReps[0]) {
+ type = 'enabledPlaylistReps';
+ }
+
+ logFn("choosing " + representationToString(_chosenRep) + " using " + type + " with options", options);
+ return _chosenRep.playlist;
+ }
+
+ logFn('could not choose a playlist with options', options);
+ return null;
+ } // filter out playlists without resolution information
+
+
+ var haveResolution = bandwidthPlaylistReps.filter(function (rep) {
+ return rep.width && rep.height;
+ }); // sort variants by resolution
+
+ stableSort(haveResolution, function (left, right) {
+ return left.width - right.width;
+ }); // if we have the exact resolution as the player use it
+
+ var resolutionBestRepList = haveResolution.filter(function (rep) {
+ return rep.width === playerWidth && rep.height === playerHeight;
+ });
+ highestRemainingBandwidthRep = resolutionBestRepList[resolutionBestRepList.length - 1]; // ensure that we pick the highest bandwidth variant that have exact resolution
+
+ var resolutionBestRep = resolutionBestRepList.filter(function (rep) {
+ return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
+ })[0];
+ var resolutionPlusOneList;
+ var resolutionPlusOneSmallest;
+ var resolutionPlusOneRep; // find the smallest variant that is larger than the player
+ // if there is no match of exact resolution
+
+ if (!resolutionBestRep) {
+ resolutionPlusOneList = haveResolution.filter(function (rep) {
+ return rep.width > playerWidth || rep.height > playerHeight;
+ }); // find all the variants have the same smallest resolution
+
+ resolutionPlusOneSmallest = resolutionPlusOneList.filter(function (rep) {
+ return rep.width === resolutionPlusOneList[0].width && rep.height === resolutionPlusOneList[0].height;
+ }); // ensure that we also pick the highest bandwidth variant that
+ // is just-larger-than the video player
+
+ highestRemainingBandwidthRep = resolutionPlusOneSmallest[resolutionPlusOneSmallest.length - 1];
+ resolutionPlusOneRep = resolutionPlusOneSmallest.filter(function (rep) {
+ return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
+ })[0];
+ }
+
+ var leastPixelDiffRep; // If this selector proves to be better than others,
+ // resolutionPlusOneRep and resolutionBestRep and all
+ // the code involving them should be removed.
+
+ if (masterPlaylistController.experimentalLeastPixelDiffSelector) {
+ // find the variant that is closest to the player's pixel size
+ var leastPixelDiffList = haveResolution.map(function (rep) {
+ rep.pixelDiff = Math.abs(rep.width - playerWidth) + Math.abs(rep.height - playerHeight);
+ return rep;
+ }); // get the highest bandwidth, closest resolution playlist
+
+ stableSort(leastPixelDiffList, function (left, right) {
+ // sort by highest bandwidth if pixelDiff is the same
+ if (left.pixelDiff === right.pixelDiff) {
+ return right.bandwidth - left.bandwidth;
+ }
+
+ return left.pixelDiff - right.pixelDiff;
+ });
+ leastPixelDiffRep = leastPixelDiffList[0];
+ } // fallback chain of variants
+
+
+ var chosenRep = leastPixelDiffRep || resolutionPlusOneRep || resolutionBestRep || bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
+
+ if (chosenRep && chosenRep.playlist) {
+ var _type = 'sortedPlaylistReps';
+
+ if (leastPixelDiffRep) {
+ _type = 'leastPixelDiffRep';
+ } else if (resolutionPlusOneRep) {
+ _type = 'resolutionPlusOneRep';
+ } else if (resolutionBestRep) {
+ _type = 'resolutionBestRep';
+ } else if (bandwidthBestRep) {
+ _type = 'bandwidthBestRep';
+ } else if (enabledPlaylistReps[0]) {
+ _type = 'enabledPlaylistReps';
+ }
+
+ logFn("choosing " + representationToString(chosenRep) + " using " + _type + " with options", options);
+ return chosenRep.playlist;
+ }
+
+ logFn('could not choose a playlist with options', options);
+ return null;
+};
+/**
+ * Chooses the appropriate media playlist based on the most recent
+ * bandwidth estimate and the player size.
+ *
+ * Expects to be called within the context of an instance of VhsHandler
+ *
+ * @return {Playlist} the highest bitrate playlist less than the
+ * currently detected bandwidth, accounting for some amount of
+ * bandwidth variance
+ */
+
+
+var lastBandwidthSelector = function lastBandwidthSelector() {
+ var pixelRatio = this.useDevicePixelRatio ? window$1.devicePixelRatio || 1 : 1;
+ return simpleSelector(this.playlists.master, this.systemBandwidth, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.masterPlaylistController_);
+};
+/**
+ * Chooses the appropriate media playlist based on an
+ * exponential-weighted moving average of the bandwidth after
+ * filtering for player size.
+ *
+ * Expects to be called within the context of an instance of VhsHandler
+ *
+ * @param {number} decay - a number between 0 and 1. Higher values of
+ * this parameter will cause previous bandwidth estimates to lose
+ * significance more quickly.
+ * @return {Function} a function which can be invoked to create a new
+ * playlist selector function.
+ * @see https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
+ */
+
+
+var movingAverageBandwidthSelector = function movingAverageBandwidthSelector(decay) {
+ var average = -1;
+ var lastSystemBandwidth = -1;
+
+ if (decay < 0 || decay > 1) {
+ throw new Error('Moving average bandwidth decay must be between 0 and 1.');
+ }
+
+ return function () {
+ var pixelRatio = this.useDevicePixelRatio ? window$1.devicePixelRatio || 1 : 1;
+
+ if (average < 0) {
+ average = this.systemBandwidth;
+ lastSystemBandwidth = this.systemBandwidth;
+ } // stop the average value from decaying for every 250ms
+ // when the systemBandwidth is constant
+ // and
+ // stop average from setting to a very low value when the
+ // systemBandwidth becomes 0 in case of chunk cancellation
+
+
+ if (this.systemBandwidth > 0 && this.systemBandwidth !== lastSystemBandwidth) {
+ average = decay * this.systemBandwidth + (1 - decay) * average;
+ lastSystemBandwidth = this.systemBandwidth;
+ }
+
+ return simpleSelector(this.playlists.master, average, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.masterPlaylistController_);
+ };
+};
+/**
+ * Chooses the appropriate media playlist based on the potential to rebuffer
+ *
+ * @param {Object} settings
+ * Object of information required to use this selector
+ * @param {Object} settings.master
+ * Object representation of the master manifest
+ * @param {number} settings.currentTime
+ * The current time of the player
+ * @param {number} settings.bandwidth
+ * Current measured bandwidth
+ * @param {number} settings.duration
+ * Duration of the media
+ * @param {number} settings.segmentDuration
+ * Segment duration to be used in round trip time calculations
+ * @param {number} settings.timeUntilRebuffer
+ * Time left in seconds until the player has to rebuffer
+ * @param {number} settings.currentTimeline
+ * The current timeline segments are being loaded from
+ * @param {SyncController} settings.syncController
+ * SyncController for determining if we have a sync point for a given playlist
+ * @return {Object|null}
+ * {Object} return.playlist
+ * The highest bandwidth playlist with the least amount of rebuffering
+ * {Number} return.rebufferingImpact
+ * The amount of time in seconds switching to this playlist will rebuffer. A
+ * negative value means that switching will cause zero rebuffering.
+ */
+
+
+var minRebufferMaxBandwidthSelector = function minRebufferMaxBandwidthSelector(settings) {
+ var master = settings.master,
+ currentTime = settings.currentTime,
+ bandwidth = settings.bandwidth,
+ duration = settings.duration,
+ segmentDuration = settings.segmentDuration,
+ timeUntilRebuffer = settings.timeUntilRebuffer,
+ currentTimeline = settings.currentTimeline,
+ syncController = settings.syncController; // filter out any playlists that have been excluded due to
+ // incompatible configurations
+
+ var compatiblePlaylists = master.playlists.filter(function (playlist) {
+ return !Playlist.isIncompatible(playlist);
+ }); // filter out any playlists that have been disabled manually through the representations
+ // api or blacklisted temporarily due to playback errors.
+
+ var enabledPlaylists = compatiblePlaylists.filter(Playlist.isEnabled);
+
+ if (!enabledPlaylists.length) {
+ // if there are no enabled playlists, then they have all been blacklisted or disabled
+ // by the user through the representations api. In this case, ignore blacklisting and
+ // fallback to what the user wants by using playlists the user has not disabled.
+ enabledPlaylists = compatiblePlaylists.filter(function (playlist) {
+ return !Playlist.isDisabled(playlist);
+ });
+ }
+
+ var bandwidthPlaylists = enabledPlaylists.filter(Playlist.hasAttribute.bind(null, 'BANDWIDTH'));
+ var rebufferingEstimates = bandwidthPlaylists.map(function (playlist) {
+ var syncPoint = syncController.getSyncPoint(playlist, duration, currentTimeline, currentTime); // If there is no sync point for this playlist, switching to it will require a
+ // sync request first. This will double the request time
+
+ var numRequests = syncPoint ? 1 : 2;
+ var requestTimeEstimate = Playlist.estimateSegmentRequestTime(segmentDuration, bandwidth, playlist);
+ var rebufferingImpact = requestTimeEstimate * numRequests - timeUntilRebuffer;
+ return {
+ playlist: playlist,
+ rebufferingImpact: rebufferingImpact
+ };
+ });
+ var noRebufferingPlaylists = rebufferingEstimates.filter(function (estimate) {
+ return estimate.rebufferingImpact <= 0;
+ }); // Sort by bandwidth DESC
+
+ stableSort(noRebufferingPlaylists, function (a, b) {
+ return comparePlaylistBandwidth(b.playlist, a.playlist);
+ });
+
+ if (noRebufferingPlaylists.length) {
+ return noRebufferingPlaylists[0];
+ }
+
+ stableSort(rebufferingEstimates, function (a, b) {
+ return a.rebufferingImpact - b.rebufferingImpact;
+ });
+ return rebufferingEstimates[0] || null;
+};
+/**
+ * Chooses the appropriate media playlist, which in this case is the lowest bitrate
+ * one with video. If no renditions with video exist, return the lowest audio rendition.
+ *
+ * Expects to be called within the context of an instance of VhsHandler
+ *
+ * @return {Object|null}
+ * {Object} return.playlist
+ * The lowest bitrate playlist that contains a video codec. If no such rendition
+ * exists pick the lowest audio rendition.
+ */
+
+
+var lowestBitrateCompatibleVariantSelector = function lowestBitrateCompatibleVariantSelector() {
+ var _this = this; // filter out any playlists that have been excluded due to
+ // incompatible configurations or playback errors
+
+
+ var playlists = this.playlists.master.playlists.filter(Playlist.isEnabled); // Sort ascending by bitrate
+
+ stableSort(playlists, function (a, b) {
+ return comparePlaylistBandwidth(a, b);
+ }); // Parse and assume that playlists with no video codec have no video
+ // (this is not necessarily true, although it is generally true).
+ //
+ // If an entire manifest has no valid videos everything will get filtered
+ // out.
+
+ var playlistsWithVideo = playlists.filter(function (playlist) {
+ return !!codecsForPlaylist(_this.playlists.master, playlist).video;
+ });
+ return playlistsWithVideo[0] || null;
+};
+/**
+ * Combine all segments into a single Uint8Array
+ *
+ * @param {Object} segmentObj
+ * @return {Uint8Array} concatenated bytes
+ * @private
+ */
+
+
+var concatSegments = function concatSegments(segmentObj) {
+ var offset = 0;
+ var tempBuffer;
+
+ if (segmentObj.bytes) {
+ tempBuffer = new Uint8Array(segmentObj.bytes); // combine the individual segments into one large typed-array
+
+ segmentObj.segments.forEach(function (segment) {
+ tempBuffer.set(segment, offset);
+ offset += segment.byteLength;
+ });
+ }
+
+ return tempBuffer;
+};
+/**
+ * @file text-tracks.js
+ */
+
+/**
+ * Create captions text tracks on video.js if they do not exist
+ *
+ * @param {Object} inbandTextTracks a reference to current inbandTextTracks
+ * @param {Object} tech the video.js tech
+ * @param {Object} captionStream the caption stream to create
+ * @private
+ */
+
+
+var createCaptionsTrackIfNotExists = function createCaptionsTrackIfNotExists(inbandTextTracks, tech, captionStream) {
+ if (!inbandTextTracks[captionStream]) {
+ tech.trigger({
+ type: 'usage',
+ name: 'vhs-608'
+ });
+ tech.trigger({
+ type: 'usage',
+ name: 'hls-608'
+ });
+ var instreamId = captionStream; // we need to translate SERVICEn for 708 to how mux.js currently labels them
+
+ if (/^cc708_/.test(captionStream)) {
+ instreamId = 'SERVICE' + captionStream.split('_')[1];
+ }
+
+ var track = tech.textTracks().getTrackById(instreamId);
+
+ if (track) {
+ // Resuse an existing track with a CC# id because this was
+ // very likely created by videojs-contrib-hls from information
+ // in the m3u8 for us to use
+ inbandTextTracks[captionStream] = track;
+ } else {
+ // This section gets called when we have caption services that aren't specified in the manifest.
+ // Manifest level caption services are handled in media-groups.js under CLOSED-CAPTIONS.
+ var captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};
+ var label = captionStream;
+ var language = captionStream;
+ var def = false;
+ var captionService = captionServices[instreamId];
+
+ if (captionService) {
+ label = captionService.label;
+ language = captionService.language;
+ def = captionService["default"];
+ } // Otherwise, create a track with the default `CC#` label and
+ // without a language
+
+
+ inbandTextTracks[captionStream] = tech.addRemoteTextTrack({
+ kind: 'captions',
+ id: instreamId,
+ // TODO: investigate why this doesn't seem to turn the caption on by default
+ "default": def,
+ label: label,
+ language: language
+ }, false).track;
+ }
+ }
+};
+/**
+ * Add caption text track data to a source handler given an array of captions
+ *
+ * @param {Object}
+ * @param {Object} inbandTextTracks the inband text tracks
+ * @param {number} timestampOffset the timestamp offset of the source buffer
+ * @param {Array} captionArray an array of caption data
+ * @private
+ */
+
+
+var addCaptionData = function addCaptionData(_ref) {
+ var inbandTextTracks = _ref.inbandTextTracks,
+ captionArray = _ref.captionArray,
+ timestampOffset = _ref.timestampOffset;
+
+ if (!captionArray) {
+ return;
+ }
+
+ var Cue = window$1.WebKitDataCue || window$1.VTTCue;
+ captionArray.forEach(function (caption) {
+ var track = caption.stream;
+ inbandTextTracks[track].addCue(new Cue(caption.startTime + timestampOffset, caption.endTime + timestampOffset, caption.text));
+ });
+};
+/**
+ * Define properties on a cue for backwards compatability,
+ * but warn the user that the way that they are using it
+ * is depricated and will be removed at a later date.
+ *
+ * @param {Cue} cue the cue to add the properties on
+ * @private
+ */
+
+
+var deprecateOldCue = function deprecateOldCue(cue) {
+ Object.defineProperties(cue.frame, {
+ id: {
+ get: function get() {
+ videojs.log.warn('cue.frame.id is deprecated. Use cue.value.key instead.');
+ return cue.value.key;
+ }
+ },
+ value: {
+ get: function get() {
+ videojs.log.warn('cue.frame.value is deprecated. Use cue.value.data instead.');
+ return cue.value.data;
+ }
+ },
+ privateData: {
+ get: function get() {
+ videojs.log.warn('cue.frame.privateData is deprecated. Use cue.value.data instead.');
+ return cue.value.data;
+ }
+ }
+ });
+};
+/**
+ * Add metadata text track data to a source handler given an array of metadata
+ *
+ * @param {Object}
+ * @param {Object} inbandTextTracks the inband text tracks
+ * @param {Array} metadataArray an array of meta data
+ * @param {number} timestampOffset the timestamp offset of the source buffer
+ * @param {number} videoDuration the duration of the video
+ * @private
+ */
+
+
+var addMetadata = function addMetadata(_ref2) {
+ var inbandTextTracks = _ref2.inbandTextTracks,
+ metadataArray = _ref2.metadataArray,
+ timestampOffset = _ref2.timestampOffset,
+ videoDuration = _ref2.videoDuration;
+
+ if (!metadataArray) {
+ return;
+ }
+
+ var Cue = window$1.WebKitDataCue || window$1.VTTCue;
+ var metadataTrack = inbandTextTracks.metadataTrack_;
+
+ if (!metadataTrack) {
+ return;
+ }
+
+ metadataArray.forEach(function (metadata) {
+ var time = metadata.cueTime + timestampOffset; // if time isn't a finite number between 0 and Infinity, like NaN,
+ // ignore this bit of metadata.
+ // This likely occurs when you have an non-timed ID3 tag like TIT2,
+ // which is the "Title/Songname/Content description" frame
+
+ if (typeof time !== 'number' || window$1.isNaN(time) || time < 0 || !(time < Infinity)) {
+ return;
+ }
+
+ metadata.frames.forEach(function (frame) {
+ var cue = new Cue(time, time, frame.value || frame.url || frame.data || '');
+ cue.frame = frame;
+ cue.value = frame;
+ deprecateOldCue(cue);
+ metadataTrack.addCue(cue);
+ });
+ });
+
+ if (!metadataTrack.cues || !metadataTrack.cues.length) {
+ return;
+ } // Updating the metadeta cues so that
+ // the endTime of each cue is the startTime of the next cue
+ // the endTime of last cue is the duration of the video
+
+
+ var cues = metadataTrack.cues;
+ var cuesArray = []; // Create a copy of the TextTrackCueList...
+ // ...disregarding cues with a falsey value
+
+ for (var i = 0; i < cues.length; i++) {
+ if (cues[i]) {
+ cuesArray.push(cues[i]);
+ }
+ } // Group cues by their startTime value
+
+
+ var cuesGroupedByStartTime = cuesArray.reduce(function (obj, cue) {
+ var timeSlot = obj[cue.startTime] || [];
+ timeSlot.push(cue);
+ obj[cue.startTime] = timeSlot;
+ return obj;
+ }, {}); // Sort startTimes by ascending order
+
+ var sortedStartTimes = Object.keys(cuesGroupedByStartTime).sort(function (a, b) {
+ return Number(a) - Number(b);
+ }); // Map each cue group's endTime to the next group's startTime
+
+ sortedStartTimes.forEach(function (startTime, idx) {
+ var cueGroup = cuesGroupedByStartTime[startTime];
+ var nextTime = Number(sortedStartTimes[idx + 1]) || videoDuration; // Map each cue's endTime the next group's startTime
+
+ cueGroup.forEach(function (cue) {
+ cue.endTime = nextTime;
+ });
+ });
+};
+/**
+ * Create metadata text track on video.js if it does not exist
+ *
+ * @param {Object} inbandTextTracks a reference to current inbandTextTracks
+ * @param {string} dispatchType the inband metadata track dispatch type
+ * @param {Object} tech the video.js tech
+ * @private
+ */
+
+
+var createMetadataTrackIfNotExists = function createMetadataTrackIfNotExists(inbandTextTracks, dispatchType, tech) {
+ if (inbandTextTracks.metadataTrack_) {
+ return;
+ }
+
+ inbandTextTracks.metadataTrack_ = tech.addRemoteTextTrack({
+ kind: 'metadata',
+ label: 'Timed Metadata'
+ }, false).track;
+ inbandTextTracks.metadataTrack_.inBandMetadataTrackDispatchType = dispatchType;
+};
+/**
+ * Remove cues from a track on video.js.
+ *
+ * @param {Double} start start of where we should remove the cue
+ * @param {Double} end end of where the we should remove the cue
+ * @param {Object} track the text track to remove the cues from
+ * @private
+ */
+
+
+var removeCuesFromTrack = function removeCuesFromTrack(start, end, track) {
+ var i;
+ var cue;
+
+ if (!track) {
+ return;
+ }
+
+ if (!track.cues) {
+ return;
+ }
+
+ i = track.cues.length;
+
+ while (i--) {
+ cue = track.cues[i]; // Remove any cue within the provided start and end time
+
+ if (cue.startTime >= start && cue.endTime <= end) {
+ track.removeCue(cue);
+ }
+ }
+};
+/**
+ * Remove duplicate cues from a track on video.js (a cue is considered a
+ * duplicate if it has the same time interval and text as another)
+ *
+ * @param {Object} track the text track to remove the duplicate cues from
+ * @private
+ */
+
+
+var removeDuplicateCuesFromTrack = function removeDuplicateCuesFromTrack(track) {
+ var cues = track.cues;
+
+ if (!cues) {
+ return;
+ }
+
+ for (var i = 0; i < cues.length; i++) {
+ var duplicates = [];
+ var occurrences = 0;
+
+ for (var j = 0; j < cues.length; j++) {
+ if (cues[i].startTime === cues[j].startTime && cues[i].endTime === cues[j].endTime && cues[i].text === cues[j].text) {
+ occurrences++;
+
+ if (occurrences > 1) {
+ duplicates.push(cues[j]);
+ }
+ }
+ }
+
+ if (duplicates.length) {
+ duplicates.forEach(function (dupe) {
+ return track.removeCue(dupe);
+ });
+ }
+ }
+};
+/**
+ * Returns a list of gops in the buffer that have a pts value of 3 seconds or more in
+ * front of current time.
+ *
+ * @param {Array} buffer
+ * The current buffer of gop information
+ * @param {number} currentTime
+ * The current time
+ * @param {Double} mapping
+ * Offset to map display time to stream presentation time
+ * @return {Array}
+ * List of gops considered safe to append over
+ */
+
+
+var gopsSafeToAlignWith = function gopsSafeToAlignWith(buffer, currentTime, mapping) {
+ if (typeof currentTime === 'undefined' || currentTime === null || !buffer.length) {
+ return [];
+ } // pts value for current time + 3 seconds to give a bit more wiggle room
+
+
+ var currentTimePts = Math.ceil((currentTime - mapping + 3) * ONE_SECOND_IN_TS);
+ var i;
+
+ for (i = 0; i < buffer.length; i++) {
+ if (buffer[i].pts > currentTimePts) {
+ break;
+ }
+ }
+
+ return buffer.slice(i);
+};
+/**
+ * Appends gop information (timing and byteLength) received by the transmuxer for the
+ * gops appended in the last call to appendBuffer
+ *
+ * @param {Array} buffer
+ * The current buffer of gop information
+ * @param {Array} gops
+ * List of new gop information
+ * @param {boolean} replace
+ * If true, replace the buffer with the new gop information. If false, append the
+ * new gop information to the buffer in the right location of time.
+ * @return {Array}
+ * Updated list of gop information
+ */
+
+
+var updateGopBuffer = function updateGopBuffer(buffer, gops, replace) {
+ if (!gops.length) {
+ return buffer;
+ }
+
+ if (replace) {
+ // If we are in safe append mode, then completely overwrite the gop buffer
+ // with the most recent appeneded data. This will make sure that when appending
+ // future segments, we only try to align with gops that are both ahead of current
+ // time and in the last segment appended.
+ return gops.slice();
+ }
+
+ var start = gops[0].pts;
+ var i = 0;
+
+ for (i; i < buffer.length; i++) {
+ if (buffer[i].pts >= start) {
+ break;
+ }
+ }
+
+ return buffer.slice(0, i).concat(gops);
+};
+/**
+ * Removes gop information in buffer that overlaps with provided start and end
+ *
+ * @param {Array} buffer
+ * The current buffer of gop information
+ * @param {Double} start
+ * position to start the remove at
+ * @param {Double} end
+ * position to end the remove at
+ * @param {Double} mapping
+ * Offset to map display time to stream presentation time
+ */
+
+
+var removeGopBuffer = function removeGopBuffer(buffer, start, end, mapping) {
+ var startPts = Math.ceil((start - mapping) * ONE_SECOND_IN_TS);
+ var endPts = Math.ceil((end - mapping) * ONE_SECOND_IN_TS);
+ var updatedBuffer = buffer.slice();
+ var i = buffer.length;
+
+ while (i--) {
+ if (buffer[i].pts <= endPts) {
+ break;
+ }
+ }
+
+ if (i === -1) {
+ // no removal because end of remove range is before start of buffer
+ return updatedBuffer;
+ }
+
+ var j = i + 1;
+
+ while (j--) {
+ if (buffer[j].pts <= startPts) {
+ break;
+ }
+ } // clamp remove range start to 0 index
+
+
+ j = Math.max(j, 0);
+ updatedBuffer.splice(j, i - j + 1);
+ return updatedBuffer;
+};
+
+var shallowEqual = function shallowEqual(a, b) {
+ // if both are undefined
+ // or one or the other is undefined
+ // they are not equal
+ if (!a && !b || !a && b || a && !b) {
+ return false;
+ } // they are the same object and thus, equal
+
+
+ if (a === b) {
+ return true;
+ } // sort keys so we can make sure they have
+ // all the same keys later.
+
+
+ var akeys = Object.keys(a).sort();
+ var bkeys = Object.keys(b).sort(); // different number of keys, not equal
+
+ if (akeys.length !== bkeys.length) {
+ return false;
+ }
+
+ for (var i = 0; i < akeys.length; i++) {
+ var key = akeys[i]; // different sorted keys, not equal
+
+ if (key !== bkeys[i]) {
+ return false;
+ } // different values, not equal
+
+
+ if (a[key] !== b[key]) {
+ return false;
+ }
+ }
+
+ return true;
+}; // https://www.w3.org/TR/WebIDL-1/#quotaexceedederror
+
+
+var QUOTA_EXCEEDED_ERR = 22;
+/**
+ * The segment loader has no recourse except to fetch a segment in the
+ * current playlist and use the internal timestamps in that segment to
+ * generate a syncPoint. This function returns a good candidate index
+ * for that process.
+ *
+ * @param {Array} segments - the segments array from a playlist.
+ * @return {number} An index of a segment from the playlist to load
+ */
+
+var getSyncSegmentCandidate = function getSyncSegmentCandidate(currentTimeline, segments, targetTime) {
+ segments = segments || [];
+ var timelineSegments = [];
+ var time = 0;
+
+ for (var i = 0; i < segments.length; i++) {
+ var segment = segments[i];
+
+ if (currentTimeline === segment.timeline) {
+ timelineSegments.push(i);
+ time += segment.duration;
+
+ if (time > targetTime) {
+ return i;
+ }
+ }
+ }
+
+ if (timelineSegments.length === 0) {
+ return 0;
+ } // default to the last timeline segment
+
+
+ return timelineSegments[timelineSegments.length - 1];
+}; // In the event of a quota exceeded error, keep at least one second of back buffer. This
+// number was arbitrarily chosen and may be updated in the future, but seemed reasonable
+// as a start to prevent any potential issues with removing content too close to the
+// playhead.
+
+
+var MIN_BACK_BUFFER = 1; // in ms
+
+var CHECK_BUFFER_DELAY = 500;
+
+var finite = function finite(num) {
+ return typeof num === 'number' && isFinite(num);
+}; // With most content hovering around 30fps, if a segment has a duration less than a half
+// frame at 30fps or one frame at 60fps, the bandwidth and throughput calculations will
+// not accurately reflect the rest of the content.
+
+
+var MIN_SEGMENT_DURATION_TO_SAVE_STATS = 1 / 60;
+
+var illegalMediaSwitch = function illegalMediaSwitch(loaderType, startingMedia, trackInfo) {
+ // Although these checks should most likely cover non 'main' types, for now it narrows
+ // the scope of our checks.
+ if (loaderType !== 'main' || !startingMedia || !trackInfo) {
+ return null;
+ }
+
+ if (!trackInfo.hasAudio && !trackInfo.hasVideo) {
+ return 'Neither audio nor video found in segment.';
+ }
+
+ if (startingMedia.hasVideo && !trackInfo.hasVideo) {
+ return 'Only audio found in segment when we expected video.' + ' We can\'t switch to audio only from a stream that had video.' + ' To get rid of this message, please add codec information to the manifest.';
+ }
+
+ if (!startingMedia.hasVideo && trackInfo.hasVideo) {
+ return 'Video found in segment when we expected only audio.' + ' We can\'t switch to a stream with video from an audio only stream.' + ' To get rid of this message, please add codec information to the manifest.';
+ }
+
+ return null;
+};
+/**
+ * Calculates a time value that is safe to remove from the back buffer without interrupting
+ * playback.
+ *
+ * @param {TimeRange} seekable
+ * The current seekable range
+ * @param {number} currentTime
+ * The current time of the player
+ * @param {number} targetDuration
+ * The target duration of the current playlist
+ * @return {number}
+ * Time that is safe to remove from the back buffer without interrupting playback
+ */
+
+
+var safeBackBufferTrimTime = function safeBackBufferTrimTime(seekable, currentTime, targetDuration) {
+ // 30 seconds before the playhead provides a safe default for trimming.
+ //
+ // Choosing a reasonable default is particularly important for high bitrate content and
+ // VOD videos/live streams with large windows, as the buffer may end up overfilled and
+ // throw an APPEND_BUFFER_ERR.
+ var trimTime = currentTime - Config.BACK_BUFFER_LENGTH;
+
+ if (seekable.length) {
+ // Some live playlists may have a shorter window of content than the full allowed back
+ // buffer. For these playlists, don't save content that's no longer within the window.
+ trimTime = Math.max(trimTime, seekable.start(0));
+ } // Don't remove within target duration of the current time to avoid the possibility of
+ // removing the GOP currently being played, as removing it can cause playback stalls.
+
+
+ var maxTrimTime = currentTime - targetDuration;
+ return Math.min(maxTrimTime, trimTime);
+};
+
+var segmentInfoString = function segmentInfoString(segmentInfo) {
+ var startOfSegment = segmentInfo.startOfSegment,
+ duration = segmentInfo.duration,
+ segment = segmentInfo.segment,
+ part = segmentInfo.part,
+ _segmentInfo$playlist = segmentInfo.playlist,
+ seq = _segmentInfo$playlist.mediaSequence,
+ id = _segmentInfo$playlist.id,
+ _segmentInfo$playlist2 = _segmentInfo$playlist.segments,
+ segments = _segmentInfo$playlist2 === void 0 ? [] : _segmentInfo$playlist2,
+ index = segmentInfo.mediaIndex,
+ partIndex = segmentInfo.partIndex,
+ timeline = segmentInfo.timeline;
+ var segmentLen = segments.length - 1;
+ var selection = 'mediaIndex/partIndex increment';
+
+ if (segmentInfo.getMediaInfoForTime) {
+ selection = "getMediaInfoForTime (" + segmentInfo.getMediaInfoForTime + ")";
+ } else if (segmentInfo.isSyncRequest) {
+ selection = 'getSyncSegmentCandidate (isSyncRequest)';
+ }
+
+ if (segmentInfo.independent) {
+ selection += " with independent " + segmentInfo.independent;
+ }
+
+ var hasPartIndex = typeof partIndex === 'number';
+ var name = segmentInfo.segment.uri ? 'segment' : 'pre-segment';
+ var zeroBasedPartCount = hasPartIndex ? getKnownPartCount({
+ preloadSegment: segment
+ }) - 1 : 0;
+ return name + " [" + (seq + index) + "/" + (seq + segmentLen) + "]" + (hasPartIndex ? " part [" + partIndex + "/" + zeroBasedPartCount + "]" : '') + (" segment start/end [" + segment.start + " => " + segment.end + "]") + (hasPartIndex ? " part start/end [" + part.start + " => " + part.end + "]" : '') + (" startOfSegment [" + startOfSegment + "]") + (" duration [" + duration + "]") + (" timeline [" + timeline + "]") + (" selected by [" + selection + "]") + (" playlist [" + id + "]");
+};
+
+var timingInfoPropertyForMedia = function timingInfoPropertyForMedia(mediaType) {
+ return mediaType + "TimingInfo";
+};
+/**
+ * Returns the timestamp offset to use for the segment.
+ *
+ * @param {number} segmentTimeline
+ * The timeline of the segment
+ * @param {number} currentTimeline
+ * The timeline currently being followed by the loader
+ * @param {number} startOfSegment
+ * The estimated segment start
+ * @param {TimeRange[]} buffered
+ * The loader's buffer
+ * @param {boolean} overrideCheck
+ * If true, no checks are made to see if the timestamp offset value should be set,
+ * but sets it directly to a value.
+ *
+ * @return {number|null}
+ * Either a number representing a new timestamp offset, or null if the segment is
+ * part of the same timeline
+ */
+
+
+var timestampOffsetForSegment = function timestampOffsetForSegment(_ref) {
+ var segmentTimeline = _ref.segmentTimeline,
+ currentTimeline = _ref.currentTimeline,
+ startOfSegment = _ref.startOfSegment,
+ buffered = _ref.buffered,
+ overrideCheck = _ref.overrideCheck; // Check to see if we are crossing a discontinuity to see if we need to set the
+ // timestamp offset on the transmuxer and source buffer.
+ //
+ // Previously, we changed the timestampOffset if the start of this segment was less than
+ // the currently set timestampOffset, but this isn't desirable as it can produce bad
+ // behavior, especially around long running live streams.
+
+ if (!overrideCheck && segmentTimeline === currentTimeline) {
+ return null;
+ } // When changing renditions, it's possible to request a segment on an older timeline. For
+ // instance, given two renditions with the following:
+ //
+ // #EXTINF:10
+ // segment1
+ // #EXT-X-DISCONTINUITY
+ // #EXTINF:10
+ // segment2
+ // #EXTINF:10
+ // segment3
+ //
+ // And the current player state:
+ //
+ // current time: 8
+ // buffer: 0 => 20
+ //
+ // The next segment on the current rendition would be segment3, filling the buffer from
+ // 20s onwards. However, if a rendition switch happens after segment2 was requested,
+ // then the next segment to be requested will be segment1 from the new rendition in
+ // order to fill time 8 and onwards. Using the buffered end would result in repeated
+ // content (since it would position segment1 of the new rendition starting at 20s). This
+ // case can be identified when the new segment's timeline is a prior value. Instead of
+ // using the buffered end, the startOfSegment can be used, which, hopefully, will be
+ // more accurate to the actual start time of the segment.
+
+
+ if (segmentTimeline < currentTimeline) {
+ return startOfSegment;
+ } // segmentInfo.startOfSegment used to be used as the timestamp offset, however, that
+ // value uses the end of the last segment if it is available. While this value
+ // should often be correct, it's better to rely on the buffered end, as the new
+ // content post discontinuity should line up with the buffered end as if it were
+ // time 0 for the new content.
+
+
+ return buffered.length ? buffered.end(buffered.length - 1) : startOfSegment;
+};
+/**
+ * Returns whether or not the loader should wait for a timeline change from the timeline
+ * change controller before processing the segment.
+ *
+ * Primary timing in VHS goes by video. This is different from most media players, as
+ * audio is more often used as the primary timing source. For the foreseeable future, VHS
+ * will continue to use video as the primary timing source, due to the current logic and
+ * expectations built around it.
+
+ * Since the timing follows video, in order to maintain sync, the video loader is
+ * responsible for setting both audio and video source buffer timestamp offsets.
+ *
+ * Setting different values for audio and video source buffers could lead to
+ * desyncing. The following examples demonstrate some of the situations where this
+ * distinction is important. Note that all of these cases involve demuxed content. When
+ * content is muxed, the audio and video are packaged together, therefore syncing
+ * separate media playlists is not an issue.
+ *
+ * CASE 1: Audio prepares to load a new timeline before video:
+ *
+ * Timeline: 0 1
+ * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
+ * Audio Loader: ^
+ * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
+ * Video Loader ^
+ *
+ * In the above example, the audio loader is preparing to load the 6th segment, the first
+ * after a discontinuity, while the video loader is still loading the 5th segment, before
+ * the discontinuity.
+ *
+ * If the audio loader goes ahead and loads and appends the 6th segment before the video
+ * loader crosses the discontinuity, then when appended, the 6th audio segment will use
+ * the timestamp offset from timeline 0. This will likely lead to desyncing. In addition,
+ * the audio loader must provide the audioAppendStart value to trim the content in the
+ * transmuxer, and that value relies on the audio timestamp offset. Since the audio
+ * timestamp offset is set by the video (main) loader, the audio loader shouldn't load the
+ * segment until that value is provided.
+ *
+ * CASE 2: Video prepares to load a new timeline before audio:
+ *
+ * Timeline: 0 1
+ * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
+ * Audio Loader: ^
+ * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
+ * Video Loader ^
+ *
+ * In the above example, the video loader is preparing to load the 6th segment, the first
+ * after a discontinuity, while the audio loader is still loading the 5th segment, before
+ * the discontinuity.
+ *
+ * If the video loader goes ahead and loads and appends the 6th segment, then once the
+ * segment is loaded and processed, both the video and audio timestamp offsets will be
+ * set, since video is used as the primary timing source. This is to ensure content lines
+ * up appropriately, as any modifications to the video timing are reflected by audio when
+ * the video loader sets the audio and video timestamp offsets to the same value. However,
+ * setting the timestamp offset for audio before audio has had a chance to change
+ * timelines will likely lead to desyncing, as the audio loader will append segment 5 with
+ * a timestamp intended to apply to segments from timeline 1 rather than timeline 0.
+ *
+ * CASE 3: When seeking, audio prepares to load a new timeline before video
+ *
+ * Timeline: 0 1
+ * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
+ * Audio Loader: ^
+ * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
+ * Video Loader ^
+ *
+ * In the above example, both audio and video loaders are loading segments from timeline
+ * 0, but imagine that the seek originated from timeline 1.
+ *
+ * When seeking to a new timeline, the timestamp offset will be set based on the expected
+ * segment start of the loaded video segment. In order to maintain sync, the audio loader
+ * must wait for the video loader to load its segment and update both the audio and video
+ * timestamp offsets before it may load and append its own segment. This is the case
+ * whether the seek results in a mismatched segment request (e.g., the audio loader
+ * chooses to load segment 3 and the video loader chooses to load segment 4) or the
+ * loaders choose to load the same segment index from each playlist, as the segments may
+ * not be aligned perfectly, even for matching segment indexes.
+ *
+ * @param {Object} timelinechangeController
+ * @param {number} currentTimeline
+ * The timeline currently being followed by the loader
+ * @param {number} segmentTimeline
+ * The timeline of the segment being loaded
+ * @param {('main'|'audio')} loaderType
+ * The loader type
+ * @param {boolean} audioDisabled
+ * Whether the audio is disabled for the loader. This should only be true when the
+ * loader may have muxed audio in its segment, but should not append it, e.g., for
+ * the main loader when an alternate audio playlist is active.
+ *
+ * @return {boolean}
+ * Whether the loader should wait for a timeline change from the timeline change
+ * controller before processing the segment
+ */
+
+
+var shouldWaitForTimelineChange = function shouldWaitForTimelineChange(_ref2) {
+ var timelineChangeController = _ref2.timelineChangeController,
+ currentTimeline = _ref2.currentTimeline,
+ segmentTimeline = _ref2.segmentTimeline,
+ loaderType = _ref2.loaderType,
+ audioDisabled = _ref2.audioDisabled;
+
+ if (currentTimeline === segmentTimeline) {
+ return false;
+ }
+
+ if (loaderType === 'audio') {
+ var lastMainTimelineChange = timelineChangeController.lastTimelineChange({
+ type: 'main'
+ }); // Audio loader should wait if:
+ //
+ // * main hasn't had a timeline change yet (thus has not loaded its first segment)
+ // * main hasn't yet changed to the timeline audio is looking to load
+
+ return !lastMainTimelineChange || lastMainTimelineChange.to !== segmentTimeline;
+ } // The main loader only needs to wait for timeline changes if there's demuxed audio.
+ // Otherwise, there's nothing to wait for, since audio would be muxed into the main
+ // loader's segments (or the content is audio/video only and handled by the main
+ // loader).
+
+
+ if (loaderType === 'main' && audioDisabled) {
+ var pendingAudioTimelineChange = timelineChangeController.pendingTimelineChange({
+ type: 'audio'
+ }); // Main loader should wait for the audio loader if audio is not pending a timeline
+ // change to the current timeline.
+ //
+ // Since the main loader is responsible for setting the timestamp offset for both
+ // audio and video, the main loader must wait for audio to be about to change to its
+ // timeline before setting the offset, otherwise, if audio is behind in loading,
+ // segments from the previous timeline would be adjusted by the new timestamp offset.
+ //
+ // This requirement means that video will not cross a timeline until the audio is
+ // about to cross to it, so that way audio and video will always cross the timeline
+ // together.
+ //
+ // In addition to normal timeline changes, these rules also apply to the start of a
+ // stream (going from a non-existent timeline, -1, to timeline 0). It's important
+ // that these rules apply to the first timeline change because if they did not, it's
+ // possible that the main loader will cross two timelines before the audio loader has
+ // crossed one. Logic may be implemented to handle the startup as a special case, but
+ // it's easier to simply treat all timeline changes the same.
+
+ if (pendingAudioTimelineChange && pendingAudioTimelineChange.to === segmentTimeline) {
+ return false;
+ }
+
+ return true;
+ }
+
+ return false;
+};
+
+var mediaDuration = function mediaDuration(timingInfos) {
+ var maxDuration = 0;
+ ['video', 'audio'].forEach(function (type) {
+ var typeTimingInfo = timingInfos[type + "TimingInfo"];
+
+ if (!typeTimingInfo) {
+ return;
+ }
+
+ var start = typeTimingInfo.start,
+ end = typeTimingInfo.end;
+ var duration;
+
+ if (typeof start === 'bigint' || typeof end === 'bigint') {
+ duration = window$1.BigInt(end) - window$1.BigInt(start);
+ } else if (typeof start === 'number' && typeof end === 'number') {
+ duration = end - start;
+ }
+
+ if (typeof duration !== 'undefined' && duration > maxDuration) {
+ maxDuration = duration;
+ }
+ }); // convert back to a number if it is lower than MAX_SAFE_INTEGER
+ // as we only need BigInt when we are above that.
+
+ if (typeof maxDuration === 'bigint' && maxDuration < Number.MAX_SAFE_INTEGER) {
+ maxDuration = Number(maxDuration);
+ }
+
+ return maxDuration;
+};
+
+var segmentTooLong = function segmentTooLong(_ref3) {
+ var segmentDuration = _ref3.segmentDuration,
+ maxDuration = _ref3.maxDuration; // 0 duration segments are most likely due to metadata only segments or a lack of
+ // information.
+
+ if (!segmentDuration) {
+ return false;
+ } // For HLS:
+ //
+ // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1
+ // The EXTINF duration of each Media Segment in the Playlist
+ // file, when rounded to the nearest integer, MUST be less than or equal
+ // to the target duration; longer segments can trigger playback stalls
+ // or other errors.
+ //
+ // For DASH, the mpd-parser uses the largest reported segment duration as the target
+ // duration. Although that reported duration is occasionally approximate (i.e., not
+ // exact), a strict check may report that a segment is too long more often in DASH.
+
+
+ return Math.round(segmentDuration) > maxDuration + TIME_FUDGE_FACTOR;
+};
+
+var getTroublesomeSegmentDurationMessage = function getTroublesomeSegmentDurationMessage(segmentInfo, sourceType) {
+ // Right now we aren't following DASH's timing model exactly, so only perform
+ // this check for HLS content.
+ if (sourceType !== 'hls') {
+ return null;
+ }
+
+ var segmentDuration = mediaDuration({
+ audioTimingInfo: segmentInfo.audioTimingInfo,
+ videoTimingInfo: segmentInfo.videoTimingInfo
+ }); // Don't report if we lack information.
+ //
+ // If the segment has a duration of 0 it is either a lack of information or a
+ // metadata only segment and shouldn't be reported here.
+
+ if (!segmentDuration) {
+ return null;
+ }
+
+ var targetDuration = segmentInfo.playlist.targetDuration;
+ var isSegmentWayTooLong = segmentTooLong({
+ segmentDuration: segmentDuration,
+ maxDuration: targetDuration * 2
+ });
+ var isSegmentSlightlyTooLong = segmentTooLong({
+ segmentDuration: segmentDuration,
+ maxDuration: targetDuration
+ });
+ var segmentTooLongMessage = "Segment with index " + segmentInfo.mediaIndex + " " + ("from playlist " + segmentInfo.playlist.id + " ") + ("has a duration of " + segmentDuration + " ") + ("when the reported duration is " + segmentInfo.duration + " ") + ("and the target duration is " + targetDuration + ". ") + 'For HLS content, a duration in excess of the target duration may result in ' + 'playback issues. See the HLS specification section on EXT-X-TARGETDURATION for ' + 'more details: ' + 'https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1';
+
+ if (isSegmentWayTooLong || isSegmentSlightlyTooLong) {
+ return {
+ severity: isSegmentWayTooLong ? 'warn' : 'info',
+ message: segmentTooLongMessage
+ };
+ }
+
+ return null;
+};
+/**
+ * An object that manages segment loading and appending.
+ *
+ * @class SegmentLoader
+ * @param {Object} options required and optional options
+ * @extends videojs.EventTarget
+ */
+
+
+var SegmentLoader = /*#__PURE__*/function (_videojs$EventTarget) {
+ _inheritsLoose(SegmentLoader, _videojs$EventTarget);
+
+ function SegmentLoader(settings, options) {
+ var _this;
+
+ _this = _videojs$EventTarget.call(this) || this; // check pre-conditions
+
+ if (!settings) {
+ throw new TypeError('Initialization settings are required');
+ }
+
+ if (typeof settings.currentTime !== 'function') {
+ throw new TypeError('No currentTime getter specified');
+ }
+
+ if (!settings.mediaSource) {
+ throw new TypeError('No MediaSource specified');
+ } // public properties
+
+
+ _this.bandwidth = settings.bandwidth;
+ _this.throughput = {
+ rate: 0,
+ count: 0
+ };
+ _this.roundTrip = NaN;
+
+ _this.resetStats_();
+
+ _this.mediaIndex = null;
+ _this.partIndex = null; // private settings
+
+ _this.hasPlayed_ = settings.hasPlayed;
+ _this.currentTime_ = settings.currentTime;
+ _this.seekable_ = settings.seekable;
+ _this.seeking_ = settings.seeking;
+ _this.duration_ = settings.duration;
+ _this.mediaSource_ = settings.mediaSource;
+ _this.vhs_ = settings.vhs;
+ _this.loaderType_ = settings.loaderType;
+ _this.currentMediaInfo_ = void 0;
+ _this.startingMediaInfo_ = void 0;
+ _this.segmentMetadataTrack_ = settings.segmentMetadataTrack;
+ _this.goalBufferLength_ = settings.goalBufferLength;
+ _this.sourceType_ = settings.sourceType;
+ _this.sourceUpdater_ = settings.sourceUpdater;
+ _this.inbandTextTracks_ = settings.inbandTextTracks;
+ _this.state_ = 'INIT';
+ _this.timelineChangeController_ = settings.timelineChangeController;
+ _this.shouldSaveSegmentTimingInfo_ = true;
+ _this.parse708captions_ = settings.parse708captions;
+ _this.useDtsForTimestampOffset_ = settings.useDtsForTimestampOffset;
+ _this.captionServices_ = settings.captionServices;
+ _this.experimentalExactManifestTimings = settings.experimentalExactManifestTimings; // private instance variables
+
+ _this.checkBufferTimeout_ = null;
+ _this.error_ = void 0;
+ _this.currentTimeline_ = -1;
+ _this.pendingSegment_ = null;
+ _this.xhrOptions_ = null;
+ _this.pendingSegments_ = [];
+ _this.audioDisabled_ = false;
+ _this.isPendingTimestampOffset_ = false; // TODO possibly move gopBuffer and timeMapping info to a separate controller
+
+ _this.gopBuffer_ = [];
+ _this.timeMapping_ = 0;
+ _this.safeAppend_ = videojs.browser.IE_VERSION >= 11;
+ _this.appendInitSegment_ = {
+ audio: true,
+ video: true
+ };
+ _this.playlistOfLastInitSegment_ = {
+ audio: null,
+ video: null
+ };
+ _this.callQueue_ = []; // If the segment loader prepares to load a segment, but does not have enough
+ // information yet to start the loading process (e.g., if the audio loader wants to
+ // load a segment from the next timeline but the main loader hasn't yet crossed that
+ // timeline), then the load call will be added to the queue until it is ready to be
+ // processed.
+
+ _this.loadQueue_ = [];
+ _this.metadataQueue_ = {
+ id3: [],
+ caption: []
+ };
+ _this.waitingOnRemove_ = false;
+ _this.quotaExceededErrorRetryTimeout_ = null; // Fragmented mp4 playback
+
+ _this.activeInitSegmentId_ = null;
+ _this.initSegments_ = {}; // HLSe playback
+
+ _this.cacheEncryptionKeys_ = settings.cacheEncryptionKeys;
+ _this.keyCache_ = {};
+ _this.decrypter_ = settings.decrypter; // Manages the tracking and generation of sync-points, mappings
+ // between a time in the display time and a segment index within
+ // a playlist
+
+ _this.syncController_ = settings.syncController;
+ _this.syncPoint_ = {
+ segmentIndex: 0,
+ time: 0
+ };
+ _this.transmuxer_ = _this.createTransmuxer_();
+
+ _this.triggerSyncInfoUpdate_ = function () {
+ return _this.trigger('syncinfoupdate');
+ };
+
+ _this.syncController_.on('syncinfoupdate', _this.triggerSyncInfoUpdate_);
+
+ _this.mediaSource_.addEventListener('sourceopen', function () {
+ if (!_this.isEndOfStream_()) {
+ _this.ended_ = false;
+ }
+ }); // ...for determining the fetch location
+
+
+ _this.fetchAtBuffer_ = false;
+ _this.logger_ = logger("SegmentLoader[" + _this.loaderType_ + "]");
+ Object.defineProperty(_assertThisInitialized(_this), 'state', {
+ get: function get() {
+ return this.state_;
+ },
+ set: function set(newState) {
+ if (newState !== this.state_) {
+ this.logger_(this.state_ + " -> " + newState);
+ this.state_ = newState;
+ this.trigger('statechange');
+ }
+ }
+ });
+
+ _this.sourceUpdater_.on('ready', function () {
+ if (_this.hasEnoughInfoToAppend_()) {
+ _this.processCallQueue_();
+ }
+ }); // Only the main loader needs to listen for pending timeline changes, as the main
+ // loader should wait for audio to be ready to change its timeline so that both main
+ // and audio timelines change together. For more details, see the
+ // shouldWaitForTimelineChange function.
+
+
+ if (_this.loaderType_ === 'main') {
+ _this.timelineChangeController_.on('pendingtimelinechange', function () {
+ if (_this.hasEnoughInfoToAppend_()) {
+ _this.processCallQueue_();
+ }
+ });
+ } // The main loader only listens on pending timeline changes, but the audio loader,
+ // since its loads follow main, needs to listen on timeline changes. For more details,
+ // see the shouldWaitForTimelineChange function.
+
+
+ if (_this.loaderType_ === 'audio') {
+ _this.timelineChangeController_.on('timelinechange', function () {
+ if (_this.hasEnoughInfoToLoad_()) {
+ _this.processLoadQueue_();
+ }
+
+ if (_this.hasEnoughInfoToAppend_()) {
+ _this.processCallQueue_();
+ }
+ });
+ }
+
+ return _this;
+ }
+
+ var _proto = SegmentLoader.prototype;
+
+ _proto.createTransmuxer_ = function createTransmuxer_() {
+ return segmentTransmuxer.createTransmuxer({
+ remux: false,
+ alignGopsAtEnd: this.safeAppend_,
+ keepOriginalTimestamps: true,
+ parse708captions: this.parse708captions_,
+ captionServices: this.captionServices_
+ });
+ }
+ /**
+ * reset all of our media stats
+ *
+ * @private
+ */
+ ;
+
+ _proto.resetStats_ = function resetStats_() {
+ this.mediaBytesTransferred = 0;
+ this.mediaRequests = 0;
+ this.mediaRequestsAborted = 0;
+ this.mediaRequestsTimedout = 0;
+ this.mediaRequestsErrored = 0;
+ this.mediaTransferDuration = 0;
+ this.mediaSecondsLoaded = 0;
+ this.mediaAppends = 0;
+ }
+ /**
+ * dispose of the SegmentLoader and reset to the default state
+ */
+ ;
+
+ _proto.dispose = function dispose() {
+ this.trigger('dispose');
+ this.state = 'DISPOSED';
+ this.pause();
+ this.abort_();
+
+ if (this.transmuxer_) {
+ this.transmuxer_.terminate();
+ }
+
+ this.resetStats_();
+
+ if (this.checkBufferTimeout_) {
+ window$1.clearTimeout(this.checkBufferTimeout_);
+ }
+
+ if (this.syncController_ && this.triggerSyncInfoUpdate_) {
+ this.syncController_.off('syncinfoupdate', this.triggerSyncInfoUpdate_);
+ }
+
+ this.off();
+ };
+
+ _proto.setAudio = function setAudio(enable) {
+ this.audioDisabled_ = !enable;
+
+ if (enable) {
+ this.appendInitSegment_.audio = true;
+ } else {
+ // remove current track audio if it gets disabled
+ this.sourceUpdater_.removeAudio(0, this.duration_());
+ }
+ }
+ /**
+ * abort anything that is currently doing on with the SegmentLoader
+ * and reset to a default state
+ */
+ ;
+
+ _proto.abort = function abort() {
+ if (this.state !== 'WAITING') {
+ if (this.pendingSegment_) {
+ this.pendingSegment_ = null;
+ }
+
+ return;
+ }
+
+ this.abort_(); // We aborted the requests we were waiting on, so reset the loader's state to READY
+ // since we are no longer "waiting" on any requests. XHR callback is not always run
+ // when the request is aborted. This will prevent the loader from being stuck in the
+ // WAITING state indefinitely.
+
+ this.state = 'READY'; // don't wait for buffer check timeouts to begin fetching the
+ // next segment
+
+ if (!this.paused()) {
+ this.monitorBuffer_();
+ }
+ }
+ /**
+ * abort all pending xhr requests and null any pending segements
+ *
+ * @private
+ */
+ ;
+
+ _proto.abort_ = function abort_() {
+ if (this.pendingSegment_ && this.pendingSegment_.abortRequests) {
+ this.pendingSegment_.abortRequests();
+ } // clear out the segment being processed
+
+
+ this.pendingSegment_ = null;
+ this.callQueue_ = [];
+ this.loadQueue_ = [];
+ this.metadataQueue_.id3 = [];
+ this.metadataQueue_.caption = [];
+ this.timelineChangeController_.clearPendingTimelineChange(this.loaderType_);
+ this.waitingOnRemove_ = false;
+ window$1.clearTimeout(this.quotaExceededErrorRetryTimeout_);
+ this.quotaExceededErrorRetryTimeout_ = null;
+ };
+
+ _proto.checkForAbort_ = function checkForAbort_(requestId) {
+ // If the state is APPENDING, then aborts will not modify the state, meaning the first
+ // callback that happens should reset the state to READY so that loading can continue.
+ if (this.state === 'APPENDING' && !this.pendingSegment_) {
+ this.state = 'READY';
+ return true;
+ }
+
+ if (!this.pendingSegment_ || this.pendingSegment_.requestId !== requestId) {
+ return true;
+ }
+
+ return false;
+ }
+ /**
+ * set an error on the segment loader and null out any pending segements
+ *
+ * @param {Error} error the error to set on the SegmentLoader
+ * @return {Error} the error that was set or that is currently set
+ */
+ ;
+
+ _proto.error = function error(_error) {
+ if (typeof _error !== 'undefined') {
+ this.logger_('error occurred:', _error);
+ this.error_ = _error;
+ }
+
+ this.pendingSegment_ = null;
+ return this.error_;
+ };
+
+ _proto.endOfStream = function endOfStream() {
+ this.ended_ = true;
+
+ if (this.transmuxer_) {
+ // need to clear out any cached data to prepare for the new segment
+ segmentTransmuxer.reset(this.transmuxer_);
+ }
+
+ this.gopBuffer_.length = 0;
+ this.pause();
+ this.trigger('ended');
+ }
+ /**
+ * Indicates which time ranges are buffered
+ *
+ * @return {TimeRange}
+ * TimeRange object representing the current buffered ranges
+ */
+ ;
+
+ _proto.buffered_ = function buffered_() {
+ var trackInfo = this.getMediaInfo_();
+
+ if (!this.sourceUpdater_ || !trackInfo) {
+ return videojs.createTimeRanges();
+ }
+
+ if (this.loaderType_ === 'main') {
+ var hasAudio = trackInfo.hasAudio,
+ hasVideo = trackInfo.hasVideo,
+ isMuxed = trackInfo.isMuxed;
+
+ if (hasVideo && hasAudio && !this.audioDisabled_ && !isMuxed) {
+ return this.sourceUpdater_.buffered();
+ }
+
+ if (hasVideo) {
+ return this.sourceUpdater_.videoBuffered();
+ }
+ } // One case that can be ignored for now is audio only with alt audio,
+ // as we don't yet have proper support for that.
+
+
+ return this.sourceUpdater_.audioBuffered();
+ }
+ /**
+ * Gets and sets init segment for the provided map
+ *
+ * @param {Object} map
+ * The map object representing the init segment to get or set
+ * @param {boolean=} set
+ * If true, the init segment for the provided map should be saved
+ * @return {Object}
+ * map object for desired init segment
+ */
+ ;
+
+ _proto.initSegmentForMap = function initSegmentForMap(map, set) {
+ if (set === void 0) {
+ set = false;
+ }
+
+ if (!map) {
+ return null;
+ }
+
+ var id = initSegmentId(map);
+ var storedMap = this.initSegments_[id];
+
+ if (set && !storedMap && map.bytes) {
+ this.initSegments_[id] = storedMap = {
+ resolvedUri: map.resolvedUri,
+ byterange: map.byterange,
+ bytes: map.bytes,
+ tracks: map.tracks,
+ timescales: map.timescales
+ };
+ }
+
+ return storedMap || map;
+ }
+ /**
+ * Gets and sets key for the provided key
+ *
+ * @param {Object} key
+ * The key object representing the key to get or set
+ * @param {boolean=} set
+ * If true, the key for the provided key should be saved
+ * @return {Object}
+ * Key object for desired key
+ */
+ ;
+
+ _proto.segmentKey = function segmentKey(key, set) {
+ if (set === void 0) {
+ set = false;
+ }
+
+ if (!key) {
+ return null;
+ }
+
+ var id = segmentKeyId(key);
+ var storedKey = this.keyCache_[id]; // TODO: We should use the HTTP Expires header to invalidate our cache per
+ // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-6.2.3
+
+ if (this.cacheEncryptionKeys_ && set && !storedKey && key.bytes) {
+ this.keyCache_[id] = storedKey = {
+ resolvedUri: key.resolvedUri,
+ bytes: key.bytes
+ };
+ }
+
+ var result = {
+ resolvedUri: (storedKey || key).resolvedUri
+ };
+
+ if (storedKey) {
+ result.bytes = storedKey.bytes;
+ }
+
+ return result;
+ }
+ /**
+ * Returns true if all configuration required for loading is present, otherwise false.
+ *
+ * @return {boolean} True if the all configuration is ready for loading
+ * @private
+ */
+ ;
+
+ _proto.couldBeginLoading_ = function couldBeginLoading_() {
+ return this.playlist_ && !this.paused();
+ }
+ /**
+ * load a playlist and start to fill the buffer
+ */
+ ;
+
+ _proto.load = function load() {
+ // un-pause
+ this.monitorBuffer_(); // if we don't have a playlist yet, keep waiting for one to be
+ // specified
+
+ if (!this.playlist_) {
+ return;
+ } // if all the configuration is ready, initialize and begin loading
+
+
+ if (this.state === 'INIT' && this.couldBeginLoading_()) {
+ return this.init_();
+ } // if we're in the middle of processing a segment already, don't
+ // kick off an additional segment request
+
+
+ if (!this.couldBeginLoading_() || this.state !== 'READY' && this.state !== 'INIT') {
+ return;
+ }
+
+ this.state = 'READY';
+ }
+ /**
+ * Once all the starting parameters have been specified, begin
+ * operation. This method should only be invoked from the INIT
+ * state.
+ *
+ * @private
+ */
+ ;
+
+ _proto.init_ = function init_() {
+ this.state = 'READY'; // if this is the audio segment loader, and it hasn't been inited before, then any old
+ // audio data from the muxed content should be removed
+
+ this.resetEverything();
+ return this.monitorBuffer_();
+ }
+ /**
+ * set a playlist on the segment loader
+ *
+ * @param {PlaylistLoader} media the playlist to set on the segment loader
+ */
+ ;
+
+ _proto.playlist = function playlist(newPlaylist, options) {
+ if (options === void 0) {
+ options = {};
+ }
+
+ if (!newPlaylist) {
+ return;
+ }
+
+ var oldPlaylist = this.playlist_;
+ var segmentInfo = this.pendingSegment_;
+ this.playlist_ = newPlaylist;
+ this.xhrOptions_ = options; // when we haven't started playing yet, the start of a live playlist
+ // is always our zero-time so force a sync update each time the playlist
+ // is refreshed from the server
+ //
+ // Use the INIT state to determine if playback has started, as the playlist sync info
+ // should be fixed once requests begin (as sync points are generated based on sync
+ // info), but not before then.
+
+ if (this.state === 'INIT') {
+ newPlaylist.syncInfo = {
+ mediaSequence: newPlaylist.mediaSequence,
+ time: 0
+ }; // Setting the date time mapping means mapping the program date time (if available)
+ // to time 0 on the player's timeline. The playlist's syncInfo serves a similar
+ // purpose, mapping the initial mediaSequence to time zero. Since the syncInfo can
+ // be updated as the playlist is refreshed before the loader starts loading, the
+ // program date time mapping needs to be updated as well.
+ //
+ // This mapping is only done for the main loader because a program date time should
+ // map equivalently between playlists.
+
+ if (this.loaderType_ === 'main') {
+ this.syncController_.setDateTimeMappingForStart(newPlaylist);
+ }
+ }
+
+ var oldId = null;
+
+ if (oldPlaylist) {
+ if (oldPlaylist.id) {
+ oldId = oldPlaylist.id;
+ } else if (oldPlaylist.uri) {
+ oldId = oldPlaylist.uri;
+ }
+ }
+
+ this.logger_("playlist update [" + oldId + " => " + (newPlaylist.id || newPlaylist.uri) + "]"); // in VOD, this is always a rendition switch (or we updated our syncInfo above)
+ // in LIVE, we always want to update with new playlists (including refreshes)
+
+ this.trigger('syncinfoupdate'); // if we were unpaused but waiting for a playlist, start
+ // buffering now
+
+ if (this.state === 'INIT' && this.couldBeginLoading_()) {
+ return this.init_();
+ }
+
+ if (!oldPlaylist || oldPlaylist.uri !== newPlaylist.uri) {
+ if (this.mediaIndex !== null) {
+ // we must reset/resync the segment loader when we switch renditions and
+ // the segment loader is already synced to the previous rendition
+ // on playlist changes we want it to be possible to fetch
+ // at the buffer for vod but not for live. So we use resetLoader
+ // for live and resyncLoader for vod. We want this because
+ // if a playlist uses independent and non-independent segments/parts the
+ // buffer may not accurately reflect the next segment that we should try
+ // downloading.
+ if (!newPlaylist.endList) {
+ this.resetLoader();
+ } else {
+ this.resyncLoader();
+ }
+ }
+
+ this.currentMediaInfo_ = void 0;
+ this.trigger('playlistupdate'); // the rest of this function depends on `oldPlaylist` being defined
+
+ return;
+ } // we reloaded the same playlist so we are in a live scenario
+ // and we will likely need to adjust the mediaIndex
+
+
+ var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;
+ this.logger_("live window shift [" + mediaSequenceDiff + "]"); // update the mediaIndex on the SegmentLoader
+ // this is important because we can abort a request and this value must be
+ // equal to the last appended mediaIndex
+
+ if (this.mediaIndex !== null) {
+ this.mediaIndex -= mediaSequenceDiff; // this can happen if we are going to load the first segment, but get a playlist
+ // update during that. mediaIndex would go from 0 to -1 if mediaSequence in the
+ // new playlist was incremented by 1.
+
+ if (this.mediaIndex < 0) {
+ this.mediaIndex = null;
+ this.partIndex = null;
+ } else {
+ var segment = this.playlist_.segments[this.mediaIndex]; // partIndex should remain the same for the same segment
+ // unless parts fell off of the playlist for this segment.
+ // In that case we need to reset partIndex and resync
+
+ if (this.partIndex && (!segment.parts || !segment.parts.length || !segment.parts[this.partIndex])) {
+ var mediaIndex = this.mediaIndex;
+ this.logger_("currently processing part (index " + this.partIndex + ") no longer exists.");
+ this.resetLoader(); // We want to throw away the partIndex and the data associated with it,
+ // as the part was dropped from our current playlists segment.
+ // The mediaIndex will still be valid so keep that around.
+
+ this.mediaIndex = mediaIndex;
+ }
+ }
+ } // update the mediaIndex on the SegmentInfo object
+ // this is important because we will update this.mediaIndex with this value
+ // in `handleAppendsDone_` after the segment has been successfully appended
+
+
+ if (segmentInfo) {
+ segmentInfo.mediaIndex -= mediaSequenceDiff;
+
+ if (segmentInfo.mediaIndex < 0) {
+ segmentInfo.mediaIndex = null;
+ segmentInfo.partIndex = null;
+ } else {
+ // we need to update the referenced segment so that timing information is
+ // saved for the new playlist's segment, however, if the segment fell off the
+ // playlist, we can leave the old reference and just lose the timing info
+ if (segmentInfo.mediaIndex >= 0) {
+ segmentInfo.segment = newPlaylist.segments[segmentInfo.mediaIndex];
+ }
+
+ if (segmentInfo.partIndex >= 0 && segmentInfo.segment.parts) {
+ segmentInfo.part = segmentInfo.segment.parts[segmentInfo.partIndex];
+ }
+ }
+ }
+
+ this.syncController_.saveExpiredSegmentInfo(oldPlaylist, newPlaylist);
+ }
+ /**
+ * Prevent the loader from fetching additional segments. If there
+ * is a segment request outstanding, it will finish processing
+ * before the loader halts. A segment loader can be unpaused by
+ * calling load().
+ */
+ ;
+
+ _proto.pause = function pause() {
+ if (this.checkBufferTimeout_) {
+ window$1.clearTimeout(this.checkBufferTimeout_);
+ this.checkBufferTimeout_ = null;
+ }
+ }
+ /**
+ * Returns whether the segment loader is fetching additional
+ * segments when given the opportunity. This property can be
+ * modified through calls to pause() and load().
+ */
+ ;
+
+ _proto.paused = function paused() {
+ return this.checkBufferTimeout_ === null;
+ }
+ /**
+ * Delete all the buffered data and reset the SegmentLoader
+ *
+ * @param {Function} [done] an optional callback to be executed when the remove
+ * operation is complete
+ */
+ ;
+
+ _proto.resetEverything = function resetEverything(done) {
+ this.ended_ = false;
+ this.appendInitSegment_ = {
+ audio: true,
+ video: true
+ };
+ this.resetLoader(); // remove from 0, the earliest point, to Infinity, to signify removal of everything.
+ // VTT Segment Loader doesn't need to do anything but in the regular SegmentLoader,
+ // we then clamp the value to duration if necessary.
+
+ this.remove(0, Infinity, done); // clears fmp4 captions
+
+ if (this.transmuxer_) {
+ this.transmuxer_.postMessage({
+ action: 'clearAllMp4Captions'
+ }); // reset the cache in the transmuxer
+
+ this.transmuxer_.postMessage({
+ action: 'reset'
+ });
+ }
+ }
+ /**
+ * Force the SegmentLoader to resync and start loading around the currentTime instead
+ * of starting at the end of the buffer
+ *
+ * Useful for fast quality changes
+ */
+ ;
+
+ _proto.resetLoader = function resetLoader() {
+ this.fetchAtBuffer_ = false;
+ this.resyncLoader();
+ }
+ /**
+ * Force the SegmentLoader to restart synchronization and make a conservative guess
+ * before returning to the simple walk-forward method
+ */
+ ;
+
+ _proto.resyncLoader = function resyncLoader() {
+ if (this.transmuxer_) {
+ // need to clear out any cached data to prepare for the new segment
+ segmentTransmuxer.reset(this.transmuxer_);
+ }
+
+ this.mediaIndex = null;
+ this.partIndex = null;
+ this.syncPoint_ = null;
+ this.isPendingTimestampOffset_ = false;
+ this.callQueue_ = [];
+ this.loadQueue_ = [];
+ this.metadataQueue_.id3 = [];
+ this.metadataQueue_.caption = [];
+ this.abort();
+
+ if (this.transmuxer_) {
+ this.transmuxer_.postMessage({
+ action: 'clearParsedMp4Captions'
+ });
+ }
+ }
+ /**
+ * Remove any data in the source buffer between start and end times
+ *
+ * @param {number} start - the start time of the region to remove from the buffer
+ * @param {number} end - the end time of the region to remove from the buffer
+ * @param {Function} [done] - an optional callback to be executed when the remove
+ * @param {boolean} force - force all remove operations to happen
+ * operation is complete
+ */
+ ;
+
+ _proto.remove = function remove(start, end, done, force) {
+ if (done === void 0) {
+ done = function done() {};
+ }
+
+ if (force === void 0) {
+ force = false;
+ } // clamp end to duration if we need to remove everything.
+ // This is due to a browser bug that causes issues if we remove to Infinity.
+ // videojs/videojs-contrib-hls#1225
+
+
+ if (end === Infinity) {
+ end = this.duration_();
+ } // skip removes that would throw an error
+ // commonly happens during a rendition switch at the start of a video
+ // from start 0 to end 0
+
+
+ if (end <= start) {
+ this.logger_('skipping remove because end ${end} is <= start ${start}');
+ return;
+ }
+
+ if (!this.sourceUpdater_ || !this.getMediaInfo_()) {
+ this.logger_('skipping remove because no source updater or starting media info'); // nothing to remove if we haven't processed any media
+
+ return;
+ } // set it to one to complete this function's removes
+
+
+ var removesRemaining = 1;
+
+ var removeFinished = function removeFinished() {
+ removesRemaining--;
+
+ if (removesRemaining === 0) {
+ done();
+ }
+ };
+
+ if (force || !this.audioDisabled_) {
+ removesRemaining++;
+ this.sourceUpdater_.removeAudio(start, end, removeFinished);
+ } // While it would be better to only remove video if the main loader has video, this
+ // should be safe with audio only as removeVideo will call back even if there's no
+ // video buffer.
+ //
+ // In theory we can check to see if there's video before calling the remove, but in
+ // the event that we're switching between renditions and from video to audio only
+ // (when we add support for that), we may need to clear the video contents despite
+ // what the new media will contain.
+
+
+ if (force || this.loaderType_ === 'main') {
+ this.gopBuffer_ = removeGopBuffer(this.gopBuffer_, start, end, this.timeMapping_);
+ removesRemaining++;
+ this.sourceUpdater_.removeVideo(start, end, removeFinished);
+ } // remove any captions and ID3 tags
+
+
+ for (var track in this.inbandTextTracks_) {
+ removeCuesFromTrack(start, end, this.inbandTextTracks_[track]);
+ }
+
+ removeCuesFromTrack(start, end, this.segmentMetadataTrack_); // finished this function's removes
+
+ removeFinished();
+ }
+ /**
+ * (re-)schedule monitorBufferTick_ to run as soon as possible
+ *
+ * @private
+ */
+ ;
+
+ _proto.monitorBuffer_ = function monitorBuffer_() {
+ if (this.checkBufferTimeout_) {
+ window$1.clearTimeout(this.checkBufferTimeout_);
+ }
+
+ this.checkBufferTimeout_ = window$1.setTimeout(this.monitorBufferTick_.bind(this), 1);
+ }
+ /**
+ * As long as the SegmentLoader is in the READY state, periodically
+ * invoke fillBuffer_().
+ *
+ * @private
+ */
+ ;
+
+ _proto.monitorBufferTick_ = function monitorBufferTick_() {
+ if (this.state === 'READY') {
+ this.fillBuffer_();
+ }
+
+ if (this.checkBufferTimeout_) {
+ window$1.clearTimeout(this.checkBufferTimeout_);
+ }
+
+ this.checkBufferTimeout_ = window$1.setTimeout(this.monitorBufferTick_.bind(this), CHECK_BUFFER_DELAY);
+ }
+ /**
+ * fill the buffer with segements unless the sourceBuffers are
+ * currently updating
+ *
+ * Note: this function should only ever be called by monitorBuffer_
+ * and never directly
+ *
+ * @private
+ */
+ ;
+
+ _proto.fillBuffer_ = function fillBuffer_() {
+ // TODO since the source buffer maintains a queue, and we shouldn't call this function
+ // except when we're ready for the next segment, this check can most likely be removed
+ if (this.sourceUpdater_.updating()) {
+ return;
+ } // see if we need to begin loading immediately
+
+
+ var segmentInfo = this.chooseNextRequest_();
+
+ if (!segmentInfo) {
+ return;
+ }
+
+ if (typeof segmentInfo.timestampOffset === 'number') {
+ this.isPendingTimestampOffset_ = false;
+ this.timelineChangeController_.pendingTimelineChange({
+ type: this.loaderType_,
+ from: this.currentTimeline_,
+ to: segmentInfo.timeline
+ });
+ }
+
+ this.loadSegment_(segmentInfo);
+ }
+ /**
+ * Determines if we should call endOfStream on the media source based
+ * on the state of the buffer or if appened segment was the final
+ * segment in the playlist.
+ *
+ * @param {number} [mediaIndex] the media index of segment we last appended
+ * @param {Object} [playlist] a media playlist object
+ * @return {boolean} do we need to call endOfStream on the MediaSource
+ */
+ ;
+
+ _proto.isEndOfStream_ = function isEndOfStream_(mediaIndex, playlist, partIndex) {
+ if (mediaIndex === void 0) {
+ mediaIndex = this.mediaIndex;
+ }
+
+ if (playlist === void 0) {
+ playlist = this.playlist_;
+ }
+
+ if (partIndex === void 0) {
+ partIndex = this.partIndex;
+ }
+
+ if (!playlist || !this.mediaSource_) {
+ return false;
+ }
+
+ var segment = typeof mediaIndex === 'number' && playlist.segments[mediaIndex]; // mediaIndex is zero based but length is 1 based
+
+ var appendedLastSegment = mediaIndex + 1 === playlist.segments.length; // true if there are no parts, or this is the last part.
+
+ var appendedLastPart = !segment || !segment.parts || partIndex + 1 === segment.parts.length; // if we've buffered to the end of the video, we need to call endOfStream
+ // so that MediaSources can trigger the `ended` event when it runs out of
+ // buffered data instead of waiting for me
+
+ return playlist.endList && this.mediaSource_.readyState === 'open' && appendedLastSegment && appendedLastPart;
+ }
+ /**
+ * Determines what request should be made given current segment loader state.
+ *
+ * @return {Object} a request object that describes the segment/part to load
+ */
+ ;
+
+ _proto.chooseNextRequest_ = function chooseNextRequest_() {
+ var buffered = this.buffered_();
+ var bufferedEnd = lastBufferedEnd(buffered) || 0;
+ var bufferedTime = timeAheadOf(buffered, this.currentTime_());
+ var preloaded = !this.hasPlayed_() && bufferedTime >= 1;
+ var haveEnoughBuffer = bufferedTime >= this.goalBufferLength_();
+ var segments = this.playlist_.segments; // return no segment if:
+ // 1. we don't have segments
+ // 2. The video has not yet played and we already downloaded a segment
+ // 3. we already have enough buffered time
+
+ if (!segments.length || preloaded || haveEnoughBuffer) {
+ return null;
+ }
+
+ this.syncPoint_ = this.syncPoint_ || this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_());
+ var next = {
+ partIndex: null,
+ mediaIndex: null,
+ startOfSegment: null,
+ playlist: this.playlist_,
+ isSyncRequest: Boolean(!this.syncPoint_)
+ };
+
+ if (next.isSyncRequest) {
+ next.mediaIndex = getSyncSegmentCandidate(this.currentTimeline_, segments, bufferedEnd);
+ } else if (this.mediaIndex !== null) {
+ var segment = segments[this.mediaIndex];
+ var partIndex = typeof this.partIndex === 'number' ? this.partIndex : -1;
+ next.startOfSegment = segment.end ? segment.end : bufferedEnd;
+
+ if (segment.parts && segment.parts[partIndex + 1]) {
+ next.mediaIndex = this.mediaIndex;
+ next.partIndex = partIndex + 1;
+ } else {
+ next.mediaIndex = this.mediaIndex + 1;
+ }
+ } else {
+ // Find the segment containing the end of the buffer or current time.
+ var _Playlist$getMediaInf = Playlist.getMediaInfoForTime({
+ experimentalExactManifestTimings: this.experimentalExactManifestTimings,
+ playlist: this.playlist_,
+ currentTime: this.fetchAtBuffer_ ? bufferedEnd : this.currentTime_(),
+ startingPartIndex: this.syncPoint_.partIndex,
+ startingSegmentIndex: this.syncPoint_.segmentIndex,
+ startTime: this.syncPoint_.time
+ }),
+ segmentIndex = _Playlist$getMediaInf.segmentIndex,
+ startTime = _Playlist$getMediaInf.startTime,
+ _partIndex = _Playlist$getMediaInf.partIndex;
+
+ next.getMediaInfoForTime = this.fetchAtBuffer_ ? "bufferedEnd " + bufferedEnd : "currentTime " + this.currentTime_();
+ next.mediaIndex = segmentIndex;
+ next.startOfSegment = startTime;
+ next.partIndex = _partIndex;
+ }
+
+ var nextSegment = segments[next.mediaIndex];
+ var nextPart = nextSegment && typeof next.partIndex === 'number' && nextSegment.parts && nextSegment.parts[next.partIndex]; // if the next segment index is invalid or
+ // the next partIndex is invalid do not choose a next segment.
+
+ if (!nextSegment || typeof next.partIndex === 'number' && !nextPart) {
+ return null;
+ } // if the next segment has parts, and we don't have a partIndex.
+ // Set partIndex to 0
+
+
+ if (typeof next.partIndex !== 'number' && nextSegment.parts) {
+ next.partIndex = 0;
+ nextPart = nextSegment.parts[0];
+ } // if we have no buffered data then we need to make sure
+ // that the next part we append is "independent" if possible.
+ // So we check if the previous part is independent, and request
+ // it if it is.
+
+
+ if (!bufferedTime && nextPart && !nextPart.independent) {
+ if (next.partIndex === 0) {
+ var lastSegment = segments[next.mediaIndex - 1];
+ var lastSegmentLastPart = lastSegment.parts && lastSegment.parts.length && lastSegment.parts[lastSegment.parts.length - 1];
+
+ if (lastSegmentLastPart && lastSegmentLastPart.independent) {
+ next.mediaIndex -= 1;
+ next.partIndex = lastSegment.parts.length - 1;
+ next.independent = 'previous segment';
+ }
+ } else if (nextSegment.parts[next.partIndex - 1].independent) {
+ next.partIndex -= 1;
+ next.independent = 'previous part';
+ }
+ }
+
+ var ended = this.mediaSource_ && this.mediaSource_.readyState === 'ended'; // do not choose a next segment if all of the following:
+ // 1. this is the last segment in the playlist
+ // 2. end of stream has been called on the media source already
+ // 3. the player is not seeking
+
+ if (next.mediaIndex >= segments.length - 1 && ended && !this.seeking_()) {
+ return null;
+ }
+
+ return this.generateSegmentInfo_(next);
+ };
+
+ _proto.generateSegmentInfo_ = function generateSegmentInfo_(options) {
+ var independent = options.independent,
+ playlist = options.playlist,
+ mediaIndex = options.mediaIndex,
+ startOfSegment = options.startOfSegment,
+ isSyncRequest = options.isSyncRequest,
+ partIndex = options.partIndex,
+ forceTimestampOffset = options.forceTimestampOffset,
+ getMediaInfoForTime = options.getMediaInfoForTime;
+ var segment = playlist.segments[mediaIndex];
+ var part = typeof partIndex === 'number' && segment.parts[partIndex];
+ var segmentInfo = {
+ requestId: 'segment-loader-' + Math.random(),
+ // resolve the segment URL relative to the playlist
+ uri: part && part.resolvedUri || segment.resolvedUri,
+ // the segment's mediaIndex at the time it was requested
+ mediaIndex: mediaIndex,
+ partIndex: part ? partIndex : null,
+ // whether or not to update the SegmentLoader's state with this
+ // segment's mediaIndex
+ isSyncRequest: isSyncRequest,
+ startOfSegment: startOfSegment,
+ // the segment's playlist
+ playlist: playlist,
+ // unencrypted bytes of the segment
+ bytes: null,
+ // when a key is defined for this segment, the encrypted bytes
+ encryptedBytes: null,
+ // The target timestampOffset for this segment when we append it
+ // to the source buffer
+ timestampOffset: null,
+ // The timeline that the segment is in
+ timeline: segment.timeline,
+ // The expected duration of the segment in seconds
+ duration: part && part.duration || segment.duration,
+ // retain the segment in case the playlist updates while doing an async process
+ segment: segment,
+ part: part,
+ byteLength: 0,
+ transmuxer: this.transmuxer_,
+ // type of getMediaInfoForTime that was used to get this segment
+ getMediaInfoForTime: getMediaInfoForTime,
+ independent: independent
+ };
+ var overrideCheck = typeof forceTimestampOffset !== 'undefined' ? forceTimestampOffset : this.isPendingTimestampOffset_;
+ segmentInfo.timestampOffset = this.timestampOffsetForSegment_({
+ segmentTimeline: segment.timeline,
+ currentTimeline: this.currentTimeline_,
+ startOfSegment: startOfSegment,
+ buffered: this.buffered_(),
+ overrideCheck: overrideCheck
+ });
+ var audioBufferedEnd = lastBufferedEnd(this.sourceUpdater_.audioBuffered());
+
+ if (typeof audioBufferedEnd === 'number') {
+ // since the transmuxer is using the actual timing values, but the buffer is
+ // adjusted by the timestamp offset, we must adjust the value here
+ segmentInfo.audioAppendStart = audioBufferedEnd - this.sourceUpdater_.audioTimestampOffset();
+ }
+
+ if (this.sourceUpdater_.videoBuffered().length) {
+ segmentInfo.gopsToAlignWith = gopsSafeToAlignWith(this.gopBuffer_, // since the transmuxer is using the actual timing values, but the time is
+ // adjusted by the timestmap offset, we must adjust the value here
+ this.currentTime_() - this.sourceUpdater_.videoTimestampOffset(), this.timeMapping_);
+ }
+
+ return segmentInfo;
+ } // get the timestampoffset for a segment,
+ // added so that vtt segment loader can override and prevent
+ // adding timestamp offsets.
+ ;
+
+ _proto.timestampOffsetForSegment_ = function timestampOffsetForSegment_(options) {
+ return timestampOffsetForSegment(options);
+ }
+ /**
+ * Determines if the network has enough bandwidth to complete the current segment
+ * request in a timely manner. If not, the request will be aborted early and bandwidth
+ * updated to trigger a playlist switch.
+ *
+ * @param {Object} stats
+ * Object containing stats about the request timing and size
+ * @private
+ */
+ ;
+
+ _proto.earlyAbortWhenNeeded_ = function earlyAbortWhenNeeded_(stats) {
+ if (this.vhs_.tech_.paused() || // Don't abort if the current playlist is on the lowestEnabledRendition
+ // TODO: Replace using timeout with a boolean indicating whether this playlist is
+ // the lowestEnabledRendition.
+ !this.xhrOptions_.timeout || // Don't abort if we have no bandwidth information to estimate segment sizes
+ !this.playlist_.attributes.BANDWIDTH) {
+ return;
+ } // Wait at least 1 second since the first byte of data has been received before
+ // using the calculated bandwidth from the progress event to allow the bitrate
+ // to stabilize
+
+
+ if (Date.now() - (stats.firstBytesReceivedAt || Date.now()) < 1000) {
+ return;
+ }
+
+ var currentTime = this.currentTime_();
+ var measuredBandwidth = stats.bandwidth;
+ var segmentDuration = this.pendingSegment_.duration;
+ var requestTimeRemaining = Playlist.estimateSegmentRequestTime(segmentDuration, measuredBandwidth, this.playlist_, stats.bytesReceived); // Subtract 1 from the timeUntilRebuffer so we still consider an early abort
+ // if we are only left with less than 1 second when the request completes.
+ // A negative timeUntilRebuffering indicates we are already rebuffering
+
+ var timeUntilRebuffer$1 = timeUntilRebuffer(this.buffered_(), currentTime, this.vhs_.tech_.playbackRate()) - 1; // Only consider aborting early if the estimated time to finish the download
+ // is larger than the estimated time until the player runs out of forward buffer
+
+ if (requestTimeRemaining <= timeUntilRebuffer$1) {
+ return;
+ }
+
+ var switchCandidate = minRebufferMaxBandwidthSelector({
+ master: this.vhs_.playlists.master,
+ currentTime: currentTime,
+ bandwidth: measuredBandwidth,
+ duration: this.duration_(),
+ segmentDuration: segmentDuration,
+ timeUntilRebuffer: timeUntilRebuffer$1,
+ currentTimeline: this.currentTimeline_,
+ syncController: this.syncController_
+ });
+
+ if (!switchCandidate) {
+ return;
+ }
+
+ var rebufferingImpact = requestTimeRemaining - timeUntilRebuffer$1;
+ var timeSavedBySwitching = rebufferingImpact - switchCandidate.rebufferingImpact;
+ var minimumTimeSaving = 0.5; // If we are already rebuffering, increase the amount of variance we add to the
+ // potential round trip time of the new request so that we are not too aggressive
+ // with switching to a playlist that might save us a fraction of a second.
+
+ if (timeUntilRebuffer$1 <= TIME_FUDGE_FACTOR) {
+ minimumTimeSaving = 1;
+ }
+
+ if (!switchCandidate.playlist || switchCandidate.playlist.uri === this.playlist_.uri || timeSavedBySwitching < minimumTimeSaving) {
+ return;
+ } // set the bandwidth to that of the desired playlist being sure to scale by
+ // BANDWIDTH_VARIANCE and add one so the playlist selector does not exclude it
+ // don't trigger a bandwidthupdate as the bandwidth is artifial
+
+
+ this.bandwidth = switchCandidate.playlist.attributes.BANDWIDTH * Config.BANDWIDTH_VARIANCE + 1;
+ this.trigger('earlyabort');
+ };
+
+ _proto.handleAbort_ = function handleAbort_(segmentInfo) {
+ this.logger_("Aborting " + segmentInfoString(segmentInfo));
+ this.mediaRequestsAborted += 1;
+ }
+ /**
+ * XHR `progress` event handler
+ *
+ * @param {Event}
+ * The XHR `progress` event
+ * @param {Object} simpleSegment
+ * A simplified segment object copy
+ * @private
+ */
+ ;
+
+ _proto.handleProgress_ = function handleProgress_(event, simpleSegment) {
+ this.earlyAbortWhenNeeded_(simpleSegment.stats);
+
+ if (this.checkForAbort_(simpleSegment.requestId)) {
+ return;
+ }
+
+ this.trigger('progress');
+ };
+
+ _proto.handleTrackInfo_ = function handleTrackInfo_(simpleSegment, trackInfo) {
+ this.earlyAbortWhenNeeded_(simpleSegment.stats);
+
+ if (this.checkForAbort_(simpleSegment.requestId)) {
+ return;
+ }
+
+ if (this.checkForIllegalMediaSwitch(trackInfo)) {
+ return;
+ }
+
+ trackInfo = trackInfo || {}; // When we have track info, determine what media types this loader is dealing with.
+ // Guard against cases where we're not getting track info at all until we are
+ // certain that all streams will provide it.
+
+ if (!shallowEqual(this.currentMediaInfo_, trackInfo)) {
+ this.appendInitSegment_ = {
+ audio: true,
+ video: true
+ };
+ this.startingMediaInfo_ = trackInfo;
+ this.currentMediaInfo_ = trackInfo;
+ this.logger_('trackinfo update', trackInfo);
+ this.trigger('trackinfo');
+ } // trackinfo may cause an abort if the trackinfo
+ // causes a codec change to an unsupported codec.
+
+
+ if (this.checkForAbort_(simpleSegment.requestId)) {
+ return;
+ } // set trackinfo on the pending segment so that
+ // it can append.
+
+
+ this.pendingSegment_.trackInfo = trackInfo; // check if any calls were waiting on the track info
+
+ if (this.hasEnoughInfoToAppend_()) {
+ this.processCallQueue_();
+ }
+ };
+
+ _proto.handleTimingInfo_ = function handleTimingInfo_(simpleSegment, mediaType, timeType, time) {
+ this.earlyAbortWhenNeeded_(simpleSegment.stats);
+
+ if (this.checkForAbort_(simpleSegment.requestId)) {
+ return;
+ }
+
+ var segmentInfo = this.pendingSegment_;
+ var timingInfoProperty = timingInfoPropertyForMedia(mediaType);
+ segmentInfo[timingInfoProperty] = segmentInfo[timingInfoProperty] || {};
+ segmentInfo[timingInfoProperty][timeType] = time;
+ this.logger_("timinginfo: " + mediaType + " - " + timeType + " - " + time); // check if any calls were waiting on the timing info
+
+ if (this.hasEnoughInfoToAppend_()) {
+ this.processCallQueue_();
+ }
+ };
+
+ _proto.handleCaptions_ = function handleCaptions_(simpleSegment, captionData) {
+ var _this2 = this;
+
+ this.earlyAbortWhenNeeded_(simpleSegment.stats);
+
+ if (this.checkForAbort_(simpleSegment.requestId)) {
+ return;
+ } // This could only happen with fmp4 segments, but
+ // should still not happen in general
+
+
+ if (captionData.length === 0) {
+ this.logger_('SegmentLoader received no captions from a caption event');
+ return;
+ }
+
+ var segmentInfo = this.pendingSegment_; // Wait until we have some video data so that caption timing
+ // can be adjusted by the timestamp offset
+
+ if (!segmentInfo.hasAppendedData_) {
+ this.metadataQueue_.caption.push(this.handleCaptions_.bind(this, simpleSegment, captionData));
+ return;
+ }
+
+ var timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset();
+ var captionTracks = {}; // get total start/end and captions for each track/stream
+
+ captionData.forEach(function (caption) {
+ // caption.stream is actually a track name...
+ // set to the existing values in tracks or default values
+ captionTracks[caption.stream] = captionTracks[caption.stream] || {
+ // Infinity, as any other value will be less than this
+ startTime: Infinity,
+ captions: [],
+ // 0 as an other value will be more than this
+ endTime: 0
+ };
+ var captionTrack = captionTracks[caption.stream];
+ captionTrack.startTime = Math.min(captionTrack.startTime, caption.startTime + timestampOffset);
+ captionTrack.endTime = Math.max(captionTrack.endTime, caption.endTime + timestampOffset);
+ captionTrack.captions.push(caption);
+ });
+ Object.keys(captionTracks).forEach(function (trackName) {
+ var _captionTracks$trackN = captionTracks[trackName],
+ startTime = _captionTracks$trackN.startTime,
+ endTime = _captionTracks$trackN.endTime,
+ captions = _captionTracks$trackN.captions;
+ var inbandTextTracks = _this2.inbandTextTracks_;
+
+ _this2.logger_("adding cues from " + startTime + " -> " + endTime + " for " + trackName);
+
+ createCaptionsTrackIfNotExists(inbandTextTracks, _this2.vhs_.tech_, trackName); // clear out any cues that start and end at the same time period for the same track.
+ // We do this because a rendition change that also changes the timescale for captions
+ // will result in captions being re-parsed for certain segments. If we add them again
+ // without clearing we will have two of the same captions visible.
+
+ removeCuesFromTrack(startTime, endTime, inbandTextTracks[trackName]);
+ addCaptionData({
+ captionArray: captions,
+ inbandTextTracks: inbandTextTracks,
+ timestampOffset: timestampOffset
+ });
+ }); // Reset stored captions since we added parsed
+ // captions to a text track at this point
+
+ if (this.transmuxer_) {
+ this.transmuxer_.postMessage({
+ action: 'clearParsedMp4Captions'
+ });
+ }
+ };
+
+ _proto.handleId3_ = function handleId3_(simpleSegment, id3Frames, dispatchType) {
+ this.earlyAbortWhenNeeded_(simpleSegment.stats);
+
+ if (this.checkForAbort_(simpleSegment.requestId)) {
+ return;
+ }
+
+ var segmentInfo = this.pendingSegment_; // we need to have appended data in order for the timestamp offset to be set
+
+ if (!segmentInfo.hasAppendedData_) {
+ this.metadataQueue_.id3.push(this.handleId3_.bind(this, simpleSegment, id3Frames, dispatchType));
+ return;
+ }
+
+ var timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset(); // There's potentially an issue where we could double add metadata if there's a muxed
+ // audio/video source with a metadata track, and an alt audio with a metadata track.
+ // However, this probably won't happen, and if it does it can be handled then.
+
+ createMetadataTrackIfNotExists(this.inbandTextTracks_, dispatchType, this.vhs_.tech_);
+ addMetadata({
+ inbandTextTracks: this.inbandTextTracks_,
+ metadataArray: id3Frames,
+ timestampOffset: timestampOffset,
+ videoDuration: this.duration_()
+ });
+ };
+
+ _proto.processMetadataQueue_ = function processMetadataQueue_() {
+ this.metadataQueue_.id3.forEach(function (fn) {
+ return fn();
+ });
+ this.metadataQueue_.caption.forEach(function (fn) {
+ return fn();
+ });
+ this.metadataQueue_.id3 = [];
+ this.metadataQueue_.caption = [];
+ };
+
+ _proto.processCallQueue_ = function processCallQueue_() {
+ var callQueue = this.callQueue_; // Clear out the queue before the queued functions are run, since some of the
+ // functions may check the length of the load queue and default to pushing themselves
+ // back onto the queue.
+
+ this.callQueue_ = [];
+ callQueue.forEach(function (fun) {
+ return fun();
+ });
+ };
+
+ _proto.processLoadQueue_ = function processLoadQueue_() {
+ var loadQueue = this.loadQueue_; // Clear out the queue before the queued functions are run, since some of the
+ // functions may check the length of the load queue and default to pushing themselves
+ // back onto the queue.
+
+ this.loadQueue_ = [];
+ loadQueue.forEach(function (fun) {
+ return fun();
+ });
+ }
+ /**
+ * Determines whether the loader has enough info to load the next segment.
+ *
+ * @return {boolean}
+ * Whether or not the loader has enough info to load the next segment
+ */
+ ;
+
+ _proto.hasEnoughInfoToLoad_ = function hasEnoughInfoToLoad_() {
+ // Since primary timing goes by video, only the audio loader potentially needs to wait
+ // to load.
+ if (this.loaderType_ !== 'audio') {
+ return true;
+ }
+
+ var segmentInfo = this.pendingSegment_; // A fill buffer must have already run to establish a pending segment before there's
+ // enough info to load.
+
+ if (!segmentInfo) {
+ return false;
+ } // The first segment can and should be loaded immediately so that source buffers are
+ // created together (before appending). Source buffer creation uses the presence of
+ // audio and video data to determine whether to create audio/video source buffers, and
+ // uses processed (transmuxed or parsed) media to determine the types required.
+
+
+ if (!this.getCurrentMediaInfo_()) {
+ return true;
+ }
+
+ if ( // Technically, instead of waiting to load a segment on timeline changes, a segment
+ // can be requested and downloaded and only wait before it is transmuxed or parsed.
+ // But in practice, there are a few reasons why it is better to wait until a loader
+ // is ready to append that segment before requesting and downloading:
+ //
+ // 1. Because audio and main loaders cross discontinuities together, if this loader
+ // is waiting for the other to catch up, then instead of requesting another
+ // segment and using up more bandwidth, by not yet loading, more bandwidth is
+ // allotted to the loader currently behind.
+ // 2. media-segment-request doesn't have to have logic to consider whether a segment
+ // is ready to be processed or not, isolating the queueing behavior to the loader.
+ // 3. The audio loader bases some of its segment properties on timing information
+ // provided by the main loader, meaning that, if the logic for waiting on
+ // processing was in media-segment-request, then it would also need to know how
+ // to re-generate the segment information after the main loader caught up.
+ shouldWaitForTimelineChange({
+ timelineChangeController: this.timelineChangeController_,
+ currentTimeline: this.currentTimeline_,
+ segmentTimeline: segmentInfo.timeline,
+ loaderType: this.loaderType_,
+ audioDisabled: this.audioDisabled_
+ })) {
+ return false;
+ }
+
+ return true;
+ };
+
+ _proto.getCurrentMediaInfo_ = function getCurrentMediaInfo_(segmentInfo) {
+ if (segmentInfo === void 0) {
+ segmentInfo = this.pendingSegment_;
+ }
+
+ return segmentInfo && segmentInfo.trackInfo || this.currentMediaInfo_;
+ };
+
+ _proto.getMediaInfo_ = function getMediaInfo_(segmentInfo) {
+ if (segmentInfo === void 0) {
+ segmentInfo = this.pendingSegment_;
+ }
+
+ return this.getCurrentMediaInfo_(segmentInfo) || this.startingMediaInfo_;
+ };
+
+ _proto.hasEnoughInfoToAppend_ = function hasEnoughInfoToAppend_() {
+ if (!this.sourceUpdater_.ready()) {
+ return false;
+ } // If content needs to be removed or the loader is waiting on an append reattempt,
+ // then no additional content should be appended until the prior append is resolved.
+
+
+ if (this.waitingOnRemove_ || this.quotaExceededErrorRetryTimeout_) {
+ return false;
+ }
+
+ var segmentInfo = this.pendingSegment_;
+ var trackInfo = this.getCurrentMediaInfo_(); // no segment to append any data for or
+ // we do not have information on this specific
+ // segment yet
+
+ if (!segmentInfo || !trackInfo) {
+ return false;
+ }
+
+ var hasAudio = trackInfo.hasAudio,
+ hasVideo = trackInfo.hasVideo,
+ isMuxed = trackInfo.isMuxed;
+
+ if (hasVideo && !segmentInfo.videoTimingInfo) {
+ return false;
+ } // muxed content only relies on video timing information for now.
+
+
+ if (hasAudio && !this.audioDisabled_ && !isMuxed && !segmentInfo.audioTimingInfo) {
+ return false;
+ }
+
+ if (shouldWaitForTimelineChange({
+ timelineChangeController: this.timelineChangeController_,
+ currentTimeline: this.currentTimeline_,
+ segmentTimeline: segmentInfo.timeline,
+ loaderType: this.loaderType_,
+ audioDisabled: this.audioDisabled_
+ })) {
+ return false;
+ }
+
+ return true;
+ };
+
+ _proto.handleData_ = function handleData_(simpleSegment, result) {
+ this.earlyAbortWhenNeeded_(simpleSegment.stats);
+
+ if (this.checkForAbort_(simpleSegment.requestId)) {
+ return;
+ } // If there's anything in the call queue, then this data came later and should be
+ // executed after the calls currently queued.
+
+
+ if (this.callQueue_.length || !this.hasEnoughInfoToAppend_()) {
+ this.callQueue_.push(this.handleData_.bind(this, simpleSegment, result));
+ return;
+ }
+
+ var segmentInfo = this.pendingSegment_; // update the time mapping so we can translate from display time to media time
+
+ this.setTimeMapping_(segmentInfo.timeline); // for tracking overall stats
+
+ this.updateMediaSecondsLoaded_(segmentInfo.part || segmentInfo.segment); // Note that the state isn't changed from loading to appending. This is because abort
+ // logic may change behavior depending on the state, and changing state too early may
+ // inflate our estimates of bandwidth. In the future this should be re-examined to
+ // note more granular states.
+ // don't process and append data if the mediaSource is closed
+
+ if (this.mediaSource_.readyState === 'closed') {
+ return;
+ } // if this request included an initialization segment, save that data
+ // to the initSegment cache
+
+
+ if (simpleSegment.map) {
+ simpleSegment.map = this.initSegmentForMap(simpleSegment.map, true); // move over init segment properties to media request
+
+ segmentInfo.segment.map = simpleSegment.map;
+ } // if this request included a segment key, save that data in the cache
+
+
+ if (simpleSegment.key) {
+ this.segmentKey(simpleSegment.key, true);
+ }
+
+ segmentInfo.isFmp4 = simpleSegment.isFmp4;
+ segmentInfo.timingInfo = segmentInfo.timingInfo || {};
+
+ if (segmentInfo.isFmp4) {
+ this.trigger('fmp4');
+ segmentInfo.timingInfo.start = segmentInfo[timingInfoPropertyForMedia(result.type)].start;
+ } else {
+ var trackInfo = this.getCurrentMediaInfo_();
+ var useVideoTimingInfo = this.loaderType_ === 'main' && trackInfo && trackInfo.hasVideo;
+ var firstVideoFrameTimeForData;
+
+ if (useVideoTimingInfo) {
+ firstVideoFrameTimeForData = segmentInfo.videoTimingInfo.start;
+ } // Segment loader knows more about segment timing than the transmuxer (in certain
+ // aspects), so make any changes required for a more accurate start time.
+ // Don't set the end time yet, as the segment may not be finished processing.
+
+
+ segmentInfo.timingInfo.start = this.trueSegmentStart_({
+ currentStart: segmentInfo.timingInfo.start,
+ playlist: segmentInfo.playlist,
+ mediaIndex: segmentInfo.mediaIndex,
+ currentVideoTimestampOffset: this.sourceUpdater_.videoTimestampOffset(),
+ useVideoTimingInfo: useVideoTimingInfo,
+ firstVideoFrameTimeForData: firstVideoFrameTimeForData,
+ videoTimingInfo: segmentInfo.videoTimingInfo,
+ audioTimingInfo: segmentInfo.audioTimingInfo
+ });
+ } // Init segments for audio and video only need to be appended in certain cases. Now
+ // that data is about to be appended, we can check the final cases to determine
+ // whether we should append an init segment.
+
+
+ this.updateAppendInitSegmentStatus(segmentInfo, result.type); // Timestamp offset should be updated once we get new data and have its timing info,
+ // as we use the start of the segment to offset the best guess (playlist provided)
+ // timestamp offset.
+
+ this.updateSourceBufferTimestampOffset_(segmentInfo); // if this is a sync request we need to determine whether it should
+ // be appended or not.
+
+ if (segmentInfo.isSyncRequest) {
+ // first save/update our timing info for this segment.
+ // this is what allows us to choose an accurate segment
+ // and the main reason we make a sync request.
+ this.updateTimingInfoEnd_(segmentInfo);
+ this.syncController_.saveSegmentTimingInfo({
+ segmentInfo: segmentInfo,
+ shouldSaveTimelineMapping: this.loaderType_ === 'main'
+ });
+ var next = this.chooseNextRequest_(); // If the sync request isn't the segment that would be requested next
+ // after taking into account its timing info, do not append it.
+
+ if (next.mediaIndex !== segmentInfo.mediaIndex || next.partIndex !== segmentInfo.partIndex) {
+ this.logger_('sync segment was incorrect, not appending');
+ return;
+ } // otherwise append it like any other segment as our guess was correct.
+
+
+ this.logger_('sync segment was correct, appending');
+ } // Save some state so that in the future anything waiting on first append (and/or
+ // timestamp offset(s)) can process immediately. While the extra state isn't optimal,
+ // we need some notion of whether the timestamp offset or other relevant information
+ // has had a chance to be set.
+
+
+ segmentInfo.hasAppendedData_ = true; // Now that the timestamp offset should be set, we can append any waiting ID3 tags.
+
+ this.processMetadataQueue_();
+ this.appendData_(segmentInfo, result);
+ };
+
+ _proto.updateAppendInitSegmentStatus = function updateAppendInitSegmentStatus(segmentInfo, type) {
+ // alt audio doesn't manage timestamp offset
+ if (this.loaderType_ === 'main' && typeof segmentInfo.timestampOffset === 'number' && // in the case that we're handling partial data, we don't want to append an init
+ // segment for each chunk
+ !segmentInfo.changedTimestampOffset) {
+ // if the timestamp offset changed, the timeline may have changed, so we have to re-
+ // append init segments
+ this.appendInitSegment_ = {
+ audio: true,
+ video: true
+ };
+ }
+
+ if (this.playlistOfLastInitSegment_[type] !== segmentInfo.playlist) {
+ // make sure we append init segment on playlist changes, in case the media config
+ // changed
+ this.appendInitSegment_[type] = true;
+ }
+ };
+
+ _proto.getInitSegmentAndUpdateState_ = function getInitSegmentAndUpdateState_(_ref4) {
+ var type = _ref4.type,
+ initSegment = _ref4.initSegment,
+ map = _ref4.map,
+ playlist = _ref4.playlist; // "The EXT-X-MAP tag specifies how to obtain the Media Initialization Section
+ // (Section 3) required to parse the applicable Media Segments. It applies to every
+ // Media Segment that appears after it in the Playlist until the next EXT-X-MAP tag
+ // or until the end of the playlist."
+ // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.2.5
+
+ if (map) {
+ var id = initSegmentId(map);
+
+ if (this.activeInitSegmentId_ === id) {
+ // don't need to re-append the init segment if the ID matches
+ return null;
+ } // a map-specified init segment takes priority over any transmuxed (or otherwise
+ // obtained) init segment
+ //
+ // this also caches the init segment for later use
+
+
+ initSegment = this.initSegmentForMap(map, true).bytes;
+ this.activeInitSegmentId_ = id;
+ } // We used to always prepend init segments for video, however, that shouldn't be
+ // necessary. Instead, we should only append on changes, similar to what we've always
+ // done for audio. This is more important (though may not be that important) for
+ // frame-by-frame appending for LHLS, simply because of the increased quantity of
+ // appends.
+
+
+ if (initSegment && this.appendInitSegment_[type]) {
+ // Make sure we track the playlist that we last used for the init segment, so that
+ // we can re-append the init segment in the event that we get data from a new
+ // playlist. Discontinuities and track changes are handled in other sections.
+ this.playlistOfLastInitSegment_[type] = playlist; // Disable future init segment appends for this type. Until a change is necessary.
+
+ this.appendInitSegment_[type] = false; // we need to clear out the fmp4 active init segment id, since
+ // we are appending the muxer init segment
+
+ this.activeInitSegmentId_ = null;
+ return initSegment;
+ }
+
+ return null;
+ };
+
+ _proto.handleQuotaExceededError_ = function handleQuotaExceededError_(_ref5, error) {
+ var _this3 = this;
+
+ var segmentInfo = _ref5.segmentInfo,
+ type = _ref5.type,
+ bytes = _ref5.bytes;
+ var audioBuffered = this.sourceUpdater_.audioBuffered();
+ var videoBuffered = this.sourceUpdater_.videoBuffered(); // For now we're ignoring any notion of gaps in the buffer, but they, in theory,
+ // should be cleared out during the buffer removals. However, log in case it helps
+ // debug.
+
+ if (audioBuffered.length > 1) {
+ this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the audio buffer: ' + timeRangesToArray(audioBuffered).join(', '));
+ }
+
+ if (videoBuffered.length > 1) {
+ this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the video buffer: ' + timeRangesToArray(videoBuffered).join(', '));
+ }
+
+ var audioBufferStart = audioBuffered.length ? audioBuffered.start(0) : 0;
+ var audioBufferEnd = audioBuffered.length ? audioBuffered.end(audioBuffered.length - 1) : 0;
+ var videoBufferStart = videoBuffered.length ? videoBuffered.start(0) : 0;
+ var videoBufferEnd = videoBuffered.length ? videoBuffered.end(videoBuffered.length - 1) : 0;
+
+ if (audioBufferEnd - audioBufferStart <= MIN_BACK_BUFFER && videoBufferEnd - videoBufferStart <= MIN_BACK_BUFFER) {
+ // Can't remove enough buffer to make room for new segment (or the browser doesn't
+ // allow for appends of segments this size). In the future, it may be possible to
+ // split up the segment and append in pieces, but for now, error out this playlist
+ // in an attempt to switch to a more manageable rendition.
+ this.logger_('On QUOTA_EXCEEDED_ERR, single segment too large to append to ' + 'buffer, triggering an error. ' + ("Appended byte length: " + bytes.byteLength + ", ") + ("audio buffer: " + timeRangesToArray(audioBuffered).join(', ') + ", ") + ("video buffer: " + timeRangesToArray(videoBuffered).join(', ') + ", "));
+ this.error({
+ message: 'Quota exceeded error with append of a single segment of content',
+ excludeUntil: Infinity
+ });
+ this.trigger('error');
+ return;
+ } // To try to resolve the quota exceeded error, clear back buffer and retry. This means
+ // that the segment-loader should block on future events until this one is handled, so
+ // that it doesn't keep moving onto further segments. Adding the call to the call
+ // queue will prevent further appends until waitingOnRemove_ and
+ // quotaExceededErrorRetryTimeout_ are cleared.
+ //
+ // Note that this will only block the current loader. In the case of demuxed content,
+ // the other load may keep filling as fast as possible. In practice, this should be
+ // OK, as it is a rare case when either audio has a high enough bitrate to fill up a
+ // source buffer, or video fills without enough room for audio to append (and without
+ // the availability of clearing out seconds of back buffer to make room for audio).
+ // But it might still be good to handle this case in the future as a TODO.
+
+
+ this.waitingOnRemove_ = true;
+ this.callQueue_.push(this.appendToSourceBuffer_.bind(this, {
+ segmentInfo: segmentInfo,
+ type: type,
+ bytes: bytes
+ }));
+ var currentTime = this.currentTime_(); // Try to remove as much audio and video as possible to make room for new content
+ // before retrying.
+
+ var timeToRemoveUntil = currentTime - MIN_BACK_BUFFER;
+ this.logger_("On QUOTA_EXCEEDED_ERR, removing audio/video from 0 to " + timeToRemoveUntil);
+ this.remove(0, timeToRemoveUntil, function () {
+ _this3.logger_("On QUOTA_EXCEEDED_ERR, retrying append in " + MIN_BACK_BUFFER + "s");
+
+ _this3.waitingOnRemove_ = false; // wait the length of time alotted in the back buffer to prevent wasted
+ // attempts (since we can't clear less than the minimum)
+
+ _this3.quotaExceededErrorRetryTimeout_ = window$1.setTimeout(function () {
+ _this3.logger_('On QUOTA_EXCEEDED_ERR, re-processing call queue');
+
+ _this3.quotaExceededErrorRetryTimeout_ = null;
+
+ _this3.processCallQueue_();
+ }, MIN_BACK_BUFFER * 1000);
+ }, true);
+ };
+
+ _proto.handleAppendError_ = function handleAppendError_(_ref6, error) {
+ var segmentInfo = _ref6.segmentInfo,
+ type = _ref6.type,
+ bytes = _ref6.bytes; // if there's no error, nothing to do
+
+ if (!error) {
+ return;
+ }
+
+ if (error.code === QUOTA_EXCEEDED_ERR) {
+ this.handleQuotaExceededError_({
+ segmentInfo: segmentInfo,
+ type: type,
+ bytes: bytes
+ }); // A quota exceeded error should be recoverable with a future re-append, so no need
+ // to trigger an append error.
+
+ return;
+ }
+
+ this.logger_('Received non QUOTA_EXCEEDED_ERR on append', error);
+ this.error(type + " append of " + bytes.length + "b failed for segment " + ("#" + segmentInfo.mediaIndex + " in playlist " + segmentInfo.playlist.id)); // If an append errors, we often can't recover.
+ // (see https://w3c.github.io/media-source/#sourcebuffer-append-error).
+ //
+ // Trigger a special error so that it can be handled separately from normal,
+ // recoverable errors.
+
+ this.trigger('appenderror');
+ };
+
+ _proto.appendToSourceBuffer_ = function appendToSourceBuffer_(_ref7) {
+ var segmentInfo = _ref7.segmentInfo,
+ type = _ref7.type,
+ initSegment = _ref7.initSegment,
+ data = _ref7.data,
+ bytes = _ref7.bytes; // If this is a re-append, bytes were already created and don't need to be recreated
+
+ if (!bytes) {
+ var segments = [data];
+ var byteLength = data.byteLength;
+
+ if (initSegment) {
+ // if the media initialization segment is changing, append it before the content
+ // segment
+ segments.unshift(initSegment);
+ byteLength += initSegment.byteLength;
+ } // Technically we should be OK appending the init segment separately, however, we
+ // haven't yet tested that, and prepending is how we have always done things.
+
+
+ bytes = concatSegments({
+ bytes: byteLength,
+ segments: segments
+ });
+ }
+
+ this.sourceUpdater_.appendBuffer({
+ segmentInfo: segmentInfo,
+ type: type,
+ bytes: bytes
+ }, this.handleAppendError_.bind(this, {
+ segmentInfo: segmentInfo,
+ type: type,
+ bytes: bytes
+ }));
+ };
+
+ _proto.handleSegmentTimingInfo_ = function handleSegmentTimingInfo_(type, requestId, segmentTimingInfo) {
+ if (!this.pendingSegment_ || requestId !== this.pendingSegment_.requestId) {
+ return;
+ }
+
+ var segment = this.pendingSegment_.segment;
+ var timingInfoProperty = type + "TimingInfo";
+
+ if (!segment[timingInfoProperty]) {
+ segment[timingInfoProperty] = {};
+ }
+
+ segment[timingInfoProperty].transmuxerPrependedSeconds = segmentTimingInfo.prependedContentDuration || 0;
+ segment[timingInfoProperty].transmuxedPresentationStart = segmentTimingInfo.start.presentation;
+ segment[timingInfoProperty].transmuxedDecodeStart = segmentTimingInfo.start.decode;
+ segment[timingInfoProperty].transmuxedPresentationEnd = segmentTimingInfo.end.presentation;
+ segment[timingInfoProperty].transmuxedDecodeEnd = segmentTimingInfo.end.decode; // mainly used as a reference for debugging
+
+ segment[timingInfoProperty].baseMediaDecodeTime = segmentTimingInfo.baseMediaDecodeTime;
+ };
+
+ _proto.appendData_ = function appendData_(segmentInfo, result) {
+ var type = result.type,
+ data = result.data;
+
+ if (!data || !data.byteLength) {
+ return;
+ }
+
+ if (type === 'audio' && this.audioDisabled_) {
+ return;
+ }
+
+ var initSegment = this.getInitSegmentAndUpdateState_({
+ type: type,
+ initSegment: result.initSegment,
+ playlist: segmentInfo.playlist,
+ map: segmentInfo.isFmp4 ? segmentInfo.segment.map : null
+ });
+ this.appendToSourceBuffer_({
+ segmentInfo: segmentInfo,
+ type: type,
+ initSegment: initSegment,
+ data: data
+ });
+ }
+ /**
+ * load a specific segment from a request into the buffer
+ *
+ * @private
+ */
+ ;
+
+ _proto.loadSegment_ = function loadSegment_(segmentInfo) {
+ var _this4 = this;
+
+ this.state = 'WAITING';
+ this.pendingSegment_ = segmentInfo;
+ this.trimBackBuffer_(segmentInfo);
+
+ if (typeof segmentInfo.timestampOffset === 'number') {
+ if (this.transmuxer_) {
+ this.transmuxer_.postMessage({
+ action: 'clearAllMp4Captions'
+ });
+ }
+ }
+
+ if (!this.hasEnoughInfoToLoad_()) {
+ this.loadQueue_.push(function () {
+ // regenerate the audioAppendStart, timestampOffset, etc as they
+ // may have changed since this function was added to the queue.
+ var options = _extends({}, segmentInfo, {
+ forceTimestampOffset: true
+ });
+
+ _extends(segmentInfo, _this4.generateSegmentInfo_(options));
+
+ _this4.isPendingTimestampOffset_ = false;
+
+ _this4.updateTransmuxerAndRequestSegment_(segmentInfo);
+ });
+ return;
+ }
+
+ this.updateTransmuxerAndRequestSegment_(segmentInfo);
+ };
+
+ _proto.updateTransmuxerAndRequestSegment_ = function updateTransmuxerAndRequestSegment_(segmentInfo) {
+ var _this5 = this; // We'll update the source buffer's timestamp offset once we have transmuxed data, but
+ // the transmuxer still needs to be updated before then.
+ //
+ // Even though keepOriginalTimestamps is set to true for the transmuxer, timestamp
+ // offset must be passed to the transmuxer for stream correcting adjustments.
+
+
+ if (this.shouldUpdateTransmuxerTimestampOffset_(segmentInfo.timestampOffset)) {
+ this.gopBuffer_.length = 0; // gopsToAlignWith was set before the GOP buffer was cleared
+
+ segmentInfo.gopsToAlignWith = [];
+ this.timeMapping_ = 0; // reset values in the transmuxer since a discontinuity should start fresh
+
+ this.transmuxer_.postMessage({
+ action: 'reset'
+ });
+ this.transmuxer_.postMessage({
+ action: 'setTimestampOffset',
+ timestampOffset: segmentInfo.timestampOffset
+ });
+ }
+
+ var simpleSegment = this.createSimplifiedSegmentObj_(segmentInfo);
+ var isEndOfStream = this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex);
+ var isWalkingForward = this.mediaIndex !== null;
+ var isDiscontinuity = segmentInfo.timeline !== this.currentTimeline_ && // currentTimeline starts at -1, so we shouldn't end the timeline switching to 0,
+ // the first timeline
+ segmentInfo.timeline > 0;
+ var isEndOfTimeline = isEndOfStream || isWalkingForward && isDiscontinuity;
+ this.logger_("Requesting " + segmentInfoString(segmentInfo)); // If there's an init segment associated with this segment, but it is not cached (identified by a lack of bytes),
+ // then this init segment has never been seen before and should be appended.
+ //
+ // At this point the content type (audio/video or both) is not yet known, but it should be safe to set
+ // both to true and leave the decision of whether to append the init segment to append time.
+
+ if (simpleSegment.map && !simpleSegment.map.bytes) {
+ this.logger_('going to request init segment.');
+ this.appendInitSegment_ = {
+ video: true,
+ audio: true
+ };
+ }
+
+ segmentInfo.abortRequests = mediaSegmentRequest({
+ xhr: this.vhs_.xhr,
+ xhrOptions: this.xhrOptions_,
+ decryptionWorker: this.decrypter_,
+ segment: simpleSegment,
+ abortFn: this.handleAbort_.bind(this, segmentInfo),
+ progressFn: this.handleProgress_.bind(this),
+ trackInfoFn: this.handleTrackInfo_.bind(this),
+ timingInfoFn: this.handleTimingInfo_.bind(this),
+ videoSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'video', segmentInfo.requestId),
+ audioSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'audio', segmentInfo.requestId),
+ captionsFn: this.handleCaptions_.bind(this),
+ isEndOfTimeline: isEndOfTimeline,
+ endedTimelineFn: function endedTimelineFn() {
+ _this5.logger_('received endedtimeline callback');
+ },
+ id3Fn: this.handleId3_.bind(this),
+ dataFn: this.handleData_.bind(this),
+ doneFn: this.segmentRequestFinished_.bind(this),
+ onTransmuxerLog: function onTransmuxerLog(_ref8) {
+ var message = _ref8.message,
+ level = _ref8.level,
+ stream = _ref8.stream;
+
+ _this5.logger_(segmentInfoString(segmentInfo) + " logged from transmuxer stream " + stream + " as a " + level + ": " + message);
+ }
+ });
+ }
+ /**
+ * trim the back buffer so that we don't have too much data
+ * in the source buffer
+ *
+ * @private
+ *
+ * @param {Object} segmentInfo - the current segment
+ */
+ ;
+
+ _proto.trimBackBuffer_ = function trimBackBuffer_(segmentInfo) {
+ var removeToTime = safeBackBufferTrimTime(this.seekable_(), this.currentTime_(), this.playlist_.targetDuration || 10); // Chrome has a hard limit of 150MB of
+ // buffer and a very conservative "garbage collector"
+ // We manually clear out the old buffer to ensure
+ // we don't trigger the QuotaExceeded error
+ // on the source buffer during subsequent appends
+
+ if (removeToTime > 0) {
+ this.remove(0, removeToTime);
+ }
+ }
+ /**
+ * created a simplified copy of the segment object with just the
+ * information necessary to perform the XHR and decryption
+ *
+ * @private
+ *
+ * @param {Object} segmentInfo - the current segment
+ * @return {Object} a simplified segment object copy
+ */
+ ;
+
+ _proto.createSimplifiedSegmentObj_ = function createSimplifiedSegmentObj_(segmentInfo) {
+ var segment = segmentInfo.segment;
+ var part = segmentInfo.part;
+ var simpleSegment = {
+ resolvedUri: part ? part.resolvedUri : segment.resolvedUri,
+ byterange: part ? part.byterange : segment.byterange,
+ requestId: segmentInfo.requestId,
+ transmuxer: segmentInfo.transmuxer,
+ audioAppendStart: segmentInfo.audioAppendStart,
+ gopsToAlignWith: segmentInfo.gopsToAlignWith,
+ part: segmentInfo.part
+ };
+ var previousSegment = segmentInfo.playlist.segments[segmentInfo.mediaIndex - 1];
+
+ if (previousSegment && previousSegment.timeline === segment.timeline) {
+ // The baseStartTime of a segment is used to handle rollover when probing the TS
+ // segment to retrieve timing information. Since the probe only looks at the media's
+ // times (e.g., PTS and DTS values of the segment), and doesn't consider the
+ // player's time (e.g., player.currentTime()), baseStartTime should reflect the
+ // media time as well. transmuxedDecodeEnd represents the end time of a segment, in
+ // seconds of media time, so should be used here. The previous segment is used since
+ // the end of the previous segment should represent the beginning of the current
+ // segment, so long as they are on the same timeline.
+ if (previousSegment.videoTimingInfo) {
+ simpleSegment.baseStartTime = previousSegment.videoTimingInfo.transmuxedDecodeEnd;
+ } else if (previousSegment.audioTimingInfo) {
+ simpleSegment.baseStartTime = previousSegment.audioTimingInfo.transmuxedDecodeEnd;
+ }
+ }
+
+ if (segment.key) {
+ // if the media sequence is greater than 2^32, the IV will be incorrect
+ // assuming 10s segments, that would be about 1300 years
+ var iv = segment.key.iv || new Uint32Array([0, 0, 0, segmentInfo.mediaIndex + segmentInfo.playlist.mediaSequence]);
+ simpleSegment.key = this.segmentKey(segment.key);
+ simpleSegment.key.iv = iv;
+ }
+
+ if (segment.map) {
+ simpleSegment.map = this.initSegmentForMap(segment.map);
+ }
+
+ return simpleSegment;
+ };
+
+ _proto.saveTransferStats_ = function saveTransferStats_(stats) {
+ // every request counts as a media request even if it has been aborted
+ // or canceled due to a timeout
+ this.mediaRequests += 1;
+
+ if (stats) {
+ this.mediaBytesTransferred += stats.bytesReceived;
+ this.mediaTransferDuration += stats.roundTripTime;
+ }
+ };
+
+ _proto.saveBandwidthRelatedStats_ = function saveBandwidthRelatedStats_(duration, stats) {
+ // byteLength will be used for throughput, and should be based on bytes receieved,
+ // which we only know at the end of the request and should reflect total bytes
+ // downloaded rather than just bytes processed from components of the segment
+ this.pendingSegment_.byteLength = stats.bytesReceived;
+
+ if (duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
+ this.logger_("Ignoring segment's bandwidth because its duration of " + duration + (" is less than the min to record " + MIN_SEGMENT_DURATION_TO_SAVE_STATS));
+ return;
+ }
+
+ this.bandwidth = stats.bandwidth;
+ this.roundTrip = stats.roundTripTime;
+ };
+
+ _proto.handleTimeout_ = function handleTimeout_() {
+ // although the VTT segment loader bandwidth isn't really used, it's good to
+ // maintain functinality between segment loaders
+ this.mediaRequestsTimedout += 1;
+ this.bandwidth = 1;
+ this.roundTrip = NaN;
+ this.trigger('bandwidthupdate');
+ }
+ /**
+ * Handle the callback from the segmentRequest function and set the
+ * associated SegmentLoader state and errors if necessary
+ *
+ * @private
+ */
+ ;
+
+ _proto.segmentRequestFinished_ = function segmentRequestFinished_(error, simpleSegment, result) {
+ // TODO handle special cases, e.g., muxed audio/video but only audio in the segment
+ // check the call queue directly since this function doesn't need to deal with any
+ // data, and can continue even if the source buffers are not set up and we didn't get
+ // any data from the segment
+ if (this.callQueue_.length) {
+ this.callQueue_.push(this.segmentRequestFinished_.bind(this, error, simpleSegment, result));
+ return;
+ }
+
+ this.saveTransferStats_(simpleSegment.stats); // The request was aborted and the SegmentLoader has already been reset
+
+ if (!this.pendingSegment_) {
+ return;
+ } // the request was aborted and the SegmentLoader has already started
+ // another request. this can happen when the timeout for an aborted
+ // request triggers due to a limitation in the XHR library
+ // do not count this as any sort of request or we risk double-counting
+
+
+ if (simpleSegment.requestId !== this.pendingSegment_.requestId) {
+ return;
+ } // an error occurred from the active pendingSegment_ so reset everything
+
+
+ if (error) {
+ this.pendingSegment_ = null;
+ this.state = 'READY'; // aborts are not a true error condition and nothing corrective needs to be done
+
+ if (error.code === REQUEST_ERRORS.ABORTED) {
+ return;
+ }
+
+ this.pause(); // the error is really just that at least one of the requests timed-out
+ // set the bandwidth to a very low value and trigger an ABR switch to
+ // take emergency action
+
+ if (error.code === REQUEST_ERRORS.TIMEOUT) {
+ this.handleTimeout_();
+ return;
+ } // if control-flow has arrived here, then the error is real
+ // emit an error event to blacklist the current playlist
+
+
+ this.mediaRequestsErrored += 1;
+ this.error(error);
+ this.trigger('error');
+ return;
+ }
+
+ var segmentInfo = this.pendingSegment_; // the response was a success so set any bandwidth stats the request
+ // generated for ABR purposes
+
+ this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats);
+ segmentInfo.endOfAllRequests = simpleSegment.endOfAllRequests;
+
+ if (result.gopInfo) {
+ this.gopBuffer_ = updateGopBuffer(this.gopBuffer_, result.gopInfo, this.safeAppend_);
+ } // Although we may have already started appending on progress, we shouldn't switch the
+ // state away from loading until we are officially done loading the segment data.
+
+
+ this.state = 'APPENDING'; // used for testing
+
+ this.trigger('appending');
+ this.waitForAppendsToComplete_(segmentInfo);
+ };
+
+ _proto.setTimeMapping_ = function setTimeMapping_(timeline) {
+ var timelineMapping = this.syncController_.mappingForTimeline(timeline);
+
+ if (timelineMapping !== null) {
+ this.timeMapping_ = timelineMapping;
+ }
+ };
+
+ _proto.updateMediaSecondsLoaded_ = function updateMediaSecondsLoaded_(segment) {
+ if (typeof segment.start === 'number' && typeof segment.end === 'number') {
+ this.mediaSecondsLoaded += segment.end - segment.start;
+ } else {
+ this.mediaSecondsLoaded += segment.duration;
+ }
+ };
+
+ _proto.shouldUpdateTransmuxerTimestampOffset_ = function shouldUpdateTransmuxerTimestampOffset_(timestampOffset) {
+ if (timestampOffset === null) {
+ return false;
+ } // note that we're potentially using the same timestamp offset for both video and
+ // audio
+
+
+ if (this.loaderType_ === 'main' && timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
+ return true;
+ }
+
+ if (!this.audioDisabled_ && timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
+ return true;
+ }
+
+ return false;
+ };
+
+ _proto.trueSegmentStart_ = function trueSegmentStart_(_ref9) {
+ var currentStart = _ref9.currentStart,
+ playlist = _ref9.playlist,
+ mediaIndex = _ref9.mediaIndex,
+ firstVideoFrameTimeForData = _ref9.firstVideoFrameTimeForData,
+ currentVideoTimestampOffset = _ref9.currentVideoTimestampOffset,
+ useVideoTimingInfo = _ref9.useVideoTimingInfo,
+ videoTimingInfo = _ref9.videoTimingInfo,
+ audioTimingInfo = _ref9.audioTimingInfo;
+
+ if (typeof currentStart !== 'undefined') {
+ // if start was set once, keep using it
+ return currentStart;
+ }
+
+ if (!useVideoTimingInfo) {
+ return audioTimingInfo.start;
+ }
+
+ var previousSegment = playlist.segments[mediaIndex - 1]; // The start of a segment should be the start of the first full frame contained
+ // within that segment. Since the transmuxer maintains a cache of incomplete data
+ // from and/or the last frame seen, the start time may reflect a frame that starts
+ // in the previous segment. Check for that case and ensure the start time is
+ // accurate for the segment.
+
+ if (mediaIndex === 0 || !previousSegment || typeof previousSegment.start === 'undefined' || previousSegment.end !== firstVideoFrameTimeForData + currentVideoTimestampOffset) {
+ return firstVideoFrameTimeForData;
+ }
+
+ return videoTimingInfo.start;
+ };
+
+ _proto.waitForAppendsToComplete_ = function waitForAppendsToComplete_(segmentInfo) {
+ var trackInfo = this.getCurrentMediaInfo_(segmentInfo);
+
+ if (!trackInfo) {
+ this.error({
+ message: 'No starting media returned, likely due to an unsupported media format.',
+ blacklistDuration: Infinity
+ });
+ this.trigger('error');
+ return;
+ } // Although transmuxing is done, appends may not yet be finished. Throw a marker
+ // on each queue this loader is responsible for to ensure that the appends are
+ // complete.
+
+
+ var hasAudio = trackInfo.hasAudio,
+ hasVideo = trackInfo.hasVideo,
+ isMuxed = trackInfo.isMuxed;
+ var waitForVideo = this.loaderType_ === 'main' && hasVideo;
+ var waitForAudio = !this.audioDisabled_ && hasAudio && !isMuxed;
+ segmentInfo.waitingOnAppends = 0; // segments with no data
+
+ if (!segmentInfo.hasAppendedData_) {
+ if (!segmentInfo.timingInfo && typeof segmentInfo.timestampOffset === 'number') {
+ // When there's no audio or video data in the segment, there's no audio or video
+ // timing information.
+ //
+ // If there's no audio or video timing information, then the timestamp offset
+ // can't be adjusted to the appropriate value for the transmuxer and source
+ // buffers.
+ //
+ // Therefore, the next segment should be used to set the timestamp offset.
+ this.isPendingTimestampOffset_ = true;
+ } // override settings for metadata only segments
+
+
+ segmentInfo.timingInfo = {
+ start: 0
+ };
+ segmentInfo.waitingOnAppends++;
+
+ if (!this.isPendingTimestampOffset_) {
+ // update the timestampoffset
+ this.updateSourceBufferTimestampOffset_(segmentInfo); // make sure the metadata queue is processed even though we have
+ // no video/audio data.
+
+ this.processMetadataQueue_();
+ } // append is "done" instantly with no data.
+
+
+ this.checkAppendsDone_(segmentInfo);
+ return;
+ } // Since source updater could call back synchronously, do the increments first.
+
+
+ if (waitForVideo) {
+ segmentInfo.waitingOnAppends++;
+ }
+
+ if (waitForAudio) {
+ segmentInfo.waitingOnAppends++;
+ }
+
+ if (waitForVideo) {
+ this.sourceUpdater_.videoQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
+ }
+
+ if (waitForAudio) {
+ this.sourceUpdater_.audioQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
+ }
+ };
+
+ _proto.checkAppendsDone_ = function checkAppendsDone_(segmentInfo) {
+ if (this.checkForAbort_(segmentInfo.requestId)) {
+ return;
+ }
+
+ segmentInfo.waitingOnAppends--;
+
+ if (segmentInfo.waitingOnAppends === 0) {
+ this.handleAppendsDone_();
+ }
+ };
+
+ _proto.checkForIllegalMediaSwitch = function checkForIllegalMediaSwitch(trackInfo) {
+ var illegalMediaSwitchError = illegalMediaSwitch(this.loaderType_, this.getCurrentMediaInfo_(), trackInfo);
+
+ if (illegalMediaSwitchError) {
+ this.error({
+ message: illegalMediaSwitchError,
+ blacklistDuration: Infinity
+ });
+ this.trigger('error');
+ return true;
+ }
+
+ return false;
+ };
+
+ _proto.updateSourceBufferTimestampOffset_ = function updateSourceBufferTimestampOffset_(segmentInfo) {
+ if (segmentInfo.timestampOffset === null || // we don't yet have the start for whatever media type (video or audio) has
+ // priority, timing-wise, so we must wait
+ typeof segmentInfo.timingInfo.start !== 'number' || // already updated the timestamp offset for this segment
+ segmentInfo.changedTimestampOffset || // the alt audio loader should not be responsible for setting the timestamp offset
+ this.loaderType_ !== 'main') {
+ return;
+ }
+
+ var didChange = false; // Primary timing goes by video, and audio is trimmed in the transmuxer, meaning that
+ // the timing info here comes from video. In the event that the audio is longer than
+ // the video, this will trim the start of the audio.
+ // This also trims any offset from 0 at the beginning of the media
+
+ segmentInfo.timestampOffset -= this.getSegmentStartTimeForTimestampOffsetCalculation_({
+ videoTimingInfo: segmentInfo.segment.videoTimingInfo,
+ audioTimingInfo: segmentInfo.segment.audioTimingInfo,
+ timingInfo: segmentInfo.timingInfo
+ }); // In the event that there are part segment downloads, each will try to update the
+ // timestamp offset. Retaining this bit of state prevents us from updating in the
+ // future (within the same segment), however, there may be a better way to handle it.
+
+ segmentInfo.changedTimestampOffset = true;
+
+ if (segmentInfo.timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
+ this.sourceUpdater_.videoTimestampOffset(segmentInfo.timestampOffset);
+ didChange = true;
+ }
+
+ if (segmentInfo.timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
+ this.sourceUpdater_.audioTimestampOffset(segmentInfo.timestampOffset);
+ didChange = true;
+ }
+
+ if (didChange) {
+ this.trigger('timestampoffset');
+ }
+ };
+
+ _proto.getSegmentStartTimeForTimestampOffsetCalculation_ = function getSegmentStartTimeForTimestampOffsetCalculation_(_ref10) {
+ var videoTimingInfo = _ref10.videoTimingInfo,
+ audioTimingInfo = _ref10.audioTimingInfo,
+ timingInfo = _ref10.timingInfo;
+
+ if (!this.useDtsForTimestampOffset_) {
+ return timingInfo.start;
+ }
+
+ if (videoTimingInfo && typeof videoTimingInfo.transmuxedDecodeStart === 'number') {
+ return videoTimingInfo.transmuxedDecodeStart;
+ } // handle audio only
+
+
+ if (audioTimingInfo && typeof audioTimingInfo.transmuxedDecodeStart === 'number') {
+ return audioTimingInfo.transmuxedDecodeStart;
+ } // handle content not transmuxed (e.g., MP4)
+
+
+ return timingInfo.start;
+ };
+
+ _proto.updateTimingInfoEnd_ = function updateTimingInfoEnd_(segmentInfo) {
+ segmentInfo.timingInfo = segmentInfo.timingInfo || {};
+ var trackInfo = this.getMediaInfo_();
+ var useVideoTimingInfo = this.loaderType_ === 'main' && trackInfo && trackInfo.hasVideo;
+ var prioritizedTimingInfo = useVideoTimingInfo && segmentInfo.videoTimingInfo ? segmentInfo.videoTimingInfo : segmentInfo.audioTimingInfo;
+
+ if (!prioritizedTimingInfo) {
+ return;
+ }
+
+ segmentInfo.timingInfo.end = typeof prioritizedTimingInfo.end === 'number' ? // End time may not exist in a case where we aren't parsing the full segment (one
+ // current example is the case of fmp4), so use the rough duration to calculate an
+ // end time.
+ prioritizedTimingInfo.end : prioritizedTimingInfo.start + segmentInfo.duration;
+ }
+ /**
+ * callback to run when appendBuffer is finished. detects if we are
+ * in a good state to do things with the data we got, or if we need
+ * to wait for more
+ *
+ * @private
+ */
+ ;
+
+ _proto.handleAppendsDone_ = function handleAppendsDone_() {
+ // appendsdone can cause an abort
+ if (this.pendingSegment_) {
+ this.trigger('appendsdone');
+ }
+
+ if (!this.pendingSegment_) {
+ this.state = 'READY'; // TODO should this move into this.checkForAbort to speed up requests post abort in
+ // all appending cases?
+
+ if (!this.paused()) {
+ this.monitorBuffer_();
+ }
+
+ return;
+ }
+
+ var segmentInfo = this.pendingSegment_; // Now that the end of the segment has been reached, we can set the end time. It's
+ // best to wait until all appends are done so we're sure that the primary media is
+ // finished (and we have its end time).
+
+ this.updateTimingInfoEnd_(segmentInfo);
+
+ if (this.shouldSaveSegmentTimingInfo_) {
+ // Timeline mappings should only be saved for the main loader. This is for multiple
+ // reasons:
+ //
+ // 1) Only one mapping is saved per timeline, meaning that if both the audio loader
+ // and the main loader try to save the timeline mapping, whichever comes later
+ // will overwrite the first. In theory this is OK, as the mappings should be the
+ // same, however, it breaks for (2)
+ // 2) In the event of a live stream, the initial live point will make for a somewhat
+ // arbitrary mapping. If audio and video streams are not perfectly in-sync, then
+ // the mapping will be off for one of the streams, dependent on which one was
+ // first saved (see (1)).
+ // 3) Primary timing goes by video in VHS, so the mapping should be video.
+ //
+ // Since the audio loader will wait for the main loader to load the first segment,
+ // the main loader will save the first timeline mapping, and ensure that there won't
+ // be a case where audio loads two segments without saving a mapping (thus leading
+ // to missing segment timing info).
+ this.syncController_.saveSegmentTimingInfo({
+ segmentInfo: segmentInfo,
+ shouldSaveTimelineMapping: this.loaderType_ === 'main'
+ });
+ }
+
+ var segmentDurationMessage = getTroublesomeSegmentDurationMessage(segmentInfo, this.sourceType_);
+
+ if (segmentDurationMessage) {
+ if (segmentDurationMessage.severity === 'warn') {
+ videojs.log.warn(segmentDurationMessage.message);
+ } else {
+ this.logger_(segmentDurationMessage.message);
+ }
+ }
+
+ this.recordThroughput_(segmentInfo);
+ this.pendingSegment_ = null;
+ this.state = 'READY';
+
+ if (segmentInfo.isSyncRequest) {
+ this.trigger('syncinfoupdate'); // if the sync request was not appended
+ // then it was not the correct segment.
+ // throw it away and use the data it gave us
+ // to get the correct one.
+
+ if (!segmentInfo.hasAppendedData_) {
+ this.logger_("Throwing away un-appended sync request " + segmentInfoString(segmentInfo));
+ return;
+ }
+ }
+
+ this.logger_("Appended " + segmentInfoString(segmentInfo));
+ this.addSegmentMetadataCue_(segmentInfo);
+ this.fetchAtBuffer_ = true;
+
+ if (this.currentTimeline_ !== segmentInfo.timeline) {
+ this.timelineChangeController_.lastTimelineChange({
+ type: this.loaderType_,
+ from: this.currentTimeline_,
+ to: segmentInfo.timeline
+ }); // If audio is not disabled, the main segment loader is responsible for updating
+ // the audio timeline as well. If the content is video only, this won't have any
+ // impact.
+
+ if (this.loaderType_ === 'main' && !this.audioDisabled_) {
+ this.timelineChangeController_.lastTimelineChange({
+ type: 'audio',
+ from: this.currentTimeline_,
+ to: segmentInfo.timeline
+ });
+ }
+ }
+
+ this.currentTimeline_ = segmentInfo.timeline; // We must update the syncinfo to recalculate the seekable range before
+ // the following conditional otherwise it may consider this a bad "guess"
+ // and attempt to resync when the post-update seekable window and live
+ // point would mean that this was the perfect segment to fetch
+
+ this.trigger('syncinfoupdate');
+ var segment = segmentInfo.segment;
+ var part = segmentInfo.part;
+ var badSegmentGuess = segment.end && this.currentTime_() - segment.end > segmentInfo.playlist.targetDuration * 3;
+ var badPartGuess = part && part.end && this.currentTime_() - part.end > segmentInfo.playlist.partTargetDuration * 3; // If we previously appended a segment/part that ends more than 3 part/targetDurations before
+ // the currentTime_ that means that our conservative guess was too conservative.
+ // In that case, reset the loader state so that we try to use any information gained
+ // from the previous request to create a new, more accurate, sync-point.
+
+ if (badSegmentGuess || badPartGuess) {
+ this.logger_("bad " + (badSegmentGuess ? 'segment' : 'part') + " " + segmentInfoString(segmentInfo));
+ this.resetEverything();
+ return;
+ }
+
+ var isWalkingForward = this.mediaIndex !== null; // Don't do a rendition switch unless we have enough time to get a sync segment
+ // and conservatively guess
+
+ if (isWalkingForward) {
+ this.trigger('bandwidthupdate');
+ }
+
+ this.trigger('progress');
+ this.mediaIndex = segmentInfo.mediaIndex;
+ this.partIndex = segmentInfo.partIndex; // any time an update finishes and the last segment is in the
+ // buffer, end the stream. this ensures the "ended" event will
+ // fire if playback reaches that point.
+
+ if (this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex)) {
+ this.endOfStream();
+ } // used for testing
+
+
+ this.trigger('appended');
+
+ if (segmentInfo.hasAppendedData_) {
+ this.mediaAppends++;
+ }
+
+ if (!this.paused()) {
+ this.monitorBuffer_();
+ }
+ }
+ /**
+ * Records the current throughput of the decrypt, transmux, and append
+ * portion of the semgment pipeline. `throughput.rate` is a the cumulative
+ * moving average of the throughput. `throughput.count` is the number of
+ * data points in the average.
+ *
+ * @private
+ * @param {Object} segmentInfo the object returned by loadSegment
+ */
+ ;
+
+ _proto.recordThroughput_ = function recordThroughput_(segmentInfo) {
+ if (segmentInfo.duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
+ this.logger_("Ignoring segment's throughput because its duration of " + segmentInfo.duration + (" is less than the min to record " + MIN_SEGMENT_DURATION_TO_SAVE_STATS));
+ return;
+ }
+
+ var rate = this.throughput.rate; // Add one to the time to ensure that we don't accidentally attempt to divide
+ // by zero in the case where the throughput is ridiculously high
+
+ var segmentProcessingTime = Date.now() - segmentInfo.endOfAllRequests + 1; // Multiply by 8000 to convert from bytes/millisecond to bits/second
+
+ var segmentProcessingThroughput = Math.floor(segmentInfo.byteLength / segmentProcessingTime * 8 * 1000); // This is just a cumulative moving average calculation:
+ // newAvg = oldAvg + (sample - oldAvg) / (sampleCount + 1)
+
+ this.throughput.rate += (segmentProcessingThroughput - rate) / ++this.throughput.count;
+ }
+ /**
+ * Adds a cue to the segment-metadata track with some metadata information about the
+ * segment
+ *
+ * @private
+ * @param {Object} segmentInfo
+ * the object returned by loadSegment
+ * @method addSegmentMetadataCue_
+ */
+ ;
+
+ _proto.addSegmentMetadataCue_ = function addSegmentMetadataCue_(segmentInfo) {
+ if (!this.segmentMetadataTrack_) {
+ return;
+ }
+
+ var segment = segmentInfo.segment;
+ var start = segment.start;
+ var end = segment.end; // Do not try adding the cue if the start and end times are invalid.
+
+ if (!finite(start) || !finite(end)) {
+ return;
+ }
+
+ removeCuesFromTrack(start, end, this.segmentMetadataTrack_);
+ var Cue = window$1.WebKitDataCue || window$1.VTTCue;
+ var value = {
+ custom: segment.custom,
+ dateTimeObject: segment.dateTimeObject,
+ dateTimeString: segment.dateTimeString,
+ bandwidth: segmentInfo.playlist.attributes.BANDWIDTH,
+ resolution: segmentInfo.playlist.attributes.RESOLUTION,
+ codecs: segmentInfo.playlist.attributes.CODECS,
+ byteLength: segmentInfo.byteLength,
+ uri: segmentInfo.uri,
+ timeline: segmentInfo.timeline,
+ playlist: segmentInfo.playlist.id,
+ start: start,
+ end: end
+ };
+ var data = JSON.stringify(value);
+ var cue = new Cue(start, end, data); // Attach the metadata to the value property of the cue to keep consistency between
+ // the differences of WebKitDataCue in safari and VTTCue in other browsers
+
+ cue.value = value;
+ this.segmentMetadataTrack_.addCue(cue);
+ };
+
+ return SegmentLoader;
+}(videojs.EventTarget);
+
+function noop() {}
+
+var toTitleCase = function toTitleCase(string) {
+ if (typeof string !== 'string') {
+ return string;
+ }
+
+ return string.replace(/./, function (w) {
+ return w.toUpperCase();
+ });
+};
+
+var bufferTypes = ['video', 'audio'];
+
+var _updating = function updating(type, sourceUpdater) {
+ var sourceBuffer = sourceUpdater[type + "Buffer"];
+ return sourceBuffer && sourceBuffer.updating || sourceUpdater.queuePending[type];
+};
+
+var nextQueueIndexOfType = function nextQueueIndexOfType(type, queue) {
+ for (var i = 0; i < queue.length; i++) {
+ var queueEntry = queue[i];
+
+ if (queueEntry.type === 'mediaSource') {
+ // If the next entry is a media source entry (uses multiple source buffers), block
+ // processing to allow it to go through first.
+ return null;
+ }
+
+ if (queueEntry.type === type) {
+ return i;
+ }
+ }
+
+ return null;
+};
+
+var shiftQueue = function shiftQueue(type, sourceUpdater) {
+ if (sourceUpdater.queue.length === 0) {
+ return;
+ }
+
+ var queueIndex = 0;
+ var queueEntry = sourceUpdater.queue[queueIndex];
+
+ if (queueEntry.type === 'mediaSource') {
+ if (!sourceUpdater.updating() && sourceUpdater.mediaSource.readyState !== 'closed') {
+ sourceUpdater.queue.shift();
+ queueEntry.action(sourceUpdater);
+
+ if (queueEntry.doneFn) {
+ queueEntry.doneFn();
+ } // Only specific source buffer actions must wait for async updateend events. Media
+ // Source actions process synchronously. Therefore, both audio and video source
+ // buffers are now clear to process the next queue entries.
+
+
+ shiftQueue('audio', sourceUpdater);
+ shiftQueue('video', sourceUpdater);
+ } // Media Source actions require both source buffers, so if the media source action
+ // couldn't process yet (because one or both source buffers are busy), block other
+ // queue actions until both are available and the media source action can process.
+
+
+ return;
+ }
+
+ if (type === 'mediaSource') {
+ // If the queue was shifted by a media source action (this happens when pushing a
+ // media source action onto the queue), then it wasn't from an updateend event from an
+ // audio or video source buffer, so there's no change from previous state, and no
+ // processing should be done.
+ return;
+ } // Media source queue entries don't need to consider whether the source updater is
+ // started (i.e., source buffers are created) as they don't need the source buffers, but
+ // source buffer queue entries do.
+
+
+ if (!sourceUpdater.ready() || sourceUpdater.mediaSource.readyState === 'closed' || _updating(type, sourceUpdater)) {
+ return;
+ }
+
+ if (queueEntry.type !== type) {
+ queueIndex = nextQueueIndexOfType(type, sourceUpdater.queue);
+
+ if (queueIndex === null) {
+ // Either there's no queue entry that uses this source buffer type in the queue, or
+ // there's a media source queue entry before the next entry of this type, in which
+ // case wait for that action to process first.
+ return;
+ }
+
+ queueEntry = sourceUpdater.queue[queueIndex];
+ }
+
+ sourceUpdater.queue.splice(queueIndex, 1); // Keep a record that this source buffer type is in use.
+ //
+ // The queue pending operation must be set before the action is performed in the event
+ // that the action results in a synchronous event that is acted upon. For instance, if
+ // an exception is thrown that can be handled, it's possible that new actions will be
+ // appended to an empty queue and immediately executed, but would not have the correct
+ // pending information if this property was set after the action was performed.
+
+ sourceUpdater.queuePending[type] = queueEntry;
+ queueEntry.action(type, sourceUpdater);
+
+ if (!queueEntry.doneFn) {
+ // synchronous operation, process next entry
+ sourceUpdater.queuePending[type] = null;
+ shiftQueue(type, sourceUpdater);
+ return;
+ }
+};
+
+var cleanupBuffer = function cleanupBuffer(type, sourceUpdater) {
+ var buffer = sourceUpdater[type + "Buffer"];
+ var titleType = toTitleCase(type);
+
+ if (!buffer) {
+ return;
+ }
+
+ buffer.removeEventListener('updateend', sourceUpdater["on" + titleType + "UpdateEnd_"]);
+ buffer.removeEventListener('error', sourceUpdater["on" + titleType + "Error_"]);
+ sourceUpdater.codecs[type] = null;
+ sourceUpdater[type + "Buffer"] = null;
+};
+
+var inSourceBuffers = function inSourceBuffers(mediaSource, sourceBuffer) {
+ return mediaSource && sourceBuffer && Array.prototype.indexOf.call(mediaSource.sourceBuffers, sourceBuffer) !== -1;
+};
+
+var actions = {
+ appendBuffer: function appendBuffer(bytes, segmentInfo, onError) {
+ return function (type, sourceUpdater) {
+ var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
+ // or the media source does not contain this source buffer.
+
+ if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
+ return;
+ }
+
+ sourceUpdater.logger_("Appending segment " + segmentInfo.mediaIndex + "'s " + bytes.length + " bytes to " + type + "Buffer");
+
+ try {
+ sourceBuffer.appendBuffer(bytes);
+ } catch (e) {
+ sourceUpdater.logger_("Error with code " + e.code + " " + (e.code === QUOTA_EXCEEDED_ERR ? '(QUOTA_EXCEEDED_ERR) ' : '') + ("when appending segment " + segmentInfo.mediaIndex + " to " + type + "Buffer"));
+ sourceUpdater.queuePending[type] = null;
+ onError(e);
+ }
+ };
+ },
+ remove: function remove(start, end) {
+ return function (type, sourceUpdater) {
+ var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
+ // or the media source does not contain this source buffer.
+
+ if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
+ return;
+ }
+
+ sourceUpdater.logger_("Removing " + start + " to " + end + " from " + type + "Buffer");
+
+ try {
+ sourceBuffer.remove(start, end);
+ } catch (e) {
+ sourceUpdater.logger_("Remove " + start + " to " + end + " from " + type + "Buffer failed");
+ }
+ };
+ },
+ timestampOffset: function timestampOffset(offset) {
+ return function (type, sourceUpdater) {
+ var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
+ // or the media source does not contain this source buffer.
+
+ if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
+ return;
+ }
+
+ sourceUpdater.logger_("Setting " + type + "timestampOffset to " + offset);
+ sourceBuffer.timestampOffset = offset;
+ };
+ },
+ callback: function callback(_callback) {
+ return function (type, sourceUpdater) {
+ _callback();
+ };
+ },
+ endOfStream: function endOfStream(error) {
+ return function (sourceUpdater) {
+ if (sourceUpdater.mediaSource.readyState !== 'open') {
+ return;
+ }
+
+ sourceUpdater.logger_("Calling mediaSource endOfStream(" + (error || '') + ")");
+
+ try {
+ sourceUpdater.mediaSource.endOfStream(error);
+ } catch (e) {
+ videojs.log.warn('Failed to call media source endOfStream', e);
+ }
+ };
+ },
+ duration: function duration(_duration) {
+ return function (sourceUpdater) {
+ sourceUpdater.logger_("Setting mediaSource duration to " + _duration);
+
+ try {
+ sourceUpdater.mediaSource.duration = _duration;
+ } catch (e) {
+ videojs.log.warn('Failed to set media source duration', e);
+ }
+ };
+ },
+ abort: function abort() {
+ return function (type, sourceUpdater) {
+ if (sourceUpdater.mediaSource.readyState !== 'open') {
+ return;
+ }
+
+ var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
+ // or the media source does not contain this source buffer.
+
+ if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
+ return;
+ }
+
+ sourceUpdater.logger_("calling abort on " + type + "Buffer");
+
+ try {
+ sourceBuffer.abort();
+ } catch (e) {
+ videojs.log.warn("Failed to abort on " + type + "Buffer", e);
+ }
+ };
+ },
+ addSourceBuffer: function addSourceBuffer(type, codec) {
+ return function (sourceUpdater) {
+ var titleType = toTitleCase(type);
+ var mime = getMimeForCodec(codec);
+ sourceUpdater.logger_("Adding " + type + "Buffer with codec " + codec + " to mediaSource");
+ var sourceBuffer = sourceUpdater.mediaSource.addSourceBuffer(mime);
+ sourceBuffer.addEventListener('updateend', sourceUpdater["on" + titleType + "UpdateEnd_"]);
+ sourceBuffer.addEventListener('error', sourceUpdater["on" + titleType + "Error_"]);
+ sourceUpdater.codecs[type] = codec;
+ sourceUpdater[type + "Buffer"] = sourceBuffer;
+ };
+ },
+ removeSourceBuffer: function removeSourceBuffer(type) {
+ return function (sourceUpdater) {
+ var sourceBuffer = sourceUpdater[type + "Buffer"];
+ cleanupBuffer(type, sourceUpdater); // can't do anything if the media source / source buffer is null
+ // or the media source does not contain this source buffer.
+
+ if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
+ return;
+ }
+
+ sourceUpdater.logger_("Removing " + type + "Buffer with codec " + sourceUpdater.codecs[type] + " from mediaSource");
+
+ try {
+ sourceUpdater.mediaSource.removeSourceBuffer(sourceBuffer);
+ } catch (e) {
+ videojs.log.warn("Failed to removeSourceBuffer " + type + "Buffer", e);
+ }
+ };
+ },
+ changeType: function changeType(codec) {
+ return function (type, sourceUpdater) {
+ var sourceBuffer = sourceUpdater[type + "Buffer"];
+ var mime = getMimeForCodec(codec); // can't do anything if the media source / source buffer is null
+ // or the media source does not contain this source buffer.
+
+ if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
+ return;
+ } // do not update codec if we don't need to.
+
+
+ if (sourceUpdater.codecs[type] === codec) {
+ return;
+ }
+
+ sourceUpdater.logger_("changing " + type + "Buffer codec from " + sourceUpdater.codecs[type] + " to " + codec);
+ sourceBuffer.changeType(mime);
+ sourceUpdater.codecs[type] = codec;
+ };
+ }
+};
+
+var pushQueue = function pushQueue(_ref) {
+ var type = _ref.type,
+ sourceUpdater = _ref.sourceUpdater,
+ action = _ref.action,
+ doneFn = _ref.doneFn,
+ name = _ref.name;
+ sourceUpdater.queue.push({
+ type: type,
+ action: action,
+ doneFn: doneFn,
+ name: name
+ });
+ shiftQueue(type, sourceUpdater);
+};
+
+var onUpdateend = function onUpdateend(type, sourceUpdater) {
+ return function (e) {
+ // Although there should, in theory, be a pending action for any updateend receieved,
+ // there are some actions that may trigger updateend events without set definitions in
+ // the w3c spec. For instance, setting the duration on the media source may trigger
+ // updateend events on source buffers. This does not appear to be in the spec. As such,
+ // if we encounter an updateend without a corresponding pending action from our queue
+ // for that source buffer type, process the next action.
+ if (sourceUpdater.queuePending[type]) {
+ var doneFn = sourceUpdater.queuePending[type].doneFn;
+ sourceUpdater.queuePending[type] = null;
+
+ if (doneFn) {
+ // if there's an error, report it
+ doneFn(sourceUpdater[type + "Error_"]);
+ }
+ }
+
+ shiftQueue(type, sourceUpdater);
+ };
+};
+/**
+ * A queue of callbacks to be serialized and applied when a
+ * MediaSource and its associated SourceBuffers are not in the
+ * updating state. It is used by the segment loader to update the
+ * underlying SourceBuffers when new data is loaded, for instance.
+ *
+ * @class SourceUpdater
+ * @param {MediaSource} mediaSource the MediaSource to create the SourceBuffer from
+ * @param {string} mimeType the desired MIME type of the underlying SourceBuffer
+ */
+
+
+var SourceUpdater = /*#__PURE__*/function (_videojs$EventTarget) {
+ _inheritsLoose(SourceUpdater, _videojs$EventTarget);
+
+ function SourceUpdater(mediaSource) {
+ var _this;
+
+ _this = _videojs$EventTarget.call(this) || this;
+ _this.mediaSource = mediaSource;
+
+ _this.sourceopenListener_ = function () {
+ return shiftQueue('mediaSource', _assertThisInitialized(_this));
+ };
+
+ _this.mediaSource.addEventListener('sourceopen', _this.sourceopenListener_);
+
+ _this.logger_ = logger('SourceUpdater'); // initial timestamp offset is 0
+
+ _this.audioTimestampOffset_ = 0;
+ _this.videoTimestampOffset_ = 0;
+ _this.queue = [];
+ _this.queuePending = {
+ audio: null,
+ video: null
+ };
+ _this.delayedAudioAppendQueue_ = [];
+ _this.videoAppendQueued_ = false;
+ _this.codecs = {};
+ _this.onVideoUpdateEnd_ = onUpdateend('video', _assertThisInitialized(_this));
+ _this.onAudioUpdateEnd_ = onUpdateend('audio', _assertThisInitialized(_this));
+
+ _this.onVideoError_ = function (e) {
+ // used for debugging
+ _this.videoError_ = e;
+ };
+
+ _this.onAudioError_ = function (e) {
+ // used for debugging
+ _this.audioError_ = e;
+ };
+
+ _this.createdSourceBuffers_ = false;
+ _this.initializedEme_ = false;
+ _this.triggeredReady_ = false;
+ return _this;
+ }
+
+ var _proto = SourceUpdater.prototype;
+
+ _proto.initializedEme = function initializedEme() {
+ this.initializedEme_ = true;
+ this.triggerReady();
+ };
+
+ _proto.hasCreatedSourceBuffers = function hasCreatedSourceBuffers() {
+ // if false, likely waiting on one of the segment loaders to get enough data to create
+ // source buffers
+ return this.createdSourceBuffers_;
+ };
+
+ _proto.hasInitializedAnyEme = function hasInitializedAnyEme() {
+ return this.initializedEme_;
+ };
+
+ _proto.ready = function ready() {
+ return this.hasCreatedSourceBuffers() && this.hasInitializedAnyEme();
+ };
+
+ _proto.createSourceBuffers = function createSourceBuffers(codecs) {
+ if (this.hasCreatedSourceBuffers()) {
+ // already created them before
+ return;
+ } // the intial addOrChangeSourceBuffers will always be
+ // two add buffers.
+
+
+ this.addOrChangeSourceBuffers(codecs);
+ this.createdSourceBuffers_ = true;
+ this.trigger('createdsourcebuffers');
+ this.triggerReady();
+ };
+
+ _proto.triggerReady = function triggerReady() {
+ // only allow ready to be triggered once, this prevents the case
+ // where:
+ // 1. we trigger createdsourcebuffers
+ // 2. ie 11 synchronously initializates eme
+ // 3. the synchronous initialization causes us to trigger ready
+ // 4. We go back to the ready check in createSourceBuffers and ready is triggered again.
+ if (this.ready() && !this.triggeredReady_) {
+ this.triggeredReady_ = true;
+ this.trigger('ready');
+ }
+ }
+ /**
+ * Add a type of source buffer to the media source.
+ *
+ * @param {string} type
+ * The type of source buffer to add.
+ *
+ * @param {string} codec
+ * The codec to add the source buffer with.
+ */
+ ;
+
+ _proto.addSourceBuffer = function addSourceBuffer(type, codec) {
+ pushQueue({
+ type: 'mediaSource',
+ sourceUpdater: this,
+ action: actions.addSourceBuffer(type, codec),
+ name: 'addSourceBuffer'
+ });
+ }
+ /**
+ * call abort on a source buffer.
+ *
+ * @param {string} type
+ * The type of source buffer to call abort on.
+ */
+ ;
+
+ _proto.abort = function abort(type) {
+ pushQueue({
+ type: type,
+ sourceUpdater: this,
+ action: actions.abort(type),
+ name: 'abort'
+ });
+ }
+ /**
+ * Call removeSourceBuffer and remove a specific type
+ * of source buffer on the mediaSource.
+ *
+ * @param {string} type
+ * The type of source buffer to remove.
+ */
+ ;
+
+ _proto.removeSourceBuffer = function removeSourceBuffer(type) {
+ if (!this.canRemoveSourceBuffer()) {
+ videojs.log.error('removeSourceBuffer is not supported!');
+ return;
+ }
+
+ pushQueue({
+ type: 'mediaSource',
+ sourceUpdater: this,
+ action: actions.removeSourceBuffer(type),
+ name: 'removeSourceBuffer'
+ });
+ }
+ /**
+ * Whether or not the removeSourceBuffer function is supported
+ * on the mediaSource.
+ *
+ * @return {boolean}
+ * if removeSourceBuffer can be called.
+ */
+ ;
+
+ _proto.canRemoveSourceBuffer = function canRemoveSourceBuffer() {
+ // IE reports that it supports removeSourceBuffer, but often throws
+ // errors when attempting to use the function. So we report that it
+ // does not support removeSourceBuffer. As of Firefox 83 removeSourceBuffer
+ // throws errors, so we report that it does not support this as well.
+ return !videojs.browser.IE_VERSION && !videojs.browser.IS_FIREFOX && window$1.MediaSource && window$1.MediaSource.prototype && typeof window$1.MediaSource.prototype.removeSourceBuffer === 'function';
+ }
+ /**
+ * Whether or not the changeType function is supported
+ * on our SourceBuffers.
+ *
+ * @return {boolean}
+ * if changeType can be called.
+ */
+ ;
+
+ SourceUpdater.canChangeType = function canChangeType() {
+ return window$1.SourceBuffer && window$1.SourceBuffer.prototype && typeof window$1.SourceBuffer.prototype.changeType === 'function';
+ }
+ /**
+ * Whether or not the changeType function is supported
+ * on our SourceBuffers.
+ *
+ * @return {boolean}
+ * if changeType can be called.
+ */
+ ;
+
+ _proto.canChangeType = function canChangeType() {
+ return this.constructor.canChangeType();
+ }
+ /**
+ * Call the changeType function on a source buffer, given the code and type.
+ *
+ * @param {string} type
+ * The type of source buffer to call changeType on.
+ *
+ * @param {string} codec
+ * The codec string to change type with on the source buffer.
+ */
+ ;
+
+ _proto.changeType = function changeType(type, codec) {
+ if (!this.canChangeType()) {
+ videojs.log.error('changeType is not supported!');
+ return;
+ }
+
+ pushQueue({
+ type: type,
+ sourceUpdater: this,
+ action: actions.changeType(codec),
+ name: 'changeType'
+ });
+ }
+ /**
+ * Add source buffers with a codec or, if they are already created,
+ * call changeType on source buffers using changeType.
+ *
+ * @param {Object} codecs
+ * Codecs to switch to
+ */
+ ;
+
+ _proto.addOrChangeSourceBuffers = function addOrChangeSourceBuffers(codecs) {
+ var _this2 = this;
+
+ if (!codecs || typeof codecs !== 'object' || Object.keys(codecs).length === 0) {
+ throw new Error('Cannot addOrChangeSourceBuffers to undefined codecs');
+ }
+
+ Object.keys(codecs).forEach(function (type) {
+ var codec = codecs[type];
+
+ if (!_this2.hasCreatedSourceBuffers()) {
+ return _this2.addSourceBuffer(type, codec);
+ }
+
+ if (_this2.canChangeType()) {
+ _this2.changeType(type, codec);
+ }
+ });
+ }
+ /**
+ * Queue an update to append an ArrayBuffer.
+ *
+ * @param {MediaObject} object containing audioBytes and/or videoBytes
+ * @param {Function} done the function to call when done
+ * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-appendBuffer-void-ArrayBuffer-data
+ */
+ ;
+
+ _proto.appendBuffer = function appendBuffer(options, doneFn) {
+ var _this3 = this;
+
+ var segmentInfo = options.segmentInfo,
+ type = options.type,
+ bytes = options.bytes;
+ this.processedAppend_ = true;
+
+ if (type === 'audio' && this.videoBuffer && !this.videoAppendQueued_) {
+ this.delayedAudioAppendQueue_.push([options, doneFn]);
+ this.logger_("delayed audio append of " + bytes.length + " until video append");
+ return;
+ } // In the case of certain errors, for instance, QUOTA_EXCEEDED_ERR, updateend will
+ // not be fired. This means that the queue will be blocked until the next action
+ // taken by the segment-loader. Provide a mechanism for segment-loader to handle
+ // these errors by calling the doneFn with the specific error.
+
+
+ var onError = doneFn;
+ pushQueue({
+ type: type,
+ sourceUpdater: this,
+ action: actions.appendBuffer(bytes, segmentInfo || {
+ mediaIndex: -1
+ }, onError),
+ doneFn: doneFn,
+ name: 'appendBuffer'
+ });
+
+ if (type === 'video') {
+ this.videoAppendQueued_ = true;
+
+ if (!this.delayedAudioAppendQueue_.length) {
+ return;
+ }
+
+ var queue = this.delayedAudioAppendQueue_.slice();
+ this.logger_("queuing delayed audio " + queue.length + " appendBuffers");
+ this.delayedAudioAppendQueue_.length = 0;
+ queue.forEach(function (que) {
+ _this3.appendBuffer.apply(_this3, que);
+ });
+ }
+ }
+ /**
+ * Get the audio buffer's buffered timerange.
+ *
+ * @return {TimeRange}
+ * The audio buffer's buffered time range
+ */
+ ;
+
+ _proto.audioBuffered = function audioBuffered() {
+ // no media source/source buffer or it isn't in the media sources
+ // source buffer list
+ if (!inSourceBuffers(this.mediaSource, this.audioBuffer)) {
+ return videojs.createTimeRange();
+ }
+
+ return this.audioBuffer.buffered ? this.audioBuffer.buffered : videojs.createTimeRange();
+ }
+ /**
+ * Get the video buffer's buffered timerange.
+ *
+ * @return {TimeRange}
+ * The video buffer's buffered time range
+ */
+ ;
+
+ _proto.videoBuffered = function videoBuffered() {
+ // no media source/source buffer or it isn't in the media sources
+ // source buffer list
+ if (!inSourceBuffers(this.mediaSource, this.videoBuffer)) {
+ return videojs.createTimeRange();
+ }
+
+ return this.videoBuffer.buffered ? this.videoBuffer.buffered : videojs.createTimeRange();
+ }
+ /**
+ * Get a combined video/audio buffer's buffered timerange.
+ *
+ * @return {TimeRange}
+ * the combined time range
+ */
+ ;
+
+ _proto.buffered = function buffered() {
+ var video = inSourceBuffers(this.mediaSource, this.videoBuffer) ? this.videoBuffer : null;
+ var audio = inSourceBuffers(this.mediaSource, this.audioBuffer) ? this.audioBuffer : null;
+
+ if (audio && !video) {
+ return this.audioBuffered();
+ }
+
+ if (video && !audio) {
+ return this.videoBuffered();
+ }
+
+ return bufferIntersection(this.audioBuffered(), this.videoBuffered());
+ }
+ /**
+ * Add a callback to the queue that will set duration on the mediaSource.
+ *
+ * @param {number} duration
+ * The duration to set
+ *
+ * @param {Function} [doneFn]
+ * function to run after duration has been set.
+ */
+ ;
+
+ _proto.setDuration = function setDuration(duration, doneFn) {
+ if (doneFn === void 0) {
+ doneFn = noop;
+ } // In order to set the duration on the media source, it's necessary to wait for all
+ // source buffers to no longer be updating. "If the updating attribute equals true on
+ // any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
+ // abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
+
+
+ pushQueue({
+ type: 'mediaSource',
+ sourceUpdater: this,
+ action: actions.duration(duration),
+ name: 'duration',
+ doneFn: doneFn
+ });
+ }
+ /**
+ * Add a mediaSource endOfStream call to the queue
+ *
+ * @param {Error} [error]
+ * Call endOfStream with an error
+ *
+ * @param {Function} [doneFn]
+ * A function that should be called when the
+ * endOfStream call has finished.
+ */
+ ;
+
+ _proto.endOfStream = function endOfStream(error, doneFn) {
+ if (error === void 0) {
+ error = null;
+ }
+
+ if (doneFn === void 0) {
+ doneFn = noop;
+ }
+
+ if (typeof error !== 'string') {
+ error = undefined;
+ } // In order to set the duration on the media source, it's necessary to wait for all
+ // source buffers to no longer be updating. "If the updating attribute equals true on
+ // any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
+ // abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
+
+
+ pushQueue({
+ type: 'mediaSource',
+ sourceUpdater: this,
+ action: actions.endOfStream(error),
+ name: 'endOfStream',
+ doneFn: doneFn
+ });
+ }
+ /**
+ * Queue an update to remove a time range from the buffer.
+ *
+ * @param {number} start where to start the removal
+ * @param {number} end where to end the removal
+ * @param {Function} [done=noop] optional callback to be executed when the remove
+ * operation is complete
+ * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
+ */
+ ;
+
+ _proto.removeAudio = function removeAudio(start, end, done) {
+ if (done === void 0) {
+ done = noop;
+ }
+
+ if (!this.audioBuffered().length || this.audioBuffered().end(0) === 0) {
+ done();
+ return;
+ }
+
+ pushQueue({
+ type: 'audio',
+ sourceUpdater: this,
+ action: actions.remove(start, end),
+ doneFn: done,
+ name: 'remove'
+ });
+ }
+ /**
+ * Queue an update to remove a time range from the buffer.
+ *
+ * @param {number} start where to start the removal
+ * @param {number} end where to end the removal
+ * @param {Function} [done=noop] optional callback to be executed when the remove
+ * operation is complete
+ * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
+ */
+ ;
+
+ _proto.removeVideo = function removeVideo(start, end, done) {
+ if (done === void 0) {
+ done = noop;
+ }
+
+ if (!this.videoBuffered().length || this.videoBuffered().end(0) === 0) {
+ done();
+ return;
+ }
+
+ pushQueue({
+ type: 'video',
+ sourceUpdater: this,
+ action: actions.remove(start, end),
+ doneFn: done,
+ name: 'remove'
+ });
+ }
+ /**
+ * Whether the underlying sourceBuffer is updating or not
+ *
+ * @return {boolean} the updating status of the SourceBuffer
+ */
+ ;
+
+ _proto.updating = function updating() {
+ // the audio/video source buffer is updating
+ if (_updating('audio', this) || _updating('video', this)) {
+ return true;
+ }
+
+ return false;
+ }
+ /**
+ * Set/get the timestampoffset on the audio SourceBuffer
+ *
+ * @return {number} the timestamp offset
+ */
+ ;
+
+ _proto.audioTimestampOffset = function audioTimestampOffset(offset) {
+ if (typeof offset !== 'undefined' && this.audioBuffer && // no point in updating if it's the same
+ this.audioTimestampOffset_ !== offset) {
+ pushQueue({
+ type: 'audio',
+ sourceUpdater: this,
+ action: actions.timestampOffset(offset),
+ name: 'timestampOffset'
+ });
+ this.audioTimestampOffset_ = offset;
+ }
+
+ return this.audioTimestampOffset_;
+ }
+ /**
+ * Set/get the timestampoffset on the video SourceBuffer
+ *
+ * @return {number} the timestamp offset
+ */
+ ;
+
+ _proto.videoTimestampOffset = function videoTimestampOffset(offset) {
+ if (typeof offset !== 'undefined' && this.videoBuffer && // no point in updating if it's the same
+ this.videoTimestampOffset !== offset) {
+ pushQueue({
+ type: 'video',
+ sourceUpdater: this,
+ action: actions.timestampOffset(offset),
+ name: 'timestampOffset'
+ });
+ this.videoTimestampOffset_ = offset;
+ }
+
+ return this.videoTimestampOffset_;
+ }
+ /**
+ * Add a function to the queue that will be called
+ * when it is its turn to run in the audio queue.
+ *
+ * @param {Function} callback
+ * The callback to queue.
+ */
+ ;
+
+ _proto.audioQueueCallback = function audioQueueCallback(callback) {
+ if (!this.audioBuffer) {
+ return;
+ }
+
+ pushQueue({
+ type: 'audio',
+ sourceUpdater: this,
+ action: actions.callback(callback),
+ name: 'callback'
+ });
+ }
+ /**
+ * Add a function to the queue that will be called
+ * when it is its turn to run in the video queue.
+ *
+ * @param {Function} callback
+ * The callback to queue.
+ */
+ ;
+
+ _proto.videoQueueCallback = function videoQueueCallback(callback) {
+ if (!this.videoBuffer) {
+ return;
+ }
+
+ pushQueue({
+ type: 'video',
+ sourceUpdater: this,
+ action: actions.callback(callback),
+ name: 'callback'
+ });
+ }
+ /**
+ * dispose of the source updater and the underlying sourceBuffer
+ */
+ ;
+
+ _proto.dispose = function dispose() {
+ var _this4 = this;
+
+ this.trigger('dispose');
+ bufferTypes.forEach(function (type) {
+ _this4.abort(type);
+
+ if (_this4.canRemoveSourceBuffer()) {
+ _this4.removeSourceBuffer(type);
+ } else {
+ _this4[type + "QueueCallback"](function () {
+ return cleanupBuffer(type, _this4);
+ });
+ }
+ });
+ this.videoAppendQueued_ = false;
+ this.delayedAudioAppendQueue_.length = 0;
+
+ if (this.sourceopenListener_) {
+ this.mediaSource.removeEventListener('sourceopen', this.sourceopenListener_);
+ }
+
+ this.off();
+ };
+
+ return SourceUpdater;
+}(videojs.EventTarget);
+
+var uint8ToUtf8 = function uint8ToUtf8(uintArray) {
+ return decodeURIComponent(escape(String.fromCharCode.apply(null, uintArray)));
+};
+
+var VTT_LINE_TERMINATORS = new Uint8Array('\n\n'.split('').map(function (_char3) {
+ return _char3.charCodeAt(0);
+}));
+/**
+ * An object that manages segment loading and appending.
+ *
+ * @class VTTSegmentLoader
+ * @param {Object} options required and optional options
+ * @extends videojs.EventTarget
+ */
+
+var VTTSegmentLoader = /*#__PURE__*/function (_SegmentLoader) {
+ _inheritsLoose(VTTSegmentLoader, _SegmentLoader);
+
+ function VTTSegmentLoader(settings, options) {
+ var _this;
+
+ if (options === void 0) {
+ options = {};
+ }
+
+ _this = _SegmentLoader.call(this, settings, options) || this; // SegmentLoader requires a MediaSource be specified or it will throw an error;
+ // however, VTTSegmentLoader has no need of a media source, so delete the reference
+
+ _this.mediaSource_ = null;
+ _this.subtitlesTrack_ = null;
+ _this.loaderType_ = 'subtitle';
+ _this.featuresNativeTextTracks_ = settings.featuresNativeTextTracks; // The VTT segment will have its own time mappings. Saving VTT segment timing info in
+ // the sync controller leads to improper behavior.
+
+ _this.shouldSaveSegmentTimingInfo_ = false;
+ return _this;
+ }
+
+ var _proto = VTTSegmentLoader.prototype;
+
+ _proto.createTransmuxer_ = function createTransmuxer_() {
+ // don't need to transmux any subtitles
+ return null;
+ }
+ /**
+ * Indicates which time ranges are buffered
+ *
+ * @return {TimeRange}
+ * TimeRange object representing the current buffered ranges
+ */
+ ;
+
+ _proto.buffered_ = function buffered_() {
+ if (!this.subtitlesTrack_ || !this.subtitlesTrack_.cues || !this.subtitlesTrack_.cues.length) {
+ return videojs.createTimeRanges();
+ }
+
+ var cues = this.subtitlesTrack_.cues;
+ var start = cues[0].startTime;
+ var end = cues[cues.length - 1].startTime;
+ return videojs.createTimeRanges([[start, end]]);
+ }
+ /**
+ * Gets and sets init segment for the provided map
+ *
+ * @param {Object} map
+ * The map object representing the init segment to get or set
+ * @param {boolean=} set
+ * If true, the init segment for the provided map should be saved
+ * @return {Object}
+ * map object for desired init segment
+ */
+ ;
+
+ _proto.initSegmentForMap = function initSegmentForMap(map, set) {
+ if (set === void 0) {
+ set = false;
+ }
+
+ if (!map) {
+ return null;
+ }
+
+ var id = initSegmentId(map);
+ var storedMap = this.initSegments_[id];
+
+ if (set && !storedMap && map.bytes) {
+ // append WebVTT line terminators to the media initialization segment if it exists
+ // to follow the WebVTT spec (https://w3c.github.io/webvtt/#file-structure) that
+ // requires two or more WebVTT line terminators between the WebVTT header and the
+ // rest of the file
+ var combinedByteLength = VTT_LINE_TERMINATORS.byteLength + map.bytes.byteLength;
+ var combinedSegment = new Uint8Array(combinedByteLength);
+ combinedSegment.set(map.bytes);
+ combinedSegment.set(VTT_LINE_TERMINATORS, map.bytes.byteLength);
+ this.initSegments_[id] = storedMap = {
+ resolvedUri: map.resolvedUri,
+ byterange: map.byterange,
+ bytes: combinedSegment
+ };
+ }
+
+ return storedMap || map;
+ }
+ /**
+ * Returns true if all configuration required for loading is present, otherwise false.
+ *
+ * @return {boolean} True if the all configuration is ready for loading
+ * @private
+ */
+ ;
+
+ _proto.couldBeginLoading_ = function couldBeginLoading_() {
+ return this.playlist_ && this.subtitlesTrack_ && !this.paused();
+ }
+ /**
+ * Once all the starting parameters have been specified, begin
+ * operation. This method should only be invoked from the INIT
+ * state.
+ *
+ * @private
+ */
+ ;
+
+ _proto.init_ = function init_() {
+ this.state = 'READY';
+ this.resetEverything();
+ return this.monitorBuffer_();
+ }
+ /**
+ * Set a subtitle track on the segment loader to add subtitles to
+ *
+ * @param {TextTrack=} track
+ * The text track to add loaded subtitles to
+ * @return {TextTrack}
+ * Returns the subtitles track
+ */
+ ;
+
+ _proto.track = function track(_track) {
+ if (typeof _track === 'undefined') {
+ return this.subtitlesTrack_;
+ }
+
+ this.subtitlesTrack_ = _track; // if we were unpaused but waiting for a sourceUpdater, start
+ // buffering now
+
+ if (this.state === 'INIT' && this.couldBeginLoading_()) {
+ this.init_();
+ }
+
+ return this.subtitlesTrack_;
+ }
+ /**
+ * Remove any data in the source buffer between start and end times
+ *
+ * @param {number} start - the start time of the region to remove from the buffer
+ * @param {number} end - the end time of the region to remove from the buffer
+ */
+ ;
+
+ _proto.remove = function remove(start, end) {
+ removeCuesFromTrack(start, end, this.subtitlesTrack_);
+ }
+ /**
+ * fill the buffer with segements unless the sourceBuffers are
+ * currently updating
+ *
+ * Note: this function should only ever be called by monitorBuffer_
+ * and never directly
+ *
+ * @private
+ */
+ ;
+
+ _proto.fillBuffer_ = function fillBuffer_() {
+ var _this2 = this; // see if we need to begin loading immediately
+
+
+ var segmentInfo = this.chooseNextRequest_();
+
+ if (!segmentInfo) {
+ return;
+ }
+
+ if (this.syncController_.timestampOffsetForTimeline(segmentInfo.timeline) === null) {
+ // We don't have the timestamp offset that we need to sync subtitles.
+ // Rerun on a timestamp offset or user interaction.
+ var checkTimestampOffset = function checkTimestampOffset() {
+ _this2.state = 'READY';
+
+ if (!_this2.paused()) {
+ // if not paused, queue a buffer check as soon as possible
+ _this2.monitorBuffer_();
+ }
+ };
+
+ this.syncController_.one('timestampoffset', checkTimestampOffset);
+ this.state = 'WAITING_ON_TIMELINE';
+ return;
+ }
+
+ this.loadSegment_(segmentInfo);
+ } // never set a timestamp offset for vtt segments.
+ ;
+
+ _proto.timestampOffsetForSegment_ = function timestampOffsetForSegment_() {
+ return null;
+ };
+
+ _proto.chooseNextRequest_ = function chooseNextRequest_() {
+ return this.skipEmptySegments_(_SegmentLoader.prototype.chooseNextRequest_.call(this));
+ }
+ /**
+ * Prevents the segment loader from requesting segments we know contain no subtitles
+ * by walking forward until we find the next segment that we don't know whether it is
+ * empty or not.
+ *
+ * @param {Object} segmentInfo
+ * a segment info object that describes the current segment
+ * @return {Object}
+ * a segment info object that describes the current segment
+ */
+ ;
+
+ _proto.skipEmptySegments_ = function skipEmptySegments_(segmentInfo) {
+ while (segmentInfo && segmentInfo.segment.empty) {
+ // stop at the last possible segmentInfo
+ if (segmentInfo.mediaIndex + 1 >= segmentInfo.playlist.segments.length) {
+ segmentInfo = null;
+ break;
+ }
+
+ segmentInfo = this.generateSegmentInfo_({
+ playlist: segmentInfo.playlist,
+ mediaIndex: segmentInfo.mediaIndex + 1,
+ startOfSegment: segmentInfo.startOfSegment + segmentInfo.duration,
+ isSyncRequest: segmentInfo.isSyncRequest
+ });
+ }
+
+ return segmentInfo;
+ };
+
+ _proto.stopForError = function stopForError(error) {
+ this.error(error);
+ this.state = 'READY';
+ this.pause();
+ this.trigger('error');
+ }
+ /**
+ * append a decrypted segement to the SourceBuffer through a SourceUpdater
+ *
+ * @private
+ */
+ ;
+
+ _proto.segmentRequestFinished_ = function segmentRequestFinished_(error, simpleSegment, result) {
+ var _this3 = this;
+
+ if (!this.subtitlesTrack_) {
+ this.state = 'READY';
+ return;
+ }
+
+ this.saveTransferStats_(simpleSegment.stats); // the request was aborted
+
+ if (!this.pendingSegment_) {
+ this.state = 'READY';
+ this.mediaRequestsAborted += 1;
+ return;
+ }
+
+ if (error) {
+ if (error.code === REQUEST_ERRORS.TIMEOUT) {
+ this.handleTimeout_();
+ }
+
+ if (error.code === REQUEST_ERRORS.ABORTED) {
+ this.mediaRequestsAborted += 1;
+ } else {
+ this.mediaRequestsErrored += 1;
+ }
+
+ this.stopForError(error);
+ return;
+ }
+
+ var segmentInfo = this.pendingSegment_; // although the VTT segment loader bandwidth isn't really used, it's good to
+ // maintain functionality between segment loaders
+
+ this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats);
+ this.state = 'APPENDING'; // used for tests
+
+ this.trigger('appending');
+ var segment = segmentInfo.segment;
+
+ if (segment.map) {
+ segment.map.bytes = simpleSegment.map.bytes;
+ }
+
+ segmentInfo.bytes = simpleSegment.bytes; // Make sure that vttjs has loaded, otherwise, wait till it finished loading
+
+ if (typeof window$1.WebVTT !== 'function' && this.subtitlesTrack_ && this.subtitlesTrack_.tech_) {
+ var loadHandler;
+
+ var errorHandler = function errorHandler() {
+ _this3.subtitlesTrack_.tech_.off('vttjsloaded', loadHandler);
+
+ _this3.stopForError({
+ message: 'Error loading vtt.js'
+ });
+
+ return;
+ };
+
+ loadHandler = function loadHandler() {
+ _this3.subtitlesTrack_.tech_.off('vttjserror', errorHandler);
+
+ _this3.segmentRequestFinished_(error, simpleSegment, result);
+ };
+
+ this.state = 'WAITING_ON_VTTJS';
+ this.subtitlesTrack_.tech_.one('vttjsloaded', loadHandler);
+ this.subtitlesTrack_.tech_.one('vttjserror', errorHandler);
+ return;
+ }
+
+ segment.requested = true;
+
+ try {
+ this.parseVTTCues_(segmentInfo);
+ } catch (e) {
+ this.stopForError({
+ message: e.message
+ });
+ return;
+ }
+
+ this.updateTimeMapping_(segmentInfo, this.syncController_.timelines[segmentInfo.timeline], this.playlist_);
+
+ if (segmentInfo.cues.length) {
+ segmentInfo.timingInfo = {
+ start: segmentInfo.cues[0].startTime,
+ end: segmentInfo.cues[segmentInfo.cues.length - 1].endTime
+ };
+ } else {
+ segmentInfo.timingInfo = {
+ start: segmentInfo.startOfSegment,
+ end: segmentInfo.startOfSegment + segmentInfo.duration
+ };
+ }
+
+ if (segmentInfo.isSyncRequest) {
+ this.trigger('syncinfoupdate');
+ this.pendingSegment_ = null;
+ this.state = 'READY';
+ return;
+ }
+
+ segmentInfo.byteLength = segmentInfo.bytes.byteLength;
+ this.mediaSecondsLoaded += segment.duration; // Create VTTCue instances for each cue in the new segment and add them to
+ // the subtitle track
+
+ segmentInfo.cues.forEach(function (cue) {
+ _this3.subtitlesTrack_.addCue(_this3.featuresNativeTextTracks_ ? new window$1.VTTCue(cue.startTime, cue.endTime, cue.text) : cue);
+ }); // Remove any duplicate cues from the subtitle track. The WebVTT spec allows
+ // cues to have identical time-intervals, but if the text is also identical
+ // we can safely assume it is a duplicate that can be removed (ex. when a cue
+ // "overlaps" VTT segments)
+
+ removeDuplicateCuesFromTrack(this.subtitlesTrack_);
+ this.handleAppendsDone_();
+ };
+
+ _proto.handleData_ = function handleData_() {// noop as we shouldn't be getting video/audio data captions
+ // that we do not support here.
+ };
+
+ _proto.updateTimingInfoEnd_ = function updateTimingInfoEnd_() {// noop
+ }
+ /**
+ * Uses the WebVTT parser to parse the segment response
+ *
+ * @param {Object} segmentInfo
+ * a segment info object that describes the current segment
+ * @private
+ */
+ ;
+
+ _proto.parseVTTCues_ = function parseVTTCues_(segmentInfo) {
+ var decoder;
+ var decodeBytesToString = false;
+
+ if (typeof window$1.TextDecoder === 'function') {
+ decoder = new window$1.TextDecoder('utf8');
+ } else {
+ decoder = window$1.WebVTT.StringDecoder();
+ decodeBytesToString = true;
+ }
+
+ var parser = new window$1.WebVTT.Parser(window$1, window$1.vttjs, decoder);
+ segmentInfo.cues = [];
+ segmentInfo.timestampmap = {
+ MPEGTS: 0,
+ LOCAL: 0
+ };
+ parser.oncue = segmentInfo.cues.push.bind(segmentInfo.cues);
+
+ parser.ontimestampmap = function (map) {
+ segmentInfo.timestampmap = map;
+ };
+
+ parser.onparsingerror = function (error) {
+ videojs.log.warn('Error encountered when parsing cues: ' + error.message);
+ };
+
+ if (segmentInfo.segment.map) {
+ var mapData = segmentInfo.segment.map.bytes;
+
+ if (decodeBytesToString) {
+ mapData = uint8ToUtf8(mapData);
+ }
+
+ parser.parse(mapData);
+ }
+
+ var segmentData = segmentInfo.bytes;
+
+ if (decodeBytesToString) {
+ segmentData = uint8ToUtf8(segmentData);
+ }
+
+ parser.parse(segmentData);
+ parser.flush();
+ }
+ /**
+ * Updates the start and end times of any cues parsed by the WebVTT parser using
+ * the information parsed from the X-TIMESTAMP-MAP header and a TS to media time mapping
+ * from the SyncController
+ *
+ * @param {Object} segmentInfo
+ * a segment info object that describes the current segment
+ * @param {Object} mappingObj
+ * object containing a mapping from TS to media time
+ * @param {Object} playlist
+ * the playlist object containing the segment
+ * @private
+ */
+ ;
+
+ _proto.updateTimeMapping_ = function updateTimeMapping_(segmentInfo, mappingObj, playlist) {
+ var segment = segmentInfo.segment;
+
+ if (!mappingObj) {
+ // If the sync controller does not have a mapping of TS to Media Time for the
+ // timeline, then we don't have enough information to update the cue
+ // start/end times
+ return;
+ }
+
+ if (!segmentInfo.cues.length) {
+ // If there are no cues, we also do not have enough information to figure out
+ // segment timing. Mark that the segment contains no cues so we don't re-request
+ // an empty segment.
+ segment.empty = true;
+ return;
+ }
+
+ var timestampmap = segmentInfo.timestampmap;
+ var diff = timestampmap.MPEGTS / ONE_SECOND_IN_TS - timestampmap.LOCAL + mappingObj.mapping;
+ segmentInfo.cues.forEach(function (cue) {
+ // First convert cue time to TS time using the timestamp-map provided within the vtt
+ cue.startTime += diff;
+ cue.endTime += diff;
+ });
+
+ if (!playlist.syncInfo) {
+ var firstStart = segmentInfo.cues[0].startTime;
+ var lastStart = segmentInfo.cues[segmentInfo.cues.length - 1].startTime;
+ playlist.syncInfo = {
+ mediaSequence: playlist.mediaSequence + segmentInfo.mediaIndex,
+ time: Math.min(firstStart, lastStart - segment.duration)
+ };
+ }
+ };
+
+ return VTTSegmentLoader;
+}(SegmentLoader);
+/**
+ * @file ad-cue-tags.js
+ */
+
+/**
+ * Searches for an ad cue that overlaps with the given mediaTime
+ *
+ * @param {Object} track
+ * the track to find the cue for
+ *
+ * @param {number} mediaTime
+ * the time to find the cue at
+ *
+ * @return {Object|null}
+ * the found cue or null
+ */
+
+
+var findAdCue = function findAdCue(track, mediaTime) {
+ var cues = track.cues;
+
+ for (var i = 0; i < cues.length; i++) {
+ var cue = cues[i];
+
+ if (mediaTime >= cue.adStartTime && mediaTime <= cue.adEndTime) {
+ return cue;
+ }
+ }
+
+ return null;
+};
+
+var updateAdCues = function updateAdCues(media, track, offset) {
+ if (offset === void 0) {
+ offset = 0;
+ }
+
+ if (!media.segments) {
+ return;
+ }
+
+ var mediaTime = offset;
+ var cue;
+
+ for (var i = 0; i < media.segments.length; i++) {
+ var segment = media.segments[i];
+
+ if (!cue) {
+ // Since the cues will span for at least the segment duration, adding a fudge
+ // factor of half segment duration will prevent duplicate cues from being
+ // created when timing info is not exact (e.g. cue start time initialized
+ // at 10.006677, but next call mediaTime is 10.003332 )
+ cue = findAdCue(track, mediaTime + segment.duration / 2);
+ }
+
+ if (cue) {
+ if ('cueIn' in segment) {
+ // Found a CUE-IN so end the cue
+ cue.endTime = mediaTime;
+ cue.adEndTime = mediaTime;
+ mediaTime += segment.duration;
+ cue = null;
+ continue;
+ }
+
+ if (mediaTime < cue.endTime) {
+ // Already processed this mediaTime for this cue
+ mediaTime += segment.duration;
+ continue;
+ } // otherwise extend cue until a CUE-IN is found
+
+
+ cue.endTime += segment.duration;
+ } else {
+ if ('cueOut' in segment) {
+ cue = new window$1.VTTCue(mediaTime, mediaTime + segment.duration, segment.cueOut);
+ cue.adStartTime = mediaTime; // Assumes tag format to be
+ // #EXT-X-CUE-OUT:30
+
+ cue.adEndTime = mediaTime + parseFloat(segment.cueOut);
+ track.addCue(cue);
+ }
+
+ if ('cueOutCont' in segment) {
+ // Entered into the middle of an ad cue
+ // Assumes tag formate to be
+ // #EXT-X-CUE-OUT-CONT:10/30
+ var _segment$cueOutCont$s = segment.cueOutCont.split('/').map(parseFloat),
+ adOffset = _segment$cueOutCont$s[0],
+ adTotal = _segment$cueOutCont$s[1];
+
+ cue = new window$1.VTTCue(mediaTime, mediaTime + segment.duration, '');
+ cue.adStartTime = mediaTime - adOffset;
+ cue.adEndTime = cue.adStartTime + adTotal;
+ track.addCue(cue);
+ }
+ }
+
+ mediaTime += segment.duration;
+ }
+}; // synchronize expired playlist segments.
+// the max media sequence diff is 48 hours of live stream
+// content with two second segments. Anything larger than that
+// will likely be invalid.
+
+
+var MAX_MEDIA_SEQUENCE_DIFF_FOR_SYNC = 86400;
+var syncPointStrategies = [// Stategy "VOD": Handle the VOD-case where the sync-point is *always*
+// the equivalence display-time 0 === segment-index 0
+{
+ name: 'VOD',
+ run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
+ if (duration !== Infinity) {
+ var syncPoint = {
+ time: 0,
+ segmentIndex: 0,
+ partIndex: null
+ };
+ return syncPoint;
+ }
+
+ return null;
+ }
+}, // Stategy "ProgramDateTime": We have a program-date-time tag in this playlist
+{
+ name: 'ProgramDateTime',
+ run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
+ if (!Object.keys(syncController.timelineToDatetimeMappings).length) {
+ return null;
+ }
+
+ var syncPoint = null;
+ var lastDistance = null;
+ var partsAndSegments = getPartsAndSegments(playlist);
+ currentTime = currentTime || 0;
+
+ for (var i = 0; i < partsAndSegments.length; i++) {
+ // start from the end and loop backwards for live
+ // or start from the front and loop forwards for non-live
+ var index = playlist.endList || currentTime === 0 ? i : partsAndSegments.length - (i + 1);
+ var partAndSegment = partsAndSegments[index];
+ var segment = partAndSegment.segment;
+ var datetimeMapping = syncController.timelineToDatetimeMappings[segment.timeline];
+
+ if (!datetimeMapping || !segment.dateTimeObject) {
+ continue;
+ }
+
+ var segmentTime = segment.dateTimeObject.getTime() / 1000;
+ var start = segmentTime + datetimeMapping; // take part duration into account.
+
+ if (segment.parts && typeof partAndSegment.partIndex === 'number') {
+ for (var z = 0; z < partAndSegment.partIndex; z++) {
+ start += segment.parts[z].duration;
+ }
+ }
+
+ var distance = Math.abs(currentTime - start); // Once the distance begins to increase, or if distance is 0, we have passed
+ // currentTime and can stop looking for better candidates
+
+ if (lastDistance !== null && (distance === 0 || lastDistance < distance)) {
+ break;
+ }
+
+ lastDistance = distance;
+ syncPoint = {
+ time: start,
+ segmentIndex: partAndSegment.segmentIndex,
+ partIndex: partAndSegment.partIndex
+ };
+ }
+
+ return syncPoint;
+ }
+}, // Stategy "Segment": We have a known time mapping for a timeline and a
+// segment in the current timeline with timing data
+{
+ name: 'Segment',
+ run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
+ var syncPoint = null;
+ var lastDistance = null;
+ currentTime = currentTime || 0;
+ var partsAndSegments = getPartsAndSegments(playlist);
+
+ for (var i = 0; i < partsAndSegments.length; i++) {
+ // start from the end and loop backwards for live
+ // or start from the front and loop forwards for non-live
+ var index = playlist.endList || currentTime === 0 ? i : partsAndSegments.length - (i + 1);
+ var partAndSegment = partsAndSegments[index];
+ var segment = partAndSegment.segment;
+ var start = partAndSegment.part && partAndSegment.part.start || segment && segment.start;
+
+ if (segment.timeline === currentTimeline && typeof start !== 'undefined') {
+ var distance = Math.abs(currentTime - start); // Once the distance begins to increase, we have passed
+ // currentTime and can stop looking for better candidates
+
+ if (lastDistance !== null && lastDistance < distance) {
+ break;
+ }
+
+ if (!syncPoint || lastDistance === null || lastDistance >= distance) {
+ lastDistance = distance;
+ syncPoint = {
+ time: start,
+ segmentIndex: partAndSegment.segmentIndex,
+ partIndex: partAndSegment.partIndex
+ };
+ }
+ }
+ }
+
+ return syncPoint;
+ }
+}, // Stategy "Discontinuity": We have a discontinuity with a known
+// display-time
+{
+ name: 'Discontinuity',
+ run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
+ var syncPoint = null;
+ currentTime = currentTime || 0;
+
+ if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
+ var lastDistance = null;
+
+ for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
+ var segmentIndex = playlist.discontinuityStarts[i];
+ var discontinuity = playlist.discontinuitySequence + i + 1;
+ var discontinuitySync = syncController.discontinuities[discontinuity];
+
+ if (discontinuitySync) {
+ var distance = Math.abs(currentTime - discontinuitySync.time); // Once the distance begins to increase, we have passed
+ // currentTime and can stop looking for better candidates
+
+ if (lastDistance !== null && lastDistance < distance) {
+ break;
+ }
+
+ if (!syncPoint || lastDistance === null || lastDistance >= distance) {
+ lastDistance = distance;
+ syncPoint = {
+ time: discontinuitySync.time,
+ segmentIndex: segmentIndex,
+ partIndex: null
+ };
+ }
+ }
+ }
+ }
+
+ return syncPoint;
+ }
+}, // Stategy "Playlist": We have a playlist with a known mapping of
+// segment index to display time
+{
+ name: 'Playlist',
+ run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
+ if (playlist.syncInfo) {
+ var syncPoint = {
+ time: playlist.syncInfo.time,
+ segmentIndex: playlist.syncInfo.mediaSequence - playlist.mediaSequence,
+ partIndex: null
+ };
+ return syncPoint;
+ }
+
+ return null;
+ }
+}];
+
+var SyncController = /*#__PURE__*/function (_videojs$EventTarget) {
+ _inheritsLoose(SyncController, _videojs$EventTarget);
+
+ function SyncController(options) {
+ var _this;
+
+ _this = _videojs$EventTarget.call(this) || this; // ...for synching across variants
+
+ _this.timelines = [];
+ _this.discontinuities = [];
+ _this.timelineToDatetimeMappings = {};
+ _this.logger_ = logger('SyncController');
+ return _this;
+ }
+ /**
+ * Find a sync-point for the playlist specified
+ *
+ * A sync-point is defined as a known mapping from display-time to
+ * a segment-index in the current playlist.
+ *
+ * @param {Playlist} playlist
+ * The playlist that needs a sync-point
+ * @param {number} duration
+ * Duration of the MediaSource (Infinite if playing a live source)
+ * @param {number} currentTimeline
+ * The last timeline from which a segment was loaded
+ * @return {Object}
+ * A sync-point object
+ */
+
+
+ var _proto = SyncController.prototype;
+
+ _proto.getSyncPoint = function getSyncPoint(playlist, duration, currentTimeline, currentTime) {
+ var syncPoints = this.runStrategies_(playlist, duration, currentTimeline, currentTime);
+
+ if (!syncPoints.length) {
+ // Signal that we need to attempt to get a sync-point manually
+ // by fetching a segment in the playlist and constructing
+ // a sync-point from that information
+ return null;
+ } // Now find the sync-point that is closest to the currentTime because
+ // that should result in the most accurate guess about which segment
+ // to fetch
+
+
+ return this.selectSyncPoint_(syncPoints, {
+ key: 'time',
+ value: currentTime
+ });
+ }
+ /**
+ * Calculate the amount of time that has expired off the playlist during playback
+ *
+ * @param {Playlist} playlist
+ * Playlist object to calculate expired from
+ * @param {number} duration
+ * Duration of the MediaSource (Infinity if playling a live source)
+ * @return {number|null}
+ * The amount of time that has expired off the playlist during playback. Null
+ * if no sync-points for the playlist can be found.
+ */
+ ;
+
+ _proto.getExpiredTime = function getExpiredTime(playlist, duration) {
+ if (!playlist || !playlist.segments) {
+ return null;
+ }
+
+ var syncPoints = this.runStrategies_(playlist, duration, playlist.discontinuitySequence, 0); // Without sync-points, there is not enough information to determine the expired time
+
+ if (!syncPoints.length) {
+ return null;
+ }
+
+ var syncPoint = this.selectSyncPoint_(syncPoints, {
+ key: 'segmentIndex',
+ value: 0
+ }); // If the sync-point is beyond the start of the playlist, we want to subtract the
+ // duration from index 0 to syncPoint.segmentIndex instead of adding.
+
+ if (syncPoint.segmentIndex > 0) {
+ syncPoint.time *= -1;
+ }
+
+ return Math.abs(syncPoint.time + sumDurations({
+ defaultDuration: playlist.targetDuration,
+ durationList: playlist.segments,
+ startIndex: syncPoint.segmentIndex,
+ endIndex: 0
+ }));
+ }
+ /**
+ * Runs each sync-point strategy and returns a list of sync-points returned by the
+ * strategies
+ *
+ * @private
+ * @param {Playlist} playlist
+ * The playlist that needs a sync-point
+ * @param {number} duration
+ * Duration of the MediaSource (Infinity if playing a live source)
+ * @param {number} currentTimeline
+ * The last timeline from which a segment was loaded
+ * @return {Array}
+ * A list of sync-point objects
+ */
+ ;
+
+ _proto.runStrategies_ = function runStrategies_(playlist, duration, currentTimeline, currentTime) {
+ var syncPoints = []; // Try to find a sync-point in by utilizing various strategies...
+
+ for (var i = 0; i < syncPointStrategies.length; i++) {
+ var strategy = syncPointStrategies[i];
+ var syncPoint = strategy.run(this, playlist, duration, currentTimeline, currentTime);
+
+ if (syncPoint) {
+ syncPoint.strategy = strategy.name;
+ syncPoints.push({
+ strategy: strategy.name,
+ syncPoint: syncPoint
+ });
+ }
+ }
+
+ return syncPoints;
+ }
+ /**
+ * Selects the sync-point nearest the specified target
+ *
+ * @private
+ * @param {Array} syncPoints
+ * List of sync-points to select from
+ * @param {Object} target
+ * Object specifying the property and value we are targeting
+ * @param {string} target.key
+ * Specifies the property to target. Must be either 'time' or 'segmentIndex'
+ * @param {number} target.value
+ * The value to target for the specified key.
+ * @return {Object}
+ * The sync-point nearest the target
+ */
+ ;
+
+ _proto.selectSyncPoint_ = function selectSyncPoint_(syncPoints, target) {
+ var bestSyncPoint = syncPoints[0].syncPoint;
+ var bestDistance = Math.abs(syncPoints[0].syncPoint[target.key] - target.value);
+ var bestStrategy = syncPoints[0].strategy;
+
+ for (var i = 1; i < syncPoints.length; i++) {
+ var newDistance = Math.abs(syncPoints[i].syncPoint[target.key] - target.value);
+
+ if (newDistance < bestDistance) {
+ bestDistance = newDistance;
+ bestSyncPoint = syncPoints[i].syncPoint;
+ bestStrategy = syncPoints[i].strategy;
+ }
+ }
+
+ this.logger_("syncPoint for [" + target.key + ": " + target.value + "] chosen with strategy" + (" [" + bestStrategy + "]: [time:" + bestSyncPoint.time + ",") + (" segmentIndex:" + bestSyncPoint.segmentIndex) + (typeof bestSyncPoint.partIndex === 'number' ? ",partIndex:" + bestSyncPoint.partIndex : '') + ']');
+ return bestSyncPoint;
+ }
+ /**
+ * Save any meta-data present on the segments when segments leave
+ * the live window to the playlist to allow for synchronization at the
+ * playlist level later.
+ *
+ * @param {Playlist} oldPlaylist - The previous active playlist
+ * @param {Playlist} newPlaylist - The updated and most current playlist
+ */
+ ;
+
+ _proto.saveExpiredSegmentInfo = function saveExpiredSegmentInfo(oldPlaylist, newPlaylist) {
+ var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence; // Ignore large media sequence gaps
+
+ if (mediaSequenceDiff > MAX_MEDIA_SEQUENCE_DIFF_FOR_SYNC) {
+ videojs.log.warn("Not saving expired segment info. Media sequence gap " + mediaSequenceDiff + " is too large.");
+ return;
+ } // When a segment expires from the playlist and it has a start time
+ // save that information as a possible sync-point reference in future
+
+
+ for (var i = mediaSequenceDiff - 1; i >= 0; i--) {
+ var lastRemovedSegment = oldPlaylist.segments[i];
+
+ if (lastRemovedSegment && typeof lastRemovedSegment.start !== 'undefined') {
+ newPlaylist.syncInfo = {
+ mediaSequence: oldPlaylist.mediaSequence + i,
+ time: lastRemovedSegment.start
+ };
+ this.logger_("playlist refresh sync: [time:" + newPlaylist.syncInfo.time + "," + (" mediaSequence: " + newPlaylist.syncInfo.mediaSequence + "]"));
+ this.trigger('syncinfoupdate');
+ break;
+ }
+ }
+ }
+ /**
+ * Save the mapping from playlist's ProgramDateTime to display. This should only happen
+ * before segments start to load.
+ *
+ * @param {Playlist} playlist - The currently active playlist
+ */
+ ;
+
+ _proto.setDateTimeMappingForStart = function setDateTimeMappingForStart(playlist) {
+ // It's possible for the playlist to be updated before playback starts, meaning time
+ // zero is not yet set. If, during these playlist refreshes, a discontinuity is
+ // crossed, then the old time zero mapping (for the prior timeline) would be retained
+ // unless the mappings are cleared.
+ this.timelineToDatetimeMappings = {};
+
+ if (playlist.segments && playlist.segments.length && playlist.segments[0].dateTimeObject) {
+ var firstSegment = playlist.segments[0];
+ var playlistTimestamp = firstSegment.dateTimeObject.getTime() / 1000;
+ this.timelineToDatetimeMappings[firstSegment.timeline] = -playlistTimestamp;
+ }
+ }
+ /**
+ * Calculates and saves timeline mappings, playlist sync info, and segment timing values
+ * based on the latest timing information.
+ *
+ * @param {Object} options
+ * Options object
+ * @param {SegmentInfo} options.segmentInfo
+ * The current active request information
+ * @param {boolean} options.shouldSaveTimelineMapping
+ * If there's a timeline change, determines if the timeline mapping should be
+ * saved for timeline mapping and program date time mappings.
+ */
+ ;
+
+ _proto.saveSegmentTimingInfo = function saveSegmentTimingInfo(_ref) {
+ var segmentInfo = _ref.segmentInfo,
+ shouldSaveTimelineMapping = _ref.shouldSaveTimelineMapping;
+ var didCalculateSegmentTimeMapping = this.calculateSegmentTimeMapping_(segmentInfo, segmentInfo.timingInfo, shouldSaveTimelineMapping);
+ var segment = segmentInfo.segment;
+
+ if (didCalculateSegmentTimeMapping) {
+ this.saveDiscontinuitySyncInfo_(segmentInfo); // If the playlist does not have sync information yet, record that information
+ // now with segment timing information
+
+ if (!segmentInfo.playlist.syncInfo) {
+ segmentInfo.playlist.syncInfo = {
+ mediaSequence: segmentInfo.playlist.mediaSequence + segmentInfo.mediaIndex,
+ time: segment.start
+ };
+ }
+ }
+
+ var dateTime = segment.dateTimeObject;
+
+ if (segment.discontinuity && shouldSaveTimelineMapping && dateTime) {
+ this.timelineToDatetimeMappings[segment.timeline] = -(dateTime.getTime() / 1000);
+ }
+ };
+
+ _proto.timestampOffsetForTimeline = function timestampOffsetForTimeline(timeline) {
+ if (typeof this.timelines[timeline] === 'undefined') {
+ return null;
+ }
+
+ return this.timelines[timeline].time;
+ };
+
+ _proto.mappingForTimeline = function mappingForTimeline(timeline) {
+ if (typeof this.timelines[timeline] === 'undefined') {
+ return null;
+ }
+
+ return this.timelines[timeline].mapping;
+ }
+ /**
+ * Use the "media time" for a segment to generate a mapping to "display time" and
+ * save that display time to the segment.
+ *
+ * @private
+ * @param {SegmentInfo} segmentInfo
+ * The current active request information
+ * @param {Object} timingInfo
+ * The start and end time of the current segment in "media time"
+ * @param {boolean} shouldSaveTimelineMapping
+ * If there's a timeline change, determines if the timeline mapping should be
+ * saved in timelines.
+ * @return {boolean}
+ * Returns false if segment time mapping could not be calculated
+ */
+ ;
+
+ _proto.calculateSegmentTimeMapping_ = function calculateSegmentTimeMapping_(segmentInfo, timingInfo, shouldSaveTimelineMapping) {
+ // TODO: remove side effects
+ var segment = segmentInfo.segment;
+ var part = segmentInfo.part;
+ var mappingObj = this.timelines[segmentInfo.timeline];
+ var start;
+ var end;
+
+ if (typeof segmentInfo.timestampOffset === 'number') {
+ mappingObj = {
+ time: segmentInfo.startOfSegment,
+ mapping: segmentInfo.startOfSegment - timingInfo.start
+ };
+
+ if (shouldSaveTimelineMapping) {
+ this.timelines[segmentInfo.timeline] = mappingObj;
+ this.trigger('timestampoffset');
+ this.logger_("time mapping for timeline " + segmentInfo.timeline + ": " + ("[time: " + mappingObj.time + "] [mapping: " + mappingObj.mapping + "]"));
+ }
+
+ start = segmentInfo.startOfSegment;
+ end = timingInfo.end + mappingObj.mapping;
+ } else if (mappingObj) {
+ start = timingInfo.start + mappingObj.mapping;
+ end = timingInfo.end + mappingObj.mapping;
+ } else {
+ return false;
+ }
+
+ if (part) {
+ part.start = start;
+ part.end = end;
+ } // If we don't have a segment start yet or the start value we got
+ // is less than our current segment.start value, save a new start value.
+ // We have to do this because parts will have segment timing info saved
+ // multiple times and we want segment start to be the earliest part start
+ // value for that segment.
+
+
+ if (!segment.start || start < segment.start) {
+ segment.start = start;
+ }
+
+ segment.end = end;
+ return true;
+ }
+ /**
+ * Each time we have discontinuity in the playlist, attempt to calculate the location
+ * in display of the start of the discontinuity and save that. We also save an accuracy
+ * value so that we save values with the most accuracy (closest to 0.)
+ *
+ * @private
+ * @param {SegmentInfo} segmentInfo - The current active request information
+ */
+ ;
+
+ _proto.saveDiscontinuitySyncInfo_ = function saveDiscontinuitySyncInfo_(segmentInfo) {
+ var playlist = segmentInfo.playlist;
+ var segment = segmentInfo.segment; // If the current segment is a discontinuity then we know exactly where
+ // the start of the range and it's accuracy is 0 (greater accuracy values
+ // mean more approximation)
+
+ if (segment.discontinuity) {
+ this.discontinuities[segment.timeline] = {
+ time: segment.start,
+ accuracy: 0
+ };
+ } else if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
+ // Search for future discontinuities that we can provide better timing
+ // information for and save that information for sync purposes
+ for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
+ var segmentIndex = playlist.discontinuityStarts[i];
+ var discontinuity = playlist.discontinuitySequence + i + 1;
+ var mediaIndexDiff = segmentIndex - segmentInfo.mediaIndex;
+ var accuracy = Math.abs(mediaIndexDiff);
+
+ if (!this.discontinuities[discontinuity] || this.discontinuities[discontinuity].accuracy > accuracy) {
+ var time = void 0;
+
+ if (mediaIndexDiff < 0) {
+ time = segment.start - sumDurations({
+ defaultDuration: playlist.targetDuration,
+ durationList: playlist.segments,
+ startIndex: segmentInfo.mediaIndex,
+ endIndex: segmentIndex
+ });
+ } else {
+ time = segment.end + sumDurations({
+ defaultDuration: playlist.targetDuration,
+ durationList: playlist.segments,
+ startIndex: segmentInfo.mediaIndex + 1,
+ endIndex: segmentIndex
+ });
+ }
+
+ this.discontinuities[discontinuity] = {
+ time: time,
+ accuracy: accuracy
+ };
+ }
+ }
+ }
+ };
+
+ _proto.dispose = function dispose() {
+ this.trigger('dispose');
+ this.off();
+ };
+
+ return SyncController;
+}(videojs.EventTarget);
+/**
+ * The TimelineChangeController acts as a source for segment loaders to listen for and
+ * keep track of latest and pending timeline changes. This is useful to ensure proper
+ * sync, as each loader may need to make a consideration for what timeline the other
+ * loader is on before making changes which could impact the other loader's media.
+ *
+ * @class TimelineChangeController
+ * @extends videojs.EventTarget
+ */
+
+
+var TimelineChangeController = /*#__PURE__*/function (_videojs$EventTarget) {
+ _inheritsLoose(TimelineChangeController, _videojs$EventTarget);
+
+ function TimelineChangeController() {
+ var _this;
+
+ _this = _videojs$EventTarget.call(this) || this;
+ _this.pendingTimelineChanges_ = {};
+ _this.lastTimelineChanges_ = {};
+ return _this;
+ }
+
+ var _proto = TimelineChangeController.prototype;
+
+ _proto.clearPendingTimelineChange = function clearPendingTimelineChange(type) {
+ this.pendingTimelineChanges_[type] = null;
+ this.trigger('pendingtimelinechange');
+ };
+
+ _proto.pendingTimelineChange = function pendingTimelineChange(_ref) {
+ var type = _ref.type,
+ from = _ref.from,
+ to = _ref.to;
+
+ if (typeof from === 'number' && typeof to === 'number') {
+ this.pendingTimelineChanges_[type] = {
+ type: type,
+ from: from,
+ to: to
+ };
+ this.trigger('pendingtimelinechange');
+ }
+
+ return this.pendingTimelineChanges_[type];
+ };
+
+ _proto.lastTimelineChange = function lastTimelineChange(_ref2) {
+ var type = _ref2.type,
+ from = _ref2.from,
+ to = _ref2.to;
+
+ if (typeof from === 'number' && typeof to === 'number') {
+ this.lastTimelineChanges_[type] = {
+ type: type,
+ from: from,
+ to: to
+ };
+ delete this.pendingTimelineChanges_[type];
+ this.trigger('timelinechange');
+ }
+
+ return this.lastTimelineChanges_[type];
+ };
+
+ _proto.dispose = function dispose() {
+ this.trigger('dispose');
+ this.pendingTimelineChanges_ = {};
+ this.lastTimelineChanges_ = {};
+ this.off();
+ };
+
+ return TimelineChangeController;
+}(videojs.EventTarget);
+/* rollup-plugin-worker-factory start for worker!/Users/bclifford/Code/vhs-release-test/src/decrypter-worker.js */
+
+
+var workerCode = transform(getWorkerString(function () {
+ var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
+
+ function createCommonjsModule(fn, basedir, module) {
+ return module = {
+ path: basedir,
+ exports: {},
+ require: function require(path, base) {
+ return commonjsRequire(path, base === undefined || base === null ? module.path : base);
+ }
+ }, fn(module, module.exports), module.exports;
+ }
+
+ function commonjsRequire() {
+ throw new Error('Dynamic requires are not currently supported by @rollup/plugin-commonjs');
+ }
+
+ var createClass = createCommonjsModule(function (module) {
+ function _defineProperties(target, props) {
+ for (var i = 0; i < props.length; i++) {
+ var descriptor = props[i];
+ descriptor.enumerable = descriptor.enumerable || false;
+ descriptor.configurable = true;
+ if ("value" in descriptor) descriptor.writable = true;
+ Object.defineProperty(target, descriptor.key, descriptor);
+ }
+ }
+
+ function _createClass(Constructor, protoProps, staticProps) {
+ if (protoProps) _defineProperties(Constructor.prototype, protoProps);
+ if (staticProps) _defineProperties(Constructor, staticProps);
+ return Constructor;
+ }
+
+ module.exports = _createClass;
+ module.exports["default"] = module.exports, module.exports.__esModule = true;
+ });
+ var setPrototypeOf = createCommonjsModule(function (module) {
+ function _setPrototypeOf(o, p) {
+ module.exports = _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) {
+ o.__proto__ = p;
+ return o;
+ };
+
+ module.exports["default"] = module.exports, module.exports.__esModule = true;
+ return _setPrototypeOf(o, p);
+ }
+
+ module.exports = _setPrototypeOf;
+ module.exports["default"] = module.exports, module.exports.__esModule = true;
+ });
+ var inheritsLoose = createCommonjsModule(function (module) {
+ function _inheritsLoose(subClass, superClass) {
+ subClass.prototype = Object.create(superClass.prototype);
+ subClass.prototype.constructor = subClass;
+ setPrototypeOf(subClass, superClass);
+ }
+
+ module.exports = _inheritsLoose;
+ module.exports["default"] = module.exports, module.exports.__esModule = true;
+ });
+ /**
+ * @file stream.js
+ */
+
+ /**
+ * A lightweight readable stream implemention that handles event dispatching.
+ *
+ * @class Stream
+ */
+
+ var Stream = /*#__PURE__*/function () {
+ function Stream() {
+ this.listeners = {};
+ }
+ /**
+ * Add a listener for a specified event type.
+ *
+ * @param {string} type the event name
+ * @param {Function} listener the callback to be invoked when an event of
+ * the specified type occurs
+ */
+
+
+ var _proto = Stream.prototype;
+
+ _proto.on = function on(type, listener) {
+ if (!this.listeners[type]) {
+ this.listeners[type] = [];
+ }
+
+ this.listeners[type].push(listener);
+ }
+ /**
+ * Remove a listener for a specified event type.
+ *
+ * @param {string} type the event name
+ * @param {Function} listener a function previously registered for this
+ * type of event through `on`
+ * @return {boolean} if we could turn it off or not
+ */
+ ;
+
+ _proto.off = function off(type, listener) {
+ if (!this.listeners[type]) {
+ return false;
+ }
+
+ var index = this.listeners[type].indexOf(listener); // TODO: which is better?
+ // In Video.js we slice listener functions
+ // on trigger so that it does not mess up the order
+ // while we loop through.
+ //
+ // Here we slice on off so that the loop in trigger
+ // can continue using it's old reference to loop without
+ // messing up the order.
+
+ this.listeners[type] = this.listeners[type].slice(0);
+ this.listeners[type].splice(index, 1);
+ return index > -1;
+ }
+ /**
+ * Trigger an event of the specified type on this stream. Any additional
+ * arguments to this function are passed as parameters to event listeners.
+ *
+ * @param {string} type the event name
+ */
+ ;
+
+ _proto.trigger = function trigger(type) {
+ var callbacks = this.listeners[type];
+
+ if (!callbacks) {
+ return;
+ } // Slicing the arguments on every invocation of this method
+ // can add a significant amount of overhead. Avoid the
+ // intermediate object creation for the common case of a
+ // single callback argument
+
+
+ if (arguments.length === 2) {
+ var length = callbacks.length;
+
+ for (var i = 0; i < length; ++i) {
+ callbacks[i].call(this, arguments[1]);
+ }
+ } else {
+ var args = Array.prototype.slice.call(arguments, 1);
+ var _length = callbacks.length;
+
+ for (var _i = 0; _i < _length; ++_i) {
+ callbacks[_i].apply(this, args);
+ }
+ }
+ }
+ /**
+ * Destroys the stream and cleans up.
+ */
+ ;
+
+ _proto.dispose = function dispose() {
+ this.listeners = {};
+ }
+ /**
+ * Forwards all `data` events on this stream to the destination stream. The
+ * destination stream should provide a method `push` to receive the data
+ * events as they arrive.
+ *
+ * @param {Stream} destination the stream that will receive all `data` events
+ * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
+ */
+ ;
+
+ _proto.pipe = function pipe(destination) {
+ this.on('data', function (data) {
+ destination.push(data);
+ });
+ };
+
+ return Stream;
+ }();
+ /*! @name pkcs7 @version 1.0.4 @license Apache-2.0 */
+
+ /**
+ * Returns the subarray of a Uint8Array without PKCS#7 padding.
+ *
+ * @param padded {Uint8Array} unencrypted bytes that have been padded
+ * @return {Uint8Array} the unpadded bytes
+ * @see http://tools.ietf.org/html/rfc5652
+ */
+
+
+ function unpad(padded) {
+ return padded.subarray(0, padded.byteLength - padded[padded.byteLength - 1]);
+ }
+ /*! @name aes-decrypter @version 3.1.3 @license Apache-2.0 */
+
+ /**
+ * @file aes.js
+ *
+ * This file contains an adaptation of the AES decryption algorithm
+ * from the Standford Javascript Cryptography Library. That work is
+ * covered by the following copyright and permissions notice:
+ *
+ * Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
+ * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ * DISCLAIMED. IN NO EVENT SHALL OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation
+ * are those of the authors and should not be interpreted as representing
+ * official policies, either expressed or implied, of the authors.
+ */
+
+ /**
+ * Expand the S-box tables.
+ *
+ * @private
+ */
+
+
+ var precompute = function precompute() {
+ var tables = [[[], [], [], [], []], [[], [], [], [], []]];
+ var encTable = tables[0];
+ var decTable = tables[1];
+ var sbox = encTable[4];
+ var sboxInv = decTable[4];
+ var i;
+ var x;
+ var xInv;
+ var d = [];
+ var th = [];
+ var x2;
+ var x4;
+ var x8;
+ var s;
+ var tEnc;
+ var tDec; // Compute double and third tables
+
+ for (i = 0; i < 256; i++) {
+ th[(d[i] = i << 1 ^ (i >> 7) * 283) ^ i] = i;
+ }
+
+ for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
+ // Compute sbox
+ s = xInv ^ xInv << 1 ^ xInv << 2 ^ xInv << 3 ^ xInv << 4;
+ s = s >> 8 ^ s & 255 ^ 99;
+ sbox[x] = s;
+ sboxInv[s] = x; // Compute MixColumns
+
+ x8 = d[x4 = d[x2 = d[x]]];
+ tDec = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
+ tEnc = d[s] * 0x101 ^ s * 0x1010100;
+
+ for (i = 0; i < 4; i++) {
+ encTable[i][x] = tEnc = tEnc << 24 ^ tEnc >>> 8;
+ decTable[i][s] = tDec = tDec << 24 ^ tDec >>> 8;
+ }
+ } // Compactify. Considerable speedup on Firefox.
+
+
+ for (i = 0; i < 5; i++) {
+ encTable[i] = encTable[i].slice(0);
+ decTable[i] = decTable[i].slice(0);
+ }
+
+ return tables;
+ };
+
+ var aesTables = null;
+ /**
+ * Schedule out an AES key for both encryption and decryption. This
+ * is a low-level class. Use a cipher mode to do bulk encryption.
+ *
+ * @class AES
+ * @param key {Array} The key as an array of 4, 6 or 8 words.
+ */
+
+ var AES = /*#__PURE__*/function () {
+ function AES(key) {
+ /**
+ * The expanded S-box and inverse S-box tables. These will be computed
+ * on the client so that we don't have to send them down the wire.
+ *
+ * There are two tables, _tables[0] is for encryption and
+ * _tables[1] is for decryption.
+ *
+ * The first 4 sub-tables are the expanded S-box with MixColumns. The
+ * last (_tables[01][4]) is the S-box itself.
+ *
+ * @private
+ */
+ // if we have yet to precompute the S-box tables
+ // do so now
+ if (!aesTables) {
+ aesTables = precompute();
+ } // then make a copy of that object for use
+
+
+ this._tables = [[aesTables[0][0].slice(), aesTables[0][1].slice(), aesTables[0][2].slice(), aesTables[0][3].slice(), aesTables[0][4].slice()], [aesTables[1][0].slice(), aesTables[1][1].slice(), aesTables[1][2].slice(), aesTables[1][3].slice(), aesTables[1][4].slice()]];
+ var i;
+ var j;
+ var tmp;
+ var sbox = this._tables[0][4];
+ var decTable = this._tables[1];
+ var keyLen = key.length;
+ var rcon = 1;
+
+ if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
+ throw new Error('Invalid aes key size');
+ }
+
+ var encKey = key.slice(0);
+ var decKey = [];
+ this._key = [encKey, decKey]; // schedule encryption keys
+
+ for (i = keyLen; i < 4 * keyLen + 28; i++) {
+ tmp = encKey[i - 1]; // apply sbox
+
+ if (i % keyLen === 0 || keyLen === 8 && i % keyLen === 4) {
+ tmp = sbox[tmp >>> 24] << 24 ^ sbox[tmp >> 16 & 255] << 16 ^ sbox[tmp >> 8 & 255] << 8 ^ sbox[tmp & 255]; // shift rows and add rcon
+
+ if (i % keyLen === 0) {
+ tmp = tmp << 8 ^ tmp >>> 24 ^ rcon << 24;
+ rcon = rcon << 1 ^ (rcon >> 7) * 283;
+ }
+ }
+
+ encKey[i] = encKey[i - keyLen] ^ tmp;
+ } // schedule decryption keys
+
+
+ for (j = 0; i; j++, i--) {
+ tmp = encKey[j & 3 ? i : i - 4];
+
+ if (i <= 4 || j < 4) {
+ decKey[j] = tmp;
+ } else {
+ decKey[j] = decTable[0][sbox[tmp >>> 24]] ^ decTable[1][sbox[tmp >> 16 & 255]] ^ decTable[2][sbox[tmp >> 8 & 255]] ^ decTable[3][sbox[tmp & 255]];
+ }
+ }
+ }
+ /**
+ * Decrypt 16 bytes, specified as four 32-bit words.
+ *
+ * @param {number} encrypted0 the first word to decrypt
+ * @param {number} encrypted1 the second word to decrypt
+ * @param {number} encrypted2 the third word to decrypt
+ * @param {number} encrypted3 the fourth word to decrypt
+ * @param {Int32Array} out the array to write the decrypted words
+ * into
+ * @param {number} offset the offset into the output array to start
+ * writing results
+ * @return {Array} The plaintext.
+ */
+
+
+ var _proto = AES.prototype;
+
+ _proto.decrypt = function decrypt(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {
+ var key = this._key[1]; // state variables a,b,c,d are loaded with pre-whitened data
+
+ var a = encrypted0 ^ key[0];
+ var b = encrypted3 ^ key[1];
+ var c = encrypted2 ^ key[2];
+ var d = encrypted1 ^ key[3];
+ var a2;
+ var b2;
+ var c2; // key.length === 2 ?
+
+ var nInnerRounds = key.length / 4 - 2;
+ var i;
+ var kIndex = 4;
+ var table = this._tables[1]; // load up the tables
+
+ var table0 = table[0];
+ var table1 = table[1];
+ var table2 = table[2];
+ var table3 = table[3];
+ var sbox = table[4]; // Inner rounds. Cribbed from OpenSSL.
+
+ for (i = 0; i < nInnerRounds; i++) {
+ a2 = table0[a >>> 24] ^ table1[b >> 16 & 255] ^ table2[c >> 8 & 255] ^ table3[d & 255] ^ key[kIndex];
+ b2 = table0[b >>> 24] ^ table1[c >> 16 & 255] ^ table2[d >> 8 & 255] ^ table3[a & 255] ^ key[kIndex + 1];
+ c2 = table0[c >>> 24] ^ table1[d >> 16 & 255] ^ table2[a >> 8 & 255] ^ table3[b & 255] ^ key[kIndex + 2];
+ d = table0[d >>> 24] ^ table1[a >> 16 & 255] ^ table2[b >> 8 & 255] ^ table3[c & 255] ^ key[kIndex + 3];
+ kIndex += 4;
+ a = a2;
+ b = b2;
+ c = c2;
+ } // Last round.
+
+
+ for (i = 0; i < 4; i++) {
+ out[(3 & -i) + offset] = sbox[a >>> 24] << 24 ^ sbox[b >> 16 & 255] << 16 ^ sbox[c >> 8 & 255] << 8 ^ sbox[d & 255] ^ key[kIndex++];
+ a2 = a;
+ a = b;
+ b = c;
+ c = d;
+ d = a2;
+ }
+ };
+
+ return AES;
+ }();
+ /**
+ * A wrapper around the Stream class to use setTimeout
+ * and run stream "jobs" Asynchronously
+ *
+ * @class AsyncStream
+ * @extends Stream
+ */
+
+
+ var AsyncStream = /*#__PURE__*/function (_Stream) {
+ inheritsLoose(AsyncStream, _Stream);
+
+ function AsyncStream() {
+ var _this;
+
+ _this = _Stream.call(this, Stream) || this;
+ _this.jobs = [];
+ _this.delay = 1;
+ _this.timeout_ = null;
+ return _this;
+ }
+ /**
+ * process an async job
+ *
+ * @private
+ */
+
+
+ var _proto = AsyncStream.prototype;
+
+ _proto.processJob_ = function processJob_() {
+ this.jobs.shift()();
+
+ if (this.jobs.length) {
+ this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
+ } else {
+ this.timeout_ = null;
+ }
+ }
+ /**
+ * push a job into the stream
+ *
+ * @param {Function} job the job to push into the stream
+ */
+ ;
+
+ _proto.push = function push(job) {
+ this.jobs.push(job);
+
+ if (!this.timeout_) {
+ this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
+ }
+ };
+
+ return AsyncStream;
+ }(Stream);
+ /**
+ * Convert network-order (big-endian) bytes into their little-endian
+ * representation.
+ */
+
+
+ var ntoh = function ntoh(word) {
+ return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;
+ };
+ /**
+ * Decrypt bytes using AES-128 with CBC and PKCS#7 padding.
+ *
+ * @param {Uint8Array} encrypted the encrypted bytes
+ * @param {Uint32Array} key the bytes of the decryption key
+ * @param {Uint32Array} initVector the initialization vector (IV) to
+ * use for the first round of CBC.
+ * @return {Uint8Array} the decrypted bytes
+ *
+ * @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
+ * @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29
+ * @see https://tools.ietf.org/html/rfc2315
+ */
+
+
+ var decrypt = function decrypt(encrypted, key, initVector) {
+ // word-level access to the encrypted bytes
+ var encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2);
+ var decipher = new AES(Array.prototype.slice.call(key)); // byte and word-level access for the decrypted output
+
+ var decrypted = new Uint8Array(encrypted.byteLength);
+ var decrypted32 = new Int32Array(decrypted.buffer); // temporary variables for working with the IV, encrypted, and
+ // decrypted data
+
+ var init0;
+ var init1;
+ var init2;
+ var init3;
+ var encrypted0;
+ var encrypted1;
+ var encrypted2;
+ var encrypted3; // iteration variable
+
+ var wordIx; // pull out the words of the IV to ensure we don't modify the
+ // passed-in reference and easier access
+
+ init0 = initVector[0];
+ init1 = initVector[1];
+ init2 = initVector[2];
+ init3 = initVector[3]; // decrypt four word sequences, applying cipher-block chaining (CBC)
+ // to each decrypted block
+
+ for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {
+ // convert big-endian (network order) words into little-endian
+ // (javascript order)
+ encrypted0 = ntoh(encrypted32[wordIx]);
+ encrypted1 = ntoh(encrypted32[wordIx + 1]);
+ encrypted2 = ntoh(encrypted32[wordIx + 2]);
+ encrypted3 = ntoh(encrypted32[wordIx + 3]); // decrypt the block
+
+ decipher.decrypt(encrypted0, encrypted1, encrypted2, encrypted3, decrypted32, wordIx); // XOR with the IV, and restore network byte-order to obtain the
+ // plaintext
+
+ decrypted32[wordIx] = ntoh(decrypted32[wordIx] ^ init0);
+ decrypted32[wordIx + 1] = ntoh(decrypted32[wordIx + 1] ^ init1);
+ decrypted32[wordIx + 2] = ntoh(decrypted32[wordIx + 2] ^ init2);
+ decrypted32[wordIx + 3] = ntoh(decrypted32[wordIx + 3] ^ init3); // setup the IV for the next round
+
+ init0 = encrypted0;
+ init1 = encrypted1;
+ init2 = encrypted2;
+ init3 = encrypted3;
+ }
+
+ return decrypted;
+ };
+ /**
+ * The `Decrypter` class that manages decryption of AES
+ * data through `AsyncStream` objects and the `decrypt`
+ * function
+ *
+ * @param {Uint8Array} encrypted the encrypted bytes
+ * @param {Uint32Array} key the bytes of the decryption key
+ * @param {Uint32Array} initVector the initialization vector (IV) to
+ * @param {Function} done the function to run when done
+ * @class Decrypter
+ */
+
+
+ var Decrypter = /*#__PURE__*/function () {
+ function Decrypter(encrypted, key, initVector, done) {
+ var step = Decrypter.STEP;
+ var encrypted32 = new Int32Array(encrypted.buffer);
+ var decrypted = new Uint8Array(encrypted.byteLength);
+ var i = 0;
+ this.asyncStream_ = new AsyncStream(); // split up the encryption job and do the individual chunks asynchronously
+
+ this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
+
+ for (i = step; i < encrypted32.length; i += step) {
+ initVector = new Uint32Array([ntoh(encrypted32[i - 4]), ntoh(encrypted32[i - 3]), ntoh(encrypted32[i - 2]), ntoh(encrypted32[i - 1])]);
+ this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
+ } // invoke the done() callback when everything is finished
+
+
+ this.asyncStream_.push(function () {
+ // remove pkcs#7 padding from the decrypted bytes
+ done(null, unpad(decrypted));
+ });
+ }
+ /**
+ * a getter for step the maximum number of bytes to process at one time
+ *
+ * @return {number} the value of step 32000
+ */
+
+
+ var _proto = Decrypter.prototype;
+ /**
+ * @private
+ */
+
+ _proto.decryptChunk_ = function decryptChunk_(encrypted, key, initVector, decrypted) {
+ return function () {
+ var bytes = decrypt(encrypted, key, initVector);
+ decrypted.set(bytes, encrypted.byteOffset);
+ };
+ };
+
+ createClass(Decrypter, null, [{
+ key: "STEP",
+ get: function get() {
+ // 4 * 8000;
+ return 32000;
+ }
+ }]);
+ return Decrypter;
+ }();
+
+ var win;
+
+ if (typeof window !== "undefined") {
+ win = window;
+ } else if (typeof commonjsGlobal !== "undefined") {
+ win = commonjsGlobal;
+ } else if (typeof self !== "undefined") {
+ win = self;
+ } else {
+ win = {};
+ }
+
+ var window_1 = win;
+
+ var isArrayBufferView = function isArrayBufferView(obj) {
+ if (ArrayBuffer.isView === 'function') {
+ return ArrayBuffer.isView(obj);
+ }
+
+ return obj && obj.buffer instanceof ArrayBuffer;
+ };
+
+ var BigInt = window_1.BigInt || Number;
+ [BigInt('0x1'), BigInt('0x100'), BigInt('0x10000'), BigInt('0x1000000'), BigInt('0x100000000'), BigInt('0x10000000000'), BigInt('0x1000000000000'), BigInt('0x100000000000000'), BigInt('0x10000000000000000')];
+ /**
+ * Creates an object for sending to a web worker modifying properties that are TypedArrays
+ * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
+ *
+ * @param {Object} message
+ * Object of properties and values to send to the web worker
+ * @return {Object}
+ * Modified message with TypedArray values expanded
+ * @function createTransferableMessage
+ */
+
+
+ var createTransferableMessage = function createTransferableMessage(message) {
+ var transferable = {};
+ Object.keys(message).forEach(function (key) {
+ var value = message[key];
+
+ if (isArrayBufferView(value)) {
+ transferable[key] = {
+ bytes: value.buffer,
+ byteOffset: value.byteOffset,
+ byteLength: value.byteLength
+ };
+ } else {
+ transferable[key] = value;
+ }
+ });
+ return transferable;
+ };
+ /* global self */
+
+ /**
+ * Our web worker interface so that things can talk to aes-decrypter
+ * that will be running in a web worker. the scope is passed to this by
+ * webworkify.
+ */
+
+
+ self.onmessage = function (event) {
+ var data = event.data;
+ var encrypted = new Uint8Array(data.encrypted.bytes, data.encrypted.byteOffset, data.encrypted.byteLength);
+ var key = new Uint32Array(data.key.bytes, data.key.byteOffset, data.key.byteLength / 4);
+ var iv = new Uint32Array(data.iv.bytes, data.iv.byteOffset, data.iv.byteLength / 4);
+ /* eslint-disable no-new, handle-callback-err */
+
+ new Decrypter(encrypted, key, iv, function (err, bytes) {
+ self.postMessage(createTransferableMessage({
+ source: data.source,
+ decrypted: bytes
+ }), [bytes.buffer]);
+ });
+ /* eslint-enable */
+ };
+}));
+var Decrypter = factory(workerCode);
+/* rollup-plugin-worker-factory end for worker!/Users/bclifford/Code/vhs-release-test/src/decrypter-worker.js */
+
+/**
+ * Convert the properties of an HLS track into an audioTrackKind.
+ *
+ * @private
+ */
+
+var audioTrackKind_ = function audioTrackKind_(properties) {
+ var kind = properties["default"] ? 'main' : 'alternative';
+
+ if (properties.characteristics && properties.characteristics.indexOf('public.accessibility.describes-video') >= 0) {
+ kind = 'main-desc';
+ }
+
+ return kind;
+};
+/**
+ * Pause provided segment loader and playlist loader if active
+ *
+ * @param {SegmentLoader} segmentLoader
+ * SegmentLoader to pause
+ * @param {Object} mediaType
+ * Active media type
+ * @function stopLoaders
+ */
+
+
+var stopLoaders = function stopLoaders(segmentLoader, mediaType) {
+ segmentLoader.abort();
+ segmentLoader.pause();
+
+ if (mediaType && mediaType.activePlaylistLoader) {
+ mediaType.activePlaylistLoader.pause();
+ mediaType.activePlaylistLoader = null;
+ }
+};
+/**
+ * Start loading provided segment loader and playlist loader
+ *
+ * @param {PlaylistLoader} playlistLoader
+ * PlaylistLoader to start loading
+ * @param {Object} mediaType
+ * Active media type
+ * @function startLoaders
+ */
+
+
+var startLoaders = function startLoaders(playlistLoader, mediaType) {
+ // Segment loader will be started after `loadedmetadata` or `loadedplaylist` from the
+ // playlist loader
+ mediaType.activePlaylistLoader = playlistLoader;
+ playlistLoader.load();
+};
+/**
+ * Returns a function to be called when the media group changes. It performs a
+ * non-destructive (preserve the buffer) resync of the SegmentLoader. This is because a
+ * change of group is merely a rendition switch of the same content at another encoding,
+ * rather than a change of content, such as switching audio from English to Spanish.
+ *
+ * @param {string} type
+ * MediaGroup type
+ * @param {Object} settings
+ * Object containing required information for media groups
+ * @return {Function}
+ * Handler for a non-destructive resync of SegmentLoader when the active media
+ * group changes.
+ * @function onGroupChanged
+ */
+
+
+var onGroupChanged = function onGroupChanged(type, settings) {
+ return function () {
+ var _settings$segmentLoad = settings.segmentLoaders,
+ segmentLoader = _settings$segmentLoad[type],
+ mainSegmentLoader = _settings$segmentLoad.main,
+ mediaType = settings.mediaTypes[type];
+ var activeTrack = mediaType.activeTrack();
+ var activeGroup = mediaType.getActiveGroup();
+ var previousActiveLoader = mediaType.activePlaylistLoader;
+ var lastGroup = mediaType.lastGroup_; // the group did not change do nothing
+
+ if (activeGroup && lastGroup && activeGroup.id === lastGroup.id) {
+ return;
+ }
+
+ mediaType.lastGroup_ = activeGroup;
+ mediaType.lastTrack_ = activeTrack;
+ stopLoaders(segmentLoader, mediaType);
+
+ if (!activeGroup || activeGroup.isMasterPlaylist) {
+ // there is no group active or active group is a main playlist and won't change
+ return;
+ }
+
+ if (!activeGroup.playlistLoader) {
+ if (previousActiveLoader) {
+ // The previous group had a playlist loader but the new active group does not
+ // this means we are switching from demuxed to muxed audio. In this case we want to
+ // do a destructive reset of the main segment loader and not restart the audio
+ // loaders.
+ mainSegmentLoader.resetEverything();
+ }
+
+ return;
+ } // Non-destructive resync
+
+
+ segmentLoader.resyncLoader();
+ startLoaders(activeGroup.playlistLoader, mediaType);
+ };
+};
+
+var onGroupChanging = function onGroupChanging(type, settings) {
+ return function () {
+ var segmentLoader = settings.segmentLoaders[type],
+ mediaType = settings.mediaTypes[type];
+ mediaType.lastGroup_ = null;
+ segmentLoader.abort();
+ segmentLoader.pause();
+ };
+};
+/**
+ * Returns a function to be called when the media track changes. It performs a
+ * destructive reset of the SegmentLoader to ensure we start loading as close to
+ * currentTime as possible.
+ *
+ * @param {string} type
+ * MediaGroup type
+ * @param {Object} settings
+ * Object containing required information for media groups
+ * @return {Function}
+ * Handler for a destructive reset of SegmentLoader when the active media
+ * track changes.
+ * @function onTrackChanged
+ */
+
+
+var onTrackChanged = function onTrackChanged(type, settings) {
+ return function () {
+ var masterPlaylistLoader = settings.masterPlaylistLoader,
+ _settings$segmentLoad2 = settings.segmentLoaders,
+ segmentLoader = _settings$segmentLoad2[type],
+ mainSegmentLoader = _settings$segmentLoad2.main,
+ mediaType = settings.mediaTypes[type];
+ var activeTrack = mediaType.activeTrack();
+ var activeGroup = mediaType.getActiveGroup();
+ var previousActiveLoader = mediaType.activePlaylistLoader;
+ var lastTrack = mediaType.lastTrack_; // track did not change, do nothing
+
+ if (lastTrack && activeTrack && lastTrack.id === activeTrack.id) {
+ return;
+ }
+
+ mediaType.lastGroup_ = activeGroup;
+ mediaType.lastTrack_ = activeTrack;
+ stopLoaders(segmentLoader, mediaType);
+
+ if (!activeGroup) {
+ // there is no group active so we do not want to restart loaders
+ return;
+ }
+
+ if (activeGroup.isMasterPlaylist) {
+ // track did not change, do nothing
+ if (!activeTrack || !lastTrack || activeTrack.id === lastTrack.id) {
+ return;
+ }
+
+ var mpc = settings.vhs.masterPlaylistController_;
+ var newPlaylist = mpc.selectPlaylist(); // media will not change do nothing
+
+ if (mpc.media() === newPlaylist) {
+ return;
+ }
+
+ mediaType.logger_("track change. Switching master audio from " + lastTrack.id + " to " + activeTrack.id);
+ masterPlaylistLoader.pause();
+ mainSegmentLoader.resetEverything();
+ mpc.fastQualityChange_(newPlaylist);
+ return;
+ }
+
+ if (type === 'AUDIO') {
+ if (!activeGroup.playlistLoader) {
+ // when switching from demuxed audio/video to muxed audio/video (noted by no
+ // playlist loader for the audio group), we want to do a destructive reset of the
+ // main segment loader and not restart the audio loaders
+ mainSegmentLoader.setAudio(true); // don't have to worry about disabling the audio of the audio segment loader since
+ // it should be stopped
+
+ mainSegmentLoader.resetEverything();
+ return;
+ } // although the segment loader is an audio segment loader, call the setAudio
+ // function to ensure it is prepared to re-append the init segment (or handle other
+ // config changes)
+
+
+ segmentLoader.setAudio(true);
+ mainSegmentLoader.setAudio(false);
+ }
+
+ if (previousActiveLoader === activeGroup.playlistLoader) {
+ // Nothing has actually changed. This can happen because track change events can fire
+ // multiple times for a "single" change. One for enabling the new active track, and
+ // one for disabling the track that was active
+ startLoaders(activeGroup.playlistLoader, mediaType);
+ return;
+ }
+
+ if (segmentLoader.track) {
+ // For WebVTT, set the new text track in the segmentloader
+ segmentLoader.track(activeTrack);
+ } // destructive reset
+
+
+ segmentLoader.resetEverything();
+ startLoaders(activeGroup.playlistLoader, mediaType);
+ };
+};
+
+var onError = {
+ /**
+ * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
+ * an error.
+ *
+ * @param {string} type
+ * MediaGroup type
+ * @param {Object} settings
+ * Object containing required information for media groups
+ * @return {Function}
+ * Error handler. Logs warning (or error if the playlist is blacklisted) to
+ * console and switches back to default audio track.
+ * @function onError.AUDIO
+ */
+ AUDIO: function AUDIO(type, settings) {
+ return function () {
+ var segmentLoader = settings.segmentLoaders[type],
+ mediaType = settings.mediaTypes[type],
+ blacklistCurrentPlaylist = settings.blacklistCurrentPlaylist;
+ stopLoaders(segmentLoader, mediaType); // switch back to default audio track
+
+ var activeTrack = mediaType.activeTrack();
+ var activeGroup = mediaType.activeGroup();
+ var id = (activeGroup.filter(function (group) {
+ return group["default"];
+ })[0] || activeGroup[0]).id;
+ var defaultTrack = mediaType.tracks[id];
+
+ if (activeTrack === defaultTrack) {
+ // Default track encountered an error. All we can do now is blacklist the current
+ // rendition and hope another will switch audio groups
+ blacklistCurrentPlaylist({
+ message: 'Problem encountered loading the default audio track.'
+ });
+ return;
+ }
+
+ videojs.log.warn('Problem encountered loading the alternate audio track.' + 'Switching back to default.');
+
+ for (var trackId in mediaType.tracks) {
+ mediaType.tracks[trackId].enabled = mediaType.tracks[trackId] === defaultTrack;
+ }
+
+ mediaType.onTrackChanged();
+ };
+ },
+
+ /**
+ * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
+ * an error.
+ *
+ * @param {string} type
+ * MediaGroup type
+ * @param {Object} settings
+ * Object containing required information for media groups
+ * @return {Function}
+ * Error handler. Logs warning to console and disables the active subtitle track
+ * @function onError.SUBTITLES
+ */
+ SUBTITLES: function SUBTITLES(type, settings) {
+ return function () {
+ var segmentLoader = settings.segmentLoaders[type],
+ mediaType = settings.mediaTypes[type];
+ videojs.log.warn('Problem encountered loading the subtitle track.' + 'Disabling subtitle track.');
+ stopLoaders(segmentLoader, mediaType);
+ var track = mediaType.activeTrack();
+
+ if (track) {
+ track.mode = 'disabled';
+ }
+
+ mediaType.onTrackChanged();
+ };
+ }
+};
+var setupListeners = {
+ /**
+ * Setup event listeners for audio playlist loader
+ *
+ * @param {string} type
+ * MediaGroup type
+ * @param {PlaylistLoader|null} playlistLoader
+ * PlaylistLoader to register listeners on
+ * @param {Object} settings
+ * Object containing required information for media groups
+ * @function setupListeners.AUDIO
+ */
+ AUDIO: function AUDIO(type, playlistLoader, settings) {
+ if (!playlistLoader) {
+ // no playlist loader means audio will be muxed with the video
+ return;
+ }
+
+ var tech = settings.tech,
+ requestOptions = settings.requestOptions,
+ segmentLoader = settings.segmentLoaders[type];
+ playlistLoader.on('loadedmetadata', function () {
+ var media = playlistLoader.media();
+ segmentLoader.playlist(media, requestOptions); // if the video is already playing, or if this isn't a live video and preload
+ // permits, start downloading segments
+
+ if (!tech.paused() || media.endList && tech.preload() !== 'none') {
+ segmentLoader.load();
+ }
+ });
+ playlistLoader.on('loadedplaylist', function () {
+ segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
+
+ if (!tech.paused()) {
+ segmentLoader.load();
+ }
+ });
+ playlistLoader.on('error', onError[type](type, settings));
+ },
+
+ /**
+ * Setup event listeners for subtitle playlist loader
+ *
+ * @param {string} type
+ * MediaGroup type
+ * @param {PlaylistLoader|null} playlistLoader
+ * PlaylistLoader to register listeners on
+ * @param {Object} settings
+ * Object containing required information for media groups
+ * @function setupListeners.SUBTITLES
+ */
+ SUBTITLES: function SUBTITLES(type, playlistLoader, settings) {
+ var tech = settings.tech,
+ requestOptions = settings.requestOptions,
+ segmentLoader = settings.segmentLoaders[type],
+ mediaType = settings.mediaTypes[type];
+ playlistLoader.on('loadedmetadata', function () {
+ var media = playlistLoader.media();
+ segmentLoader.playlist(media, requestOptions);
+ segmentLoader.track(mediaType.activeTrack()); // if the video is already playing, or if this isn't a live video and preload
+ // permits, start downloading segments
+
+ if (!tech.paused() || media.endList && tech.preload() !== 'none') {
+ segmentLoader.load();
+ }
+ });
+ playlistLoader.on('loadedplaylist', function () {
+ segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
+
+ if (!tech.paused()) {
+ segmentLoader.load();
+ }
+ });
+ playlistLoader.on('error', onError[type](type, settings));
+ }
+};
+var initialize = {
+ /**
+ * Setup PlaylistLoaders and AudioTracks for the audio groups
+ *
+ * @param {string} type
+ * MediaGroup type
+ * @param {Object} settings
+ * Object containing required information for media groups
+ * @function initialize.AUDIO
+ */
+ 'AUDIO': function AUDIO(type, settings) {
+ var vhs = settings.vhs,
+ sourceType = settings.sourceType,
+ segmentLoader = settings.segmentLoaders[type],
+ requestOptions = settings.requestOptions,
+ mediaGroups = settings.master.mediaGroups,
+ _settings$mediaTypes$ = settings.mediaTypes[type],
+ groups = _settings$mediaTypes$.groups,
+ tracks = _settings$mediaTypes$.tracks,
+ logger_ = _settings$mediaTypes$.logger_,
+ masterPlaylistLoader = settings.masterPlaylistLoader;
+ var audioOnlyMaster = isAudioOnly(masterPlaylistLoader.master); // force a default if we have none
+
+ if (!mediaGroups[type] || Object.keys(mediaGroups[type]).length === 0) {
+ mediaGroups[type] = {
+ main: {
+ "default": {
+ "default": true
+ }
+ }
+ };
+
+ if (audioOnlyMaster) {
+ mediaGroups[type].main["default"].playlists = masterPlaylistLoader.master.playlists;
+ }
+ }
+
+ for (var groupId in mediaGroups[type]) {
+ if (!groups[groupId]) {
+ groups[groupId] = [];
+ }
+
+ for (var variantLabel in mediaGroups[type][groupId]) {
+ var properties = mediaGroups[type][groupId][variantLabel];
+ var playlistLoader = void 0;
+
+ if (audioOnlyMaster) {
+ logger_("AUDIO group '" + groupId + "' label '" + variantLabel + "' is a master playlist");
+ properties.isMasterPlaylist = true;
+ playlistLoader = null; // if vhs-json was provided as the source, and the media playlist was resolved,
+ // use the resolved media playlist object
+ } else if (sourceType === 'vhs-json' && properties.playlists) {
+ playlistLoader = new PlaylistLoader(properties.playlists[0], vhs, requestOptions);
+ } else if (properties.resolvedUri) {
+ playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions); // TODO: dash isn't the only type with properties.playlists
+ // should we even have properties.playlists in this check.
+ } else if (properties.playlists && sourceType === 'dash') {
+ playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, masterPlaylistLoader);
+ } else {
+ // no resolvedUri means the audio is muxed with the video when using this
+ // audio track
+ playlistLoader = null;
+ }
+
+ properties = videojs.mergeOptions({
+ id: variantLabel,
+ playlistLoader: playlistLoader
+ }, properties);
+ setupListeners[type](type, properties.playlistLoader, settings);
+ groups[groupId].push(properties);
+
+ if (typeof tracks[variantLabel] === 'undefined') {
+ var track = new videojs.AudioTrack({
+ id: variantLabel,
+ kind: audioTrackKind_(properties),
+ enabled: false,
+ language: properties.language,
+ "default": properties["default"],
+ label: variantLabel
+ });
+ tracks[variantLabel] = track;
+ }
+ }
+ } // setup single error event handler for the segment loader
+
+
+ segmentLoader.on('error', onError[type](type, settings));
+ },
+
+ /**
+ * Setup PlaylistLoaders and TextTracks for the subtitle groups
+ *
+ * @param {string} type
+ * MediaGroup type
+ * @param {Object} settings
+ * Object containing required information for media groups
+ * @function initialize.SUBTITLES
+ */
+ 'SUBTITLES': function SUBTITLES(type, settings) {
+ var tech = settings.tech,
+ vhs = settings.vhs,
+ sourceType = settings.sourceType,
+ segmentLoader = settings.segmentLoaders[type],
+ requestOptions = settings.requestOptions,
+ mediaGroups = settings.master.mediaGroups,
+ _settings$mediaTypes$2 = settings.mediaTypes[type],
+ groups = _settings$mediaTypes$2.groups,
+ tracks = _settings$mediaTypes$2.tracks,
+ masterPlaylistLoader = settings.masterPlaylistLoader;
+
+ for (var groupId in mediaGroups[type]) {
+ if (!groups[groupId]) {
+ groups[groupId] = [];
+ }
+
+ for (var variantLabel in mediaGroups[type][groupId]) {
+ if (mediaGroups[type][groupId][variantLabel].forced) {
+ // Subtitle playlists with the forced attribute are not selectable in Safari.
+ // According to Apple's HLS Authoring Specification:
+ // If content has forced subtitles and regular subtitles in a given language,
+ // the regular subtitles track in that language MUST contain both the forced
+ // subtitles and the regular subtitles for that language.
+ // Because of this requirement and that Safari does not add forced subtitles,
+ // forced subtitles are skipped here to maintain consistent experience across
+ // all platforms
+ continue;
+ }
+
+ var properties = mediaGroups[type][groupId][variantLabel];
+ var playlistLoader = void 0;
+
+ if (sourceType === 'hls') {
+ playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions);
+ } else if (sourceType === 'dash') {
+ var playlists = properties.playlists.filter(function (p) {
+ return p.excludeUntil !== Infinity;
+ });
+
+ if (!playlists.length) {
+ return;
+ }
+
+ playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, masterPlaylistLoader);
+ } else if (sourceType === 'vhs-json') {
+ playlistLoader = new PlaylistLoader( // if the vhs-json object included the media playlist, use the media playlist
+ // as provided, otherwise use the resolved URI to load the playlist
+ properties.playlists ? properties.playlists[0] : properties.resolvedUri, vhs, requestOptions);
+ }
+
+ properties = videojs.mergeOptions({
+ id: variantLabel,
+ playlistLoader: playlistLoader
+ }, properties);
+ setupListeners[type](type, properties.playlistLoader, settings);
+ groups[groupId].push(properties);
+
+ if (typeof tracks[variantLabel] === 'undefined') {
+ var track = tech.addRemoteTextTrack({
+ id: variantLabel,
+ kind: 'subtitles',
+ "default": properties["default"] && properties.autoselect,
+ language: properties.language,
+ label: variantLabel
+ }, false).track;
+ tracks[variantLabel] = track;
+ }
+ }
+ } // setup single error event handler for the segment loader
+
+
+ segmentLoader.on('error', onError[type](type, settings));
+ },
+
+ /**
+ * Setup TextTracks for the closed-caption groups
+ *
+ * @param {String} type
+ * MediaGroup type
+ * @param {Object} settings
+ * Object containing required information for media groups
+ * @function initialize['CLOSED-CAPTIONS']
+ */
+ 'CLOSED-CAPTIONS': function CLOSEDCAPTIONS(type, settings) {
+ var tech = settings.tech,
+ mediaGroups = settings.master.mediaGroups,
+ _settings$mediaTypes$3 = settings.mediaTypes[type],
+ groups = _settings$mediaTypes$3.groups,
+ tracks = _settings$mediaTypes$3.tracks;
+
+ for (var groupId in mediaGroups[type]) {
+ if (!groups[groupId]) {
+ groups[groupId] = [];
+ }
+
+ for (var variantLabel in mediaGroups[type][groupId]) {
+ var properties = mediaGroups[type][groupId][variantLabel]; // Look for either 608 (CCn) or 708 (SERVICEn) caption services
+
+ if (!/^(?:CC|SERVICE)/.test(properties.instreamId)) {
+ continue;
+ }
+
+ var captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};
+ var newProps = {
+ label: variantLabel,
+ language: properties.language,
+ instreamId: properties.instreamId,
+ "default": properties["default"] && properties.autoselect
+ };
+
+ if (captionServices[newProps.instreamId]) {
+ newProps = videojs.mergeOptions(newProps, captionServices[newProps.instreamId]);
+ }
+
+ if (newProps["default"] === undefined) {
+ delete newProps["default"];
+ } // No PlaylistLoader is required for Closed-Captions because the captions are
+ // embedded within the video stream
+
+
+ groups[groupId].push(videojs.mergeOptions({
+ id: variantLabel
+ }, properties));
+
+ if (typeof tracks[variantLabel] === 'undefined') {
+ var track = tech.addRemoteTextTrack({
+ id: newProps.instreamId,
+ kind: 'captions',
+ "default": newProps["default"],
+ language: newProps.language,
+ label: newProps.label
+ }, false).track;
+ tracks[variantLabel] = track;
+ }
+ }
+ }
+ }
+};
+
+var groupMatch = function groupMatch(list, media) {
+ for (var i = 0; i < list.length; i++) {
+ if (playlistMatch(media, list[i])) {
+ return true;
+ }
+
+ if (list[i].playlists && groupMatch(list[i].playlists, media)) {
+ return true;
+ }
+ }
+
+ return false;
+};
+/**
+ * Returns a function used to get the active group of the provided type
+ *
+ * @param {string} type
+ * MediaGroup type
+ * @param {Object} settings
+ * Object containing required information for media groups
+ * @return {Function}
+ * Function that returns the active media group for the provided type. Takes an
+ * optional parameter {TextTrack} track. If no track is provided, a list of all
+ * variants in the group, otherwise the variant corresponding to the provided
+ * track is returned.
+ * @function activeGroup
+ */
+
+
+var activeGroup = function activeGroup(type, settings) {
+ return function (track) {
+ var masterPlaylistLoader = settings.masterPlaylistLoader,
+ groups = settings.mediaTypes[type].groups;
+ var media = masterPlaylistLoader.media();
+
+ if (!media) {
+ return null;
+ }
+
+ var variants = null; // set to variants to main media active group
+
+ if (media.attributes[type]) {
+ variants = groups[media.attributes[type]];
+ }
+
+ var groupKeys = Object.keys(groups);
+
+ if (!variants) {
+ // find the masterPlaylistLoader media
+ // that is in a media group if we are dealing
+ // with audio only
+ if (type === 'AUDIO' && groupKeys.length > 1 && isAudioOnly(settings.master)) {
+ for (var i = 0; i < groupKeys.length; i++) {
+ var groupPropertyList = groups[groupKeys[i]];
+
+ if (groupMatch(groupPropertyList, media)) {
+ variants = groupPropertyList;
+ break;
+ }
+ } // use the main group if it exists
+
+ } else if (groups.main) {
+ variants = groups.main; // only one group, use that one
+ } else if (groupKeys.length === 1) {
+ variants = groups[groupKeys[0]];
+ }
+ }
+
+ if (typeof track === 'undefined') {
+ return variants;
+ }
+
+ if (track === null || !variants) {
+ // An active track was specified so a corresponding group is expected. track === null
+ // means no track is currently active so there is no corresponding group
+ return null;
+ }
+
+ return variants.filter(function (props) {
+ return props.id === track.id;
+ })[0] || null;
+ };
+};
+
+var activeTrack = {
+ /**
+ * Returns a function used to get the active track of type provided
+ *
+ * @param {string} type
+ * MediaGroup type
+ * @param {Object} settings
+ * Object containing required information for media groups
+ * @return {Function}
+ * Function that returns the active media track for the provided type. Returns
+ * null if no track is active
+ * @function activeTrack.AUDIO
+ */
+ AUDIO: function AUDIO(type, settings) {
+ return function () {
+ var tracks = settings.mediaTypes[type].tracks;
+
+ for (var id in tracks) {
+ if (tracks[id].enabled) {
+ return tracks[id];
+ }
+ }
+
+ return null;
+ };
+ },
+
+ /**
+ * Returns a function used to get the active track of type provided
+ *
+ * @param {string} type
+ * MediaGroup type
+ * @param {Object} settings
+ * Object containing required information for media groups
+ * @return {Function}
+ * Function that returns the active media track for the provided type. Returns
+ * null if no track is active
+ * @function activeTrack.SUBTITLES
+ */
+ SUBTITLES: function SUBTITLES(type, settings) {
+ return function () {
+ var tracks = settings.mediaTypes[type].tracks;
+
+ for (var id in tracks) {
+ if (tracks[id].mode === 'showing' || tracks[id].mode === 'hidden') {
+ return tracks[id];
+ }
+ }
+
+ return null;
+ };
+ }
+};
+
+var getActiveGroup = function getActiveGroup(type, _ref) {
+ var mediaTypes = _ref.mediaTypes;
+ return function () {
+ var activeTrack_ = mediaTypes[type].activeTrack();
+
+ if (!activeTrack_) {
+ return null;
+ }
+
+ return mediaTypes[type].activeGroup(activeTrack_);
+ };
+};
+/**
+ * Setup PlaylistLoaders and Tracks for media groups (Audio, Subtitles,
+ * Closed-Captions) specified in the master manifest.
+ *
+ * @param {Object} settings
+ * Object containing required information for setting up the media groups
+ * @param {Tech} settings.tech
+ * The tech of the player
+ * @param {Object} settings.requestOptions
+ * XHR request options used by the segment loaders
+ * @param {PlaylistLoader} settings.masterPlaylistLoader
+ * PlaylistLoader for the master source
+ * @param {VhsHandler} settings.vhs
+ * VHS SourceHandler
+ * @param {Object} settings.master
+ * The parsed master manifest
+ * @param {Object} settings.mediaTypes
+ * Object to store the loaders, tracks, and utility methods for each media type
+ * @param {Function} settings.blacklistCurrentPlaylist
+ * Blacklists the current rendition and forces a rendition switch.
+ * @function setupMediaGroups
+ */
+
+
+var setupMediaGroups = function setupMediaGroups(settings) {
+ ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
+ initialize[type](type, settings);
+ });
+ var mediaTypes = settings.mediaTypes,
+ masterPlaylistLoader = settings.masterPlaylistLoader,
+ tech = settings.tech,
+ vhs = settings.vhs,
+ _settings$segmentLoad3 = settings.segmentLoaders,
+ audioSegmentLoader = _settings$segmentLoad3['AUDIO'],
+ mainSegmentLoader = _settings$segmentLoad3.main; // setup active group and track getters and change event handlers
+
+ ['AUDIO', 'SUBTITLES'].forEach(function (type) {
+ mediaTypes[type].activeGroup = activeGroup(type, settings);
+ mediaTypes[type].activeTrack = activeTrack[type](type, settings);
+ mediaTypes[type].onGroupChanged = onGroupChanged(type, settings);
+ mediaTypes[type].onGroupChanging = onGroupChanging(type, settings);
+ mediaTypes[type].onTrackChanged = onTrackChanged(type, settings);
+ mediaTypes[type].getActiveGroup = getActiveGroup(type, settings);
+ }); // DO NOT enable the default subtitle or caption track.
+ // DO enable the default audio track
+
+ var audioGroup = mediaTypes.AUDIO.activeGroup();
+
+ if (audioGroup) {
+ var groupId = (audioGroup.filter(function (group) {
+ return group["default"];
+ })[0] || audioGroup[0]).id;
+ mediaTypes.AUDIO.tracks[groupId].enabled = true;
+ mediaTypes.AUDIO.onGroupChanged();
+ mediaTypes.AUDIO.onTrackChanged();
+ var activeAudioGroup = mediaTypes.AUDIO.getActiveGroup(); // a similar check for handling setAudio on each loader is run again each time the
+ // track is changed, but needs to be handled here since the track may not be considered
+ // changed on the first call to onTrackChanged
+
+ if (!activeAudioGroup.playlistLoader) {
+ // either audio is muxed with video or the stream is audio only
+ mainSegmentLoader.setAudio(true);
+ } else {
+ // audio is demuxed
+ mainSegmentLoader.setAudio(false);
+ audioSegmentLoader.setAudio(true);
+ }
+ }
+
+ masterPlaylistLoader.on('mediachange', function () {
+ ['AUDIO', 'SUBTITLES'].forEach(function (type) {
+ return mediaTypes[type].onGroupChanged();
+ });
+ });
+ masterPlaylistLoader.on('mediachanging', function () {
+ ['AUDIO', 'SUBTITLES'].forEach(function (type) {
+ return mediaTypes[type].onGroupChanging();
+ });
+ }); // custom audio track change event handler for usage event
+
+ var onAudioTrackChanged = function onAudioTrackChanged() {
+ mediaTypes.AUDIO.onTrackChanged();
+ tech.trigger({
+ type: 'usage',
+ name: 'vhs-audio-change'
+ });
+ tech.trigger({
+ type: 'usage',
+ name: 'hls-audio-change'
+ });
+ };
+
+ tech.audioTracks().addEventListener('change', onAudioTrackChanged);
+ tech.remoteTextTracks().addEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
+ vhs.on('dispose', function () {
+ tech.audioTracks().removeEventListener('change', onAudioTrackChanged);
+ tech.remoteTextTracks().removeEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
+ }); // clear existing audio tracks and add the ones we just created
+
+ tech.clearTracks('audio');
+
+ for (var id in mediaTypes.AUDIO.tracks) {
+ tech.audioTracks().addTrack(mediaTypes.AUDIO.tracks[id]);
+ }
+};
+/**
+ * Creates skeleton object used to store the loaders, tracks, and utility methods for each
+ * media type
+ *
+ * @return {Object}
+ * Object to store the loaders, tracks, and utility methods for each media type
+ * @function createMediaTypes
+ */
+
+
+var createMediaTypes = function createMediaTypes() {
+ var mediaTypes = {};
+ ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
+ mediaTypes[type] = {
+ groups: {},
+ tracks: {},
+ activePlaylistLoader: null,
+ activeGroup: noop,
+ activeTrack: noop,
+ getActiveGroup: noop,
+ onGroupChanged: noop,
+ onTrackChanged: noop,
+ lastTrack_: null,
+ logger_: logger("MediaGroups[" + type + "]")
+ };
+ });
+ return mediaTypes;
+};
+
+var ABORT_EARLY_BLACKLIST_SECONDS = 60 * 2;
+var Vhs$1; // SegmentLoader stats that need to have each loader's
+// values summed to calculate the final value
+
+var loaderStats = ['mediaRequests', 'mediaRequestsAborted', 'mediaRequestsTimedout', 'mediaRequestsErrored', 'mediaTransferDuration', 'mediaBytesTransferred', 'mediaAppends'];
+
+var sumLoaderStat = function sumLoaderStat(stat) {
+ return this.audioSegmentLoader_[stat] + this.mainSegmentLoader_[stat];
+};
+
+var shouldSwitchToMedia = function shouldSwitchToMedia(_ref) {
+ var currentPlaylist = _ref.currentPlaylist,
+ buffered = _ref.buffered,
+ currentTime = _ref.currentTime,
+ nextPlaylist = _ref.nextPlaylist,
+ bufferLowWaterLine = _ref.bufferLowWaterLine,
+ bufferHighWaterLine = _ref.bufferHighWaterLine,
+ duration = _ref.duration,
+ experimentalBufferBasedABR = _ref.experimentalBufferBasedABR,
+ log = _ref.log; // we have no other playlist to switch to
+
+ if (!nextPlaylist) {
+ videojs.log.warn('We received no playlist to switch to. Please check your stream.');
+ return false;
+ }
+
+ var sharedLogLine = "allowing switch " + (currentPlaylist && currentPlaylist.id || 'null') + " -> " + nextPlaylist.id;
+
+ if (!currentPlaylist) {
+ log(sharedLogLine + " as current playlist is not set");
+ return true;
+ } // no need to switch if playlist is the same
+
+
+ if (nextPlaylist.id === currentPlaylist.id) {
+ return false;
+ } // determine if current time is in a buffered range.
+
+
+ var isBuffered = Boolean(findRange(buffered, currentTime).length); // If the playlist is live, then we want to not take low water line into account.
+ // This is because in LIVE, the player plays 3 segments from the end of the
+ // playlist, and if `BUFFER_LOW_WATER_LINE` is greater than the duration availble
+ // in those segments, a viewer will never experience a rendition upswitch.
+
+ if (!currentPlaylist.endList) {
+ // For LLHLS live streams, don't switch renditions before playback has started, as it almost
+ // doubles the time to first playback.
+ if (!isBuffered && typeof currentPlaylist.partTargetDuration === 'number') {
+ log("not " + sharedLogLine + " as current playlist is live llhls, but currentTime isn't in buffered.");
+ return false;
+ }
+
+ log(sharedLogLine + " as current playlist is live");
+ return true;
+ }
+
+ var forwardBuffer = timeAheadOf(buffered, currentTime);
+ var maxBufferLowWaterLine = experimentalBufferBasedABR ? Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE : Config.MAX_BUFFER_LOW_WATER_LINE; // For the same reason as LIVE, we ignore the low water line when the VOD
+ // duration is below the max potential low water line
+
+ if (duration < maxBufferLowWaterLine) {
+ log(sharedLogLine + " as duration < max low water line (" + duration + " < " + maxBufferLowWaterLine + ")");
+ return true;
+ }
+
+ var nextBandwidth = nextPlaylist.attributes.BANDWIDTH;
+ var currBandwidth = currentPlaylist.attributes.BANDWIDTH; // when switching down, if our buffer is lower than the high water line,
+ // we can switch down
+
+ if (nextBandwidth < currBandwidth && (!experimentalBufferBasedABR || forwardBuffer < bufferHighWaterLine)) {
+ var logLine = sharedLogLine + " as next bandwidth < current bandwidth (" + nextBandwidth + " < " + currBandwidth + ")";
+
+ if (experimentalBufferBasedABR) {
+ logLine += " and forwardBuffer < bufferHighWaterLine (" + forwardBuffer + " < " + bufferHighWaterLine + ")";
+ }
+
+ log(logLine);
+ return true;
+ } // and if our buffer is higher than the low water line,
+ // we can switch up
+
+
+ if ((!experimentalBufferBasedABR || nextBandwidth > currBandwidth) && forwardBuffer >= bufferLowWaterLine) {
+ var _logLine = sharedLogLine + " as forwardBuffer >= bufferLowWaterLine (" + forwardBuffer + " >= " + bufferLowWaterLine + ")";
+
+ if (experimentalBufferBasedABR) {
+ _logLine += " and next bandwidth > current bandwidth (" + nextBandwidth + " > " + currBandwidth + ")";
+ }
+
+ log(_logLine);
+ return true;
+ }
+
+ log("not " + sharedLogLine + " as no switching criteria met");
+ return false;
+};
+/**
+ * the master playlist controller controller all interactons
+ * between playlists and segmentloaders. At this time this mainly
+ * involves a master playlist and a series of audio playlists
+ * if they are available
+ *
+ * @class MasterPlaylistController
+ * @extends videojs.EventTarget
+ */
+
+
+var MasterPlaylistController = /*#__PURE__*/function (_videojs$EventTarget) {
+ _inheritsLoose(MasterPlaylistController, _videojs$EventTarget);
+
+ function MasterPlaylistController(options) {
+ var _this;
+
+ _this = _videojs$EventTarget.call(this) || this;
+ var src = options.src,
+ handleManifestRedirects = options.handleManifestRedirects,
+ withCredentials = options.withCredentials,
+ tech = options.tech,
+ bandwidth = options.bandwidth,
+ externVhs = options.externVhs,
+ useCueTags = options.useCueTags,
+ blacklistDuration = options.blacklistDuration,
+ enableLowInitialPlaylist = options.enableLowInitialPlaylist,
+ sourceType = options.sourceType,
+ cacheEncryptionKeys = options.cacheEncryptionKeys,
+ experimentalBufferBasedABR = options.experimentalBufferBasedABR,
+ experimentalLeastPixelDiffSelector = options.experimentalLeastPixelDiffSelector,
+ captionServices = options.captionServices;
+
+ if (!src) {
+ throw new Error('A non-empty playlist URL or JSON manifest string is required');
+ }
+
+ var maxPlaylistRetries = options.maxPlaylistRetries;
+
+ if (maxPlaylistRetries === null || typeof maxPlaylistRetries === 'undefined') {
+ maxPlaylistRetries = Infinity;
+ }
+
+ Vhs$1 = externVhs;
+ _this.experimentalBufferBasedABR = Boolean(experimentalBufferBasedABR);
+ _this.experimentalLeastPixelDiffSelector = Boolean(experimentalLeastPixelDiffSelector);
+ _this.withCredentials = withCredentials;
+ _this.tech_ = tech;
+ _this.vhs_ = tech.vhs;
+ _this.sourceType_ = sourceType;
+ _this.useCueTags_ = useCueTags;
+ _this.blacklistDuration = blacklistDuration;
+ _this.maxPlaylistRetries = maxPlaylistRetries;
+ _this.enableLowInitialPlaylist = enableLowInitialPlaylist;
+
+ if (_this.useCueTags_) {
+ _this.cueTagsTrack_ = _this.tech_.addTextTrack('metadata', 'ad-cues');
+ _this.cueTagsTrack_.inBandMetadataTrackDispatchType = '';
+ }
+
+ _this.requestOptions_ = {
+ withCredentials: withCredentials,
+ handleManifestRedirects: handleManifestRedirects,
+ maxPlaylistRetries: maxPlaylistRetries,
+ timeout: null
+ };
+
+ _this.on('error', _this.pauseLoading);
+
+ _this.mediaTypes_ = createMediaTypes();
+ _this.mediaSource = new window$1.MediaSource();
+ _this.handleDurationChange_ = _this.handleDurationChange_.bind(_assertThisInitialized(_this));
+ _this.handleSourceOpen_ = _this.handleSourceOpen_.bind(_assertThisInitialized(_this));
+ _this.handleSourceEnded_ = _this.handleSourceEnded_.bind(_assertThisInitialized(_this));
+
+ _this.mediaSource.addEventListener('durationchange', _this.handleDurationChange_); // load the media source into the player
+
+
+ _this.mediaSource.addEventListener('sourceopen', _this.handleSourceOpen_);
+
+ _this.mediaSource.addEventListener('sourceended', _this.handleSourceEnded_); // we don't have to handle sourceclose since dispose will handle termination of
+ // everything, and the MediaSource should not be detached without a proper disposal
+
+
+ _this.seekable_ = videojs.createTimeRanges();
+ _this.hasPlayed_ = false;
+ _this.syncController_ = new SyncController(options);
+ _this.segmentMetadataTrack_ = tech.addRemoteTextTrack({
+ kind: 'metadata',
+ label: 'segment-metadata'
+ }, false).track;
+ _this.decrypter_ = new Decrypter();
+ _this.sourceUpdater_ = new SourceUpdater(_this.mediaSource);
+ _this.inbandTextTracks_ = {};
+ _this.timelineChangeController_ = new TimelineChangeController();
+ var segmentLoaderSettings = {
+ vhs: _this.vhs_,
+ parse708captions: options.parse708captions,
+ useDtsForTimestampOffset: options.useDtsForTimestampOffset,
+ captionServices: captionServices,
+ mediaSource: _this.mediaSource,
+ currentTime: _this.tech_.currentTime.bind(_this.tech_),
+ seekable: function seekable() {
+ return _this.seekable();
+ },
+ seeking: function seeking() {
+ return _this.tech_.seeking();
+ },
+ duration: function duration() {
+ return _this.duration();
+ },
+ hasPlayed: function hasPlayed() {
+ return _this.hasPlayed_;
+ },
+ goalBufferLength: function goalBufferLength() {
+ return _this.goalBufferLength();
+ },
+ bandwidth: bandwidth,
+ syncController: _this.syncController_,
+ decrypter: _this.decrypter_,
+ sourceType: _this.sourceType_,
+ inbandTextTracks: _this.inbandTextTracks_,
+ cacheEncryptionKeys: cacheEncryptionKeys,
+ sourceUpdater: _this.sourceUpdater_,
+ timelineChangeController: _this.timelineChangeController_,
+ experimentalExactManifestTimings: options.experimentalExactManifestTimings
+ }; // The source type check not only determines whether a special DASH playlist loader
+ // should be used, but also covers the case where the provided src is a vhs-json
+ // manifest object (instead of a URL). In the case of vhs-json, the default
+ // PlaylistLoader should be used.
+
+ _this.masterPlaylistLoader_ = _this.sourceType_ === 'dash' ? new DashPlaylistLoader(src, _this.vhs_, _this.requestOptions_) : new PlaylistLoader(src, _this.vhs_, _this.requestOptions_);
+
+ _this.setupMasterPlaylistLoaderListeners_(); // setup segment loaders
+ // combined audio/video or just video when alternate audio track is selected
+
+
+ _this.mainSegmentLoader_ = new SegmentLoader(videojs.mergeOptions(segmentLoaderSettings, {
+ segmentMetadataTrack: _this.segmentMetadataTrack_,
+ loaderType: 'main'
+ }), options); // alternate audio track
+
+ _this.audioSegmentLoader_ = new SegmentLoader(videojs.mergeOptions(segmentLoaderSettings, {
+ loaderType: 'audio'
+ }), options);
+ _this.subtitleSegmentLoader_ = new VTTSegmentLoader(videojs.mergeOptions(segmentLoaderSettings, {
+ loaderType: 'vtt',
+ featuresNativeTextTracks: _this.tech_.featuresNativeTextTracks
+ }), options);
+
+ _this.setupSegmentLoaderListeners_();
+
+ if (_this.experimentalBufferBasedABR) {
+ _this.masterPlaylistLoader_.one('loadedplaylist', function () {
+ return _this.startABRTimer_();
+ });
+
+ _this.tech_.on('pause', function () {
+ return _this.stopABRTimer_();
+ });
+
+ _this.tech_.on('play', function () {
+ return _this.startABRTimer_();
+ });
+ } // Create SegmentLoader stat-getters
+ // mediaRequests_
+ // mediaRequestsAborted_
+ // mediaRequestsTimedout_
+ // mediaRequestsErrored_
+ // mediaTransferDuration_
+ // mediaBytesTransferred_
+ // mediaAppends_
+
+
+ loaderStats.forEach(function (stat) {
+ _this[stat + '_'] = sumLoaderStat.bind(_assertThisInitialized(_this), stat);
+ });
+ _this.logger_ = logger('MPC');
+ _this.triggeredFmp4Usage = false;
+
+ if (_this.tech_.preload() === 'none') {
+ _this.loadOnPlay_ = function () {
+ _this.loadOnPlay_ = null;
+
+ _this.masterPlaylistLoader_.load();
+ };
+
+ _this.tech_.one('play', _this.loadOnPlay_);
+ } else {
+ _this.masterPlaylistLoader_.load();
+ }
+
+ _this.timeToLoadedData__ = -1;
+ _this.mainAppendsToLoadedData__ = -1;
+ _this.audioAppendsToLoadedData__ = -1;
+ var event = _this.tech_.preload() === 'none' ? 'play' : 'loadstart'; // start the first frame timer on loadstart or play (for preload none)
+
+ _this.tech_.one(event, function () {
+ var timeToLoadedDataStart = Date.now();
+
+ _this.tech_.one('loadeddata', function () {
+ _this.timeToLoadedData__ = Date.now() - timeToLoadedDataStart;
+ _this.mainAppendsToLoadedData__ = _this.mainSegmentLoader_.mediaAppends;
+ _this.audioAppendsToLoadedData__ = _this.audioSegmentLoader_.mediaAppends;
+ });
+ });
+
+ return _this;
+ }
+
+ var _proto = MasterPlaylistController.prototype;
+
+ _proto.mainAppendsToLoadedData_ = function mainAppendsToLoadedData_() {
+ return this.mainAppendsToLoadedData__;
+ };
+
+ _proto.audioAppendsToLoadedData_ = function audioAppendsToLoadedData_() {
+ return this.audioAppendsToLoadedData__;
+ };
+
+ _proto.appendsToLoadedData_ = function appendsToLoadedData_() {
+ var main = this.mainAppendsToLoadedData_();
+ var audio = this.audioAppendsToLoadedData_();
+
+ if (main === -1 || audio === -1) {
+ return -1;
+ }
+
+ return main + audio;
+ };
+
+ _proto.timeToLoadedData_ = function timeToLoadedData_() {
+ return this.timeToLoadedData__;
+ }
+ /**
+ * Run selectPlaylist and switch to the new playlist if we should
+ *
+ * @private
+ *
+ */
+ ;
+
+ _proto.checkABR_ = function checkABR_() {
+ var nextPlaylist = this.selectPlaylist();
+
+ if (nextPlaylist && this.shouldSwitchToMedia_(nextPlaylist)) {
+ this.switchMedia_(nextPlaylist, 'abr');
+ }
+ };
+
+ _proto.switchMedia_ = function switchMedia_(playlist, cause, delay) {
+ var oldMedia = this.media();
+ var oldId = oldMedia && (oldMedia.id || oldMedia.uri);
+ var newId = playlist.id || playlist.uri;
+
+ if (oldId && oldId !== newId) {
+ this.logger_("switch media " + oldId + " -> " + newId + " from " + cause);
+ this.tech_.trigger({
+ type: 'usage',
+ name: "vhs-rendition-change-" + cause
+ });
+ }
+
+ this.masterPlaylistLoader_.media(playlist, delay);
+ }
+ /**
+ * Start a timer that periodically calls checkABR_
+ *
+ * @private
+ */
+ ;
+
+ _proto.startABRTimer_ = function startABRTimer_() {
+ var _this2 = this;
+
+ this.stopABRTimer_();
+ this.abrTimer_ = window$1.setInterval(function () {
+ return _this2.checkABR_();
+ }, 250);
+ }
+ /**
+ * Stop the timer that periodically calls checkABR_
+ *
+ * @private
+ */
+ ;
+
+ _proto.stopABRTimer_ = function stopABRTimer_() {
+ // if we're scrubbing, we don't need to pause.
+ // This getter will be added to Video.js in version 7.11.
+ if (this.tech_.scrubbing && this.tech_.scrubbing()) {
+ return;
+ }
+
+ window$1.clearInterval(this.abrTimer_);
+ this.abrTimer_ = null;
+ }
+ /**
+ * Get a list of playlists for the currently selected audio playlist
+ *
+ * @return {Array} the array of audio playlists
+ */
+ ;
+
+ _proto.getAudioTrackPlaylists_ = function getAudioTrackPlaylists_() {
+ var master = this.master();
+ var defaultPlaylists = master && master.playlists || []; // if we don't have any audio groups then we can only
+ // assume that the audio tracks are contained in masters
+ // playlist array, use that or an empty array.
+
+ if (!master || !master.mediaGroups || !master.mediaGroups.AUDIO) {
+ return defaultPlaylists;
+ }
+
+ var AUDIO = master.mediaGroups.AUDIO;
+ var groupKeys = Object.keys(AUDIO);
+ var track; // get the current active track
+
+ if (Object.keys(this.mediaTypes_.AUDIO.groups).length) {
+ track = this.mediaTypes_.AUDIO.activeTrack(); // or get the default track from master if mediaTypes_ isn't setup yet
+ } else {
+ // default group is `main` or just the first group.
+ var defaultGroup = AUDIO.main || groupKeys.length && AUDIO[groupKeys[0]];
+
+ for (var label in defaultGroup) {
+ if (defaultGroup[label]["default"]) {
+ track = {
+ label: label
+ };
+ break;
+ }
+ }
+ } // no active track no playlists.
+
+
+ if (!track) {
+ return defaultPlaylists;
+ }
+
+ var playlists = []; // get all of the playlists that are possible for the
+ // active track.
+
+ for (var group in AUDIO) {
+ if (AUDIO[group][track.label]) {
+ var properties = AUDIO[group][track.label];
+
+ if (properties.playlists && properties.playlists.length) {
+ playlists.push.apply(playlists, properties.playlists);
+ } else if (properties.uri) {
+ playlists.push(properties);
+ } else if (master.playlists.length) {
+ // if an audio group does not have a uri
+ // see if we have main playlists that use it as a group.
+ // if we do then add those to the playlists list.
+ for (var i = 0; i < master.playlists.length; i++) {
+ var playlist = master.playlists[i];
+
+ if (playlist.attributes && playlist.attributes.AUDIO && playlist.attributes.AUDIO === group) {
+ playlists.push(playlist);
+ }
+ }
+ }
+ }
+ }
+
+ if (!playlists.length) {
+ return defaultPlaylists;
+ }
+
+ return playlists;
+ }
+ /**
+ * Register event handlers on the master playlist loader. A helper
+ * function for construction time.
+ *
+ * @private
+ */
+ ;
+
+ _proto.setupMasterPlaylistLoaderListeners_ = function setupMasterPlaylistLoaderListeners_() {
+ var _this3 = this;
+
+ this.masterPlaylistLoader_.on('loadedmetadata', function () {
+ var media = _this3.masterPlaylistLoader_.media();
+
+ var requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
+ // timeout the request.
+
+ if (isLowestEnabledRendition(_this3.masterPlaylistLoader_.master, _this3.masterPlaylistLoader_.media())) {
+ _this3.requestOptions_.timeout = 0;
+ } else {
+ _this3.requestOptions_.timeout = requestTimeout;
+ } // if this isn't a live video and preload permits, start
+ // downloading segments
+
+
+ if (media.endList && _this3.tech_.preload() !== 'none') {
+ _this3.mainSegmentLoader_.playlist(media, _this3.requestOptions_);
+
+ _this3.mainSegmentLoader_.load();
+ }
+
+ setupMediaGroups({
+ sourceType: _this3.sourceType_,
+ segmentLoaders: {
+ AUDIO: _this3.audioSegmentLoader_,
+ SUBTITLES: _this3.subtitleSegmentLoader_,
+ main: _this3.mainSegmentLoader_
+ },
+ tech: _this3.tech_,
+ requestOptions: _this3.requestOptions_,
+ masterPlaylistLoader: _this3.masterPlaylistLoader_,
+ vhs: _this3.vhs_,
+ master: _this3.master(),
+ mediaTypes: _this3.mediaTypes_,
+ blacklistCurrentPlaylist: _this3.blacklistCurrentPlaylist.bind(_this3)
+ });
+
+ _this3.triggerPresenceUsage_(_this3.master(), media);
+
+ _this3.setupFirstPlay();
+
+ if (!_this3.mediaTypes_.AUDIO.activePlaylistLoader || _this3.mediaTypes_.AUDIO.activePlaylistLoader.media()) {
+ _this3.trigger('selectedinitialmedia');
+ } else {
+ // We must wait for the active audio playlist loader to
+ // finish setting up before triggering this event so the
+ // representations API and EME setup is correct
+ _this3.mediaTypes_.AUDIO.activePlaylistLoader.one('loadedmetadata', function () {
+ _this3.trigger('selectedinitialmedia');
+ });
+ }
+ });
+ this.masterPlaylistLoader_.on('loadedplaylist', function () {
+ if (_this3.loadOnPlay_) {
+ _this3.tech_.off('play', _this3.loadOnPlay_);
+ }
+
+ var updatedPlaylist = _this3.masterPlaylistLoader_.media();
+
+ if (!updatedPlaylist) {
+ // exclude any variants that are not supported by the browser before selecting
+ // an initial media as the playlist selectors do not consider browser support
+ _this3.excludeUnsupportedVariants_();
+
+ var selectedMedia;
+
+ if (_this3.enableLowInitialPlaylist) {
+ selectedMedia = _this3.selectInitialPlaylist();
+ }
+
+ if (!selectedMedia) {
+ selectedMedia = _this3.selectPlaylist();
+ }
+
+ if (!selectedMedia || !_this3.shouldSwitchToMedia_(selectedMedia)) {
+ return;
+ }
+
+ _this3.initialMedia_ = selectedMedia;
+
+ _this3.switchMedia_(_this3.initialMedia_, 'initial'); // Under the standard case where a source URL is provided, loadedplaylist will
+ // fire again since the playlist will be requested. In the case of vhs-json
+ // (where the manifest object is provided as the source), when the media
+ // playlist's `segments` list is already available, a media playlist won't be
+ // requested, and loadedplaylist won't fire again, so the playlist handler must be
+ // called on its own here.
+
+
+ var haveJsonSource = _this3.sourceType_ === 'vhs-json' && _this3.initialMedia_.segments;
+
+ if (!haveJsonSource) {
+ return;
+ }
+
+ updatedPlaylist = _this3.initialMedia_;
+ }
+
+ _this3.handleUpdatedMediaPlaylist(updatedPlaylist);
+ });
+ this.masterPlaylistLoader_.on('error', function () {
+ _this3.blacklistCurrentPlaylist(_this3.masterPlaylistLoader_.error);
+ });
+ this.masterPlaylistLoader_.on('mediachanging', function () {
+ _this3.mainSegmentLoader_.abort();
+
+ _this3.mainSegmentLoader_.pause();
+ });
+ this.masterPlaylistLoader_.on('mediachange', function () {
+ var media = _this3.masterPlaylistLoader_.media();
+
+ var requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
+ // timeout the request.
+
+ if (isLowestEnabledRendition(_this3.masterPlaylistLoader_.master, _this3.masterPlaylistLoader_.media())) {
+ _this3.requestOptions_.timeout = 0;
+ } else {
+ _this3.requestOptions_.timeout = requestTimeout;
+ } // TODO: Create a new event on the PlaylistLoader that signals
+ // that the segments have changed in some way and use that to
+ // update the SegmentLoader instead of doing it twice here and
+ // on `loadedplaylist`
+
+
+ _this3.mainSegmentLoader_.playlist(media, _this3.requestOptions_);
+
+ _this3.mainSegmentLoader_.load();
+
+ _this3.tech_.trigger({
+ type: 'mediachange',
+ bubbles: true
+ });
+ });
+ this.masterPlaylistLoader_.on('playlistunchanged', function () {
+ var updatedPlaylist = _this3.masterPlaylistLoader_.media(); // ignore unchanged playlists that have already been
+ // excluded for not-changing. We likely just have a really slowly updating
+ // playlist.
+
+
+ if (updatedPlaylist.lastExcludeReason_ === 'playlist-unchanged') {
+ return;
+ }
+
+ var playlistOutdated = _this3.stuckAtPlaylistEnd_(updatedPlaylist);
+
+ if (playlistOutdated) {
+ // Playlist has stopped updating and we're stuck at its end. Try to
+ // blacklist it and switch to another playlist in the hope that that
+ // one is updating (and give the player a chance to re-adjust to the
+ // safe live point).
+ _this3.blacklistCurrentPlaylist({
+ message: 'Playlist no longer updating.',
+ reason: 'playlist-unchanged'
+ }); // useful for monitoring QoS
+
+
+ _this3.tech_.trigger('playliststuck');
+ }
+ });
+ this.masterPlaylistLoader_.on('renditiondisabled', function () {
+ _this3.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-rendition-disabled'
+ });
+
+ _this3.tech_.trigger({
+ type: 'usage',
+ name: 'hls-rendition-disabled'
+ });
+ });
+ this.masterPlaylistLoader_.on('renditionenabled', function () {
+ _this3.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-rendition-enabled'
+ });
+
+ _this3.tech_.trigger({
+ type: 'usage',
+ name: 'hls-rendition-enabled'
+ });
+ });
+ }
+ /**
+ * Given an updated media playlist (whether it was loaded for the first time, or
+ * refreshed for live playlists), update any relevant properties and state to reflect
+ * changes in the media that should be accounted for (e.g., cues and duration).
+ *
+ * @param {Object} updatedPlaylist the updated media playlist object
+ *
+ * @private
+ */
+ ;
+
+ _proto.handleUpdatedMediaPlaylist = function handleUpdatedMediaPlaylist(updatedPlaylist) {
+ if (this.useCueTags_) {
+ this.updateAdCues_(updatedPlaylist);
+ } // TODO: Create a new event on the PlaylistLoader that signals
+ // that the segments have changed in some way and use that to
+ // update the SegmentLoader instead of doing it twice here and
+ // on `mediachange`
+
+
+ this.mainSegmentLoader_.playlist(updatedPlaylist, this.requestOptions_);
+ this.updateDuration(!updatedPlaylist.endList); // If the player isn't paused, ensure that the segment loader is running,
+ // as it is possible that it was temporarily stopped while waiting for
+ // a playlist (e.g., in case the playlist errored and we re-requested it).
+
+ if (!this.tech_.paused()) {
+ this.mainSegmentLoader_.load();
+
+ if (this.audioSegmentLoader_) {
+ this.audioSegmentLoader_.load();
+ }
+ }
+ }
+ /**
+ * A helper function for triggerring presence usage events once per source
+ *
+ * @private
+ */
+ ;
+
+ _proto.triggerPresenceUsage_ = function triggerPresenceUsage_(master, media) {
+ var mediaGroups = master.mediaGroups || {};
+ var defaultDemuxed = true;
+ var audioGroupKeys = Object.keys(mediaGroups.AUDIO);
+
+ for (var mediaGroup in mediaGroups.AUDIO) {
+ for (var label in mediaGroups.AUDIO[mediaGroup]) {
+ var properties = mediaGroups.AUDIO[mediaGroup][label];
+
+ if (!properties.uri) {
+ defaultDemuxed = false;
+ }
+ }
+ }
+
+ if (defaultDemuxed) {
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-demuxed'
+ });
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'hls-demuxed'
+ });
+ }
+
+ if (Object.keys(mediaGroups.SUBTITLES).length) {
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-webvtt'
+ });
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'hls-webvtt'
+ });
+ }
+
+ if (Vhs$1.Playlist.isAes(media)) {
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-aes'
+ });
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'hls-aes'
+ });
+ }
+
+ if (audioGroupKeys.length && Object.keys(mediaGroups.AUDIO[audioGroupKeys[0]]).length > 1) {
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-alternate-audio'
+ });
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'hls-alternate-audio'
+ });
+ }
+
+ if (this.useCueTags_) {
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-playlist-cue-tags'
+ });
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'hls-playlist-cue-tags'
+ });
+ }
+ };
+
+ _proto.shouldSwitchToMedia_ = function shouldSwitchToMedia_(nextPlaylist) {
+ var currentPlaylist = this.masterPlaylistLoader_.media() || this.masterPlaylistLoader_.pendingMedia_;
+ var currentTime = this.tech_.currentTime();
+ var bufferLowWaterLine = this.bufferLowWaterLine();
+ var bufferHighWaterLine = this.bufferHighWaterLine();
+ var buffered = this.tech_.buffered();
+ return shouldSwitchToMedia({
+ buffered: buffered,
+ currentTime: currentTime,
+ currentPlaylist: currentPlaylist,
+ nextPlaylist: nextPlaylist,
+ bufferLowWaterLine: bufferLowWaterLine,
+ bufferHighWaterLine: bufferHighWaterLine,
+ duration: this.duration(),
+ experimentalBufferBasedABR: this.experimentalBufferBasedABR,
+ log: this.logger_
+ });
+ }
+ /**
+ * Register event handlers on the segment loaders. A helper function
+ * for construction time.
+ *
+ * @private
+ */
+ ;
+
+ _proto.setupSegmentLoaderListeners_ = function setupSegmentLoaderListeners_() {
+ var _this4 = this;
+
+ if (!this.experimentalBufferBasedABR) {
+ this.mainSegmentLoader_.on('bandwidthupdate', function () {
+ var nextPlaylist = _this4.selectPlaylist();
+
+ if (_this4.shouldSwitchToMedia_(nextPlaylist)) {
+ _this4.switchMedia_(nextPlaylist, 'bandwidthupdate');
+ }
+
+ _this4.tech_.trigger('bandwidthupdate');
+ });
+ this.mainSegmentLoader_.on('progress', function () {
+ _this4.trigger('progress');
+ });
+ }
+
+ this.mainSegmentLoader_.on('error', function () {
+ _this4.blacklistCurrentPlaylist(_this4.mainSegmentLoader_.error());
+ });
+ this.mainSegmentLoader_.on('appenderror', function () {
+ _this4.error = _this4.mainSegmentLoader_.error_;
+
+ _this4.trigger('error');
+ });
+ this.mainSegmentLoader_.on('syncinfoupdate', function () {
+ _this4.onSyncInfoUpdate_();
+ });
+ this.mainSegmentLoader_.on('timestampoffset', function () {
+ _this4.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-timestamp-offset'
+ });
+
+ _this4.tech_.trigger({
+ type: 'usage',
+ name: 'hls-timestamp-offset'
+ });
+ });
+ this.audioSegmentLoader_.on('syncinfoupdate', function () {
+ _this4.onSyncInfoUpdate_();
+ });
+ this.audioSegmentLoader_.on('appenderror', function () {
+ _this4.error = _this4.audioSegmentLoader_.error_;
+
+ _this4.trigger('error');
+ });
+ this.mainSegmentLoader_.on('ended', function () {
+ _this4.logger_('main segment loader ended');
+
+ _this4.onEndOfStream();
+ });
+ this.mainSegmentLoader_.on('earlyabort', function (event) {
+ // never try to early abort with the new ABR algorithm
+ if (_this4.experimentalBufferBasedABR) {
+ return;
+ }
+
+ _this4.delegateLoaders_('all', ['abort']);
+
+ _this4.blacklistCurrentPlaylist({
+ message: 'Aborted early because there isn\'t enough bandwidth to complete the ' + 'request without rebuffering.'
+ }, ABORT_EARLY_BLACKLIST_SECONDS);
+ });
+
+ var updateCodecs = function updateCodecs() {
+ if (!_this4.sourceUpdater_.hasCreatedSourceBuffers()) {
+ return _this4.tryToCreateSourceBuffers_();
+ }
+
+ var codecs = _this4.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
+
+
+ if (!codecs) {
+ return;
+ }
+
+ _this4.sourceUpdater_.addOrChangeSourceBuffers(codecs);
+ };
+
+ this.mainSegmentLoader_.on('trackinfo', updateCodecs);
+ this.audioSegmentLoader_.on('trackinfo', updateCodecs);
+ this.mainSegmentLoader_.on('fmp4', function () {
+ if (!_this4.triggeredFmp4Usage) {
+ _this4.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-fmp4'
+ });
+
+ _this4.tech_.trigger({
+ type: 'usage',
+ name: 'hls-fmp4'
+ });
+
+ _this4.triggeredFmp4Usage = true;
+ }
+ });
+ this.audioSegmentLoader_.on('fmp4', function () {
+ if (!_this4.triggeredFmp4Usage) {
+ _this4.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-fmp4'
+ });
+
+ _this4.tech_.trigger({
+ type: 'usage',
+ name: 'hls-fmp4'
+ });
+
+ _this4.triggeredFmp4Usage = true;
+ }
+ });
+ this.audioSegmentLoader_.on('ended', function () {
+ _this4.logger_('audioSegmentLoader ended');
+
+ _this4.onEndOfStream();
+ });
+ };
+
+ _proto.mediaSecondsLoaded_ = function mediaSecondsLoaded_() {
+ return Math.max(this.audioSegmentLoader_.mediaSecondsLoaded + this.mainSegmentLoader_.mediaSecondsLoaded);
+ }
+ /**
+ * Call load on our SegmentLoaders
+ */
+ ;
+
+ _proto.load = function load() {
+ this.mainSegmentLoader_.load();
+
+ if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
+ this.audioSegmentLoader_.load();
+ }
+
+ if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
+ this.subtitleSegmentLoader_.load();
+ }
+ }
+ /**
+ * Re-tune playback quality level for the current player
+ * conditions without performing destructive actions, like
+ * removing already buffered content
+ *
+ * @private
+ * @deprecated
+ */
+ ;
+
+ _proto.smoothQualityChange_ = function smoothQualityChange_(media) {
+ if (media === void 0) {
+ media = this.selectPlaylist();
+ }
+
+ this.fastQualityChange_(media);
+ }
+ /**
+ * Re-tune playback quality level for the current player
+ * conditions. This method will perform destructive actions like removing
+ * already buffered content in order to readjust the currently active
+ * playlist quickly. This is good for manual quality changes
+ *
+ * @private
+ */
+ ;
+
+ _proto.fastQualityChange_ = function fastQualityChange_(media) {
+ var _this5 = this;
+
+ if (media === void 0) {
+ media = this.selectPlaylist();
+ }
+
+ if (media === this.masterPlaylistLoader_.media()) {
+ this.logger_('skipping fastQualityChange because new media is same as old');
+ return;
+ }
+
+ this.switchMedia_(media, 'fast-quality'); // Delete all buffered data to allow an immediate quality switch, then seek to give
+ // the browser a kick to remove any cached frames from the previous rendtion (.04 seconds
+ // ahead is roughly the minimum that will accomplish this across a variety of content
+ // in IE and Edge, but seeking in place is sufficient on all other browsers)
+ // Edge/IE bug: https://developer.microsoft.com/en-us/microsoft-edge/platform/issues/14600375/
+ // Chrome bug: https://bugs.chromium.org/p/chromium/issues/detail?id=651904
+
+ this.mainSegmentLoader_.resetEverything(function () {
+ // Since this is not a typical seek, we avoid the seekTo method which can cause segments
+ // from the previously enabled rendition to load before the new playlist has finished loading
+ if (videojs.browser.IE_VERSION || videojs.browser.IS_EDGE) {
+ _this5.tech_.setCurrentTime(_this5.tech_.currentTime() + 0.04);
+ } else {
+ _this5.tech_.setCurrentTime(_this5.tech_.currentTime());
+ }
+ }); // don't need to reset audio as it is reset when media changes
+ }
+ /**
+ * Begin playback.
+ */
+ ;
+
+ _proto.play = function play() {
+ if (this.setupFirstPlay()) {
+ return;
+ }
+
+ if (this.tech_.ended()) {
+ this.tech_.setCurrentTime(0);
+ }
+
+ if (this.hasPlayed_) {
+ this.load();
+ }
+
+ var seekable = this.tech_.seekable(); // if the viewer has paused and we fell out of the live window,
+ // seek forward to the live point
+
+ if (this.tech_.duration() === Infinity) {
+ if (this.tech_.currentTime() < seekable.start(0)) {
+ return this.tech_.setCurrentTime(seekable.end(seekable.length - 1));
+ }
+ }
+ }
+ /**
+ * Seek to the latest media position if this is a live video and the
+ * player and video are loaded and initialized.
+ */
+ ;
+
+ _proto.setupFirstPlay = function setupFirstPlay() {
+ var _this6 = this;
+
+ var media = this.masterPlaylistLoader_.media(); // Check that everything is ready to begin buffering for the first call to play
+ // If 1) there is no active media
+ // 2) the player is paused
+ // 3) the first play has already been setup
+ // then exit early
+
+ if (!media || this.tech_.paused() || this.hasPlayed_) {
+ return false;
+ } // when the video is a live stream
+
+
+ if (!media.endList) {
+ var seekable = this.seekable();
+
+ if (!seekable.length) {
+ // without a seekable range, the player cannot seek to begin buffering at the live
+ // point
+ return false;
+ }
+
+ if (videojs.browser.IE_VERSION && this.tech_.readyState() === 0) {
+ // IE11 throws an InvalidStateError if you try to set currentTime while the
+ // readyState is 0, so it must be delayed until the tech fires loadedmetadata.
+ this.tech_.one('loadedmetadata', function () {
+ _this6.trigger('firstplay');
+
+ _this6.tech_.setCurrentTime(seekable.end(0));
+
+ _this6.hasPlayed_ = true;
+ });
+ return false;
+ } // trigger firstplay to inform the source handler to ignore the next seek event
+
+
+ this.trigger('firstplay'); // seek to the live point
+
+ this.tech_.setCurrentTime(seekable.end(0));
+ }
+
+ this.hasPlayed_ = true; // we can begin loading now that everything is ready
+
+ this.load();
+ return true;
+ }
+ /**
+ * handle the sourceopen event on the MediaSource
+ *
+ * @private
+ */
+ ;
+
+ _proto.handleSourceOpen_ = function handleSourceOpen_() {
+ // Only attempt to create the source buffer if none already exist.
+ // handleSourceOpen is also called when we are "re-opening" a source buffer
+ // after `endOfStream` has been called (in response to a seek for instance)
+ this.tryToCreateSourceBuffers_(); // if autoplay is enabled, begin playback. This is duplicative of
+ // code in video.js but is required because play() must be invoked
+ // *after* the media source has opened.
+
+ if (this.tech_.autoplay()) {
+ var playPromise = this.tech_.play(); // Catch/silence error when a pause interrupts a play request
+ // on browsers which return a promise
+
+ if (typeof playPromise !== 'undefined' && typeof playPromise.then === 'function') {
+ playPromise.then(null, function (e) {});
+ }
+ }
+
+ this.trigger('sourceopen');
+ }
+ /**
+ * handle the sourceended event on the MediaSource
+ *
+ * @private
+ */
+ ;
+
+ _proto.handleSourceEnded_ = function handleSourceEnded_() {
+ if (!this.inbandTextTracks_.metadataTrack_) {
+ return;
+ }
+
+ var cues = this.inbandTextTracks_.metadataTrack_.cues;
+
+ if (!cues || !cues.length) {
+ return;
+ }
+
+ var duration = this.duration();
+ cues[cues.length - 1].endTime = isNaN(duration) || Math.abs(duration) === Infinity ? Number.MAX_VALUE : duration;
+ }
+ /**
+ * handle the durationchange event on the MediaSource
+ *
+ * @private
+ */
+ ;
+
+ _proto.handleDurationChange_ = function handleDurationChange_() {
+ this.tech_.trigger('durationchange');
+ }
+ /**
+ * Calls endOfStream on the media source when all active stream types have called
+ * endOfStream
+ *
+ * @param {string} streamType
+ * Stream type of the segment loader that called endOfStream
+ * @private
+ */
+ ;
+
+ _proto.onEndOfStream = function onEndOfStream() {
+ var isEndOfStream = this.mainSegmentLoader_.ended_;
+
+ if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
+ var mainMediaInfo = this.mainSegmentLoader_.getCurrentMediaInfo_(); // if the audio playlist loader exists, then alternate audio is active
+
+ if (!mainMediaInfo || mainMediaInfo.hasVideo) {
+ // if we do not know if the main segment loader contains video yet or if we
+ // definitively know the main segment loader contains video, then we need to wait
+ // for both main and audio segment loaders to call endOfStream
+ isEndOfStream = isEndOfStream && this.audioSegmentLoader_.ended_;
+ } else {
+ // otherwise just rely on the audio loader
+ isEndOfStream = this.audioSegmentLoader_.ended_;
+ }
+ }
+
+ if (!isEndOfStream) {
+ return;
+ }
+
+ this.stopABRTimer_();
+ this.sourceUpdater_.endOfStream();
+ }
+ /**
+ * Check if a playlist has stopped being updated
+ *
+ * @param {Object} playlist the media playlist object
+ * @return {boolean} whether the playlist has stopped being updated or not
+ */
+ ;
+
+ _proto.stuckAtPlaylistEnd_ = function stuckAtPlaylistEnd_(playlist) {
+ var seekable = this.seekable();
+
+ if (!seekable.length) {
+ // playlist doesn't have enough information to determine whether we are stuck
+ return false;
+ }
+
+ var expired = this.syncController_.getExpiredTime(playlist, this.duration());
+
+ if (expired === null) {
+ return false;
+ } // does not use the safe live end to calculate playlist end, since we
+ // don't want to say we are stuck while there is still content
+
+
+ var absolutePlaylistEnd = Vhs$1.Playlist.playlistEnd(playlist, expired);
+ var currentTime = this.tech_.currentTime();
+ var buffered = this.tech_.buffered();
+
+ if (!buffered.length) {
+ // return true if the playhead reached the absolute end of the playlist
+ return absolutePlaylistEnd - currentTime <= SAFE_TIME_DELTA;
+ }
+
+ var bufferedEnd = buffered.end(buffered.length - 1); // return true if there is too little buffer left and buffer has reached absolute
+ // end of playlist
+
+ return bufferedEnd - currentTime <= SAFE_TIME_DELTA && absolutePlaylistEnd - bufferedEnd <= SAFE_TIME_DELTA;
+ }
+ /**
+ * Blacklists a playlist when an error occurs for a set amount of time
+ * making it unavailable for selection by the rendition selection algorithm
+ * and then forces a new playlist (rendition) selection.
+ *
+ * @param {Object=} error an optional error that may include the playlist
+ * to blacklist
+ * @param {number=} blacklistDuration an optional number of seconds to blacklist the
+ * playlist
+ */
+ ;
+
+ _proto.blacklistCurrentPlaylist = function blacklistCurrentPlaylist(error, blacklistDuration) {
+ if (error === void 0) {
+ error = {};
+ } // If the `error` was generated by the playlist loader, it will contain
+ // the playlist we were trying to load (but failed) and that should be
+ // blacklisted instead of the currently selected playlist which is likely
+ // out-of-date in this scenario
+
+
+ var currentPlaylist = error.playlist || this.masterPlaylistLoader_.media();
+ blacklistDuration = blacklistDuration || error.blacklistDuration || this.blacklistDuration; // If there is no current playlist, then an error occurred while we were
+ // trying to load the master OR while we were disposing of the tech
+
+ if (!currentPlaylist) {
+ this.error = error;
+
+ if (this.mediaSource.readyState !== 'open') {
+ this.trigger('error');
+ } else {
+ this.sourceUpdater_.endOfStream('network');
+ }
+
+ return;
+ }
+
+ currentPlaylist.playlistErrors_++;
+ var playlists = this.masterPlaylistLoader_.master.playlists;
+ var enabledPlaylists = playlists.filter(isEnabled);
+ var isFinalRendition = enabledPlaylists.length === 1 && enabledPlaylists[0] === currentPlaylist; // Don't blacklist the only playlist unless it was blacklisted
+ // forever
+
+ if (playlists.length === 1 && blacklistDuration !== Infinity) {
+ videojs.log.warn("Problem encountered with playlist " + currentPlaylist.id + ". " + 'Trying again since it is the only playlist.');
+ this.tech_.trigger('retryplaylist'); // if this is a final rendition, we should delay
+
+ return this.masterPlaylistLoader_.load(isFinalRendition);
+ }
+
+ if (isFinalRendition) {
+ // Since we're on the final non-blacklisted playlist, and we're about to blacklist
+ // it, instead of erring the player or retrying this playlist, clear out the current
+ // blacklist. This allows other playlists to be attempted in case any have been
+ // fixed.
+ var reincluded = false;
+ playlists.forEach(function (playlist) {
+ // skip current playlist which is about to be blacklisted
+ if (playlist === currentPlaylist) {
+ return;
+ }
+
+ var excludeUntil = playlist.excludeUntil; // a playlist cannot be reincluded if it wasn't excluded to begin with.
+
+ if (typeof excludeUntil !== 'undefined' && excludeUntil !== Infinity) {
+ reincluded = true;
+ delete playlist.excludeUntil;
+ }
+ });
+
+ if (reincluded) {
+ videojs.log.warn('Removing other playlists from the exclusion list because the last ' + 'rendition is about to be excluded.'); // Technically we are retrying a playlist, in that we are simply retrying a previous
+ // playlist. This is needed for users relying on the retryplaylist event to catch a
+ // case where the player might be stuck and looping through "dead" playlists.
+
+ this.tech_.trigger('retryplaylist');
+ }
+ } // Blacklist this playlist
+
+
+ var excludeUntil;
+
+ if (currentPlaylist.playlistErrors_ > this.maxPlaylistRetries) {
+ excludeUntil = Infinity;
+ } else {
+ excludeUntil = Date.now() + blacklistDuration * 1000;
+ }
+
+ currentPlaylist.excludeUntil = excludeUntil;
+
+ if (error.reason) {
+ currentPlaylist.lastExcludeReason_ = error.reason;
+ }
+
+ this.tech_.trigger('blacklistplaylist');
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-rendition-blacklisted'
+ });
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'hls-rendition-blacklisted'
+ }); // TODO: should we select a new playlist if this blacklist wasn't for the currentPlaylist?
+ // Would be something like media().id !=== currentPlaylist.id and we would need something
+ // like `pendingMedia` in playlist loaders to check against that too. This will prevent us
+ // from loading a new playlist on any blacklist.
+ // Select a new playlist
+
+ var nextPlaylist = this.selectPlaylist();
+
+ if (!nextPlaylist) {
+ this.error = 'Playback cannot continue. No available working or supported playlists.';
+ this.trigger('error');
+ return;
+ }
+
+ var logFn = error.internal ? this.logger_ : videojs.log.warn;
+ var errorMessage = error.message ? ' ' + error.message : '';
+ logFn((error.internal ? 'Internal problem' : 'Problem') + " encountered with playlist " + currentPlaylist.id + "." + (errorMessage + " Switching to playlist " + nextPlaylist.id + ".")); // if audio group changed reset audio loaders
+
+ if (nextPlaylist.attributes.AUDIO !== currentPlaylist.attributes.AUDIO) {
+ this.delegateLoaders_('audio', ['abort', 'pause']);
+ } // if subtitle group changed reset subtitle loaders
+
+
+ if (nextPlaylist.attributes.SUBTITLES !== currentPlaylist.attributes.SUBTITLES) {
+ this.delegateLoaders_('subtitle', ['abort', 'pause']);
+ }
+
+ this.delegateLoaders_('main', ['abort', 'pause']);
+ var delayDuration = nextPlaylist.targetDuration / 2 * 1000 || 5 * 1000;
+ var shouldDelay = typeof nextPlaylist.lastRequest === 'number' && Date.now() - nextPlaylist.lastRequest <= delayDuration; // delay if it's a final rendition or if the last refresh is sooner than half targetDuration
+
+ return this.switchMedia_(nextPlaylist, 'exclude', isFinalRendition || shouldDelay);
+ }
+ /**
+ * Pause all segment/playlist loaders
+ */
+ ;
+
+ _proto.pauseLoading = function pauseLoading() {
+ this.delegateLoaders_('all', ['abort', 'pause']);
+ this.stopABRTimer_();
+ }
+ /**
+ * Call a set of functions in order on playlist loaders, segment loaders,
+ * or both types of loaders.
+ *
+ * @param {string} filter
+ * Filter loaders that should call fnNames using a string. Can be:
+ * * all - run on all loaders
+ * * audio - run on all audio loaders
+ * * subtitle - run on all subtitle loaders
+ * * main - run on the main/master loaders
+ *
+ * @param {Array|string} fnNames
+ * A string or array of function names to call.
+ */
+ ;
+
+ _proto.delegateLoaders_ = function delegateLoaders_(filter, fnNames) {
+ var _this7 = this;
+
+ var loaders = [];
+ var dontFilterPlaylist = filter === 'all';
+
+ if (dontFilterPlaylist || filter === 'main') {
+ loaders.push(this.masterPlaylistLoader_);
+ }
+
+ var mediaTypes = [];
+
+ if (dontFilterPlaylist || filter === 'audio') {
+ mediaTypes.push('AUDIO');
+ }
+
+ if (dontFilterPlaylist || filter === 'subtitle') {
+ mediaTypes.push('CLOSED-CAPTIONS');
+ mediaTypes.push('SUBTITLES');
+ }
+
+ mediaTypes.forEach(function (mediaType) {
+ var loader = _this7.mediaTypes_[mediaType] && _this7.mediaTypes_[mediaType].activePlaylistLoader;
+
+ if (loader) {
+ loaders.push(loader);
+ }
+ });
+ ['main', 'audio', 'subtitle'].forEach(function (name) {
+ var loader = _this7[name + "SegmentLoader_"];
+
+ if (loader && (filter === name || filter === 'all')) {
+ loaders.push(loader);
+ }
+ });
+ loaders.forEach(function (loader) {
+ return fnNames.forEach(function (fnName) {
+ if (typeof loader[fnName] === 'function') {
+ loader[fnName]();
+ }
+ });
+ });
+ }
+ /**
+ * set the current time on all segment loaders
+ *
+ * @param {TimeRange} currentTime the current time to set
+ * @return {TimeRange} the current time
+ */
+ ;
+
+ _proto.setCurrentTime = function setCurrentTime(currentTime) {
+ var buffered = findRange(this.tech_.buffered(), currentTime);
+
+ if (!(this.masterPlaylistLoader_ && this.masterPlaylistLoader_.media())) {
+ // return immediately if the metadata is not ready yet
+ return 0;
+ } // it's clearly an edge-case but don't thrown an error if asked to
+ // seek within an empty playlist
+
+
+ if (!this.masterPlaylistLoader_.media().segments) {
+ return 0;
+ } // if the seek location is already buffered, continue buffering as usual
+
+
+ if (buffered && buffered.length) {
+ return currentTime;
+ } // cancel outstanding requests so we begin buffering at the new
+ // location
+
+
+ this.mainSegmentLoader_.resetEverything();
+ this.mainSegmentLoader_.abort();
+
+ if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
+ this.audioSegmentLoader_.resetEverything();
+ this.audioSegmentLoader_.abort();
+ }
+
+ if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
+ this.subtitleSegmentLoader_.resetEverything();
+ this.subtitleSegmentLoader_.abort();
+ } // start segment loader loading in case they are paused
+
+
+ this.load();
+ }
+ /**
+ * get the current duration
+ *
+ * @return {TimeRange} the duration
+ */
+ ;
+
+ _proto.duration = function duration() {
+ if (!this.masterPlaylistLoader_) {
+ return 0;
+ }
+
+ var media = this.masterPlaylistLoader_.media();
+
+ if (!media) {
+ // no playlists loaded yet, so can't determine a duration
+ return 0;
+ } // Don't rely on the media source for duration in the case of a live playlist since
+ // setting the native MediaSource's duration to infinity ends up with consequences to
+ // seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
+ //
+ // This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
+ // however, few browsers have support for setLiveSeekableRange()
+ // https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
+ //
+ // Until a time when the duration of the media source can be set to infinity, and a
+ // seekable range specified across browsers, just return Infinity.
+
+
+ if (!media.endList) {
+ return Infinity;
+ } // Since this is a VOD video, it is safe to rely on the media source's duration (if
+ // available). If it's not available, fall back to a playlist-calculated estimate.
+
+
+ if (this.mediaSource) {
+ return this.mediaSource.duration;
+ }
+
+ return Vhs$1.Playlist.duration(media);
+ }
+ /**
+ * check the seekable range
+ *
+ * @return {TimeRange} the seekable range
+ */
+ ;
+
+ _proto.seekable = function seekable() {
+ return this.seekable_;
+ };
+
+ _proto.onSyncInfoUpdate_ = function onSyncInfoUpdate_() {
+ var audioSeekable; // TODO check for creation of both source buffers before updating seekable
+ //
+ // A fix was made to this function where a check for
+ // this.sourceUpdater_.hasCreatedSourceBuffers
+ // was added to ensure that both source buffers were created before seekable was
+ // updated. However, it originally had a bug where it was checking for a true and
+ // returning early instead of checking for false. Setting it to check for false to
+ // return early though created other issues. A call to play() would check for seekable
+ // end without verifying that a seekable range was present. In addition, even checking
+ // for that didn't solve some issues, as handleFirstPlay is sometimes worked around
+ // due to a media update calling load on the segment loaders, skipping a seek to live,
+ // thereby starting live streams at the beginning of the stream rather than at the end.
+ //
+ // This conditional should be fixed to wait for the creation of two source buffers at
+ // the same time as the other sections of code are fixed to properly seek to live and
+ // not throw an error due to checking for a seekable end when no seekable range exists.
+ //
+ // For now, fall back to the older behavior, with the understanding that the seekable
+ // range may not be completely correct, leading to a suboptimal initial live point.
+
+ if (!this.masterPlaylistLoader_) {
+ return;
+ }
+
+ var media = this.masterPlaylistLoader_.media();
+
+ if (!media) {
+ return;
+ }
+
+ var expired = this.syncController_.getExpiredTime(media, this.duration());
+
+ if (expired === null) {
+ // not enough information to update seekable
+ return;
+ }
+
+ var master = this.masterPlaylistLoader_.master;
+ var mainSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(master, media));
+
+ if (mainSeekable.length === 0) {
+ return;
+ }
+
+ if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
+ media = this.mediaTypes_.AUDIO.activePlaylistLoader.media();
+ expired = this.syncController_.getExpiredTime(media, this.duration());
+
+ if (expired === null) {
+ return;
+ }
+
+ audioSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(master, media));
+
+ if (audioSeekable.length === 0) {
+ return;
+ }
+ }
+
+ var oldEnd;
+ var oldStart;
+
+ if (this.seekable_ && this.seekable_.length) {
+ oldEnd = this.seekable_.end(0);
+ oldStart = this.seekable_.start(0);
+ }
+
+ if (!audioSeekable) {
+ // seekable has been calculated based on buffering video data so it
+ // can be returned directly
+ this.seekable_ = mainSeekable;
+ } else if (audioSeekable.start(0) > mainSeekable.end(0) || mainSeekable.start(0) > audioSeekable.end(0)) {
+ // seekables are pretty far off, rely on main
+ this.seekable_ = mainSeekable;
+ } else {
+ this.seekable_ = videojs.createTimeRanges([[audioSeekable.start(0) > mainSeekable.start(0) ? audioSeekable.start(0) : mainSeekable.start(0), audioSeekable.end(0) < mainSeekable.end(0) ? audioSeekable.end(0) : mainSeekable.end(0)]]);
+ } // seekable is the same as last time
+
+
+ if (this.seekable_ && this.seekable_.length) {
+ if (this.seekable_.end(0) === oldEnd && this.seekable_.start(0) === oldStart) {
+ return;
+ }
+ }
+
+ this.logger_("seekable updated [" + printableRange(this.seekable_) + "]");
+ this.tech_.trigger('seekablechanged');
+ }
+ /**
+ * Update the player duration
+ */
+ ;
+
+ _proto.updateDuration = function updateDuration(isLive) {
+ if (this.updateDuration_) {
+ this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
+ this.updateDuration_ = null;
+ }
+
+ if (this.mediaSource.readyState !== 'open') {
+ this.updateDuration_ = this.updateDuration.bind(this, isLive);
+ this.mediaSource.addEventListener('sourceopen', this.updateDuration_);
+ return;
+ }
+
+ if (isLive) {
+ var seekable = this.seekable();
+
+ if (!seekable.length) {
+ return;
+ } // Even in the case of a live playlist, the native MediaSource's duration should not
+ // be set to Infinity (even though this would be expected for a live playlist), since
+ // setting the native MediaSource's duration to infinity ends up with consequences to
+ // seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
+ //
+ // This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
+ // however, few browsers have support for setLiveSeekableRange()
+ // https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
+ //
+ // Until a time when the duration of the media source can be set to infinity, and a
+ // seekable range specified across browsers, the duration should be greater than or
+ // equal to the last possible seekable value.
+ // MediaSource duration starts as NaN
+ // It is possible (and probable) that this case will never be reached for many
+ // sources, since the MediaSource reports duration as the highest value without
+ // accounting for timestamp offset. For example, if the timestamp offset is -100 and
+ // we buffered times 0 to 100 with real times of 100 to 200, even though current
+ // time will be between 0 and 100, the native media source may report the duration
+ // as 200. However, since we report duration separate from the media source (as
+ // Infinity), and as long as the native media source duration value is greater than
+ // our reported seekable range, seeks will work as expected. The large number as
+ // duration for live is actually a strategy used by some players to work around the
+ // issue of live seekable ranges cited above.
+
+
+ if (isNaN(this.mediaSource.duration) || this.mediaSource.duration < seekable.end(seekable.length - 1)) {
+ this.sourceUpdater_.setDuration(seekable.end(seekable.length - 1));
+ }
+
+ return;
+ }
+
+ var buffered = this.tech_.buffered();
+ var duration = Vhs$1.Playlist.duration(this.masterPlaylistLoader_.media());
+
+ if (buffered.length > 0) {
+ duration = Math.max(duration, buffered.end(buffered.length - 1));
+ }
+
+ if (this.mediaSource.duration !== duration) {
+ this.sourceUpdater_.setDuration(duration);
+ }
+ }
+ /**
+ * dispose of the MasterPlaylistController and everything
+ * that it controls
+ */
+ ;
+
+ _proto.dispose = function dispose() {
+ var _this8 = this;
+
+ this.trigger('dispose');
+ this.decrypter_.terminate();
+ this.masterPlaylistLoader_.dispose();
+ this.mainSegmentLoader_.dispose();
+
+ if (this.loadOnPlay_) {
+ this.tech_.off('play', this.loadOnPlay_);
+ }
+
+ ['AUDIO', 'SUBTITLES'].forEach(function (type) {
+ var groups = _this8.mediaTypes_[type].groups;
+
+ for (var id in groups) {
+ groups[id].forEach(function (group) {
+ if (group.playlistLoader) {
+ group.playlistLoader.dispose();
+ }
+ });
+ }
+ });
+ this.audioSegmentLoader_.dispose();
+ this.subtitleSegmentLoader_.dispose();
+ this.sourceUpdater_.dispose();
+ this.timelineChangeController_.dispose();
+ this.stopABRTimer_();
+
+ if (this.updateDuration_) {
+ this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
+ }
+
+ this.mediaSource.removeEventListener('durationchange', this.handleDurationChange_); // load the media source into the player
+
+ this.mediaSource.removeEventListener('sourceopen', this.handleSourceOpen_);
+ this.mediaSource.removeEventListener('sourceended', this.handleSourceEnded_);
+ this.off();
+ }
+ /**
+ * return the master playlist object if we have one
+ *
+ * @return {Object} the master playlist object that we parsed
+ */
+ ;
+
+ _proto.master = function master() {
+ return this.masterPlaylistLoader_.master;
+ }
+ /**
+ * return the currently selected playlist
+ *
+ * @return {Object} the currently selected playlist object that we parsed
+ */
+ ;
+
+ _proto.media = function media() {
+ // playlist loader will not return media if it has not been fully loaded
+ return this.masterPlaylistLoader_.media() || this.initialMedia_;
+ };
+
+ _proto.areMediaTypesKnown_ = function areMediaTypesKnown_() {
+ var usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;
+ var hasMainMediaInfo = !!this.mainSegmentLoader_.getCurrentMediaInfo_(); // if we are not using an audio loader, then we have audio media info
+ // otherwise check on the segment loader.
+
+ var hasAudioMediaInfo = !usingAudioLoader ? true : !!this.audioSegmentLoader_.getCurrentMediaInfo_(); // one or both loaders has not loaded sufficently to get codecs
+
+ if (!hasMainMediaInfo || !hasAudioMediaInfo) {
+ return false;
+ }
+
+ return true;
+ };
+
+ _proto.getCodecsOrExclude_ = function getCodecsOrExclude_() {
+ var _this9 = this;
+
+ var media = {
+ main: this.mainSegmentLoader_.getCurrentMediaInfo_() || {},
+ audio: this.audioSegmentLoader_.getCurrentMediaInfo_() || {}
+ }; // set "main" media equal to video
+
+ media.video = media.main;
+ var playlistCodecs = codecsForPlaylist(this.master(), this.media());
+ var codecs = {};
+ var usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;
+
+ if (media.main.hasVideo) {
+ codecs.video = playlistCodecs.video || media.main.videoCodec || DEFAULT_VIDEO_CODEC;
+ }
+
+ if (media.main.isMuxed) {
+ codecs.video += "," + (playlistCodecs.audio || media.main.audioCodec || DEFAULT_AUDIO_CODEC);
+ }
+
+ if (media.main.hasAudio && !media.main.isMuxed || media.audio.hasAudio || usingAudioLoader) {
+ codecs.audio = playlistCodecs.audio || media.main.audioCodec || media.audio.audioCodec || DEFAULT_AUDIO_CODEC; // set audio isFmp4 so we use the correct "supports" function below
+
+ media.audio.isFmp4 = media.main.hasAudio && !media.main.isMuxed ? media.main.isFmp4 : media.audio.isFmp4;
+ } // no codecs, no playback.
+
+
+ if (!codecs.audio && !codecs.video) {
+ this.blacklistCurrentPlaylist({
+ playlist: this.media(),
+ message: 'Could not determine codecs for playlist.',
+ blacklistDuration: Infinity
+ });
+ return;
+ } // fmp4 relies on browser support, while ts relies on muxer support
+
+
+ var supportFunction = function supportFunction(isFmp4, codec) {
+ return isFmp4 ? browserSupportsCodec(codec) : muxerSupportsCodec(codec);
+ };
+
+ var unsupportedCodecs = {};
+ var unsupportedAudio;
+ ['video', 'audio'].forEach(function (type) {
+ if (codecs.hasOwnProperty(type) && !supportFunction(media[type].isFmp4, codecs[type])) {
+ var supporter = media[type].isFmp4 ? 'browser' : 'muxer';
+ unsupportedCodecs[supporter] = unsupportedCodecs[supporter] || [];
+ unsupportedCodecs[supporter].push(codecs[type]);
+
+ if (type === 'audio') {
+ unsupportedAudio = supporter;
+ }
+ }
+ });
+
+ if (usingAudioLoader && unsupportedAudio && this.media().attributes.AUDIO) {
+ var audioGroup = this.media().attributes.AUDIO;
+ this.master().playlists.forEach(function (variant) {
+ var variantAudioGroup = variant.attributes && variant.attributes.AUDIO;
+
+ if (variantAudioGroup === audioGroup && variant !== _this9.media()) {
+ variant.excludeUntil = Infinity;
+ }
+ });
+ this.logger_("excluding audio group " + audioGroup + " as " + unsupportedAudio + " does not support codec(s): \"" + codecs.audio + "\"");
+ } // if we have any unsupported codecs blacklist this playlist.
+
+
+ if (Object.keys(unsupportedCodecs).length) {
+ var message = Object.keys(unsupportedCodecs).reduce(function (acc, supporter) {
+ if (acc) {
+ acc += ', ';
+ }
+
+ acc += supporter + " does not support codec(s): \"" + unsupportedCodecs[supporter].join(',') + "\"";
+ return acc;
+ }, '') + '.';
+ this.blacklistCurrentPlaylist({
+ playlist: this.media(),
+ internal: true,
+ message: message,
+ blacklistDuration: Infinity
+ });
+ return;
+ } // check if codec switching is happening
+
+
+ if (this.sourceUpdater_.hasCreatedSourceBuffers() && !this.sourceUpdater_.canChangeType()) {
+ var switchMessages = [];
+ ['video', 'audio'].forEach(function (type) {
+ var newCodec = (parseCodecs(_this9.sourceUpdater_.codecs[type] || '')[0] || {}).type;
+ var oldCodec = (parseCodecs(codecs[type] || '')[0] || {}).type;
+
+ if (newCodec && oldCodec && newCodec.toLowerCase() !== oldCodec.toLowerCase()) {
+ switchMessages.push("\"" + _this9.sourceUpdater_.codecs[type] + "\" -> \"" + codecs[type] + "\"");
+ }
+ });
+
+ if (switchMessages.length) {
+ this.blacklistCurrentPlaylist({
+ playlist: this.media(),
+ message: "Codec switching not supported: " + switchMessages.join(', ') + ".",
+ blacklistDuration: Infinity,
+ internal: true
+ });
+ return;
+ }
+ } // TODO: when using the muxer shouldn't we just return
+ // the codecs that the muxer outputs?
+
+
+ return codecs;
+ }
+ /**
+ * Create source buffers and exlude any incompatible renditions.
+ *
+ * @private
+ */
+ ;
+
+ _proto.tryToCreateSourceBuffers_ = function tryToCreateSourceBuffers_() {
+ // media source is not ready yet or sourceBuffers are already
+ // created.
+ if (this.mediaSource.readyState !== 'open' || this.sourceUpdater_.hasCreatedSourceBuffers()) {
+ return;
+ }
+
+ if (!this.areMediaTypesKnown_()) {
+ return;
+ }
+
+ var codecs = this.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
+
+ if (!codecs) {
+ return;
+ }
+
+ this.sourceUpdater_.createSourceBuffers(codecs);
+ var codecString = [codecs.video, codecs.audio].filter(Boolean).join(',');
+ this.excludeIncompatibleVariants_(codecString);
+ }
+ /**
+ * Excludes playlists with codecs that are unsupported by the muxer and browser.
+ */
+ ;
+
+ _proto.excludeUnsupportedVariants_ = function excludeUnsupportedVariants_() {
+ var _this10 = this;
+
+ var playlists = this.master().playlists;
+ var ids = []; // TODO: why don't we have a property to loop through all
+ // playlist? Why did we ever mix indexes and keys?
+
+ Object.keys(playlists).forEach(function (key) {
+ var variant = playlists[key]; // check if we already processed this playlist.
+
+ if (ids.indexOf(variant.id) !== -1) {
+ return;
+ }
+
+ ids.push(variant.id);
+ var codecs = codecsForPlaylist(_this10.master, variant);
+ var unsupported = [];
+
+ if (codecs.audio && !muxerSupportsCodec(codecs.audio) && !browserSupportsCodec(codecs.audio)) {
+ unsupported.push("audio codec " + codecs.audio);
+ }
+
+ if (codecs.video && !muxerSupportsCodec(codecs.video) && !browserSupportsCodec(codecs.video)) {
+ unsupported.push("video codec " + codecs.video);
+ }
+
+ if (codecs.text && codecs.text === 'stpp.ttml.im1t') {
+ unsupported.push("text codec " + codecs.text);
+ }
+
+ if (unsupported.length) {
+ variant.excludeUntil = Infinity;
+
+ _this10.logger_("excluding " + variant.id + " for unsupported: " + unsupported.join(', '));
+ }
+ });
+ }
+ /**
+ * Blacklist playlists that are known to be codec or
+ * stream-incompatible with the SourceBuffer configuration. For
+ * instance, Media Source Extensions would cause the video element to
+ * stall waiting for video data if you switched from a variant with
+ * video and audio to an audio-only one.
+ *
+ * @param {Object} media a media playlist compatible with the current
+ * set of SourceBuffers. Variants in the current master playlist that
+ * do not appear to have compatible codec or stream configurations
+ * will be excluded from the default playlist selection algorithm
+ * indefinitely.
+ * @private
+ */
+ ;
+
+ _proto.excludeIncompatibleVariants_ = function excludeIncompatibleVariants_(codecString) {
+ var _this11 = this;
+
+ var ids = [];
+ var playlists = this.master().playlists;
+ var codecs = unwrapCodecList(parseCodecs(codecString));
+ var codecCount_ = codecCount(codecs);
+ var videoDetails = codecs.video && parseCodecs(codecs.video)[0] || null;
+ var audioDetails = codecs.audio && parseCodecs(codecs.audio)[0] || null;
+ Object.keys(playlists).forEach(function (key) {
+ var variant = playlists[key]; // check if we already processed this playlist.
+ // or it if it is already excluded forever.
+
+ if (ids.indexOf(variant.id) !== -1 || variant.excludeUntil === Infinity) {
+ return;
+ }
+
+ ids.push(variant.id);
+ var blacklistReasons = []; // get codecs from the playlist for this variant
+
+ var variantCodecs = codecsForPlaylist(_this11.masterPlaylistLoader_.master, variant);
+ var variantCodecCount = codecCount(variantCodecs); // if no codecs are listed, we cannot determine that this
+ // variant is incompatible. Wait for mux.js to probe
+
+ if (!variantCodecs.audio && !variantCodecs.video) {
+ return;
+ } // TODO: we can support this by removing the
+ // old media source and creating a new one, but it will take some work.
+ // The number of streams cannot change
+
+
+ if (variantCodecCount !== codecCount_) {
+ blacklistReasons.push("codec count \"" + variantCodecCount + "\" !== \"" + codecCount_ + "\"");
+ } // only exclude playlists by codec change, if codecs cannot switch
+ // during playback.
+
+
+ if (!_this11.sourceUpdater_.canChangeType()) {
+ var variantVideoDetails = variantCodecs.video && parseCodecs(variantCodecs.video)[0] || null;
+ var variantAudioDetails = variantCodecs.audio && parseCodecs(variantCodecs.audio)[0] || null; // the video codec cannot change
+
+ if (variantVideoDetails && videoDetails && variantVideoDetails.type.toLowerCase() !== videoDetails.type.toLowerCase()) {
+ blacklistReasons.push("video codec \"" + variantVideoDetails.type + "\" !== \"" + videoDetails.type + "\"");
+ } // the audio codec cannot change
+
+
+ if (variantAudioDetails && audioDetails && variantAudioDetails.type.toLowerCase() !== audioDetails.type.toLowerCase()) {
+ blacklistReasons.push("audio codec \"" + variantAudioDetails.type + "\" !== \"" + audioDetails.type + "\"");
+ }
+ }
+
+ if (blacklistReasons.length) {
+ variant.excludeUntil = Infinity;
+
+ _this11.logger_("blacklisting " + variant.id + ": " + blacklistReasons.join(' && '));
+ }
+ });
+ };
+
+ _proto.updateAdCues_ = function updateAdCues_(media) {
+ var offset = 0;
+ var seekable = this.seekable();
+
+ if (seekable.length) {
+ offset = seekable.start(0);
+ }
+
+ updateAdCues(media, this.cueTagsTrack_, offset);
+ }
+ /**
+ * Calculates the desired forward buffer length based on current time
+ *
+ * @return {number} Desired forward buffer length in seconds
+ */
+ ;
+
+ _proto.goalBufferLength = function goalBufferLength() {
+ var currentTime = this.tech_.currentTime();
+ var initial = Config.GOAL_BUFFER_LENGTH;
+ var rate = Config.GOAL_BUFFER_LENGTH_RATE;
+ var max = Math.max(initial, Config.MAX_GOAL_BUFFER_LENGTH);
+ return Math.min(initial + currentTime * rate, max);
+ }
+ /**
+ * Calculates the desired buffer low water line based on current time
+ *
+ * @return {number} Desired buffer low water line in seconds
+ */
+ ;
+
+ _proto.bufferLowWaterLine = function bufferLowWaterLine() {
+ var currentTime = this.tech_.currentTime();
+ var initial = Config.BUFFER_LOW_WATER_LINE;
+ var rate = Config.BUFFER_LOW_WATER_LINE_RATE;
+ var max = Math.max(initial, Config.MAX_BUFFER_LOW_WATER_LINE);
+ var newMax = Math.max(initial, Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE);
+ return Math.min(initial + currentTime * rate, this.experimentalBufferBasedABR ? newMax : max);
+ };
+
+ _proto.bufferHighWaterLine = function bufferHighWaterLine() {
+ return Config.BUFFER_HIGH_WATER_LINE;
+ };
+
+ return MasterPlaylistController;
+}(videojs.EventTarget);
+/**
+ * Returns a function that acts as the Enable/disable playlist function.
+ *
+ * @param {PlaylistLoader} loader - The master playlist loader
+ * @param {string} playlistID - id of the playlist
+ * @param {Function} changePlaylistFn - A function to be called after a
+ * playlist's enabled-state has been changed. Will NOT be called if a
+ * playlist's enabled-state is unchanged
+ * @param {boolean=} enable - Value to set the playlist enabled-state to
+ * or if undefined returns the current enabled-state for the playlist
+ * @return {Function} Function for setting/getting enabled
+ */
+
+
+var enableFunction = function enableFunction(loader, playlistID, changePlaylistFn) {
+ return function (enable) {
+ var playlist = loader.master.playlists[playlistID];
+ var incompatible = isIncompatible(playlist);
+ var currentlyEnabled = isEnabled(playlist);
+
+ if (typeof enable === 'undefined') {
+ return currentlyEnabled;
+ }
+
+ if (enable) {
+ delete playlist.disabled;
+ } else {
+ playlist.disabled = true;
+ }
+
+ if (enable !== currentlyEnabled && !incompatible) {
+ // Ensure the outside world knows about our changes
+ changePlaylistFn();
+
+ if (enable) {
+ loader.trigger('renditionenabled');
+ } else {
+ loader.trigger('renditiondisabled');
+ }
+ }
+
+ return enable;
+ };
+};
+/**
+ * The representation object encapsulates the publicly visible information
+ * in a media playlist along with a setter/getter-type function (enabled)
+ * for changing the enabled-state of a particular playlist entry
+ *
+ * @class Representation
+ */
+
+
+var Representation = function Representation(vhsHandler, playlist, id) {
+ var mpc = vhsHandler.masterPlaylistController_,
+ smoothQualityChange = vhsHandler.options_.smoothQualityChange; // Get a reference to a bound version of the quality change function
+
+ var changeType = smoothQualityChange ? 'smooth' : 'fast';
+ var qualityChangeFunction = mpc[changeType + "QualityChange_"].bind(mpc); // some playlist attributes are optional
+
+ if (playlist.attributes) {
+ var resolution = playlist.attributes.RESOLUTION;
+ this.width = resolution && resolution.width;
+ this.height = resolution && resolution.height;
+ this.bandwidth = playlist.attributes.BANDWIDTH;
+ }
+
+ this.codecs = codecsForPlaylist(mpc.master(), playlist);
+ this.playlist = playlist; // The id is simply the ordinality of the media playlist
+ // within the master playlist
+
+ this.id = id; // Partially-apply the enableFunction to create a playlist-
+ // specific variant
+
+ this.enabled = enableFunction(vhsHandler.playlists, playlist.id, qualityChangeFunction);
+};
+/**
+ * A mixin function that adds the `representations` api to an instance
+ * of the VhsHandler class
+ *
+ * @param {VhsHandler} vhsHandler - An instance of VhsHandler to add the
+ * representation API into
+ */
+
+
+var renditionSelectionMixin = function renditionSelectionMixin(vhsHandler) {
+ // Add a single API-specific function to the VhsHandler instance
+ vhsHandler.representations = function () {
+ var master = vhsHandler.masterPlaylistController_.master();
+ var playlists = isAudioOnly(master) ? vhsHandler.masterPlaylistController_.getAudioTrackPlaylists_() : master.playlists;
+
+ if (!playlists) {
+ return [];
+ }
+
+ return playlists.filter(function (media) {
+ return !isIncompatible(media);
+ }).map(function (e, i) {
+ return new Representation(vhsHandler, e, e.id);
+ });
+ };
+};
+/**
+ * @file playback-watcher.js
+ *
+ * Playback starts, and now my watch begins. It shall not end until my death. I shall
+ * take no wait, hold no uncleared timeouts, father no bad seeks. I shall wear no crowns
+ * and win no glory. I shall live and die at my post. I am the corrector of the underflow.
+ * I am the watcher of gaps. I am the shield that guards the realms of seekable. I pledge
+ * my life and honor to the Playback Watch, for this Player and all the Players to come.
+ */
+
+
+var timerCancelEvents = ['seeking', 'seeked', 'pause', 'playing', 'error'];
+/**
+ * @class PlaybackWatcher
+ */
+
+var PlaybackWatcher = /*#__PURE__*/function () {
+ /**
+ * Represents an PlaybackWatcher object.
+ *
+ * @class
+ * @param {Object} options an object that includes the tech and settings
+ */
+ function PlaybackWatcher(options) {
+ var _this = this;
+
+ this.masterPlaylistController_ = options.masterPlaylistController;
+ this.tech_ = options.tech;
+ this.seekable = options.seekable;
+ this.allowSeeksWithinUnsafeLiveWindow = options.allowSeeksWithinUnsafeLiveWindow;
+ this.liveRangeSafeTimeDelta = options.liveRangeSafeTimeDelta;
+ this.media = options.media;
+ this.consecutiveUpdates = 0;
+ this.lastRecordedTime = null;
+ this.timer_ = null;
+ this.checkCurrentTimeTimeout_ = null;
+ this.logger_ = logger('PlaybackWatcher');
+ this.logger_('initialize');
+
+ var playHandler = function playHandler() {
+ return _this.monitorCurrentTime_();
+ };
+
+ var canPlayHandler = function canPlayHandler() {
+ return _this.monitorCurrentTime_();
+ };
+
+ var waitingHandler = function waitingHandler() {
+ return _this.techWaiting_();
+ };
+
+ var cancelTimerHandler = function cancelTimerHandler() {
+ return _this.cancelTimer_();
+ };
+
+ var mpc = this.masterPlaylistController_;
+ var loaderTypes = ['main', 'subtitle', 'audio'];
+ var loaderChecks = {};
+ loaderTypes.forEach(function (type) {
+ loaderChecks[type] = {
+ reset: function reset() {
+ return _this.resetSegmentDownloads_(type);
+ },
+ updateend: function updateend() {
+ return _this.checkSegmentDownloads_(type);
+ }
+ };
+ mpc[type + "SegmentLoader_"].on('appendsdone', loaderChecks[type].updateend); // If a rendition switch happens during a playback stall where the buffer
+ // isn't changing we want to reset. We cannot assume that the new rendition
+ // will also be stalled, until after new appends.
+
+ mpc[type + "SegmentLoader_"].on('playlistupdate', loaderChecks[type].reset); // Playback stalls should not be detected right after seeking.
+ // This prevents one segment playlists (single vtt or single segment content)
+ // from being detected as stalling. As the buffer will not change in those cases, since
+ // the buffer is the entire video duration.
+
+ _this.tech_.on(['seeked', 'seeking'], loaderChecks[type].reset);
+ });
+ /**
+ * We check if a seek was into a gap through the following steps:
+ * 1. We get a seeking event and we do not get a seeked event. This means that
+ * a seek was attempted but not completed.
+ * 2. We run `fixesBadSeeks_` on segment loader appends. This means that we already
+ * removed everything from our buffer and appended a segment, and should be ready
+ * to check for gaps.
+ */
+
+ var setSeekingHandlers = function setSeekingHandlers(fn) {
+ ['main', 'audio'].forEach(function (type) {
+ mpc[type + "SegmentLoader_"][fn]('appended', _this.seekingAppendCheck_);
+ });
+ };
+
+ this.seekingAppendCheck_ = function () {
+ if (_this.fixesBadSeeks_()) {
+ _this.consecutiveUpdates = 0;
+ _this.lastRecordedTime = _this.tech_.currentTime();
+ setSeekingHandlers('off');
+ }
+ };
+
+ this.clearSeekingAppendCheck_ = function () {
+ return setSeekingHandlers('off');
+ };
+
+ this.watchForBadSeeking_ = function () {
+ _this.clearSeekingAppendCheck_();
+
+ setSeekingHandlers('on');
+ };
+
+ this.tech_.on('seeked', this.clearSeekingAppendCheck_);
+ this.tech_.on('seeking', this.watchForBadSeeking_);
+ this.tech_.on('waiting', waitingHandler);
+ this.tech_.on(timerCancelEvents, cancelTimerHandler);
+ this.tech_.on('canplay', canPlayHandler);
+ /*
+ An edge case exists that results in gaps not being skipped when they exist at the beginning of a stream. This case
+ is surfaced in one of two ways:
+ 1) The `waiting` event is fired before the player has buffered content, making it impossible
+ to find or skip the gap. The `waiting` event is followed by a `play` event. On first play
+ we can check if playback is stalled due to a gap, and skip the gap if necessary.
+ 2) A source with a gap at the beginning of the stream is loaded programatically while the player
+ is in a playing state. To catch this case, it's important that our one-time play listener is setup
+ even if the player is in a playing state
+ */
+
+ this.tech_.one('play', playHandler); // Define the dispose function to clean up our events
+
+ this.dispose = function () {
+ _this.clearSeekingAppendCheck_();
+
+ _this.logger_('dispose');
+
+ _this.tech_.off('waiting', waitingHandler);
+
+ _this.tech_.off(timerCancelEvents, cancelTimerHandler);
+
+ _this.tech_.off('canplay', canPlayHandler);
+
+ _this.tech_.off('play', playHandler);
+
+ _this.tech_.off('seeking', _this.watchForBadSeeking_);
+
+ _this.tech_.off('seeked', _this.clearSeekingAppendCheck_);
+
+ loaderTypes.forEach(function (type) {
+ mpc[type + "SegmentLoader_"].off('appendsdone', loaderChecks[type].updateend);
+ mpc[type + "SegmentLoader_"].off('playlistupdate', loaderChecks[type].reset);
+
+ _this.tech_.off(['seeked', 'seeking'], loaderChecks[type].reset);
+ });
+
+ if (_this.checkCurrentTimeTimeout_) {
+ window$1.clearTimeout(_this.checkCurrentTimeTimeout_);
+ }
+
+ _this.cancelTimer_();
+ };
+ }
+ /**
+ * Periodically check current time to see if playback stopped
+ *
+ * @private
+ */
+
+
+ var _proto = PlaybackWatcher.prototype;
+
+ _proto.monitorCurrentTime_ = function monitorCurrentTime_() {
+ this.checkCurrentTime_();
+
+ if (this.checkCurrentTimeTimeout_) {
+ window$1.clearTimeout(this.checkCurrentTimeTimeout_);
+ } // 42 = 24 fps // 250 is what Webkit uses // FF uses 15
+
+
+ this.checkCurrentTimeTimeout_ = window$1.setTimeout(this.monitorCurrentTime_.bind(this), 250);
+ }
+ /**
+ * Reset stalled download stats for a specific type of loader
+ *
+ * @param {string} type
+ * The segment loader type to check.
+ *
+ * @listens SegmentLoader#playlistupdate
+ * @listens Tech#seeking
+ * @listens Tech#seeked
+ */
+ ;
+
+ _proto.resetSegmentDownloads_ = function resetSegmentDownloads_(type) {
+ var loader = this.masterPlaylistController_[type + "SegmentLoader_"];
+
+ if (this[type + "StalledDownloads_"] > 0) {
+ this.logger_("resetting possible stalled download count for " + type + " loader");
+ }
+
+ this[type + "StalledDownloads_"] = 0;
+ this[type + "Buffered_"] = loader.buffered_();
+ }
+ /**
+ * Checks on every segment `appendsdone` to see
+ * if segment appends are making progress. If they are not
+ * and we are still downloading bytes. We blacklist the playlist.
+ *
+ * @param {string} type
+ * The segment loader type to check.
+ *
+ * @listens SegmentLoader#appendsdone
+ */
+ ;
+
+ _proto.checkSegmentDownloads_ = function checkSegmentDownloads_(type) {
+ var mpc = this.masterPlaylistController_;
+ var loader = mpc[type + "SegmentLoader_"];
+ var buffered = loader.buffered_();
+ var isBufferedDifferent = isRangeDifferent(this[type + "Buffered_"], buffered);
+ this[type + "Buffered_"] = buffered; // if another watcher is going to fix the issue or
+ // the buffered value for this loader changed
+ // appends are working
+
+ if (isBufferedDifferent) {
+ this.resetSegmentDownloads_(type);
+ return;
+ }
+
+ this[type + "StalledDownloads_"]++;
+ this.logger_("found #" + this[type + "StalledDownloads_"] + " " + type + " appends that did not increase buffer (possible stalled download)", {
+ playlistId: loader.playlist_ && loader.playlist_.id,
+ buffered: timeRangesToArray(buffered)
+ }); // after 10 possibly stalled appends with no reset, exclude
+
+ if (this[type + "StalledDownloads_"] < 10) {
+ return;
+ }
+
+ this.logger_(type + " loader stalled download exclusion");
+ this.resetSegmentDownloads_(type);
+ this.tech_.trigger({
+ type: 'usage',
+ name: "vhs-" + type + "-download-exclusion"
+ });
+
+ if (type === 'subtitle') {
+ return;
+ } // TODO: should we exclude audio tracks rather than main tracks
+ // when type is audio?
+
+
+ mpc.blacklistCurrentPlaylist({
+ message: "Excessive " + type + " segment downloading detected."
+ }, Infinity);
+ }
+ /**
+ * The purpose of this function is to emulate the "waiting" event on
+ * browsers that do not emit it when they are waiting for more
+ * data to continue playback
+ *
+ * @private
+ */
+ ;
+
+ _proto.checkCurrentTime_ = function checkCurrentTime_() {
+ if (this.tech_.paused() || this.tech_.seeking()) {
+ return;
+ }
+
+ var currentTime = this.tech_.currentTime();
+ var buffered = this.tech_.buffered();
+
+ if (this.lastRecordedTime === currentTime && (!buffered.length || currentTime + SAFE_TIME_DELTA >= buffered.end(buffered.length - 1))) {
+ // If current time is at the end of the final buffered region, then any playback
+ // stall is most likely caused by buffering in a low bandwidth environment. The tech
+ // should fire a `waiting` event in this scenario, but due to browser and tech
+ // inconsistencies. Calling `techWaiting_` here allows us to simulate
+ // responding to a native `waiting` event when the tech fails to emit one.
+ return this.techWaiting_();
+ }
+
+ if (this.consecutiveUpdates >= 5 && currentTime === this.lastRecordedTime) {
+ this.consecutiveUpdates++;
+ this.waiting_();
+ } else if (currentTime === this.lastRecordedTime) {
+ this.consecutiveUpdates++;
+ } else {
+ this.consecutiveUpdates = 0;
+ this.lastRecordedTime = currentTime;
+ }
+ }
+ /**
+ * Cancels any pending timers and resets the 'timeupdate' mechanism
+ * designed to detect that we are stalled
+ *
+ * @private
+ */
+ ;
+
+ _proto.cancelTimer_ = function cancelTimer_() {
+ this.consecutiveUpdates = 0;
+
+ if (this.timer_) {
+ this.logger_('cancelTimer_');
+ clearTimeout(this.timer_);
+ }
+
+ this.timer_ = null;
+ }
+ /**
+ * Fixes situations where there's a bad seek
+ *
+ * @return {boolean} whether an action was taken to fix the seek
+ * @private
+ */
+ ;
+
+ _proto.fixesBadSeeks_ = function fixesBadSeeks_() {
+ var seeking = this.tech_.seeking();
+
+ if (!seeking) {
+ return false;
+ } // TODO: It's possible that these seekable checks should be moved out of this function
+ // and into a function that runs on seekablechange. It's also possible that we only need
+ // afterSeekableWindow as the buffered check at the bottom is good enough to handle before
+ // seekable range.
+
+
+ var seekable = this.seekable();
+ var currentTime = this.tech_.currentTime();
+ var isAfterSeekableRange = this.afterSeekableWindow_(seekable, currentTime, this.media(), this.allowSeeksWithinUnsafeLiveWindow);
+ var seekTo;
+
+ if (isAfterSeekableRange) {
+ var seekableEnd = seekable.end(seekable.length - 1); // sync to live point (if VOD, our seekable was updated and we're simply adjusting)
+
+ seekTo = seekableEnd;
+ }
+
+ if (this.beforeSeekableWindow_(seekable, currentTime)) {
+ var seekableStart = seekable.start(0); // sync to the beginning of the live window
+ // provide a buffer of .1 seconds to handle rounding/imprecise numbers
+
+ seekTo = seekableStart + ( // if the playlist is too short and the seekable range is an exact time (can
+ // happen in live with a 3 segment playlist), then don't use a time delta
+ seekableStart === seekable.end(0) ? 0 : SAFE_TIME_DELTA);
+ }
+
+ if (typeof seekTo !== 'undefined') {
+ this.logger_("Trying to seek outside of seekable at time " + currentTime + " with " + ("seekable range " + printableRange(seekable) + ". Seeking to ") + (seekTo + "."));
+ this.tech_.setCurrentTime(seekTo);
+ return true;
+ }
+
+ var sourceUpdater = this.masterPlaylistController_.sourceUpdater_;
+ var buffered = this.tech_.buffered();
+ var audioBuffered = sourceUpdater.audioBuffer ? sourceUpdater.audioBuffered() : null;
+ var videoBuffered = sourceUpdater.videoBuffer ? sourceUpdater.videoBuffered() : null;
+ var media = this.media(); // verify that at least two segment durations or one part duration have been
+ // appended before checking for a gap.
+
+ var minAppendedDuration = media.partTargetDuration ? media.partTargetDuration : (media.targetDuration - TIME_FUDGE_FACTOR) * 2; // verify that at least two segment durations have been
+ // appended before checking for a gap.
+
+ var bufferedToCheck = [audioBuffered, videoBuffered];
+
+ for (var i = 0; i < bufferedToCheck.length; i++) {
+ // skip null buffered
+ if (!bufferedToCheck[i]) {
+ continue;
+ }
+
+ var timeAhead = timeAheadOf(bufferedToCheck[i], currentTime); // if we are less than two video/audio segment durations or one part
+ // duration behind we haven't appended enough to call this a bad seek.
+
+ if (timeAhead < minAppendedDuration) {
+ return false;
+ }
+ }
+
+ var nextRange = findNextRange(buffered, currentTime); // we have appended enough content, but we don't have anything buffered
+ // to seek over the gap
+
+ if (nextRange.length === 0) {
+ return false;
+ }
+
+ seekTo = nextRange.start(0) + SAFE_TIME_DELTA;
+ this.logger_("Buffered region starts (" + nextRange.start(0) + ") " + (" just beyond seek point (" + currentTime + "). Seeking to " + seekTo + "."));
+ this.tech_.setCurrentTime(seekTo);
+ return true;
+ }
+ /**
+ * Handler for situations when we determine the player is waiting.
+ *
+ * @private
+ */
+ ;
+
+ _proto.waiting_ = function waiting_() {
+ if (this.techWaiting_()) {
+ return;
+ } // All tech waiting checks failed. Use last resort correction
+
+
+ var currentTime = this.tech_.currentTime();
+ var buffered = this.tech_.buffered();
+ var currentRange = findRange(buffered, currentTime); // Sometimes the player can stall for unknown reasons within a contiguous buffered
+ // region with no indication that anything is amiss (seen in Firefox). Seeking to
+ // currentTime is usually enough to kickstart the player. This checks that the player
+ // is currently within a buffered region before attempting a corrective seek.
+ // Chrome does not appear to continue `timeupdate` events after a `waiting` event
+ // until there is ~ 3 seconds of forward buffer available. PlaybackWatcher should also
+ // make sure there is ~3 seconds of forward buffer before taking any corrective action
+ // to avoid triggering an `unknownwaiting` event when the network is slow.
+
+ if (currentRange.length && currentTime + 3 <= currentRange.end(0)) {
+ this.cancelTimer_();
+ this.tech_.setCurrentTime(currentTime);
+ this.logger_("Stopped at " + currentTime + " while inside a buffered region " + ("[" + currentRange.start(0) + " -> " + currentRange.end(0) + "]. Attempting to resume ") + 'playback by seeking to the current time.'); // unknown waiting corrections may be useful for monitoring QoS
+
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-unknown-waiting'
+ });
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'hls-unknown-waiting'
+ });
+ return;
+ }
+ }
+ /**
+ * Handler for situations when the tech fires a `waiting` event
+ *
+ * @return {boolean}
+ * True if an action (or none) was needed to correct the waiting. False if no
+ * checks passed
+ * @private
+ */
+ ;
+
+ _proto.techWaiting_ = function techWaiting_() {
+ var seekable = this.seekable();
+ var currentTime = this.tech_.currentTime();
+
+ if (this.tech_.seeking() || this.timer_ !== null) {
+ // Tech is seeking or already waiting on another action, no action needed
+ return true;
+ }
+
+ if (this.beforeSeekableWindow_(seekable, currentTime)) {
+ var livePoint = seekable.end(seekable.length - 1);
+ this.logger_("Fell out of live window at time " + currentTime + ". Seeking to " + ("live point (seekable end) " + livePoint));
+ this.cancelTimer_();
+ this.tech_.setCurrentTime(livePoint); // live window resyncs may be useful for monitoring QoS
+
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-live-resync'
+ });
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'hls-live-resync'
+ });
+ return true;
+ }
+
+ var sourceUpdater = this.tech_.vhs.masterPlaylistController_.sourceUpdater_;
+ var buffered = this.tech_.buffered();
+ var videoUnderflow = this.videoUnderflow_({
+ audioBuffered: sourceUpdater.audioBuffered(),
+ videoBuffered: sourceUpdater.videoBuffered(),
+ currentTime: currentTime
+ });
+
+ if (videoUnderflow) {
+ // Even though the video underflowed and was stuck in a gap, the audio overplayed
+ // the gap, leading currentTime into a buffered range. Seeking to currentTime
+ // allows the video to catch up to the audio position without losing any audio
+ // (only suffering ~3 seconds of frozen video and a pause in audio playback).
+ this.cancelTimer_();
+ this.tech_.setCurrentTime(currentTime); // video underflow may be useful for monitoring QoS
+
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-video-underflow'
+ });
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'hls-video-underflow'
+ });
+ return true;
+ }
+
+ var nextRange = findNextRange(buffered, currentTime); // check for gap
+
+ if (nextRange.length > 0) {
+ var difference = nextRange.start(0) - currentTime;
+ this.logger_("Stopped at " + currentTime + ", setting timer for " + difference + ", seeking " + ("to " + nextRange.start(0)));
+ this.cancelTimer_();
+ this.timer_ = setTimeout(this.skipTheGap_.bind(this), difference * 1000, currentTime);
+ return true;
+ } // All checks failed. Returning false to indicate failure to correct waiting
+
+
+ return false;
+ };
+
+ _proto.afterSeekableWindow_ = function afterSeekableWindow_(seekable, currentTime, playlist, allowSeeksWithinUnsafeLiveWindow) {
+ if (allowSeeksWithinUnsafeLiveWindow === void 0) {
+ allowSeeksWithinUnsafeLiveWindow = false;
+ }
+
+ if (!seekable.length) {
+ // we can't make a solid case if there's no seekable, default to false
+ return false;
+ }
+
+ var allowedEnd = seekable.end(seekable.length - 1) + SAFE_TIME_DELTA;
+ var isLive = !playlist.endList;
+
+ if (isLive && allowSeeksWithinUnsafeLiveWindow) {
+ allowedEnd = seekable.end(seekable.length - 1) + playlist.targetDuration * 3;
+ }
+
+ if (currentTime > allowedEnd) {
+ return true;
+ }
+
+ return false;
+ };
+
+ _proto.beforeSeekableWindow_ = function beforeSeekableWindow_(seekable, currentTime) {
+ if (seekable.length && // can't fall before 0 and 0 seekable start identifies VOD stream
+ seekable.start(0) > 0 && currentTime < seekable.start(0) - this.liveRangeSafeTimeDelta) {
+ return true;
+ }
+
+ return false;
+ };
+
+ _proto.videoUnderflow_ = function videoUnderflow_(_ref) {
+ var videoBuffered = _ref.videoBuffered,
+ audioBuffered = _ref.audioBuffered,
+ currentTime = _ref.currentTime; // audio only content will not have video underflow :)
+
+ if (!videoBuffered) {
+ return;
+ }
+
+ var gap; // find a gap in demuxed content.
+
+ if (videoBuffered.length && audioBuffered.length) {
+ // in Chrome audio will continue to play for ~3s when we run out of video
+ // so we have to check that the video buffer did have some buffer in the
+ // past.
+ var lastVideoRange = findRange(videoBuffered, currentTime - 3);
+ var videoRange = findRange(videoBuffered, currentTime);
+ var audioRange = findRange(audioBuffered, currentTime);
+
+ if (audioRange.length && !videoRange.length && lastVideoRange.length) {
+ gap = {
+ start: lastVideoRange.end(0),
+ end: audioRange.end(0)
+ };
+ } // find a gap in muxed content.
+
+ } else {
+ var nextRange = findNextRange(videoBuffered, currentTime); // Even if there is no available next range, there is still a possibility we are
+ // stuck in a gap due to video underflow.
+
+ if (!nextRange.length) {
+ gap = this.gapFromVideoUnderflow_(videoBuffered, currentTime);
+ }
+ }
+
+ if (gap) {
+ this.logger_("Encountered a gap in video from " + gap.start + " to " + gap.end + ". " + ("Seeking to current time " + currentTime));
+ return true;
+ }
+
+ return false;
+ }
+ /**
+ * Timer callback. If playback still has not proceeded, then we seek
+ * to the start of the next buffered region.
+ *
+ * @private
+ */
+ ;
+
+ _proto.skipTheGap_ = function skipTheGap_(scheduledCurrentTime) {
+ var buffered = this.tech_.buffered();
+ var currentTime = this.tech_.currentTime();
+ var nextRange = findNextRange(buffered, currentTime);
+ this.cancelTimer_();
+
+ if (nextRange.length === 0 || currentTime !== scheduledCurrentTime) {
+ return;
+ }
+
+ this.logger_('skipTheGap_:', 'currentTime:', currentTime, 'scheduled currentTime:', scheduledCurrentTime, 'nextRange start:', nextRange.start(0)); // only seek if we still have not played
+
+ this.tech_.setCurrentTime(nextRange.start(0) + TIME_FUDGE_FACTOR);
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-gap-skip'
+ });
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'hls-gap-skip'
+ });
+ };
+
+ _proto.gapFromVideoUnderflow_ = function gapFromVideoUnderflow_(buffered, currentTime) {
+ // At least in Chrome, if there is a gap in the video buffer, the audio will continue
+ // playing for ~3 seconds after the video gap starts. This is done to account for
+ // video buffer underflow/underrun (note that this is not done when there is audio
+ // buffer underflow/underrun -- in that case the video will stop as soon as it
+ // encounters the gap, as audio stalls are more noticeable/jarring to a user than
+ // video stalls). The player's time will reflect the playthrough of audio, so the
+ // time will appear as if we are in a buffered region, even if we are stuck in a
+ // "gap."
+ //
+ // Example:
+ // video buffer: 0 => 10.1, 10.2 => 20
+ // audio buffer: 0 => 20
+ // overall buffer: 0 => 10.1, 10.2 => 20
+ // current time: 13
+ //
+ // Chrome's video froze at 10 seconds, where the video buffer encountered the gap,
+ // however, the audio continued playing until it reached ~3 seconds past the gap
+ // (13 seconds), at which point it stops as well. Since current time is past the
+ // gap, findNextRange will return no ranges.
+ //
+ // To check for this issue, we see if there is a gap that starts somewhere within
+ // a 3 second range (3 seconds +/- 1 second) back from our current time.
+ var gaps = findGaps(buffered);
+
+ for (var i = 0; i < gaps.length; i++) {
+ var start = gaps.start(i);
+ var end = gaps.end(i); // gap is starts no more than 4 seconds back
+
+ if (currentTime - start < 4 && currentTime - start > 2) {
+ return {
+ start: start,
+ end: end
+ };
+ }
+ }
+
+ return null;
+ };
+
+ return PlaybackWatcher;
+}();
+
+var defaultOptions = {
+ errorInterval: 30,
+ getSource: function getSource(next) {
+ var tech = this.tech({
+ IWillNotUseThisInPlugins: true
+ });
+ var sourceObj = tech.currentSource_ || this.currentSource();
+ return next(sourceObj);
+ }
+};
+/**
+ * Main entry point for the plugin
+ *
+ * @param {Player} player a reference to a videojs Player instance
+ * @param {Object} [options] an object with plugin options
+ * @private
+ */
+
+var initPlugin = function initPlugin(player, options) {
+ var lastCalled = 0;
+ var seekTo = 0;
+ var localOptions = videojs.mergeOptions(defaultOptions, options);
+ player.ready(function () {
+ player.trigger({
+ type: 'usage',
+ name: 'vhs-error-reload-initialized'
+ });
+ player.trigger({
+ type: 'usage',
+ name: 'hls-error-reload-initialized'
+ });
+ });
+ /**
+ * Player modifications to perform that must wait until `loadedmetadata`
+ * has been triggered
+ *
+ * @private
+ */
+
+ var loadedMetadataHandler = function loadedMetadataHandler() {
+ if (seekTo) {
+ player.currentTime(seekTo);
+ }
+ };
+ /**
+ * Set the source on the player element, play, and seek if necessary
+ *
+ * @param {Object} sourceObj An object specifying the source url and mime-type to play
+ * @private
+ */
+
+
+ var setSource = function setSource(sourceObj) {
+ if (sourceObj === null || sourceObj === undefined) {
+ return;
+ }
+
+ seekTo = player.duration() !== Infinity && player.currentTime() || 0;
+ player.one('loadedmetadata', loadedMetadataHandler);
+ player.src(sourceObj);
+ player.trigger({
+ type: 'usage',
+ name: 'vhs-error-reload'
+ });
+ player.trigger({
+ type: 'usage',
+ name: 'hls-error-reload'
+ });
+ player.play();
+ };
+ /**
+ * Attempt to get a source from either the built-in getSource function
+ * or a custom function provided via the options
+ *
+ * @private
+ */
+
+
+ var errorHandler = function errorHandler() {
+ // Do not attempt to reload the source if a source-reload occurred before
+ // 'errorInterval' time has elapsed since the last source-reload
+ if (Date.now() - lastCalled < localOptions.errorInterval * 1000) {
+ player.trigger({
+ type: 'usage',
+ name: 'vhs-error-reload-canceled'
+ });
+ player.trigger({
+ type: 'usage',
+ name: 'hls-error-reload-canceled'
+ });
+ return;
+ }
+
+ if (!localOptions.getSource || typeof localOptions.getSource !== 'function') {
+ videojs.log.error('ERROR: reloadSourceOnError - The option getSource must be a function!');
+ return;
+ }
+
+ lastCalled = Date.now();
+ return localOptions.getSource.call(player, setSource);
+ };
+ /**
+ * Unbind any event handlers that were bound by the plugin
+ *
+ * @private
+ */
+
+
+ var cleanupEvents = function cleanupEvents() {
+ player.off('loadedmetadata', loadedMetadataHandler);
+ player.off('error', errorHandler);
+ player.off('dispose', cleanupEvents);
+ };
+ /**
+ * Cleanup before re-initializing the plugin
+ *
+ * @param {Object} [newOptions] an object with plugin options
+ * @private
+ */
+
+
+ var reinitPlugin = function reinitPlugin(newOptions) {
+ cleanupEvents();
+ initPlugin(player, newOptions);
+ };
+
+ player.on('error', errorHandler);
+ player.on('dispose', cleanupEvents); // Overwrite the plugin function so that we can correctly cleanup before
+ // initializing the plugin
+
+ player.reloadSourceOnError = reinitPlugin;
+};
+/**
+ * Reload the source when an error is detected as long as there
+ * wasn't an error previously within the last 30 seconds
+ *
+ * @param {Object} [options] an object with plugin options
+ */
+
+
+var reloadSourceOnError = function reloadSourceOnError(options) {
+ initPlugin(this, options);
+};
+
+var version$4 = "2.14.2";
+var version$3 = "6.0.1";
+var version$2 = "0.21.1";
+var version$1 = "4.7.1";
+var version = "3.1.3";
+var Vhs = {
+ PlaylistLoader: PlaylistLoader,
+ Playlist: Playlist,
+ utils: utils,
+ STANDARD_PLAYLIST_SELECTOR: lastBandwidthSelector,
+ INITIAL_PLAYLIST_SELECTOR: lowestBitrateCompatibleVariantSelector,
+ lastBandwidthSelector: lastBandwidthSelector,
+ movingAverageBandwidthSelector: movingAverageBandwidthSelector,
+ comparePlaylistBandwidth: comparePlaylistBandwidth,
+ comparePlaylistResolution: comparePlaylistResolution,
+ xhr: xhrFactory()
+}; // Define getter/setters for config properties
+
+Object.keys(Config).forEach(function (prop) {
+ Object.defineProperty(Vhs, prop, {
+ get: function get() {
+ videojs.log.warn("using Vhs." + prop + " is UNSAFE be sure you know what you are doing");
+ return Config[prop];
+ },
+ set: function set(value) {
+ videojs.log.warn("using Vhs." + prop + " is UNSAFE be sure you know what you are doing");
+
+ if (typeof value !== 'number' || value < 0) {
+ videojs.log.warn("value of Vhs." + prop + " must be greater than or equal to 0");
+ return;
+ }
+
+ Config[prop] = value;
+ }
+ });
+});
+var LOCAL_STORAGE_KEY = 'videojs-vhs';
+/**
+ * Updates the selectedIndex of the QualityLevelList when a mediachange happens in vhs.
+ *
+ * @param {QualityLevelList} qualityLevels The QualityLevelList to update.
+ * @param {PlaylistLoader} playlistLoader PlaylistLoader containing the new media info.
+ * @function handleVhsMediaChange
+ */
+
+var handleVhsMediaChange = function handleVhsMediaChange(qualityLevels, playlistLoader) {
+ var newPlaylist = playlistLoader.media();
+ var selectedIndex = -1;
+
+ for (var i = 0; i < qualityLevels.length; i++) {
+ if (qualityLevels[i].id === newPlaylist.id) {
+ selectedIndex = i;
+ break;
+ }
+ }
+
+ qualityLevels.selectedIndex_ = selectedIndex;
+ qualityLevels.trigger({
+ selectedIndex: selectedIndex,
+ type: 'change'
+ });
+};
+/**
+ * Adds quality levels to list once playlist metadata is available
+ *
+ * @param {QualityLevelList} qualityLevels The QualityLevelList to attach events to.
+ * @param {Object} vhs Vhs object to listen to for media events.
+ * @function handleVhsLoadedMetadata
+ */
+
+
+var handleVhsLoadedMetadata = function handleVhsLoadedMetadata(qualityLevels, vhs) {
+ vhs.representations().forEach(function (rep) {
+ qualityLevels.addQualityLevel(rep);
+ });
+ handleVhsMediaChange(qualityLevels, vhs.playlists);
+}; // HLS is a source handler, not a tech. Make sure attempts to use it
+// as one do not cause exceptions.
+
+
+Vhs.canPlaySource = function () {
+ return videojs.log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
+};
+
+var emeKeySystems = function emeKeySystems(keySystemOptions, mainPlaylist, audioPlaylist) {
+ if (!keySystemOptions) {
+ return keySystemOptions;
+ }
+
+ var codecs = {};
+
+ if (mainPlaylist && mainPlaylist.attributes && mainPlaylist.attributes.CODECS) {
+ codecs = unwrapCodecList(parseCodecs(mainPlaylist.attributes.CODECS));
+ }
+
+ if (audioPlaylist && audioPlaylist.attributes && audioPlaylist.attributes.CODECS) {
+ codecs.audio = audioPlaylist.attributes.CODECS;
+ }
+
+ var videoContentType = getMimeForCodec(codecs.video);
+ var audioContentType = getMimeForCodec(codecs.audio); // upsert the content types based on the selected playlist
+
+ var keySystemContentTypes = {};
+
+ for (var keySystem in keySystemOptions) {
+ keySystemContentTypes[keySystem] = {};
+
+ if (audioContentType) {
+ keySystemContentTypes[keySystem].audioContentType = audioContentType;
+ }
+
+ if (videoContentType) {
+ keySystemContentTypes[keySystem].videoContentType = videoContentType;
+ } // Default to using the video playlist's PSSH even though they may be different, as
+ // videojs-contrib-eme will only accept one in the options.
+ //
+ // This shouldn't be an issue for most cases as early intialization will handle all
+ // unique PSSH values, and if they aren't, then encrypted events should have the
+ // specific information needed for the unique license.
+
+
+ if (mainPlaylist.contentProtection && mainPlaylist.contentProtection[keySystem] && mainPlaylist.contentProtection[keySystem].pssh) {
+ keySystemContentTypes[keySystem].pssh = mainPlaylist.contentProtection[keySystem].pssh;
+ } // videojs-contrib-eme accepts the option of specifying: 'com.some.cdm': 'url'
+ // so we need to prevent overwriting the URL entirely
+
+
+ if (typeof keySystemOptions[keySystem] === 'string') {
+ keySystemContentTypes[keySystem].url = keySystemOptions[keySystem];
+ }
+ }
+
+ return videojs.mergeOptions(keySystemOptions, keySystemContentTypes);
+};
+/**
+ * @typedef {Object} KeySystems
+ *
+ * keySystems configuration for https://github.com/videojs/videojs-contrib-eme
+ * Note: not all options are listed here.
+ *
+ * @property {Uint8Array} [pssh]
+ * Protection System Specific Header
+ */
+
+/**
+ * Goes through all the playlists and collects an array of KeySystems options objects
+ * containing each playlist's keySystems and their pssh values, if available.
+ *
+ * @param {Object[]} playlists
+ * The playlists to look through
+ * @param {string[]} keySystems
+ * The keySystems to collect pssh values for
+ *
+ * @return {KeySystems[]}
+ * An array of KeySystems objects containing available key systems and their
+ * pssh values
+ */
+
+
+var getAllPsshKeySystemsOptions = function getAllPsshKeySystemsOptions(playlists, keySystems) {
+ return playlists.reduce(function (keySystemsArr, playlist) {
+ if (!playlist.contentProtection) {
+ return keySystemsArr;
+ }
+
+ var keySystemsOptions = keySystems.reduce(function (keySystemsObj, keySystem) {
+ var keySystemOptions = playlist.contentProtection[keySystem];
+
+ if (keySystemOptions && keySystemOptions.pssh) {
+ keySystemsObj[keySystem] = {
+ pssh: keySystemOptions.pssh
+ };
+ }
+
+ return keySystemsObj;
+ }, {});
+
+ if (Object.keys(keySystemsOptions).length) {
+ keySystemsArr.push(keySystemsOptions);
+ }
+
+ return keySystemsArr;
+ }, []);
+};
+/**
+ * Returns a promise that waits for the
+ * [eme plugin](https://github.com/videojs/videojs-contrib-eme) to create a key session.
+ *
+ * Works around https://bugs.chromium.org/p/chromium/issues/detail?id=895449 in non-IE11
+ * browsers.
+ *
+ * As per the above ticket, this is particularly important for Chrome, where, if
+ * unencrypted content is appended before encrypted content and the key session has not
+ * been created, a MEDIA_ERR_DECODE will be thrown once the encrypted content is reached
+ * during playback.
+ *
+ * @param {Object} player
+ * The player instance
+ * @param {Object[]} sourceKeySystems
+ * The key systems options from the player source
+ * @param {Object} [audioMedia]
+ * The active audio media playlist (optional)
+ * @param {Object[]} mainPlaylists
+ * The playlists found on the master playlist object
+ *
+ * @return {Object}
+ * Promise that resolves when the key session has been created
+ */
+
+
+var waitForKeySessionCreation = function waitForKeySessionCreation(_ref) {
+ var player = _ref.player,
+ sourceKeySystems = _ref.sourceKeySystems,
+ audioMedia = _ref.audioMedia,
+ mainPlaylists = _ref.mainPlaylists;
+
+ if (!player.eme.initializeMediaKeys) {
+ return Promise.resolve();
+ } // TODO should all audio PSSH values be initialized for DRM?
+ //
+ // All unique video rendition pssh values are initialized for DRM, but here only
+ // the initial audio playlist license is initialized. In theory, an encrypted
+ // event should be fired if the user switches to an alternative audio playlist
+ // where a license is required, but this case hasn't yet been tested. In addition, there
+ // may be many alternate audio playlists unlikely to be used (e.g., multiple different
+ // languages).
+
+
+ var playlists = audioMedia ? mainPlaylists.concat([audioMedia]) : mainPlaylists;
+ var keySystemsOptionsArr = getAllPsshKeySystemsOptions(playlists, Object.keys(sourceKeySystems));
+ var initializationFinishedPromises = [];
+ var keySessionCreatedPromises = []; // Since PSSH values are interpreted as initData, EME will dedupe any duplicates. The
+ // only place where it should not be deduped is for ms-prefixed APIs, but the early
+ // return for IE11 above, and the existence of modern EME APIs in addition to
+ // ms-prefixed APIs on Edge should prevent this from being a concern.
+ // initializeMediaKeys also won't use the webkit-prefixed APIs.
+
+ keySystemsOptionsArr.forEach(function (keySystemsOptions) {
+ keySessionCreatedPromises.push(new Promise(function (resolve, reject) {
+ player.tech_.one('keysessioncreated', resolve);
+ }));
+ initializationFinishedPromises.push(new Promise(function (resolve, reject) {
+ player.eme.initializeMediaKeys({
+ keySystems: keySystemsOptions
+ }, function (err) {
+ if (err) {
+ reject(err);
+ return;
+ }
+
+ resolve();
+ });
+ }));
+ }); // The reasons Promise.race is chosen over Promise.any:
+ //
+ // * Promise.any is only available in Safari 14+.
+ // * None of these promises are expected to reject. If they do reject, it might be
+ // better here for the race to surface the rejection, rather than mask it by using
+ // Promise.any.
+
+ return Promise.race([// If a session was previously created, these will all finish resolving without
+ // creating a new session, otherwise it will take until the end of all license
+ // requests, which is why the key session check is used (to make setup much faster).
+ Promise.all(initializationFinishedPromises), // Once a single session is created, the browser knows DRM will be used.
+ Promise.race(keySessionCreatedPromises)]);
+};
+/**
+ * If the [eme](https://github.com/videojs/videojs-contrib-eme) plugin is available, and
+ * there are keySystems on the source, sets up source options to prepare the source for
+ * eme.
+ *
+ * @param {Object} player
+ * The player instance
+ * @param {Object[]} sourceKeySystems
+ * The key systems options from the player source
+ * @param {Object} media
+ * The active media playlist
+ * @param {Object} [audioMedia]
+ * The active audio media playlist (optional)
+ *
+ * @return {boolean}
+ * Whether or not options were configured and EME is available
+ */
+
+
+var setupEmeOptions = function setupEmeOptions(_ref2) {
+ var player = _ref2.player,
+ sourceKeySystems = _ref2.sourceKeySystems,
+ media = _ref2.media,
+ audioMedia = _ref2.audioMedia;
+ var sourceOptions = emeKeySystems(sourceKeySystems, media, audioMedia);
+
+ if (!sourceOptions) {
+ return false;
+ }
+
+ player.currentSource().keySystems = sourceOptions; // eme handles the rest of the setup, so if it is missing
+ // do nothing.
+
+ if (sourceOptions && !player.eme) {
+ videojs.log.warn('DRM encrypted source cannot be decrypted without a DRM plugin');
+ return false;
+ }
+
+ return true;
+};
+
+var getVhsLocalStorage = function getVhsLocalStorage() {
+ if (!window$1.localStorage) {
+ return null;
+ }
+
+ var storedObject = window$1.localStorage.getItem(LOCAL_STORAGE_KEY);
+
+ if (!storedObject) {
+ return null;
+ }
+
+ try {
+ return JSON.parse(storedObject);
+ } catch (e) {
+ // someone may have tampered with the value
+ return null;
+ }
+};
+
+var updateVhsLocalStorage = function updateVhsLocalStorage(options) {
+ if (!window$1.localStorage) {
+ return false;
+ }
+
+ var objectToStore = getVhsLocalStorage();
+ objectToStore = objectToStore ? videojs.mergeOptions(objectToStore, options) : options;
+
+ try {
+ window$1.localStorage.setItem(LOCAL_STORAGE_KEY, JSON.stringify(objectToStore));
+ } catch (e) {
+ // Throws if storage is full (e.g., always on iOS 5+ Safari private mode, where
+ // storage is set to 0).
+ // https://developer.mozilla.org/en-US/docs/Web/API/Storage/setItem#Exceptions
+ // No need to perform any operation.
+ return false;
+ }
+
+ return objectToStore;
+};
+/**
+ * Parses VHS-supported media types from data URIs. See
+ * https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs
+ * for information on data URIs.
+ *
+ * @param {string} dataUri
+ * The data URI
+ *
+ * @return {string|Object}
+ * The parsed object/string, or the original string if no supported media type
+ * was found
+ */
+
+
+var expandDataUri = function expandDataUri(dataUri) {
+ if (dataUri.toLowerCase().indexOf('data:application/vnd.videojs.vhs+json,') === 0) {
+ return JSON.parse(dataUri.substring(dataUri.indexOf(',') + 1));
+ } // no known case for this data URI, return the string as-is
+
+
+ return dataUri;
+};
+/**
+ * Whether the browser has built-in HLS support.
+ */
+
+
+Vhs.supportsNativeHls = function () {
+ if (!document || !document.createElement) {
+ return false;
+ }
+
+ var video = document.createElement('video'); // native HLS is definitely not supported if HTML5 video isn't
+
+ if (!videojs.getTech('Html5').isSupported()) {
+ return false;
+ } // HLS manifests can go by many mime-types
+
+
+ var canPlay = [// Apple santioned
+ 'application/vnd.apple.mpegurl', // Apple sanctioned for backwards compatibility
+ 'audio/mpegurl', // Very common
+ 'audio/x-mpegurl', // Very common
+ 'application/x-mpegurl', // Included for completeness
+ 'video/x-mpegurl', 'video/mpegurl', 'application/mpegurl'];
+ return canPlay.some(function (canItPlay) {
+ return /maybe|probably/i.test(video.canPlayType(canItPlay));
+ });
+}();
+
+Vhs.supportsNativeDash = function () {
+ if (!document || !document.createElement || !videojs.getTech('Html5').isSupported()) {
+ return false;
+ }
+
+ return /maybe|probably/i.test(document.createElement('video').canPlayType('application/dash+xml'));
+}();
+
+Vhs.supportsTypeNatively = function (type) {
+ if (type === 'hls') {
+ return Vhs.supportsNativeHls;
+ }
+
+ if (type === 'dash') {
+ return Vhs.supportsNativeDash;
+ }
+
+ return false;
+};
+/**
+ * HLS is a source handler, not a tech. Make sure attempts to use it
+ * as one do not cause exceptions.
+ */
+
+
+Vhs.isSupported = function () {
+ return videojs.log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
+};
+
+var Component = videojs.getComponent('Component');
+/**
+ * The Vhs Handler object, where we orchestrate all of the parts
+ * of HLS to interact with video.js
+ *
+ * @class VhsHandler
+ * @extends videojs.Component
+ * @param {Object} source the soruce object
+ * @param {Tech} tech the parent tech object
+ * @param {Object} options optional and required options
+ */
+
+var VhsHandler = /*#__PURE__*/function (_Component) {
+ _inheritsLoose(VhsHandler, _Component);
+
+ function VhsHandler(source, tech, options) {
+ var _this;
+
+ _this = _Component.call(this, tech, videojs.mergeOptions(options.hls, options.vhs)) || this;
+
+ if (options.hls && Object.keys(options.hls).length) {
+ videojs.log.warn('Using hls options is deprecated. Please rename `hls` to `vhs` in your options object.');
+ } // if a tech level `initialBandwidth` option was passed
+ // use that over the VHS level `bandwidth` option
+
+
+ if (typeof options.initialBandwidth === 'number') {
+ _this.options_.bandwidth = options.initialBandwidth;
+ }
+
+ _this.logger_ = logger('VhsHandler'); // tech.player() is deprecated but setup a reference to HLS for
+ // backwards-compatibility
+
+ if (tech.options_ && tech.options_.playerId) {
+ var _player = videojs(tech.options_.playerId);
+
+ if (!_player.hasOwnProperty('hls')) {
+ Object.defineProperty(_player, 'hls', {
+ get: function get() {
+ videojs.log.warn('player.hls is deprecated. Use player.tech().vhs instead.');
+ tech.trigger({
+ type: 'usage',
+ name: 'hls-player-access'
+ });
+ return _assertThisInitialized(_this);
+ },
+ configurable: true
+ });
+ }
+
+ if (!_player.hasOwnProperty('vhs')) {
+ Object.defineProperty(_player, 'vhs', {
+ get: function get() {
+ videojs.log.warn('player.vhs is deprecated. Use player.tech().vhs instead.');
+ tech.trigger({
+ type: 'usage',
+ name: 'vhs-player-access'
+ });
+ return _assertThisInitialized(_this);
+ },
+ configurable: true
+ });
+ }
+
+ if (!_player.hasOwnProperty('dash')) {
+ Object.defineProperty(_player, 'dash', {
+ get: function get() {
+ videojs.log.warn('player.dash is deprecated. Use player.tech().vhs instead.');
+ return _assertThisInitialized(_this);
+ },
+ configurable: true
+ });
+ }
+
+ _this.player_ = _player;
+ }
+
+ _this.tech_ = tech;
+ _this.source_ = source;
+ _this.stats = {};
+ _this.ignoreNextSeekingEvent_ = false;
+
+ _this.setOptions_();
+
+ if (_this.options_.overrideNative && tech.overrideNativeAudioTracks && tech.overrideNativeVideoTracks) {
+ tech.overrideNativeAudioTracks(true);
+ tech.overrideNativeVideoTracks(true);
+ } else if (_this.options_.overrideNative && (tech.featuresNativeVideoTracks || tech.featuresNativeAudioTracks)) {
+ // overriding native HLS only works if audio tracks have been emulated
+ // error early if we're misconfigured
+ throw new Error('Overriding native HLS requires emulated tracks. ' + 'See https://git.io/vMpjB');
+ } // listen for fullscreenchange events for this player so that we
+ // can adjust our quality selection quickly
+
+
+ _this.on(document, ['fullscreenchange', 'webkitfullscreenchange', 'mozfullscreenchange', 'MSFullscreenChange'], function (event) {
+ var fullscreenElement = document.fullscreenElement || document.webkitFullscreenElement || document.mozFullScreenElement || document.msFullscreenElement;
+
+ if (fullscreenElement && fullscreenElement.contains(_this.tech_.el())) {
+ _this.masterPlaylistController_.fastQualityChange_();
+ } else {
+ // When leaving fullscreen, since the in page pixel dimensions should be smaller
+ // than full screen, see if there should be a rendition switch down to preserve
+ // bandwidth.
+ _this.masterPlaylistController_.checkABR_();
+ }
+ });
+
+ _this.on(_this.tech_, 'seeking', function () {
+ if (this.ignoreNextSeekingEvent_) {
+ this.ignoreNextSeekingEvent_ = false;
+ return;
+ }
+
+ this.setCurrentTime(this.tech_.currentTime());
+ });
+
+ _this.on(_this.tech_, 'error', function () {
+ // verify that the error was real and we are loaded
+ // enough to have mpc loaded.
+ if (this.tech_.error() && this.masterPlaylistController_) {
+ this.masterPlaylistController_.pauseLoading();
+ }
+ });
+
+ _this.on(_this.tech_, 'play', _this.play);
+
+ return _this;
+ }
+
+ var _proto = VhsHandler.prototype;
+
+ _proto.setOptions_ = function setOptions_() {
+ var _this2 = this; // defaults
+
+
+ this.options_.withCredentials = this.options_.withCredentials || false;
+ this.options_.handleManifestRedirects = this.options_.handleManifestRedirects === false ? false : true;
+ this.options_.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions === false ? false : true;
+ this.options_.useDevicePixelRatio = this.options_.useDevicePixelRatio || false;
+ this.options_.smoothQualityChange = this.options_.smoothQualityChange || false;
+ this.options_.useBandwidthFromLocalStorage = typeof this.source_.useBandwidthFromLocalStorage !== 'undefined' ? this.source_.useBandwidthFromLocalStorage : this.options_.useBandwidthFromLocalStorage || false;
+ this.options_.useNetworkInformationApi = this.options_.useNetworkInformationApi || false;
+ this.options_.useDtsForTimestampOffset = this.options_.useDtsForTimestampOffset || false;
+ this.options_.customTagParsers = this.options_.customTagParsers || [];
+ this.options_.customTagMappers = this.options_.customTagMappers || [];
+ this.options_.cacheEncryptionKeys = this.options_.cacheEncryptionKeys || false;
+
+ if (typeof this.options_.blacklistDuration !== 'number') {
+ this.options_.blacklistDuration = 5 * 60;
+ }
+
+ if (typeof this.options_.bandwidth !== 'number') {
+ if (this.options_.useBandwidthFromLocalStorage) {
+ var storedObject = getVhsLocalStorage();
+
+ if (storedObject && storedObject.bandwidth) {
+ this.options_.bandwidth = storedObject.bandwidth;
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-bandwidth-from-local-storage'
+ });
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'hls-bandwidth-from-local-storage'
+ });
+ }
+
+ if (storedObject && storedObject.throughput) {
+ this.options_.throughput = storedObject.throughput;
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-throughput-from-local-storage'
+ });
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'hls-throughput-from-local-storage'
+ });
+ }
+ }
+ } // if bandwidth was not set by options or pulled from local storage, start playlist
+ // selection at a reasonable bandwidth
+
+
+ if (typeof this.options_.bandwidth !== 'number') {
+ this.options_.bandwidth = Config.INITIAL_BANDWIDTH;
+ } // If the bandwidth number is unchanged from the initial setting
+ // then this takes precedence over the enableLowInitialPlaylist option
+
+
+ this.options_.enableLowInitialPlaylist = this.options_.enableLowInitialPlaylist && this.options_.bandwidth === Config.INITIAL_BANDWIDTH; // grab options passed to player.src
+
+ ['withCredentials', 'useDevicePixelRatio', 'limitRenditionByPlayerDimensions', 'bandwidth', 'smoothQualityChange', 'customTagParsers', 'customTagMappers', 'handleManifestRedirects', 'cacheEncryptionKeys', 'playlistSelector', 'initialPlaylistSelector', 'experimentalBufferBasedABR', 'liveRangeSafeTimeDelta', 'experimentalLLHLS', 'useNetworkInformationApi', 'useDtsForTimestampOffset', 'experimentalExactManifestTimings', 'experimentalLeastPixelDiffSelector'].forEach(function (option) {
+ if (typeof _this2.source_[option] !== 'undefined') {
+ _this2.options_[option] = _this2.source_[option];
+ }
+ });
+ this.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions;
+ this.useDevicePixelRatio = this.options_.useDevicePixelRatio;
+ }
+ /**
+ * called when player.src gets called, handle a new source
+ *
+ * @param {Object} src the source object to handle
+ */
+ ;
+
+ _proto.src = function src(_src, type) {
+ var _this3 = this; // do nothing if the src is falsey
+
+
+ if (!_src) {
+ return;
+ }
+
+ this.setOptions_(); // add master playlist controller options
+
+ this.options_.src = expandDataUri(this.source_.src);
+ this.options_.tech = this.tech_;
+ this.options_.externVhs = Vhs;
+ this.options_.sourceType = simpleTypeFromSourceType(type); // Whenever we seek internally, we should update the tech
+
+ this.options_.seekTo = function (time) {
+ _this3.tech_.setCurrentTime(time);
+ };
+
+ if (this.options_.smoothQualityChange) {
+ videojs.log.warn('smoothQualityChange is deprecated and will be removed in the next major version');
+ }
+
+ this.masterPlaylistController_ = new MasterPlaylistController(this.options_);
+ var playbackWatcherOptions = videojs.mergeOptions({
+ liveRangeSafeTimeDelta: SAFE_TIME_DELTA
+ }, this.options_, {
+ seekable: function seekable() {
+ return _this3.seekable();
+ },
+ media: function media() {
+ return _this3.masterPlaylistController_.media();
+ },
+ masterPlaylistController: this.masterPlaylistController_
+ });
+ this.playbackWatcher_ = new PlaybackWatcher(playbackWatcherOptions);
+ this.masterPlaylistController_.on('error', function () {
+ var player = videojs.players[_this3.tech_.options_.playerId];
+ var error = _this3.masterPlaylistController_.error;
+
+ if (typeof error === 'object' && !error.code) {
+ error.code = 3;
+ } else if (typeof error === 'string') {
+ error = {
+ message: error,
+ code: 3
+ };
+ }
+
+ player.error(error);
+ });
+ var defaultSelector = this.options_.experimentalBufferBasedABR ? Vhs.movingAverageBandwidthSelector(0.55) : Vhs.STANDARD_PLAYLIST_SELECTOR; // `this` in selectPlaylist should be the VhsHandler for backwards
+ // compatibility with < v2
+
+ this.masterPlaylistController_.selectPlaylist = this.selectPlaylist ? this.selectPlaylist.bind(this) : defaultSelector.bind(this);
+ this.masterPlaylistController_.selectInitialPlaylist = Vhs.INITIAL_PLAYLIST_SELECTOR.bind(this); // re-expose some internal objects for backwards compatibility with < v2
+
+ this.playlists = this.masterPlaylistController_.masterPlaylistLoader_;
+ this.mediaSource = this.masterPlaylistController_.mediaSource; // Proxy assignment of some properties to the master playlist
+ // controller. Using a custom property for backwards compatibility
+ // with < v2
+
+ Object.defineProperties(this, {
+ selectPlaylist: {
+ get: function get() {
+ return this.masterPlaylistController_.selectPlaylist;
+ },
+ set: function set(selectPlaylist) {
+ this.masterPlaylistController_.selectPlaylist = selectPlaylist.bind(this);
+ }
+ },
+ throughput: {
+ get: function get() {
+ return this.masterPlaylistController_.mainSegmentLoader_.throughput.rate;
+ },
+ set: function set(throughput) {
+ this.masterPlaylistController_.mainSegmentLoader_.throughput.rate = throughput; // By setting `count` to 1 the throughput value becomes the starting value
+ // for the cumulative average
+
+ this.masterPlaylistController_.mainSegmentLoader_.throughput.count = 1;
+ }
+ },
+ bandwidth: {
+ get: function get() {
+ var playerBandwidthEst = this.masterPlaylistController_.mainSegmentLoader_.bandwidth;
+ var networkInformation = window$1.navigator.connection || window$1.navigator.mozConnection || window$1.navigator.webkitConnection;
+ var tenMbpsAsBitsPerSecond = 10e6;
+
+ if (this.options_.useNetworkInformationApi && networkInformation) {
+ // downlink returns Mbps
+ // https://developer.mozilla.org/en-US/docs/Web/API/NetworkInformation/downlink
+ var networkInfoBandwidthEstBitsPerSec = networkInformation.downlink * 1000 * 1000; // downlink maxes out at 10 Mbps. In the event that both networkInformationApi and the player
+ // estimate a bandwidth greater than 10 Mbps, use the larger of the two estimates to ensure that
+ // high quality streams are not filtered out.
+
+ if (networkInfoBandwidthEstBitsPerSec >= tenMbpsAsBitsPerSecond && playerBandwidthEst >= tenMbpsAsBitsPerSecond) {
+ playerBandwidthEst = Math.max(playerBandwidthEst, networkInfoBandwidthEstBitsPerSec);
+ } else {
+ playerBandwidthEst = networkInfoBandwidthEstBitsPerSec;
+ }
+ }
+
+ return playerBandwidthEst;
+ },
+ set: function set(bandwidth) {
+ this.masterPlaylistController_.mainSegmentLoader_.bandwidth = bandwidth; // setting the bandwidth manually resets the throughput counter
+ // `count` is set to zero that current value of `rate` isn't included
+ // in the cumulative average
+
+ this.masterPlaylistController_.mainSegmentLoader_.throughput = {
+ rate: 0,
+ count: 0
+ };
+ }
+ },
+
+ /**
+ * `systemBandwidth` is a combination of two serial processes bit-rates. The first
+ * is the network bitrate provided by `bandwidth` and the second is the bitrate of
+ * the entire process after that - decryption, transmuxing, and appending - provided
+ * by `throughput`.
+ *
+ * Since the two process are serial, the overall system bandwidth is given by:
+ * sysBandwidth = 1 / (1 / bandwidth + 1 / throughput)
+ */
+ systemBandwidth: {
+ get: function get() {
+ var invBandwidth = 1 / (this.bandwidth || 1);
+ var invThroughput;
+
+ if (this.throughput > 0) {
+ invThroughput = 1 / this.throughput;
+ } else {
+ invThroughput = 0;
+ }
+
+ var systemBitrate = Math.floor(1 / (invBandwidth + invThroughput));
+ return systemBitrate;
+ },
+ set: function set() {
+ videojs.log.error('The "systemBandwidth" property is read-only');
+ }
+ }
+ });
+
+ if (this.options_.bandwidth) {
+ this.bandwidth = this.options_.bandwidth;
+ }
+
+ if (this.options_.throughput) {
+ this.throughput = this.options_.throughput;
+ }
+
+ Object.defineProperties(this.stats, {
+ bandwidth: {
+ get: function get() {
+ return _this3.bandwidth || 0;
+ },
+ enumerable: true
+ },
+ mediaRequests: {
+ get: function get() {
+ return _this3.masterPlaylistController_.mediaRequests_() || 0;
+ },
+ enumerable: true
+ },
+ mediaRequestsAborted: {
+ get: function get() {
+ return _this3.masterPlaylistController_.mediaRequestsAborted_() || 0;
+ },
+ enumerable: true
+ },
+ mediaRequestsTimedout: {
+ get: function get() {
+ return _this3.masterPlaylistController_.mediaRequestsTimedout_() || 0;
+ },
+ enumerable: true
+ },
+ mediaRequestsErrored: {
+ get: function get() {
+ return _this3.masterPlaylistController_.mediaRequestsErrored_() || 0;
+ },
+ enumerable: true
+ },
+ mediaTransferDuration: {
+ get: function get() {
+ return _this3.masterPlaylistController_.mediaTransferDuration_() || 0;
+ },
+ enumerable: true
+ },
+ mediaBytesTransferred: {
+ get: function get() {
+ return _this3.masterPlaylistController_.mediaBytesTransferred_() || 0;
+ },
+ enumerable: true
+ },
+ mediaSecondsLoaded: {
+ get: function get() {
+ return _this3.masterPlaylistController_.mediaSecondsLoaded_() || 0;
+ },
+ enumerable: true
+ },
+ mediaAppends: {
+ get: function get() {
+ return _this3.masterPlaylistController_.mediaAppends_() || 0;
+ },
+ enumerable: true
+ },
+ mainAppendsToLoadedData: {
+ get: function get() {
+ return _this3.masterPlaylistController_.mainAppendsToLoadedData_() || 0;
+ },
+ enumerable: true
+ },
+ audioAppendsToLoadedData: {
+ get: function get() {
+ return _this3.masterPlaylistController_.audioAppendsToLoadedData_() || 0;
+ },
+ enumerable: true
+ },
+ appendsToLoadedData: {
+ get: function get() {
+ return _this3.masterPlaylistController_.appendsToLoadedData_() || 0;
+ },
+ enumerable: true
+ },
+ timeToLoadedData: {
+ get: function get() {
+ return _this3.masterPlaylistController_.timeToLoadedData_() || 0;
+ },
+ enumerable: true
+ },
+ buffered: {
+ get: function get() {
+ return timeRangesToArray(_this3.tech_.buffered());
+ },
+ enumerable: true
+ },
+ currentTime: {
+ get: function get() {
+ return _this3.tech_.currentTime();
+ },
+ enumerable: true
+ },
+ currentSource: {
+ get: function get() {
+ return _this3.tech_.currentSource_;
+ },
+ enumerable: true
+ },
+ currentTech: {
+ get: function get() {
+ return _this3.tech_.name_;
+ },
+ enumerable: true
+ },
+ duration: {
+ get: function get() {
+ return _this3.tech_.duration();
+ },
+ enumerable: true
+ },
+ master: {
+ get: function get() {
+ return _this3.playlists.master;
+ },
+ enumerable: true
+ },
+ playerDimensions: {
+ get: function get() {
+ return _this3.tech_.currentDimensions();
+ },
+ enumerable: true
+ },
+ seekable: {
+ get: function get() {
+ return timeRangesToArray(_this3.tech_.seekable());
+ },
+ enumerable: true
+ },
+ timestamp: {
+ get: function get() {
+ return Date.now();
+ },
+ enumerable: true
+ },
+ videoPlaybackQuality: {
+ get: function get() {
+ return _this3.tech_.getVideoPlaybackQuality();
+ },
+ enumerable: true
+ }
+ });
+ this.tech_.one('canplay', this.masterPlaylistController_.setupFirstPlay.bind(this.masterPlaylistController_));
+ this.tech_.on('bandwidthupdate', function () {
+ if (_this3.options_.useBandwidthFromLocalStorage) {
+ updateVhsLocalStorage({
+ bandwidth: _this3.bandwidth,
+ throughput: Math.round(_this3.throughput)
+ });
+ }
+ });
+ this.masterPlaylistController_.on('selectedinitialmedia', function () {
+ // Add the manual rendition mix-in to VhsHandler
+ renditionSelectionMixin(_this3);
+ });
+ this.masterPlaylistController_.sourceUpdater_.on('createdsourcebuffers', function () {
+ _this3.setupEme_();
+ }); // the bandwidth of the primary segment loader is our best
+ // estimate of overall bandwidth
+
+ this.on(this.masterPlaylistController_, 'progress', function () {
+ this.tech_.trigger('progress');
+ }); // In the live case, we need to ignore the very first `seeking` event since
+ // that will be the result of the seek-to-live behavior
+
+ this.on(this.masterPlaylistController_, 'firstplay', function () {
+ this.ignoreNextSeekingEvent_ = true;
+ });
+ this.setupQualityLevels_(); // do nothing if the tech has been disposed already
+ // this can occur if someone sets the src in player.ready(), for instance
+
+ if (!this.tech_.el()) {
+ return;
+ }
+
+ this.mediaSourceUrl_ = window$1.URL.createObjectURL(this.masterPlaylistController_.mediaSource);
+ this.tech_.src(this.mediaSourceUrl_);
+ };
+
+ _proto.createKeySessions_ = function createKeySessions_() {
+ var _this4 = this;
+
+ var audioPlaylistLoader = this.masterPlaylistController_.mediaTypes_.AUDIO.activePlaylistLoader;
+ this.logger_('waiting for EME key session creation');
+ waitForKeySessionCreation({
+ player: this.player_,
+ sourceKeySystems: this.source_.keySystems,
+ audioMedia: audioPlaylistLoader && audioPlaylistLoader.media(),
+ mainPlaylists: this.playlists.master.playlists
+ }).then(function () {
+ _this4.logger_('created EME key session');
+
+ _this4.masterPlaylistController_.sourceUpdater_.initializedEme();
+ })["catch"](function (err) {
+ _this4.logger_('error while creating EME key session', err);
+
+ _this4.player_.error({
+ message: 'Failed to initialize media keys for EME',
+ code: 3
+ });
+ });
+ };
+
+ _proto.handleWaitingForKey_ = function handleWaitingForKey_() {
+ // If waitingforkey is fired, it's possible that the data that's necessary to retrieve
+ // the key is in the manifest. While this should've happened on initial source load, it
+ // may happen again in live streams where the keys change, and the manifest info
+ // reflects the update.
+ //
+ // Because videojs-contrib-eme compares the PSSH data we send to that of PSSH data it's
+ // already requested keys for, we don't have to worry about this generating extraneous
+ // requests.
+ this.logger_('waitingforkey fired, attempting to create any new key sessions');
+ this.createKeySessions_();
+ }
+ /**
+ * If necessary and EME is available, sets up EME options and waits for key session
+ * creation.
+ *
+ * This function also updates the source updater so taht it can be used, as for some
+ * browsers, EME must be configured before content is appended (if appending unencrypted
+ * content before encrypted content).
+ */
+ ;
+
+ _proto.setupEme_ = function setupEme_() {
+ var _this5 = this;
+
+ var audioPlaylistLoader = this.masterPlaylistController_.mediaTypes_.AUDIO.activePlaylistLoader;
+ var didSetupEmeOptions = setupEmeOptions({
+ player: this.player_,
+ sourceKeySystems: this.source_.keySystems,
+ media: this.playlists.media(),
+ audioMedia: audioPlaylistLoader && audioPlaylistLoader.media()
+ });
+ this.player_.tech_.on('keystatuschange', function (e) {
+ if (e.status === 'output-restricted') {
+ _this5.masterPlaylistController_.blacklistCurrentPlaylist({
+ playlist: _this5.masterPlaylistController_.media(),
+ message: "DRM keystatus changed to " + e.status + ". Playlist will fail to play. Check for HDCP content.",
+ blacklistDuration: Infinity
+ });
+ }
+ });
+ this.handleWaitingForKey_ = this.handleWaitingForKey_.bind(this);
+ this.player_.tech_.on('waitingforkey', this.handleWaitingForKey_); // In IE11 this is too early to initialize media keys, and IE11 does not support
+ // promises.
+
+ if (videojs.browser.IE_VERSION === 11 || !didSetupEmeOptions) {
+ // If EME options were not set up, we've done all we could to initialize EME.
+ this.masterPlaylistController_.sourceUpdater_.initializedEme();
+ return;
+ }
+
+ this.createKeySessions_();
+ }
+ /**
+ * Initializes the quality levels and sets listeners to update them.
+ *
+ * @method setupQualityLevels_
+ * @private
+ */
+ ;
+
+ _proto.setupQualityLevels_ = function setupQualityLevels_() {
+ var _this6 = this;
+
+ var player = videojs.players[this.tech_.options_.playerId]; // if there isn't a player or there isn't a qualityLevels plugin
+ // or qualityLevels_ listeners have already been setup, do nothing.
+
+ if (!player || !player.qualityLevels || this.qualityLevels_) {
+ return;
+ }
+
+ this.qualityLevels_ = player.qualityLevels();
+ this.masterPlaylistController_.on('selectedinitialmedia', function () {
+ handleVhsLoadedMetadata(_this6.qualityLevels_, _this6);
+ });
+ this.playlists.on('mediachange', function () {
+ handleVhsMediaChange(_this6.qualityLevels_, _this6.playlists);
+ });
+ }
+ /**
+ * return the version
+ */
+ ;
+
+ VhsHandler.version = function version$5() {
+ return {
+ '@videojs/http-streaming': version$4,
+ 'mux.js': version$3,
+ 'mpd-parser': version$2,
+ 'm3u8-parser': version$1,
+ 'aes-decrypter': version
+ };
+ }
+ /**
+ * return the version
+ */
+ ;
+
+ _proto.version = function version() {
+ return this.constructor.version();
+ };
+
+ _proto.canChangeType = function canChangeType() {
+ return SourceUpdater.canChangeType();
+ }
+ /**
+ * Begin playing the video.
+ */
+ ;
+
+ _proto.play = function play() {
+ this.masterPlaylistController_.play();
+ }
+ /**
+ * a wrapper around the function in MasterPlaylistController
+ */
+ ;
+
+ _proto.setCurrentTime = function setCurrentTime(currentTime) {
+ this.masterPlaylistController_.setCurrentTime(currentTime);
+ }
+ /**
+ * a wrapper around the function in MasterPlaylistController
+ */
+ ;
+
+ _proto.duration = function duration() {
+ return this.masterPlaylistController_.duration();
+ }
+ /**
+ * a wrapper around the function in MasterPlaylistController
+ */
+ ;
+
+ _proto.seekable = function seekable() {
+ return this.masterPlaylistController_.seekable();
+ }
+ /**
+ * Abort all outstanding work and cleanup.
+ */
+ ;
+
+ _proto.dispose = function dispose() {
+ if (this.playbackWatcher_) {
+ this.playbackWatcher_.dispose();
+ }
+
+ if (this.masterPlaylistController_) {
+ this.masterPlaylistController_.dispose();
+ }
+
+ if (this.qualityLevels_) {
+ this.qualityLevels_.dispose();
+ }
+
+ if (this.player_) {
+ delete this.player_.vhs;
+ delete this.player_.dash;
+ delete this.player_.hls;
+ }
+
+ if (this.tech_ && this.tech_.vhs) {
+ delete this.tech_.vhs;
+ } // don't check this.tech_.hls as it will log a deprecated warning
+
+
+ if (this.tech_) {
+ delete this.tech_.hls;
+ }
+
+ if (this.mediaSourceUrl_ && window$1.URL.revokeObjectURL) {
+ window$1.URL.revokeObjectURL(this.mediaSourceUrl_);
+ this.mediaSourceUrl_ = null;
+ }
+
+ if (this.tech_) {
+ this.tech_.off('waitingforkey', this.handleWaitingForKey_);
+ }
+
+ _Component.prototype.dispose.call(this);
+ };
+
+ _proto.convertToProgramTime = function convertToProgramTime(time, callback) {
+ return getProgramTime({
+ playlist: this.masterPlaylistController_.media(),
+ time: time,
+ callback: callback
+ });
+ } // the player must be playing before calling this
+ ;
+
+ _proto.seekToProgramTime = function seekToProgramTime$1(programTime, callback, pauseAfterSeek, retryCount) {
+ if (pauseAfterSeek === void 0) {
+ pauseAfterSeek = true;
+ }
+
+ if (retryCount === void 0) {
+ retryCount = 2;
+ }
+
+ return seekToProgramTime({
+ programTime: programTime,
+ playlist: this.masterPlaylistController_.media(),
+ retryCount: retryCount,
+ pauseAfterSeek: pauseAfterSeek,
+ seekTo: this.options_.seekTo,
+ tech: this.options_.tech,
+ callback: callback
+ });
+ };
+
+ return VhsHandler;
+}(Component);
+/**
+ * The Source Handler object, which informs video.js what additional
+ * MIME types are supported and sets up playback. It is registered
+ * automatically to the appropriate tech based on the capabilities of
+ * the browser it is running in. It is not necessary to use or modify
+ * this object in normal usage.
+ */
+
+
+var VhsSourceHandler = {
+ name: 'videojs-http-streaming',
+ VERSION: version$4,
+ canHandleSource: function canHandleSource(srcObj, options) {
+ if (options === void 0) {
+ options = {};
+ }
+
+ var localOptions = videojs.mergeOptions(videojs.options, options);
+ return VhsSourceHandler.canPlayType(srcObj.type, localOptions);
+ },
+ handleSource: function handleSource(source, tech, options) {
+ if (options === void 0) {
+ options = {};
+ }
+
+ var localOptions = videojs.mergeOptions(videojs.options, options);
+ tech.vhs = new VhsHandler(source, tech, localOptions);
+
+ if (!videojs.hasOwnProperty('hls')) {
+ Object.defineProperty(tech, 'hls', {
+ get: function get() {
+ videojs.log.warn('player.tech().hls is deprecated. Use player.tech().vhs instead.');
+ return tech.vhs;
+ },
+ configurable: true
+ });
+ }
+
+ tech.vhs.xhr = xhrFactory();
+ tech.vhs.src(source.src, source.type);
+ return tech.vhs;
+ },
+ canPlayType: function canPlayType(type, options) {
+ if (options === void 0) {
+ options = {};
+ }
+
+ var _videojs$mergeOptions = videojs.mergeOptions(videojs.options, options),
+ _videojs$mergeOptions2 = _videojs$mergeOptions.vhs;
+
+ _videojs$mergeOptions2 = _videojs$mergeOptions2 === void 0 ? {} : _videojs$mergeOptions2;
+ var _videojs$mergeOptions3 = _videojs$mergeOptions2.overrideNative,
+ overrideNative = _videojs$mergeOptions3 === void 0 ? !videojs.browser.IS_ANY_SAFARI : _videojs$mergeOptions3,
+ _videojs$mergeOptions4 = _videojs$mergeOptions.hls;
+ _videojs$mergeOptions4 = _videojs$mergeOptions4 === void 0 ? {} : _videojs$mergeOptions4;
+ var _videojs$mergeOptions5 = _videojs$mergeOptions4.overrideNative,
+ legacyOverrideNative = _videojs$mergeOptions5 === void 0 ? false : _videojs$mergeOptions5;
+ var supportedType = simpleTypeFromSourceType(type);
+ var canUseMsePlayback = supportedType && (!Vhs.supportsTypeNatively(supportedType) || legacyOverrideNative || overrideNative);
+ return canUseMsePlayback ? 'maybe' : '';
+ }
+};
+/**
+ * Check to see if the native MediaSource object exists and supports
+ * an MP4 container with both H.264 video and AAC-LC audio.
+ *
+ * @return {boolean} if native media sources are supported
+ */
+
+var supportsNativeMediaSources = function supportsNativeMediaSources() {
+ return browserSupportsCodec('avc1.4d400d,mp4a.40.2');
+}; // register source handlers with the appropriate techs
+
+
+if (supportsNativeMediaSources()) {
+ videojs.getTech('Html5').registerSourceHandler(VhsSourceHandler, 0);
+}
+
+videojs.VhsHandler = VhsHandler;
+Object.defineProperty(videojs, 'HlsHandler', {
+ get: function get() {
+ videojs.log.warn('videojs.HlsHandler is deprecated. Use videojs.VhsHandler instead.');
+ return VhsHandler;
+ },
+ configurable: true
+});
+videojs.VhsSourceHandler = VhsSourceHandler;
+Object.defineProperty(videojs, 'HlsSourceHandler', {
+ get: function get() {
+ videojs.log.warn('videojs.HlsSourceHandler is deprecated. ' + 'Use videojs.VhsSourceHandler instead.');
+ return VhsSourceHandler;
+ },
+ configurable: true
+});
+videojs.Vhs = Vhs;
+Object.defineProperty(videojs, 'Hls', {
+ get: function get() {
+ videojs.log.warn('videojs.Hls is deprecated. Use videojs.Vhs instead.');
+ return Vhs;
+ },
+ configurable: true
+});
+
+if (!videojs.use) {
+ videojs.registerComponent('Hls', Vhs);
+ videojs.registerComponent('Vhs', Vhs);
+}
+
+videojs.options.vhs = videojs.options.vhs || {};
+videojs.options.hls = videojs.options.hls || {};
+
+if (!videojs.getPlugin || !videojs.getPlugin('reloadSourceOnError')) {
+ var registerPlugin = videojs.registerPlugin || videojs.plugin;
+ registerPlugin('reloadSourceOnError', reloadSourceOnError);
+}
+
+export default videojs;
diff --git a/frontend/src/static/lib/video-js/7.7.5/video.js b/frontend/src/static/lib/video-js/7.20.2/video.js
old mode 100755
new mode 100644
similarity index 54%
rename from frontend/src/static/lib/video-js/7.7.5/video.js
rename to frontend/src/static/lib/video-js/7.20.2/video.js
index 3cb599c..1ccf43a
--- a/frontend/src/static/lib/video-js/7.7.5/video.js
+++ b/frontend/src/static/lib/video-js/7.20.2/video.js
@@ -1,31 +1,159 @@
/**
* @license
- * Video.js 7.7.5
+ * Video.js 7.20.2
* Copyright Brightcove, Inc.
* Available under Apache License Version 2.0
- *
+ *
*
* Includes vtt.js
* Available under Apache License Version 2.0
- *
+ *
*/
(function (global, factory) {
- typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory(require('global/window'), require('global/document')) :
- typeof define === 'function' && define.amd ? define(['global/window', 'global/document'], factory) :
- (global = global || self, global.videojs = factory(global.window, global.document));
-}(this, function (window$1, document) { 'use strict';
+ typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
+ typeof define === 'function' && define.amd ? define(factory) :
+ (global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.videojs = factory());
+}(this, (function () { 'use strict';
- window$1 = window$1 && window$1.hasOwnProperty('default') ? window$1['default'] : window$1;
- document = document && document.hasOwnProperty('default') ? document['default'] : document;
+ var version$5 = "7.20.2";
- var version = "7.7.5";
+ /**
+ * An Object that contains lifecycle hooks as keys which point to an array
+ * of functions that are run when a lifecycle is triggered
+ *
+ * @private
+ */
+ var hooks_ = {};
+ /**
+ * Get a list of hooks for a specific lifecycle
+ *
+ * @param {string} type
+ * the lifecyle to get hooks from
+ *
+ * @param {Function|Function[]} [fn]
+ * Optionally add a hook (or hooks) to the lifecycle that your are getting.
+ *
+ * @return {Array}
+ * an array of hooks, or an empty array if there are none.
+ */
+
+ var hooks = function hooks(type, fn) {
+ hooks_[type] = hooks_[type] || [];
+
+ if (fn) {
+ hooks_[type] = hooks_[type].concat(fn);
+ }
+
+ return hooks_[type];
+ };
+ /**
+ * Add a function hook to a specific videojs lifecycle.
+ *
+ * @param {string} type
+ * the lifecycle to hook the function to.
+ *
+ * @param {Function|Function[]}
+ * The function or array of functions to attach.
+ */
+
+
+ var hook = function hook(type, fn) {
+ hooks(type, fn);
+ };
+ /**
+ * Remove a hook from a specific videojs lifecycle.
+ *
+ * @param {string} type
+ * the lifecycle that the function hooked to
+ *
+ * @param {Function} fn
+ * The hooked function to remove
+ *
+ * @return {boolean}
+ * The function that was removed or undef
+ */
+
+
+ var removeHook = function removeHook(type, fn) {
+ var index = hooks(type).indexOf(fn);
+
+ if (index <= -1) {
+ return false;
+ }
+
+ hooks_[type] = hooks_[type].slice();
+ hooks_[type].splice(index, 1);
+ return true;
+ };
+ /**
+ * Add a function hook that will only run once to a specific videojs lifecycle.
+ *
+ * @param {string} type
+ * the lifecycle to hook the function to.
+ *
+ * @param {Function|Function[]}
+ * The function or array of functions to attach.
+ */
+
+
+ var hookOnce = function hookOnce(type, fn) {
+ hooks(type, [].concat(fn).map(function (original) {
+ var wrapper = function wrapper() {
+ removeHook(type, wrapper);
+ return original.apply(void 0, arguments);
+ };
+
+ return wrapper;
+ }));
+ };
+
+ /**
+ * @file fullscreen-api.js
+ * @module fullscreen-api
+ * @private
+ */
+
+ /**
+ * Store the browser-specific methods for the fullscreen API.
+ *
+ * @type {Object}
+ * @see [Specification]{@link https://fullscreen.spec.whatwg.org}
+ * @see [Map Approach From Screenfull.js]{@link https://github.com/sindresorhus/screenfull.js}
+ */
+ var FullscreenApi = {
+ prefixed: true
+ }; // browser API methods
+
+ var apiMap = [['requestFullscreen', 'exitFullscreen', 'fullscreenElement', 'fullscreenEnabled', 'fullscreenchange', 'fullscreenerror', 'fullscreen'], // WebKit
+ ['webkitRequestFullscreen', 'webkitExitFullscreen', 'webkitFullscreenElement', 'webkitFullscreenEnabled', 'webkitfullscreenchange', 'webkitfullscreenerror', '-webkit-full-screen'], // Mozilla
+ ['mozRequestFullScreen', 'mozCancelFullScreen', 'mozFullScreenElement', 'mozFullScreenEnabled', 'mozfullscreenchange', 'mozfullscreenerror', '-moz-full-screen'], // Microsoft
+ ['msRequestFullscreen', 'msExitFullscreen', 'msFullscreenElement', 'msFullscreenEnabled', 'MSFullscreenChange', 'MSFullscreenError', '-ms-fullscreen']];
+ var specApi = apiMap[0];
+ var browserApi; // determine the supported set of functions
+
+ for (var i = 0; i < apiMap.length; i++) {
+ // check for exitFullscreen function
+ if (apiMap[i][1] in document) {
+ browserApi = apiMap[i];
+ break;
+ }
+ } // map the browser API names to the spec API names
+
+
+ if (browserApi) {
+ for (var _i = 0; _i < browserApi.length; _i++) {
+ FullscreenApi[specApi[_i]] = browserApi[_i];
+ }
+
+ FullscreenApi.prefixed = browserApi[0] !== specApi[0];
+ }
/**
* @file create-logger.js
* @module create-logger
*/
-
+ // This is the private tracking variable for the logging history.
var history = [];
/**
* Log messages to the console and history based on the type of message
@@ -60,19 +188,19 @@
// still be stored in history.
- if (!window$1.console) {
+ if (!window.console) {
return;
} // Was setting these once outside of this function, but containing them
// in the function makes it easier to test cases where console doesn't exist
// when the module is executed.
- var fn = window$1.console[type];
+ var fn = window.console[type];
if (!fn && type === 'debug') {
// Certain browsers don't have support for console.debug. For those, we
// should default to the closest comparable log.
- fn = window$1.console.info || window$1.console.log;
+ fn = window.console.info || window.console.log;
} // Bail out if there's no console or if this type is not allowed by the
// current logging level.
@@ -81,11 +209,11 @@
return;
}
- fn[Array.isArray(args) ? 'apply' : 'call'](window$1.console, args);
+ fn[Array.isArray(args) ? 'apply' : 'call'](window.console, args);
};
};
- function createLogger(name) {
+ function createLogger$1(name) {
// This is the private tracking variable for logging level.
var level = 'info'; // the curried logByType bound to the specific log and history
@@ -137,7 +265,7 @@
*/
log.createLogger = function (subname) {
- return createLogger(name + ': ' + subname);
+ return createLogger$1(name + ': ' + subname);
};
/**
* Enumeration of available logging levels, where the keys are the level names
@@ -310,8 +438,10 @@
* @file log.js
* @module log
*/
- var log = createLogger('VIDEOJS');
- var createLogger$1 = log.createLogger;
+ var log$1 = createLogger$1('VIDEOJS');
+ var createLogger = log$1.createLogger;
+
+ var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
function createCommonjsModule(fn, module) {
return module = { exports: {} }, fn(module, module.exports), module.exports;
@@ -369,7 +499,7 @@
* @return {Mixed}
* The new accumulated value.
*/
- var toString = Object.prototype.toString;
+ var toString$1 = Object.prototype.toString;
/**
* Get the keys of an Object
*
@@ -384,7 +514,7 @@
*/
var keys = function keys(object) {
- return isObject(object) ? Object.keys(object) : [];
+ return isObject$1(object) ? Object.keys(object) : [];
};
/**
* Array-like iteration for objects.
@@ -468,7 +598,7 @@
* @return {boolean}
*/
- function isObject(value) {
+ function isObject$1(value) {
return !!value && typeof value === 'object';
}
/**
@@ -480,13 +610,14 @@
*/
function isPlain(value) {
- return isObject(value) && toString.call(value) === '[object Object]' && value.constructor === Object;
+ return isObject$1(value) && toString$1.call(value) === '[object Object]' && value.constructor === Object;
}
/**
* @file computed-style.js
* @module computed-style
*/
+
/**
* A safe getComputedStyle.
*
@@ -503,20 +634,259 @@
*
* @see https://bugzilla.mozilla.org/show_bug.cgi?id=548397
*/
-
function computedStyle(el, prop) {
if (!el || !prop) {
return '';
}
- if (typeof window$1.getComputedStyle === 'function') {
- var computedStyleValue = window$1.getComputedStyle(el);
+ if (typeof window.getComputedStyle === 'function') {
+ var computedStyleValue;
+
+ try {
+ computedStyleValue = window.getComputedStyle(el);
+ } catch (e) {
+ return '';
+ }
+
return computedStyleValue ? computedStyleValue.getPropertyValue(prop) || computedStyleValue[prop] : '';
}
return '';
}
+ /**
+ * @file browser.js
+ * @module browser
+ */
+ var USER_AGENT = window.navigator && window.navigator.userAgent || '';
+ var webkitVersionMap = /AppleWebKit\/([\d.]+)/i.exec(USER_AGENT);
+ var appleWebkitVersion = webkitVersionMap ? parseFloat(webkitVersionMap.pop()) : null;
+ /**
+ * Whether or not this device is an iPod.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+ var IS_IPOD = /iPod/i.test(USER_AGENT);
+ /**
+ * The detected iOS version - or `null`.
+ *
+ * @static
+ * @const
+ * @type {string|null}
+ */
+
+ var IOS_VERSION = function () {
+ var match = USER_AGENT.match(/OS (\d+)_/i);
+
+ if (match && match[1]) {
+ return match[1];
+ }
+
+ return null;
+ }();
+ /**
+ * Whether or not this is an Android device.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+ var IS_ANDROID = /Android/i.test(USER_AGENT);
+ /**
+ * The detected Android version - or `null`.
+ *
+ * @static
+ * @const
+ * @type {number|string|null}
+ */
+
+ var ANDROID_VERSION = function () {
+ // This matches Android Major.Minor.Patch versions
+ // ANDROID_VERSION is Major.Minor as a Number, if Minor isn't available, then only Major is returned
+ var match = USER_AGENT.match(/Android (\d+)(?:\.(\d+))?(?:\.(\d+))*/i);
+
+ if (!match) {
+ return null;
+ }
+
+ var major = match[1] && parseFloat(match[1]);
+ var minor = match[2] && parseFloat(match[2]);
+
+ if (major && minor) {
+ return parseFloat(match[1] + '.' + match[2]);
+ } else if (major) {
+ return major;
+ }
+
+ return null;
+ }();
+ /**
+ * Whether or not this is a native Android browser.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+ var IS_NATIVE_ANDROID = IS_ANDROID && ANDROID_VERSION < 5 && appleWebkitVersion < 537;
+ /**
+ * Whether or not this is Mozilla Firefox.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+ var IS_FIREFOX = /Firefox/i.test(USER_AGENT);
+ /**
+ * Whether or not this is Microsoft Edge.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+ var IS_EDGE = /Edg/i.test(USER_AGENT);
+ /**
+ * Whether or not this is Google Chrome.
+ *
+ * This will also be `true` for Chrome on iOS, which will have different support
+ * as it is actually Safari under the hood.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+ var IS_CHROME = !IS_EDGE && (/Chrome/i.test(USER_AGENT) || /CriOS/i.test(USER_AGENT));
+ /**
+ * The detected Google Chrome version - or `null`.
+ *
+ * @static
+ * @const
+ * @type {number|null}
+ */
+
+ var CHROME_VERSION = function () {
+ var match = USER_AGENT.match(/(Chrome|CriOS)\/(\d+)/);
+
+ if (match && match[2]) {
+ return parseFloat(match[2]);
+ }
+
+ return null;
+ }();
+ /**
+ * The detected Internet Explorer version - or `null`.
+ *
+ * @static
+ * @const
+ * @type {number|null}
+ */
+
+ var IE_VERSION = function () {
+ var result = /MSIE\s(\d+)\.\d/.exec(USER_AGENT);
+ var version = result && parseFloat(result[1]);
+
+ if (!version && /Trident\/7.0/i.test(USER_AGENT) && /rv:11.0/.test(USER_AGENT)) {
+ // IE 11 has a different user agent string than other IE versions
+ version = 11.0;
+ }
+
+ return version;
+ }();
+ /**
+ * Whether or not this is desktop Safari.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+ var IS_SAFARI = /Safari/i.test(USER_AGENT) && !IS_CHROME && !IS_ANDROID && !IS_EDGE;
+ /**
+ * Whether or not this is a Windows machine.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+ var IS_WINDOWS = /Windows/i.test(USER_AGENT);
+ /**
+ * Whether or not this device is touch-enabled.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+ var TOUCH_ENABLED = Boolean(isReal() && ('ontouchstart' in window || window.navigator.maxTouchPoints || window.DocumentTouch && window.document instanceof window.DocumentTouch));
+ /**
+ * Whether or not this device is an iPad.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+ var IS_IPAD = /iPad/i.test(USER_AGENT) || IS_SAFARI && TOUCH_ENABLED && !/iPhone/i.test(USER_AGENT);
+ /**
+ * Whether or not this device is an iPhone.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+ // The Facebook app's UIWebView identifies as both an iPhone and iPad, so
+ // to identify iPhones, we need to exclude iPads.
+ // http://artsy.github.io/blog/2012/10/18/the-perils-of-ios-user-agent-sniffing/
+
+ var IS_IPHONE = /iPhone/i.test(USER_AGENT) && !IS_IPAD;
+ /**
+ * Whether or not this is an iOS device.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+ var IS_IOS = IS_IPHONE || IS_IPAD || IS_IPOD;
+ /**
+ * Whether or not this is any flavor of Safari - including iOS.
+ *
+ * @static
+ * @const
+ * @type {Boolean}
+ */
+
+ var IS_ANY_SAFARI = (IS_SAFARI || IS_IOS) && !IS_CHROME;
+
+ var browser = /*#__PURE__*/Object.freeze({
+ __proto__: null,
+ IS_IPOD: IS_IPOD,
+ IOS_VERSION: IOS_VERSION,
+ IS_ANDROID: IS_ANDROID,
+ ANDROID_VERSION: ANDROID_VERSION,
+ IS_NATIVE_ANDROID: IS_NATIVE_ANDROID,
+ IS_FIREFOX: IS_FIREFOX,
+ IS_EDGE: IS_EDGE,
+ IS_CHROME: IS_CHROME,
+ CHROME_VERSION: CHROME_VERSION,
+ IE_VERSION: IE_VERSION,
+ IS_SAFARI: IS_SAFARI,
+ IS_WINDOWS: IS_WINDOWS,
+ TOUCH_ENABLED: TOUCH_ENABLED,
+ IS_IPAD: IS_IPAD,
+ IS_IPHONE: IS_IPHONE,
+ IS_IOS: IS_IOS,
+ IS_ANY_SAFARI: IS_ANY_SAFARI
+ });
+
/**
* @file dom.js
* @module dom
@@ -586,7 +956,7 @@
function isReal() {
// Both document and window will never be undefined thanks to `global`.
- return document === window$1.document;
+ return document === window.document;
}
/**
* Determines, via duck typing, whether or not a value is a DOM element.
@@ -599,7 +969,7 @@
*/
function isEl(value) {
- return isObject(value) && value.nodeType === 1;
+ return isObject$1(value) && value.nodeType === 1;
}
/**
* Determines if the current DOM is embedded in an iframe.
@@ -613,7 +983,7 @@
// We need a try/catch here because Safari will throw errors when attempting
// to get either `parent` or `self`
try {
- return window$1.parent !== window$1.self;
+ return window.parent !== window.self;
} catch (x) {
return true;
}
@@ -683,12 +1053,12 @@
// same object, but that doesn't work so well.
if (propName.indexOf('aria-') !== -1 || propName === 'role' || propName === 'type') {
- log.warn('Setting attributes in the second argument of createEl()\n' + 'has been deprecated. Use the third argument instead.\n' + ("createEl(type, properties, attributes). Attempting to set " + propName + " to " + val + "."));
+ log$1.warn('Setting attributes in the second argument of createEl()\n' + 'has been deprecated. Use the third argument instead.\n' + ("createEl(type, properties, attributes). Attempting to set " + propName + " to " + val + "."));
el.setAttribute(propName, val); // Handle textContent since it's not supported everywhere and we have a
// method for it.
} else if (propName === 'textContent') {
textContent(el, val);
- } else if (el[propName] !== val) {
+ } else if (el[propName] !== val || propName === 'tabIndex') {
el[propName] = val;
}
});
@@ -803,6 +1173,12 @@
*/
function removeClass(element, classToRemove) {
+ // Protect in case the player gets disposed
+ if (!element) {
+ log$1.warn("removeClass was called with an element that doesn't exist");
+ return null;
+ }
+
if (element.classList) {
element.classList.remove(classToRemove);
} else {
@@ -1071,31 +1447,31 @@
*/
function findPosition(el) {
- var box;
-
- if (el.getBoundingClientRect && el.parentNode) {
- box = el.getBoundingClientRect();
- }
-
- if (!box) {
+ if (!el || el && !el.offsetParent) {
return {
left: 0,
- top: 0
+ top: 0,
+ width: 0,
+ height: 0
};
}
- var docEl = document.documentElement;
- var body = document.body;
- var clientLeft = docEl.clientLeft || body.clientLeft || 0;
- var scrollLeft = window$1.pageXOffset || body.scrollLeft;
- var left = box.left + scrollLeft - clientLeft;
- var clientTop = docEl.clientTop || body.clientTop || 0;
- var scrollTop = window$1.pageYOffset || body.scrollTop;
- var top = box.top + scrollTop - clientTop; // Android sometimes returns slightly off decimal values, so need to round
+ var width = el.offsetWidth;
+ var height = el.offsetHeight;
+ var left = 0;
+ var top = 0;
+
+ while (el.offsetParent && el !== document[FullscreenApi.fullscreenElement]) {
+ left += el.offsetLeft;
+ top += el.offsetTop;
+ el = el.offsetParent;
+ }
return {
- left: Math.round(left),
- top: Math.round(top)
+ left: left,
+ top: top,
+ width: width,
+ height: height
};
}
/**
@@ -1127,22 +1503,52 @@
*/
function getPointerPosition(el, event) {
- var position = {};
- var box = findPosition(el);
- var boxW = el.offsetWidth;
- var boxH = el.offsetHeight;
- var boxY = box.top;
- var boxX = box.left;
- var pageY = event.pageY;
- var pageX = event.pageX;
+ var translated = {
+ x: 0,
+ y: 0
+ };
- if (event.changedTouches) {
- pageX = event.changedTouches[0].pageX;
- pageY = event.changedTouches[0].pageY;
+ if (IS_IOS) {
+ var item = el;
+
+ while (item && item.nodeName.toLowerCase() !== 'html') {
+ var transform = computedStyle(item, 'transform');
+
+ if (/^matrix/.test(transform)) {
+ var values = transform.slice(7, -1).split(/,\s/).map(Number);
+ translated.x += values[4];
+ translated.y += values[5];
+ } else if (/^matrix3d/.test(transform)) {
+ var _values = transform.slice(9, -1).split(/,\s/).map(Number);
+
+ translated.x += _values[12];
+ translated.y += _values[13];
+ }
+
+ item = item.parentNode;
+ }
}
- position.y = Math.max(0, Math.min(1, (boxY - pageY + boxH) / boxH));
- position.x = Math.max(0, Math.min(1, (pageX - boxX) / boxW));
+ var position = {};
+ var boxTarget = findPosition(event.target);
+ var box = findPosition(el);
+ var boxW = box.width;
+ var boxH = box.height;
+ var offsetY = event.offsetY - (box.top - boxTarget.top);
+ var offsetX = event.offsetX - (box.left - boxTarget.left);
+
+ if (event.changedTouches) {
+ offsetX = event.changedTouches[0].pageX - box.left;
+ offsetY = event.changedTouches[0].pageY + box.top;
+
+ if (IS_IOS) {
+ offsetX -= translated.x;
+ offsetY -= translated.y;
+ }
+ }
+
+ position.y = 1 - Math.max(0, Math.min(1, offsetY / boxH));
+ position.x = Math.max(0, Math.min(1, offsetX / boxW));
return position;
}
/**
@@ -1156,7 +1562,7 @@
*/
function isTextNode(value) {
- return isObject(value) && value.nodeType === 3;
+ return isObject$1(value) && value.nodeType === 3;
}
/**
* Empties the contents of an element.
@@ -1361,6 +1767,7 @@
var $$ = createQuerier('querySelectorAll');
var Dom = /*#__PURE__*/Object.freeze({
+ __proto__: null,
isReal: isReal,
isEl: isEl,
isInFrame: isInFrame,
@@ -1398,14 +1805,13 @@
* @module setup
*/
var _windowLoaded = false;
- var videojs;
+ var videojs$1;
/**
* Set up any tags that have a data-setup `attribute` when the player is started.
*/
var autoSetup = function autoSetup() {
- // Protect against breakage in non-browser environments and check global autoSetup option.
- if (!isReal() || videojs.options.autoSetup === false) {
+ if (videojs$1.options.autoSetup === false) {
return;
}
@@ -1426,7 +1832,7 @@
if (options !== null) {
// Create new video.js instance.
- videojs(mediaEl);
+ videojs$1(mediaEl);
}
} // If getAttribute isn't defined, we need to wait for the DOM.
@@ -1453,11 +1859,16 @@
function autoSetupTimeout(wait, vjs) {
- if (vjs) {
- videojs = vjs;
+ // Protect against breakage in non-browser environments
+ if (!isReal()) {
+ return;
}
- window$1.setTimeout(autoSetup, wait);
+ if (vjs) {
+ videojs$1 = vjs;
+ }
+
+ window.setTimeout(autoSetup, wait);
}
/**
* Used to set the internal tracking of window loaded state to true.
@@ -1468,7 +1879,7 @@
function setWindowLoaded() {
_windowLoaded = true;
- window$1.removeEventListener('load', setWindowLoaded);
+ window.removeEventListener('load', setWindowLoaded);
}
if (isReal()) {
@@ -1483,7 +1894,7 @@
*
* @listens load
*/
- window$1.addEventListener('load', setWindowLoaded);
+ window.addEventListener('load', setWindowLoaded);
}
}
@@ -1491,6 +1902,7 @@
* @file stylesheet.js
* @module stylesheet
*/
+
/**
* Create a DOM syle element given a className for it.
*
@@ -1500,7 +1912,6 @@
* @return {Element}
* The element that was created.
*/
-
var createStyleElement = function createStyleElement(className) {
var style = document.createElement('style');
style.className = className;
@@ -1559,12 +1970,10 @@
*/
var FakeWeakMap;
- if (!window$1.WeakMap) {
- FakeWeakMap =
- /*#__PURE__*/
- function () {
+ if (!window.WeakMap) {
+ FakeWeakMap = /*#__PURE__*/function () {
function FakeWeakMap() {
- this.vdata = 'vdata' + Math.floor(window$1.performance && window$1.performance.now() || Date.now());
+ this.vdata = 'vdata' + Math.floor(window.performance && window.performance.now() || Date.now());
this.data = {};
}
@@ -1590,7 +1999,7 @@
// return undefined explicitly as that's the contract for this method
- log('We have no data for this element', key);
+ log$1('We have no data for this element', key);
return undefined;
};
@@ -1623,7 +2032,7 @@
*/
- var DomData = window$1.WeakMap ? new WeakMap() : new FakeWeakMap();
+ var DomData = window.WeakMap ? new WeakMap() : new FakeWeakMap();
/**
* @file events.js. An Event System (John Resig - Secrets of a JS Ninja http://jsninja.com/)
@@ -1727,8 +2136,8 @@
// with the Javascript Ninja code. So we're just overriding all events now.
- if (!event || !event.isPropagationStopped) {
- var old = event || window$1.event;
+ if (!event || !event.isPropagationStopped || !event.isImmediatePropagationStopped) {
+ var old = event || window.event;
event = {}; // Clone the old object so that we can modify the values event = {};
// IE8 Doesn't like when you mess with native event properties
// Firefox returns false for event.hasOwnProperty('type') and other props
@@ -1739,7 +2148,8 @@
// Safari 6.0.3 warns you if you try to copy deprecated layerX/Y
// Chrome warns you if you try to copy deprecated keyboardEvent.keyLocation
// and webkitMovementX/Y
- if (key !== 'layerX' && key !== 'layerY' && key !== 'keyLocation' && key !== 'webkitMovementX' && key !== 'webkitMovementY') {
+ // Lighthouse complains if Event.path is copied
+ if (key !== 'layerX' && key !== 'layerY' && key !== 'keyLocation' && key !== 'webkitMovementX' && key !== 'webkitMovementY' && key !== 'path') {
// Chrome 32+ warns if you try to copy deprecated returnValue, but
// we still want to if preventDefault isn't supported (IE8).
if (!(key === 'returnValue' && old.preventDefault)) {
@@ -1835,8 +2245,8 @@
_supportsPassive = true;
}
});
- window$1.addEventListener('test', null, opts);
- window$1.removeEventListener('test', null, opts);
+ window.addEventListener('test', null, opts);
+ window.removeEventListener('test', null, opts);
} catch (e) {// disregard
}
}
@@ -1912,7 +2322,7 @@
try {
handlersCopy[m].call(elem, event, hash);
} catch (e) {
- log.error(e);
+ log$1.error(e);
}
}
}
@@ -2130,6 +2540,7 @@
}
var Events = /*#__PURE__*/Object.freeze({
+ __proto__: null,
fixEvent: fixEvent,
on: on,
off: off,
@@ -2196,10 +2607,10 @@
*/
var throttle = function throttle(fn, wait) {
- var last = window$1.performance.now();
+ var last = window.performance.now();
var throttled = function throttled() {
- var now = window$1.performance.now();
+ var now = window.performance.now();
if (now - last >= wait) {
fn.apply(void 0, arguments);
@@ -2238,7 +2649,7 @@
var debounce = function debounce(func, wait, immediate, context) {
if (context === void 0) {
- context = window$1;
+ context = window;
}
var timeout;
@@ -2289,7 +2700,7 @@
* @class EventTarget
*/
- var EventTarget = function EventTarget() {};
+ var EventTarget$2 = function EventTarget() {};
/**
* A Custom DOM event.
*
@@ -2321,7 +2732,7 @@
*/
- EventTarget.prototype.allowedEvents_ = {};
+ EventTarget$2.prototype.allowedEvents_ = {};
/**
* Adds an `event listener` to an instance of an `EventTarget`. An `event listener` is a
* function that will get called when an event with a certain name gets triggered.
@@ -2333,7 +2744,7 @@
* The function to call with `EventTarget`s
*/
- EventTarget.prototype.on = function (type, fn) {
+ EventTarget$2.prototype.on = function (type, fn) {
// Remove the addEventListener alias before calling Events.on
// so we don't get into an infinite type loop
var ael = this.addEventListener;
@@ -2352,7 +2763,7 @@
*/
- EventTarget.prototype.addEventListener = EventTarget.prototype.on;
+ EventTarget$2.prototype.addEventListener = EventTarget$2.prototype.on;
/**
* Removes an `event listener` for a specific event from an instance of `EventTarget`.
* This makes it so that the `event listener` will no longer get called when the
@@ -2365,7 +2776,7 @@
* The function to remove.
*/
- EventTarget.prototype.off = function (type, fn) {
+ EventTarget$2.prototype.off = function (type, fn) {
off(this, type, fn);
};
/**
@@ -2377,7 +2788,7 @@
*/
- EventTarget.prototype.removeEventListener = EventTarget.prototype.off;
+ EventTarget$2.prototype.removeEventListener = EventTarget$2.prototype.off;
/**
* This function will add an `event listener` that gets triggered only once. After the
* first trigger it will get removed. This is like adding an `event listener`
@@ -2390,7 +2801,7 @@
* The function to be called once for each event name.
*/
- EventTarget.prototype.one = function (type, fn) {
+ EventTarget$2.prototype.one = function (type, fn) {
// Remove the addEventListener aliasing Events.on
// so we don't get into an infinite type loop
var ael = this.addEventListener;
@@ -2401,7 +2812,7 @@
this.addEventListener = ael;
};
- EventTarget.prototype.any = function (type, fn) {
+ EventTarget$2.prototype.any = function (type, fn) {
// Remove the addEventListener aliasing Events.on
// so we don't get into an infinite type loop
var ael = this.addEventListener;
@@ -2429,7 +2840,7 @@
*/
- EventTarget.prototype.trigger = function (event) {
+ EventTarget$2.prototype.trigger = function (event) {
var type = event.type || event; // deprecation
// In a future version we should default target to `this`
// similar to how we default the target to `elem` in
@@ -2459,10 +2870,10 @@
*/
- EventTarget.prototype.dispatchEvent = EventTarget.prototype.trigger;
+ EventTarget$2.prototype.dispatchEvent = EventTarget$2.prototype.trigger;
var EVENT_MAP;
- EventTarget.prototype.queueTrigger = function (event) {
+ EventTarget$2.prototype.queueTrigger = function (event) {
var _this = this;
// only set up EVENT_MAP if it'll be used
@@ -2480,8 +2891,8 @@
var oldTimeout = map.get(type);
map["delete"](type);
- window$1.clearTimeout(oldTimeout);
- var timeout = window$1.setTimeout(function () {
+ window.clearTimeout(oldTimeout);
+ var timeout = window.setTimeout(function () {
// if we cleared out all timeouts for the current target, delete its map
if (map.size === 0) {
map = null;
@@ -2497,6 +2908,26 @@
* @file mixins/evented.js
* @module evented
*/
+
+ var objName = function objName(obj) {
+ if (typeof obj.name === 'function') {
+ return obj.name();
+ }
+
+ if (typeof obj.name === 'string') {
+ return obj.name;
+ }
+
+ if (obj.name_) {
+ return obj.name_;
+ }
+
+ if (obj.constructor && obj.constructor.name) {
+ return obj.constructor.name;
+ }
+
+ return typeof obj;
+ };
/**
* Returns whether or not an object has had the evented mixin applied.
*
@@ -2507,8 +2938,9 @@
* Whether or not the object appears to be evented.
*/
+
var isEvented = function isEvented(object) {
- return object instanceof EventTarget || !!object.eventBusEl_ && ['on', 'one', 'off', 'trigger'].every(function (k) {
+ return object instanceof EventTarget$2 || !!object.eventBusEl_ && ['on', 'one', 'off', 'trigger'].every(function (k) {
return typeof object[k] === 'function';
});
};
@@ -2560,12 +2992,18 @@
*
* @param {Object} target
* The object to test.
+ *
+ * @param {Object} obj
+ * The evented object we are validating for
+ *
+ * @param {string} fnName
+ * The name of the evented mixin function that called this.
*/
- var validateTarget = function validateTarget(target) {
- if (!target.nodeName && !isEvented(target)) {
- throw new Error('Invalid target; must be a DOM node or evented object.');
+ var validateTarget = function validateTarget(target, obj, fnName) {
+ if (!target || !target.nodeName && !isEvented(target)) {
+ throw new Error("Invalid target for " + objName(obj) + "#" + fnName + "; must be a DOM node or evented object.");
}
};
/**
@@ -2577,12 +3015,18 @@
*
* @param {string|Array} type
* The type to test.
+ *
+ * @param {Object} obj
+ * The evented object we are validating for
+ *
+ * @param {string} fnName
+ * The name of the evented mixin function that called this.
*/
- var validateEventType = function validateEventType(type) {
+ var validateEventType = function validateEventType(type, obj, fnName) {
if (!isValidEventType(type)) {
- throw new Error('Invalid event type; must be a non-empty string or array.');
+ throw new Error("Invalid event type for " + objName(obj) + "#" + fnName + "; must be a non-empty string or array.");
}
};
/**
@@ -2594,12 +3038,18 @@
*
* @param {Function} listener
* The listener to test.
+ *
+ * @param {Object} obj
+ * The evented object we are validating for
+ *
+ * @param {string} fnName
+ * The name of the evented mixin function that called this.
*/
- var validateListener = function validateListener(listener) {
+ var validateListener = function validateListener(listener, obj, fnName) {
if (typeof listener !== 'function') {
- throw new Error('Invalid listener; must be a function.');
+ throw new Error("Invalid listener for " + objName(obj) + "#" + fnName + "; must be a function.");
}
};
/**
@@ -2614,12 +3064,15 @@
* @param {Array} args
* An array of arguments passed to `on()` or `one()`.
*
+ * @param {string} fnName
+ * The name of the evented mixin function that called this.
+ *
* @return {Object}
* An object containing useful values for `on()` or `one()` calls.
*/
- var normalizeListenArgs = function normalizeListenArgs(self, args) {
+ var normalizeListenArgs = function normalizeListenArgs(self, args, fnName) {
// If the number of arguments is less than 3, the target is always the
// evented object itself.
var isTargetingSelf = args.length < 3 || args[0] === self || args[0] === self.eventBusEl_;
@@ -2643,9 +3096,9 @@
listener = args[2];
}
- validateTarget(target);
- validateEventType(type);
- validateListener(listener);
+ validateTarget(target, self, fnName);
+ validateEventType(type, self, fnName);
+ validateListener(listener, self, fnName);
listener = bind(self, listener);
return {
isTargetingSelf: isTargetingSelf,
@@ -2674,7 +3127,7 @@
var listen = function listen(target, method, type, listener) {
- validateTarget(target);
+ validateTarget(target, target, method);
if (target.nodeName) {
Events[method](target, type, listener);
@@ -2721,7 +3174,7 @@
args[_key] = arguments[_key];
}
- var _normalizeListenArgs = normalizeListenArgs(this, args),
+ var _normalizeListenArgs = normalizeListenArgs(this, args, 'on'),
isTargetingSelf = _normalizeListenArgs.isTargetingSelf,
target = _normalizeListenArgs.target,
type = _normalizeListenArgs.type,
@@ -2783,7 +3236,7 @@
args[_key2] = arguments[_key2];
}
- var _normalizeListenArgs2 = normalizeListenArgs(this, args),
+ var _normalizeListenArgs2 = normalizeListenArgs(this, args, 'one'),
isTargetingSelf = _normalizeListenArgs2.isTargetingSelf,
target = _normalizeListenArgs2.target,
type = _normalizeListenArgs2.type,
@@ -2845,7 +3298,7 @@
args[_key4] = arguments[_key4];
}
- var _normalizeListenArgs3 = normalizeListenArgs(this, args),
+ var _normalizeListenArgs3 = normalizeListenArgs(this, args, 'any'),
isTargetingSelf = _normalizeListenArgs3.isTargetingSelf,
target = _normalizeListenArgs3.target,
type = _normalizeListenArgs3.type,
@@ -2899,9 +3352,9 @@
var target = targetOrType;
var type = typeOrListener; // Fail fast and in a meaningful way!
- validateTarget(target);
- validateEventType(type);
- validateListener(listener); // Ensure there's at least a guid, even if the function hasn't been used
+ validateTarget(target, this, 'off');
+ validateEventType(type, this, 'off');
+ validateListener(listener, this, 'off'); // Ensure there's at least a guid, even if the function hasn't been used
listener = bind(this, listener); // Remove the dispose listener on this evented object, which was given
// the same guid as the event listener in on().
@@ -2931,6 +3384,19 @@
* Whether or not the default behavior was prevented.
*/
trigger: function trigger$1(event, hash) {
+ validateTarget(this.eventBusEl_, this, 'trigger');
+ var type = event && typeof event !== 'string' ? event.type : event;
+
+ if (!isValidEventType(type)) {
+ var error = "Invalid event type for " + objName(this) + "#trigger; " + 'must be a non-empty string or object with a type key that has a non-empty value.';
+
+ if (event) {
+ (this.log || log$1).error(error);
+ } else {
+ throw new Error(error);
+ }
+ }
+
return trigger(this.eventBusEl_, event, hash);
}
};
@@ -2983,7 +3449,12 @@
target.on('dispose', function () {
target.off();
- window$1.setTimeout(function () {
+ [target, target.el_, target.eventBusEl_].forEach(function (val) {
+ if (val && DomData.has(val)) {
+ DomData["delete"](val);
+ }
+ });
+ window.setTimeout(function () {
target.eventBusEl_ = null;
}, 0);
});
@@ -3134,7 +3605,7 @@
* The string with an uppercased first letter
*/
- var toTitleCase = function toTitleCase(string) {
+ var toTitleCase$1 = function toTitleCase(string) {
if (typeof string !== 'string') {
return string;
}
@@ -3157,7 +3628,7 @@
*/
var titleCaseEquals = function titleCaseEquals(str1, str2) {
- return toTitleCase(str1) === toTitleCase(str2);
+ return toTitleCase$1(str1) === toTitleCase$1(str2);
};
/**
@@ -3182,7 +3653,7 @@
* A new object that is the merged result of all sources.
*/
- function mergeOptions() {
+ function mergeOptions$3() {
var result = {};
for (var _len = arguments.length, sources = new Array(_len), _key = 0; _key < _len; _key++) {
@@ -3204,12 +3675,273 @@
result[key] = {};
}
- result[key] = mergeOptions(result[key], value);
+ result[key] = mergeOptions$3(result[key], value);
});
});
return result;
}
+ var MapSham = /*#__PURE__*/function () {
+ function MapSham() {
+ this.map_ = {};
+ }
+
+ var _proto = MapSham.prototype;
+
+ _proto.has = function has(key) {
+ return key in this.map_;
+ };
+
+ _proto["delete"] = function _delete(key) {
+ var has = this.has(key);
+ delete this.map_[key];
+ return has;
+ };
+
+ _proto.set = function set(key, value) {
+ this.map_[key] = value;
+ return this;
+ };
+
+ _proto.forEach = function forEach(callback, thisArg) {
+ for (var key in this.map_) {
+ callback.call(thisArg, this.map_[key], key, this);
+ }
+ };
+
+ return MapSham;
+ }();
+
+ var Map$1 = window.Map ? window.Map : MapSham;
+
+ var SetSham = /*#__PURE__*/function () {
+ function SetSham() {
+ this.set_ = {};
+ }
+
+ var _proto = SetSham.prototype;
+
+ _proto.has = function has(key) {
+ return key in this.set_;
+ };
+
+ _proto["delete"] = function _delete(key) {
+ var has = this.has(key);
+ delete this.set_[key];
+ return has;
+ };
+
+ _proto.add = function add(key) {
+ this.set_[key] = 1;
+ return this;
+ };
+
+ _proto.forEach = function forEach(callback, thisArg) {
+ for (var key in this.set_) {
+ callback.call(thisArg, key, key, this);
+ }
+ };
+
+ return SetSham;
+ }();
+
+ var Set$1 = window.Set ? window.Set : SetSham;
+
+ var keycode = createCommonjsModule(function (module, exports) {
+ // Source: http://jsfiddle.net/vWx8V/
+ // http://stackoverflow.com/questions/5603195/full-list-of-javascript-keycodes
+
+ /**
+ * Conenience method returns corresponding value for given keyName or keyCode.
+ *
+ * @param {Mixed} keyCode {Number} or keyName {String}
+ * @return {Mixed}
+ * @api public
+ */
+ function keyCode(searchInput) {
+ // Keyboard Events
+ if (searchInput && 'object' === typeof searchInput) {
+ var hasKeyCode = searchInput.which || searchInput.keyCode || searchInput.charCode;
+ if (hasKeyCode) searchInput = hasKeyCode;
+ } // Numbers
+
+
+ if ('number' === typeof searchInput) return names[searchInput]; // Everything else (cast to string)
+
+ var search = String(searchInput); // check codes
+
+ var foundNamedKey = codes[search.toLowerCase()];
+ if (foundNamedKey) return foundNamedKey; // check aliases
+
+ var foundNamedKey = aliases[search.toLowerCase()];
+ if (foundNamedKey) return foundNamedKey; // weird character?
+
+ if (search.length === 1) return search.charCodeAt(0);
+ return undefined;
+ }
+ /**
+ * Compares a keyboard event with a given keyCode or keyName.
+ *
+ * @param {Event} event Keyboard event that should be tested
+ * @param {Mixed} keyCode {Number} or keyName {String}
+ * @return {Boolean}
+ * @api public
+ */
+
+
+ keyCode.isEventKey = function isEventKey(event, nameOrCode) {
+ if (event && 'object' === typeof event) {
+ var keyCode = event.which || event.keyCode || event.charCode;
+
+ if (keyCode === null || keyCode === undefined) {
+ return false;
+ }
+
+ if (typeof nameOrCode === 'string') {
+ // check codes
+ var foundNamedKey = codes[nameOrCode.toLowerCase()];
+
+ if (foundNamedKey) {
+ return foundNamedKey === keyCode;
+ } // check aliases
+
+
+ var foundNamedKey = aliases[nameOrCode.toLowerCase()];
+
+ if (foundNamedKey) {
+ return foundNamedKey === keyCode;
+ }
+ } else if (typeof nameOrCode === 'number') {
+ return nameOrCode === keyCode;
+ }
+
+ return false;
+ }
+ };
+
+ exports = module.exports = keyCode;
+ /**
+ * Get by name
+ *
+ * exports.code['enter'] // => 13
+ */
+
+ var codes = exports.code = exports.codes = {
+ 'backspace': 8,
+ 'tab': 9,
+ 'enter': 13,
+ 'shift': 16,
+ 'ctrl': 17,
+ 'alt': 18,
+ 'pause/break': 19,
+ 'caps lock': 20,
+ 'esc': 27,
+ 'space': 32,
+ 'page up': 33,
+ 'page down': 34,
+ 'end': 35,
+ 'home': 36,
+ 'left': 37,
+ 'up': 38,
+ 'right': 39,
+ 'down': 40,
+ 'insert': 45,
+ 'delete': 46,
+ 'command': 91,
+ 'left command': 91,
+ 'right command': 93,
+ 'numpad *': 106,
+ 'numpad +': 107,
+ 'numpad -': 109,
+ 'numpad .': 110,
+ 'numpad /': 111,
+ 'num lock': 144,
+ 'scroll lock': 145,
+ 'my computer': 182,
+ 'my calculator': 183,
+ ';': 186,
+ '=': 187,
+ ',': 188,
+ '-': 189,
+ '.': 190,
+ '/': 191,
+ '`': 192,
+ '[': 219,
+ '\\': 220,
+ ']': 221,
+ "'": 222
+ }; // Helper aliases
+
+ var aliases = exports.aliases = {
+ 'windows': 91,
+ '⇧': 16,
+ '⌥': 18,
+ '⌃': 17,
+ '⌘': 91,
+ 'ctl': 17,
+ 'control': 17,
+ 'option': 18,
+ 'pause': 19,
+ 'break': 19,
+ 'caps': 20,
+ 'return': 13,
+ 'escape': 27,
+ 'spc': 32,
+ 'spacebar': 32,
+ 'pgup': 33,
+ 'pgdn': 34,
+ 'ins': 45,
+ 'del': 46,
+ 'cmd': 91
+ };
+ /*!
+ * Programatically add the following
+ */
+ // lower case chars
+
+ for (i = 97; i < 123; i++) {
+ codes[String.fromCharCode(i)] = i - 32;
+ } // numbers
+
+
+ for (var i = 48; i < 58; i++) {
+ codes[i - 48] = i;
+ } // function keys
+
+
+ for (i = 1; i < 13; i++) {
+ codes['f' + i] = i + 111;
+ } // numpad keys
+
+
+ for (i = 0; i < 10; i++) {
+ codes['numpad ' + i] = i + 96;
+ }
+ /**
+ * Get by code
+ *
+ * exports.name[13] // => 'Enter'
+ */
+
+
+ var names = exports.names = exports.title = {}; // title for backward compat
+ // Create reverse mapping
+
+ for (i in codes) {
+ names[codes[i]] = i;
+ } // Add aliases
+
+
+ for (var alias in aliases) {
+ codes[alias] = aliases[alias];
+ }
+ });
+ keycode.code;
+ keycode.codes;
+ keycode.aliases;
+ keycode.names;
+ keycode.title;
+
/**
* Player Component - Base class for all UI objects
*
@@ -3224,9 +3956,7 @@
* Components can also use methods from {@link EventTarget}
*/
- var Component =
- /*#__PURE__*/
- function () {
+ var Component$1 = /*#__PURE__*/function () {
/**
* A callback that is called when a component is ready. Does not have any
* paramters and any callback value will be ignored.
@@ -3242,17 +3972,22 @@
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
- * The key/value store of player options.
+ * The key/value store of component options.
*
* @param {Object[]} [options.children]
* An array of children objects to intialize this component with. Children objects have
* a name property that will be used if more than one component of the same type needs to be
* added.
*
+ * @param {string} [options.className]
+ * A class or space separated list of classes to add the component
+ *
* @param {Component~ReadyCallback} [ready]
* Function that gets called when the `Component` is ready.
*/
function Component(player, options, ready) {
+ var _this = this;
+
// The component might be the player itself and we can't pass `this` to super
if (!player && this.play) {
this.player_ = player = this; // eslint-disable-line
@@ -3264,9 +3999,9 @@
this.parentComponent_ = null; // Make a copy of prototype.options_ to protect against overriding defaults
- this.options_ = mergeOptions({}, this.options_); // Updated options with supplied options
+ this.options_ = mergeOptions$3({}, this.options_); // Updated options with supplied options
- options = this.options_ = mergeOptions(this.options_, options); // Get ID from options or options element if one is supplied
+ options = this.options_ = mergeOptions$3(this.options_, options); // Get ID from options or options element if one is supplied
this.id_ = options.id || options.el && options.el.id; // If there was no ID from the options, generate one
@@ -3282,6 +4017,12 @@
this.el_ = options.el;
} else if (options.createEl !== false) {
this.el_ = this.createEl();
+ }
+
+ if (options.className && this.el_) {
+ options.className.split(' ').forEach(function (c) {
+ return _this.addClass(c);
+ });
} // if evented is anything except false, we want to mixin in evented
@@ -3290,61 +4031,28 @@
evented(this, {
eventBusKey: this.el_ ? 'el_' : null
});
+ this.handleLanguagechange = this.handleLanguagechange.bind(this);
+ this.on(this.player_, 'languagechange', this.handleLanguagechange);
}
stateful(this, this.constructor.defaultState);
this.children_ = [];
this.childIndex_ = {};
this.childNameIndex_ = {};
- var SetSham;
-
- if (!window$1.Set) {
- SetSham =
- /*#__PURE__*/
- function () {
- function SetSham() {
- this.set_ = {};
- }
-
- var _proto2 = SetSham.prototype;
-
- _proto2.has = function has(key) {
- return key in this.set_;
- };
-
- _proto2["delete"] = function _delete(key) {
- var has = this.has(key);
- delete this.set_[key];
- return has;
- };
-
- _proto2.add = function add(key) {
- this.set_[key] = 1;
- return this;
- };
-
- _proto2.forEach = function forEach(callback, thisArg) {
- for (var key in this.set_) {
- callback.call(thisArg, key, key, this);
- }
- };
-
- return SetSham;
- }();
- }
-
- this.setTimeoutIds_ = window$1.Set ? new Set() : new SetSham();
- this.setIntervalIds_ = window$1.Set ? new Set() : new SetSham();
- this.rafIds_ = window$1.Set ? new Set() : new SetSham();
+ this.setTimeoutIds_ = new Set$1();
+ this.setIntervalIds_ = new Set$1();
+ this.rafIds_ = new Set$1();
+ this.namedRafs_ = new Map$1();
this.clearingTimersOnDispose_ = false; // Add any child components in options
if (options.initChildren !== false) {
this.initChildren();
- }
-
- this.ready(ready); // Don't want to trigger ready here or it will before init is actually
+ } // Don't want to trigger ready here or it will go before init is actually
// finished for all children that run this constructor
+
+ this.ready(ready);
+
if (options.reportTouchActivity !== false) {
this.enableTouchActivity();
}
@@ -3353,16 +4061,27 @@
* Dispose of the `Component` and all child components.
*
* @fires Component#dispose
+ *
+ * @param {Object} options
+ * @param {Element} options.originalEl element with which to replace player element
*/
var _proto = Component.prototype;
- _proto.dispose = function dispose() {
+ _proto.dispose = function dispose(options) {
+ if (options === void 0) {
+ options = {};
+ }
+
// Bail out if the component has already been disposed.
if (this.isDisposed_) {
return;
}
+
+ if (this.readyQueue_) {
+ this.readyQueue_.length = 0;
+ }
/**
* Triggered when a `Component` is disposed.
*
@@ -3398,11 +4117,11 @@
if (this.el_) {
// Remove element from DOM
if (this.el_.parentNode) {
- this.el_.parentNode.removeChild(this.el_);
- }
-
- if (DomData.has(this.el_)) {
- DomData["delete"](this.el_);
+ if (options.restoreEl) {
+ this.el_.parentNode.replaceChild(options.restoreEl, this.el_);
+ } else {
+ this.el_.parentNode.removeChild(this.el_);
+ }
}
this.el_ = null;
@@ -3451,7 +4170,7 @@
return this.options_;
}
- this.options_ = mergeOptions(this.options_, obj);
+ this.options_ = mergeOptions$3(this.options_, obj);
return this.options_;
}
/**
@@ -3558,6 +4277,14 @@
return localizedString;
}
+ /**
+ * Handles language change for the player in components. Should be overriden by sub-components.
+ *
+ * @abstract
+ */
+ ;
+
+ _proto.handleLanguagechange = function handleLanguagechange() {}
/**
* Return the `Component`s DOM element. This is where children get inserted.
* This will usually be the the same as the element returned in {@link Component#el}.
@@ -3636,6 +4363,43 @@
return this.childNameIndex_[name];
}
+ /**
+ * Returns the descendant `Component` following the givent
+ * descendant `names`. For instance ['foo', 'bar', 'baz'] would
+ * try to get 'foo' on the current component, 'bar' on the 'foo'
+ * component and 'baz' on the 'bar' component and return undefined
+ * if any of those don't exist.
+ *
+ * @param {...string[]|...string} names
+ * The name of the child `Component` to get.
+ *
+ * @return {Component|undefined}
+ * The descendant `Component` following the given descendant
+ * `names` or undefined.
+ */
+ ;
+
+ _proto.getDescendant = function getDescendant() {
+ for (var _len = arguments.length, names = new Array(_len), _key = 0; _key < _len; _key++) {
+ names[_key] = arguments[_key];
+ }
+
+ // flatten array argument into the main array
+ names = names.reduce(function (acc, n) {
+ return acc.concat(n);
+ }, []);
+ var currentChild = this;
+
+ for (var i = 0; i < names.length; i++) {
+ currentChild = currentChild.getChild(names[i]);
+
+ if (!currentChild || !currentChild.getChild) {
+ return;
+ }
+ }
+
+ return currentChild;
+ }
/**
* Add a child `Component` inside the current `Component`.
*
@@ -3669,7 +4433,7 @@
var componentName; // If child is a string, create component with options
if (typeof child === 'string') {
- componentName = toTitleCase(child);
+ componentName = toTitleCase$1(child);
var componentClassName = options.componentClass || componentName; // Set name through options
options.name = componentName; // Create a new object & element for this controls set
@@ -3707,7 +4471,7 @@
// name function of the component
- componentName = componentName || component.name && toTitleCase(component.name());
+ componentName = componentName || component.name && toTitleCase$1(component.name());
if (componentName) {
this.childNameIndex_[componentName] = component;
@@ -3720,8 +4484,13 @@
// If inserting before a component, insert before that component's element
var refNode = null;
- if (this.children_[index + 1] && this.children_[index + 1].el_) {
- refNode = this.children_[index + 1].el_;
+ if (this.children_[index + 1]) {
+ // Most children are components, but the video tech is an HTML element
+ if (this.children_[index + 1].el_) {
+ refNode = this.children_[index + 1].el_;
+ } else if (isEl(this.children_[index + 1])) {
+ refNode = this.children_[index + 1];
+ }
}
this.contentEl().insertBefore(component.el(), refNode);
@@ -3764,7 +4533,7 @@
component.parentComponent_ = null;
this.childIndex_[component.id()] = null;
- this.childNameIndex_[toTitleCase(component.name())] = null;
+ this.childNameIndex_[toTitleCase$1(component.name())] = null;
this.childNameIndex_[toLowerCase(component.name())] = null;
var compEl = component.el();
@@ -3778,7 +4547,7 @@
;
_proto.initChildren = function initChildren() {
- var _this = this;
+ var _this2 = this;
var children = this.options_.children;
@@ -3811,15 +4580,15 @@
// reach back into the player for options later.
- opts.playerOptions = _this.options_.playerOptions; // Create and add the child component.
+ opts.playerOptions = _this2.options_.playerOptions; // Create and add the child component.
// Add a direct reference to the child by name on the parent instance.
// If two of the same component are used, different names should be supplied
// for each
- var newChild = _this.addChild(name, opts);
+ var newChild = _this2.addChild(name, opts);
if (newChild) {
- _this[name] = newChild;
+ _this2[name] = newChild;
}
}; // Allow for an array of children details to passed in the options
@@ -3849,7 +4618,7 @@
if (typeof child === 'string') {
name = child;
- opts = children[name] || _this.options_[name] || {};
+ opts = children[name] || _this2.options_[name] || {};
} else {
name = child.name;
opts = child;
@@ -3863,7 +4632,7 @@
// we have to make sure that child.name isn't in the techOrder since
// techs are registerd as Components but can't aren't compatible
// See https://github.com/videojs/video.js/issues/2772
- var c = Component.getComponent(child.opts.componentClass || toTitleCase(child.name));
+ var c = Component.getComponent(child.opts.componentClass || toTitleCase$1(child.name));
return c && !Tech.isTech(c);
}).forEach(handleAdd);
}
@@ -4267,7 +5036,7 @@
// TODO: handle display:none and no dimension style using px
- return parseInt(this.el_['offset' + toTitleCase(widthOrHeight)], 10);
+ return parseInt(this.el_['offset' + toTitleCase$1(widthOrHeight)], 10);
}
/**
* Get the computed width or the height of the component's element.
@@ -4297,7 +5066,7 @@
// This code also runs wherever getComputedStyle doesn't exist.
if (computedWidthOrHeight === 0 || isNaN(computedWidthOrHeight)) {
- var rule = "offset" + toTitleCase(widthOrHeight);
+ var rule = "offset" + toTitleCase$1(widthOrHeight);
computedWidthOrHeight = this.el_[rule];
}
@@ -4387,8 +5156,11 @@
_proto.handleKeyDown = function handleKeyDown(event) {
if (this.player_) {
// We only stop propagation here because we want unhandled events to fall
- // back to the browser.
- event.stopPropagation();
+ // back to the browser. Exclude Tab for focus trapping.
+ if (!keycode.isEventKey(event, 'Tab')) {
+ event.stopPropagation();
+ }
+
this.player_.handleKeyDown(event);
}
}
@@ -4441,7 +5213,7 @@
pageY: event.touches[0].pageY
}; // Record start time so we can detect a tap vs. "touch and hold"
- touchStart = window$1.performance.now(); // Reset couldBeTap tracking
+ touchStart = window.performance.now(); // Reset couldBeTap tracking
couldBeTap = true;
}
@@ -4477,7 +5249,7 @@
if (couldBeTap === true) {
// Measure how long the touch lasted
- var touchTime = window$1.performance.now() - touchStart; // Make sure the touch was less than the threshold to be considered a tap
+ var touchTime = window.performance.now() - touchStart; // Make sure the touch was less than the threshold to be considered a tap
if (touchTime < touchTimeThreshold) {
// Don't let browser turn this into a click
@@ -4586,16 +5358,16 @@
;
_proto.setTimeout = function setTimeout(fn, timeout) {
- var _this2 = this;
+ var _this3 = this;
// declare as variables so they are properly available in timeout function
// eslint-disable-next-line
var timeoutId;
fn = bind(this, fn);
this.clearTimersOnDispose_();
- timeoutId = window$1.setTimeout(function () {
- if (_this2.setTimeoutIds_.has(timeoutId)) {
- _this2.setTimeoutIds_["delete"](timeoutId);
+ timeoutId = window.setTimeout(function () {
+ if (_this3.setTimeoutIds_.has(timeoutId)) {
+ _this3.setTimeoutIds_["delete"](timeoutId);
}
fn();
@@ -4623,7 +5395,7 @@
_proto.clearTimeout = function clearTimeout(timeoutId) {
if (this.setTimeoutIds_.has(timeoutId)) {
this.setTimeoutIds_["delete"](timeoutId);
- window$1.clearTimeout(timeoutId);
+ window.clearTimeout(timeoutId);
}
return timeoutId;
@@ -4653,7 +5425,7 @@
_proto.setInterval = function setInterval(fn, interval) {
fn = bind(this, fn);
this.clearTimersOnDispose_();
- var intervalId = window$1.setInterval(fn, interval);
+ var intervalId = window.setInterval(fn, interval);
this.setIntervalIds_.add(intervalId);
return intervalId;
}
@@ -4677,7 +5449,7 @@
_proto.clearInterval = function clearInterval(intervalId) {
if (this.setIntervalIds_.has(intervalId)) {
this.setIntervalIds_["delete"](intervalId);
- window$1.clearInterval(intervalId);
+ window.clearInterval(intervalId);
}
return intervalId;
@@ -4710,7 +5482,7 @@
;
_proto.requestAnimationFrame = function requestAnimationFrame(fn) {
- var _this3 = this;
+ var _this4 = this;
// Fall back to using a timer.
if (!this.supportsRaf_) {
@@ -4722,9 +5494,9 @@
var id;
fn = bind(this, fn);
- id = window$1.requestAnimationFrame(function () {
- if (_this3.rafIds_.has(id)) {
- _this3.rafIds_["delete"](id);
+ id = window.requestAnimationFrame(function () {
+ if (_this4.rafIds_.has(id)) {
+ _this4.rafIds_["delete"](id);
}
fn();
@@ -4732,6 +5504,55 @@
this.rafIds_.add(id);
return id;
}
+ /**
+ * Request an animation frame, but only one named animation
+ * frame will be queued. Another will never be added until
+ * the previous one finishes.
+ *
+ * @param {string} name
+ * The name to give this requestAnimationFrame
+ *
+ * @param {Component~GenericCallback} fn
+ * A function that will be bound to this component and executed just
+ * before the browser's next repaint.
+ */
+ ;
+
+ _proto.requestNamedAnimationFrame = function requestNamedAnimationFrame(name, fn) {
+ var _this5 = this;
+
+ if (this.namedRafs_.has(name)) {
+ return;
+ }
+
+ this.clearTimersOnDispose_();
+ fn = bind(this, fn);
+ var id = this.requestAnimationFrame(function () {
+ fn();
+
+ if (_this5.namedRafs_.has(name)) {
+ _this5.namedRafs_["delete"](name);
+ }
+ });
+ this.namedRafs_.set(name, id);
+ return name;
+ }
+ /**
+ * Cancels a current named animation frame if it exists.
+ *
+ * @param {string} name
+ * The name of the requestAnimationFrame to cancel.
+ */
+ ;
+
+ _proto.cancelNamedAnimationFrame = function cancelNamedAnimationFrame(name) {
+ if (!this.namedRafs_.has(name)) {
+ return;
+ }
+
+ this.cancelAnimationFrame(this.namedRafs_.get(name));
+ this.namedRafs_["delete"](name);
+ }
/**
* Cancels a queued callback passed to {@link Component#requestAnimationFrame}
* (rAF).
@@ -4758,7 +5579,7 @@
if (this.rafIds_.has(id)) {
this.rafIds_["delete"](id);
- window$1.cancelAnimationFrame(id);
+ window.cancelAnimationFrame(id);
}
return id;
@@ -4776,7 +5597,7 @@
;
_proto.clearTimersOnDispose_ = function clearTimersOnDispose_() {
- var _this4 = this;
+ var _this6 = this;
if (this.clearingTimersOnDispose_) {
return;
@@ -4784,13 +5605,18 @@
this.clearingTimersOnDispose_ = true;
this.one('dispose', function () {
- [['rafIds_', 'cancelAnimationFrame'], ['setTimeoutIds_', 'clearTimeout'], ['setIntervalIds_', 'clearInterval']].forEach(function (_ref) {
+ [['namedRafs_', 'cancelNamedAnimationFrame'], ['rafIds_', 'cancelAnimationFrame'], ['setTimeoutIds_', 'clearTimeout'], ['setIntervalIds_', 'clearInterval']].forEach(function (_ref) {
var idName = _ref[0],
cancelName = _ref[1];
- _this4[idName].forEach(_this4[cancelName], _this4);
+ // for a `Set` key will actually be the value again
+ // so forEach((val, val) =>` but for maps we want to use
+ // the key.
+ _this6[idName].forEach(function (val, key) {
+ return _this6[cancelName](key);
+ });
});
- _this4.clearingTimersOnDispose_ = false;
+ _this6.clearingTimersOnDispose_ = false;
});
}
/**
@@ -4836,7 +5662,7 @@
throw new Error("Illegal component, \"" + name + "\"; " + reason + ".");
}
- name = toTitleCase(name);
+ name = toTitleCase$1(name);
if (!Component.components_) {
Component.components_ = {};
@@ -4870,11 +5696,6 @@
*
* @return {Component}
* The `Component` that got registered under the given name.
- *
- * @deprecated In `videojs` 6 this will not return `Component`s that were not
- * registered using {@link Component.registerComponent}. Currently we
- * check the global `videojs` object for a `Component` name and
- * return that if it exists.
*/
;
@@ -4898,8 +5719,8 @@
*/
- Component.prototype.supportsRaf_ = typeof window$1.requestAnimationFrame === 'function' && typeof window$1.cancelAnimationFrame === 'function';
- Component.registerComponent('Component', Component);
+ Component$1.prototype.supportsRaf_ = typeof window.requestAnimationFrame === 'function' && typeof window.cancelAnimationFrame === 'function';
+ Component$1.registerComponent('Component', Component$1);
function _assertThisInitialized(self) {
if (self === void 0) {
@@ -4919,238 +5740,6 @@
var inheritsLoose = _inheritsLoose;
- /**
- * @file browser.js
- * @module browser
- */
- var USER_AGENT = window$1.navigator && window$1.navigator.userAgent || '';
- var webkitVersionMap = /AppleWebKit\/([\d.]+)/i.exec(USER_AGENT);
- var appleWebkitVersion = webkitVersionMap ? parseFloat(webkitVersionMap.pop()) : null;
- /**
- * Whether or not this device is an iPod.
- *
- * @static
- * @const
- * @type {Boolean}
- */
-
- var IS_IPOD = /iPod/i.test(USER_AGENT);
- /**
- * The detected iOS version - or `null`.
- *
- * @static
- * @const
- * @type {string|null}
- */
-
- var IOS_VERSION = function () {
- var match = USER_AGENT.match(/OS (\d+)_/i);
-
- if (match && match[1]) {
- return match[1];
- }
-
- return null;
- }();
- /**
- * Whether or not this is an Android device.
- *
- * @static
- * @const
- * @type {Boolean}
- */
-
- var IS_ANDROID = /Android/i.test(USER_AGENT);
- /**
- * The detected Android version - or `null`.
- *
- * @static
- * @const
- * @type {number|string|null}
- */
-
- var ANDROID_VERSION = function () {
- // This matches Android Major.Minor.Patch versions
- // ANDROID_VERSION is Major.Minor as a Number, if Minor isn't available, then only Major is returned
- var match = USER_AGENT.match(/Android (\d+)(?:\.(\d+))?(?:\.(\d+))*/i);
-
- if (!match) {
- return null;
- }
-
- var major = match[1] && parseFloat(match[1]);
- var minor = match[2] && parseFloat(match[2]);
-
- if (major && minor) {
- return parseFloat(match[1] + '.' + match[2]);
- } else if (major) {
- return major;
- }
-
- return null;
- }();
- /**
- * Whether or not this is a native Android browser.
- *
- * @static
- * @const
- * @type {Boolean}
- */
-
- var IS_NATIVE_ANDROID = IS_ANDROID && ANDROID_VERSION < 5 && appleWebkitVersion < 537;
- /**
- * Whether or not this is Mozilla Firefox.
- *
- * @static
- * @const
- * @type {Boolean}
- */
-
- var IS_FIREFOX = /Firefox/i.test(USER_AGENT);
- /**
- * Whether or not this is Microsoft Edge.
- *
- * @static
- * @const
- * @type {Boolean}
- */
-
- var IS_EDGE = /Edge/i.test(USER_AGENT);
- /**
- * Whether or not this is Google Chrome.
- *
- * This will also be `true` for Chrome on iOS, which will have different support
- * as it is actually Safari under the hood.
- *
- * @static
- * @const
- * @type {Boolean}
- */
-
- var IS_CHROME = !IS_EDGE && (/Chrome/i.test(USER_AGENT) || /CriOS/i.test(USER_AGENT));
- /**
- * The detected Google Chrome version - or `null`.
- *
- * @static
- * @const
- * @type {number|null}
- */
-
- var CHROME_VERSION = function () {
- var match = USER_AGENT.match(/(Chrome|CriOS)\/(\d+)/);
-
- if (match && match[2]) {
- return parseFloat(match[2]);
- }
-
- return null;
- }();
- /**
- * The detected Internet Explorer version - or `null`.
- *
- * @static
- * @const
- * @type {number|null}
- */
-
- var IE_VERSION = function () {
- var result = /MSIE\s(\d+)\.\d/.exec(USER_AGENT);
- var version = result && parseFloat(result[1]);
-
- if (!version && /Trident\/7.0/i.test(USER_AGENT) && /rv:11.0/.test(USER_AGENT)) {
- // IE 11 has a different user agent string than other IE versions
- version = 11.0;
- }
-
- return version;
- }();
- /**
- * Whether or not this is desktop Safari.
- *
- * @static
- * @const
- * @type {Boolean}
- */
-
- var IS_SAFARI = /Safari/i.test(USER_AGENT) && !IS_CHROME && !IS_ANDROID && !IS_EDGE;
- /**
- * Whether or not this is a Windows machine.
- *
- * @static
- * @const
- * @type {Boolean}
- */
-
- var IS_WINDOWS = /Windows/i.test(USER_AGENT);
- /**
- * Whether or not this device is touch-enabled.
- *
- * @static
- * @const
- * @type {Boolean}
- */
-
- var TOUCH_ENABLED = isReal() && ('ontouchstart' in window$1 || window$1.navigator.maxTouchPoints || window$1.DocumentTouch && window$1.document instanceof window$1.DocumentTouch);
- /**
- * Whether or not this device is an iPad.
- *
- * @static
- * @const
- * @type {Boolean}
- */
-
- var IS_IPAD = /iPad/i.test(USER_AGENT) || IS_SAFARI && TOUCH_ENABLED && !/iPhone/i.test(USER_AGENT);
- /**
- * Whether or not this device is an iPhone.
- *
- * @static
- * @const
- * @type {Boolean}
- */
- // The Facebook app's UIWebView identifies as both an iPhone and iPad, so
- // to identify iPhones, we need to exclude iPads.
- // http://artsy.github.io/blog/2012/10/18/the-perils-of-ios-user-agent-sniffing/
-
- var IS_IPHONE = /iPhone/i.test(USER_AGENT) && !IS_IPAD;
- /**
- * Whether or not this is an iOS device.
- *
- * @static
- * @const
- * @type {Boolean}
- */
-
- var IS_IOS = IS_IPHONE || IS_IPAD || IS_IPOD;
- /**
- * Whether or not this is any flavor of Safari - including iOS.
- *
- * @static
- * @const
- * @type {Boolean}
- */
-
- var IS_ANY_SAFARI = (IS_SAFARI || IS_IOS) && !IS_CHROME;
-
- var browser = /*#__PURE__*/Object.freeze({
- IS_IPOD: IS_IPOD,
- IOS_VERSION: IOS_VERSION,
- IS_ANDROID: IS_ANDROID,
- ANDROID_VERSION: ANDROID_VERSION,
- IS_NATIVE_ANDROID: IS_NATIVE_ANDROID,
- IS_FIREFOX: IS_FIREFOX,
- IS_EDGE: IS_EDGE,
- IS_CHROME: IS_CHROME,
- CHROME_VERSION: CHROME_VERSION,
- IE_VERSION: IE_VERSION,
- IS_SAFARI: IS_SAFARI,
- IS_WINDOWS: IS_WINDOWS,
- TOUCH_ENABLED: TOUCH_ENABLED,
- IS_IPAD: IS_IPAD,
- IS_IPHONE: IS_IPHONE,
- IS_IOS: IS_IOS,
- IS_ANY_SAFARI: IS_ANY_SAFARI
- });
-
/**
* @file time-ranges.js
* @module time-ranges
@@ -5249,8 +5838,10 @@
function createTimeRangesObj(ranges) {
+ var timeRangesObj;
+
if (ranges === undefined || ranges.length === 0) {
- return {
+ timeRangesObj = {
length: 0,
start: function start() {
throw new Error('This TimeRanges object is empty');
@@ -5259,13 +5850,21 @@
throw new Error('This TimeRanges object is empty');
}
};
+ } else {
+ timeRangesObj = {
+ length: ranges.length,
+ start: getRange.bind(null, 'start', 0, ranges),
+ end: getRange.bind(null, 'end', 1, ranges)
+ };
}
- return {
- length: ranges.length,
- start: getRange.bind(null, 'start', 0, ranges),
- end: getRange.bind(null, 'end', 1, ranges)
- };
+ if (window.Symbol && window.Symbol.iterator) {
+ timeRangesObj[window.Symbol.iterator] = function () {
+ return (ranges || []).values();
+ };
+ }
+
+ return timeRangesObj;
}
/**
* Create a `TimeRange` object which mimics an
@@ -5335,47 +5934,6 @@
return bufferedDuration / duration;
}
- /**
- * @file fullscreen-api.js
- * @module fullscreen-api
- * @private
- */
- /**
- * Store the browser-specific methods for the fullscreen API.
- *
- * @type {Object}
- * @see [Specification]{@link https://fullscreen.spec.whatwg.org}
- * @see [Map Approach From Screenfull.js]{@link https://github.com/sindresorhus/screenfull.js}
- */
-
- var FullscreenApi = {
- prefixed: true
- }; // browser API methods
-
- var apiMap = [['requestFullscreen', 'exitFullscreen', 'fullscreenElement', 'fullscreenEnabled', 'fullscreenchange', 'fullscreenerror', 'fullscreen'], // WebKit
- ['webkitRequestFullscreen', 'webkitExitFullscreen', 'webkitFullscreenElement', 'webkitFullscreenEnabled', 'webkitfullscreenchange', 'webkitfullscreenerror', '-webkit-full-screen'], // Mozilla
- ['mozRequestFullScreen', 'mozCancelFullScreen', 'mozFullScreenElement', 'mozFullScreenEnabled', 'mozfullscreenchange', 'mozfullscreenerror', '-moz-full-screen'], // Microsoft
- ['msRequestFullscreen', 'msExitFullscreen', 'msFullscreenElement', 'msFullscreenEnabled', 'MSFullscreenChange', 'MSFullscreenError', '-ms-fullscreen']];
- var specApi = apiMap[0];
- var browserApi; // determine the supported set of functions
-
- for (var i = 0; i < apiMap.length; i++) {
- // check for exitFullscreen function
- if (apiMap[i][1] in document) {
- browserApi = apiMap[i];
- break;
- }
- } // map the browser API names to the spec API names
-
-
- if (browserApi) {
- for (var _i = 0; _i < browserApi.length; _i++) {
- FullscreenApi[specApi[_i]] = browserApi[_i];
- }
-
- FullscreenApi.prefixed = browserApi[0] !== specApi[0];
- }
-
/**
* @file media-error.js
*/
@@ -5409,7 +5967,7 @@
} else if (typeof value === 'string') {
// default code is zero, so this is a custom error
this.message = value;
- } else if (isObject(value)) {
+ } else if (isObject$1(value)) {
// We assign the `code` property manually because native `MediaError` objects
// do not expose it as an own/enumerable property of the object.
if (typeof value.code === 'number') {
@@ -5631,202 +6189,6 @@
trackToJson_: trackToJson_
};
- var keycode = createCommonjsModule(function (module, exports) {
- // Source: http://jsfiddle.net/vWx8V/
- // http://stackoverflow.com/questions/5603195/full-list-of-javascript-keycodes
-
- /**
- * Conenience method returns corresponding value for given keyName or keyCode.
- *
- * @param {Mixed} keyCode {Number} or keyName {String}
- * @return {Mixed}
- * @api public
- */
- function keyCode(searchInput) {
- // Keyboard Events
- if (searchInput && 'object' === typeof searchInput) {
- var hasKeyCode = searchInput.which || searchInput.keyCode || searchInput.charCode;
- if (hasKeyCode) searchInput = hasKeyCode;
- } // Numbers
-
-
- if ('number' === typeof searchInput) return names[searchInput]; // Everything else (cast to string)
-
- var search = String(searchInput); // check codes
-
- var foundNamedKey = codes[search.toLowerCase()];
- if (foundNamedKey) return foundNamedKey; // check aliases
-
- var foundNamedKey = aliases[search.toLowerCase()];
- if (foundNamedKey) return foundNamedKey; // weird character?
-
- if (search.length === 1) return search.charCodeAt(0);
- return undefined;
- }
- /**
- * Compares a keyboard event with a given keyCode or keyName.
- *
- * @param {Event} event Keyboard event that should be tested
- * @param {Mixed} keyCode {Number} or keyName {String}
- * @return {Boolean}
- * @api public
- */
-
-
- keyCode.isEventKey = function isEventKey(event, nameOrCode) {
- if (event && 'object' === typeof event) {
- var keyCode = event.which || event.keyCode || event.charCode;
-
- if (keyCode === null || keyCode === undefined) {
- return false;
- }
-
- if (typeof nameOrCode === 'string') {
- // check codes
- var foundNamedKey = codes[nameOrCode.toLowerCase()];
-
- if (foundNamedKey) {
- return foundNamedKey === keyCode;
- } // check aliases
-
-
- var foundNamedKey = aliases[nameOrCode.toLowerCase()];
-
- if (foundNamedKey) {
- return foundNamedKey === keyCode;
- }
- } else if (typeof nameOrCode === 'number') {
- return nameOrCode === keyCode;
- }
-
- return false;
- }
- };
-
- exports = module.exports = keyCode;
- /**
- * Get by name
- *
- * exports.code['enter'] // => 13
- */
-
- var codes = exports.code = exports.codes = {
- 'backspace': 8,
- 'tab': 9,
- 'enter': 13,
- 'shift': 16,
- 'ctrl': 17,
- 'alt': 18,
- 'pause/break': 19,
- 'caps lock': 20,
- 'esc': 27,
- 'space': 32,
- 'page up': 33,
- 'page down': 34,
- 'end': 35,
- 'home': 36,
- 'left': 37,
- 'up': 38,
- 'right': 39,
- 'down': 40,
- 'insert': 45,
- 'delete': 46,
- 'command': 91,
- 'left command': 91,
- 'right command': 93,
- 'numpad *': 106,
- 'numpad +': 107,
- 'numpad -': 109,
- 'numpad .': 110,
- 'numpad /': 111,
- 'num lock': 144,
- 'scroll lock': 145,
- 'my computer': 182,
- 'my calculator': 183,
- ';': 186,
- '=': 187,
- ',': 188,
- '-': 189,
- '.': 190,
- '/': 191,
- '`': 192,
- '[': 219,
- '\\': 220,
- ']': 221,
- "'": 222 // Helper aliases
-
- };
- var aliases = exports.aliases = {
- 'windows': 91,
- '⇧': 16,
- '⌥': 18,
- '⌃': 17,
- '⌘': 91,
- 'ctl': 17,
- 'control': 17,
- 'option': 18,
- 'pause': 19,
- 'break': 19,
- 'caps': 20,
- 'return': 13,
- 'escape': 27,
- 'spc': 32,
- 'spacebar': 32,
- 'pgup': 33,
- 'pgdn': 34,
- 'ins': 45,
- 'del': 46,
- 'cmd': 91
- /*!
- * Programatically add the following
- */
- // lower case chars
-
- };
-
- for (i = 97; i < 123; i++) {
- codes[String.fromCharCode(i)] = i - 32;
- } // numbers
-
-
- for (var i = 48; i < 58; i++) {
- codes[i - 48] = i;
- } // function keys
-
-
- for (i = 1; i < 13; i++) {
- codes['f' + i] = i + 111;
- } // numpad keys
-
-
- for (i = 0; i < 10; i++) {
- codes['numpad ' + i] = i + 96;
- }
- /**
- * Get by code
- *
- * exports.name[13] // => 'Enter'
- */
-
-
- var names = exports.names = exports.title = {}; // title for backward compat
- // Create reverse mapping
-
- for (i in codes) {
- names[codes[i]] = i;
- } // Add aliases
-
-
- for (var alias in aliases) {
- codes[alias] = aliases[alias];
- }
- });
- var keycode_1 = keycode.code;
- var keycode_2 = keycode.codes;
- var keycode_3 = keycode.aliases;
- var keycode_4 = keycode.names;
- var keycode_5 = keycode.title;
-
var MODAL_CLASS_NAME = 'vjs-modal-dialog';
/**
* The `ModalDialog` displays over the video and its controls, which blocks
@@ -5838,9 +6200,7 @@
* @extends Component
*/
- var ModalDialog =
- /*#__PURE__*/
- function (_Component) {
+ var ModalDialog = /*#__PURE__*/function (_Component) {
inheritsLoose(ModalDialog, _Component);
/**
@@ -5883,6 +6243,15 @@
var _this;
_this = _Component.call(this, player, options) || this;
+
+ _this.handleKeyDown_ = function (e) {
+ return _this.handleKeyDown(e);
+ };
+
+ _this.close_ = function (e) {
+ return _this.close(e);
+ };
+
_this.opened_ = _this.hasBeenOpened_ = _this.hasBeenFilled_ = false;
_this.closeable(!_this.options_.uncloseable);
@@ -6012,7 +6381,7 @@
player.pause();
}
- this.on('keydown', this.handleKeyDown); // Hide controls and note if they were enabled.
+ this.on('keydown', this.handleKeyDown_); // Hide controls and note if they were enabled.
this.hadControls_ = player.controls();
player.controls(false);
@@ -6077,7 +6446,7 @@
player.play();
}
- this.off('keydown', this.handleKeyDown);
+ this.off('keydown', this.handleKeyDown_);
if (this.hadControls_) {
player.controls(true);
@@ -6124,12 +6493,12 @@
controlText: 'Close Modal Dialog'
});
this.contentEl_ = temp;
- this.on(close, 'close', this.close);
+ this.on(close, 'close', this.close_);
} // If this is being made uncloseable and has a close button, remove it.
if (!closeable && close) {
- this.off(close, 'close', this.close);
+ this.off(close, 'close', this.close_);
this.removeChild(close);
close.dispose();
}
@@ -6333,12 +6702,12 @@
_proto.focusableEls_ = function focusableEls_() {
var allChildren = this.el_.querySelectorAll('*');
return Array.prototype.filter.call(allChildren, function (child) {
- return (child instanceof window$1.HTMLAnchorElement || child instanceof window$1.HTMLAreaElement) && child.hasAttribute('href') || (child instanceof window$1.HTMLInputElement || child instanceof window$1.HTMLSelectElement || child instanceof window$1.HTMLTextAreaElement || child instanceof window$1.HTMLButtonElement) && !child.hasAttribute('disabled') || child instanceof window$1.HTMLIFrameElement || child instanceof window$1.HTMLObjectElement || child instanceof window$1.HTMLEmbedElement || child.hasAttribute('tabindex') && child.getAttribute('tabindex') !== -1 || child.hasAttribute('contenteditable');
+ return (child instanceof window.HTMLAnchorElement || child instanceof window.HTMLAreaElement) && child.hasAttribute('href') || (child instanceof window.HTMLInputElement || child instanceof window.HTMLSelectElement || child instanceof window.HTMLTextAreaElement || child instanceof window.HTMLButtonElement) && !child.hasAttribute('disabled') || child instanceof window.HTMLIFrameElement || child instanceof window.HTMLObjectElement || child instanceof window.HTMLEmbedElement || child.hasAttribute('tabindex') && child.getAttribute('tabindex') !== -1 || child.hasAttribute('contenteditable');
});
};
return ModalDialog;
- }(Component);
+ }(Component$1);
/**
* Default options for `ModalDialog` default options.
*
@@ -6351,7 +6720,7 @@
pauseOnOpen: true,
temporary: true
};
- Component.registerComponent('ModalDialog', ModalDialog);
+ Component$1.registerComponent('ModalDialog', ModalDialog);
/**
* Common functionaliy between {@link TextTrackList}, {@link AudioTrackList}, and
@@ -6360,9 +6729,7 @@
* @extends EventTarget
*/
- var TrackList =
- /*#__PURE__*/
- function (_EventTarget) {
+ var TrackList = /*#__PURE__*/function (_EventTarget) {
inheritsLoose(TrackList, _EventTarget);
/**
@@ -6414,6 +6781,8 @@
var _proto = TrackList.prototype;
_proto.addTrack = function addTrack(track) {
+ var _this2 = this;
+
var index = this.tracks_.length;
if (!('' + index in this)) {
@@ -6442,6 +6811,27 @@
target: this
});
}
+ /**
+ * Triggered when a track label is changed.
+ *
+ * @event TrackList#addtrack
+ * @type {EventTarget~Event}
+ * @property {Track} track
+ * A reference to track that was added.
+ */
+
+
+ track.labelchange_ = function () {
+ _this2.trigger({
+ track: track,
+ type: 'labelchange',
+ target: _this2
+ });
+ };
+
+ if (isEvented(track)) {
+ track.addEventListener('labelchange', track.labelchange_);
+ }
}
/**
* Remove a {@link Track} from the `TrackList`
@@ -6514,7 +6904,7 @@
};
return TrackList;
- }(EventTarget);
+ }(EventTarget$2);
/**
* Triggered when a different track is selected/enabled.
*
@@ -6533,7 +6923,8 @@
TrackList.prototype.allowedEvents_ = {
change: 'change',
addtrack: 'addtrack',
- removetrack: 'removetrack'
+ removetrack: 'removetrack',
+ labelchange: 'labelchange'
}; // emulate attribute EventHandler support to allow for feature detection
for (var event in TrackList.prototype.allowedEvents_) {
@@ -6553,7 +6944,7 @@
* @private
*/
- var disableOthers = function disableOthers(list, track) {
+ var disableOthers$1 = function disableOthers(list, track) {
for (var i = 0; i < list.length; i++) {
if (!Object.keys(list[i]).length || track.id === list[i].id) {
continue;
@@ -6571,9 +6962,7 @@
*/
- var AudioTrackList =
- /*#__PURE__*/
- function (_TrackList) {
+ var AudioTrackList = /*#__PURE__*/function (_TrackList) {
inheritsLoose(AudioTrackList, _TrackList);
/**
@@ -6593,7 +6982,7 @@
// sorted from last index to first index
for (var i = tracks.length - 1; i >= 0; i--) {
if (tracks[i].enabled) {
- disableOthers(tracks, tracks[i]);
+ disableOthers$1(tracks, tracks[i]);
break;
}
}
@@ -6618,7 +7007,7 @@
var _this2 = this;
if (track.enabled) {
- disableOthers(this, track);
+ disableOthers$1(this, track);
}
_TrackList.prototype.addTrack.call(this, track); // native tracks don't have this
@@ -6637,7 +7026,7 @@
}
_this2.changing_ = true;
- disableOthers(_this2, track);
+ disableOthers$1(_this2, track);
_this2.changing_ = false;
_this2.trigger('change');
@@ -6675,7 +7064,7 @@
* @private
*/
- var disableOthers$1 = function disableOthers(list, track) {
+ var disableOthers = function disableOthers(list, track) {
for (var i = 0; i < list.length; i++) {
if (!Object.keys(list[i]).length || track.id === list[i].id) {
continue;
@@ -6693,9 +7082,7 @@
*/
- var VideoTrackList =
- /*#__PURE__*/
- function (_TrackList) {
+ var VideoTrackList = /*#__PURE__*/function (_TrackList) {
inheritsLoose(VideoTrackList, _TrackList);
/**
@@ -6715,7 +7102,7 @@
// sorted from last index to first index
for (var i = tracks.length - 1; i >= 0; i--) {
if (tracks[i].selected) {
- disableOthers$1(tracks, tracks[i]);
+ disableOthers(tracks, tracks[i]);
break;
}
}
@@ -6757,7 +7144,7 @@
var _this2 = this;
if (track.selected) {
- disableOthers$1(this, track);
+ disableOthers(this, track);
}
_TrackList.prototype.addTrack.call(this, track); // native tracks don't have this
@@ -6773,7 +7160,7 @@
}
_this2.changing_ = true;
- disableOthers$1(_this2, track);
+ disableOthers(_this2, track);
_this2.changing_ = false;
_this2.trigger('change');
@@ -6806,9 +7193,7 @@
* @extends TrackList
*/
- var TextTrackList =
- /*#__PURE__*/
- function (_TrackList) {
+ var TextTrackList = /*#__PURE__*/function (_TrackList) {
inheritsLoose(TextTrackList, _TrackList);
function TextTrackList() {
@@ -6880,9 +7265,7 @@
/**
* The current list of {@link HtmlTrackElement}s.
*/
- var HtmlTrackElementList =
- /*#__PURE__*/
- function () {
+ var HtmlTrackElementList = /*#__PURE__*/function () {
/**
* Create an instance of this class.
*
@@ -7023,9 +7406,7 @@
*
* @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#texttrackcuelist}
*/
- var TextTrackCueList =
- /*#__PURE__*/
- function () {
+ var TextTrackCueList = /*#__PURE__*/function () {
/**
* Create an instance of this class..
*
@@ -7189,9 +7570,7 @@
* @abstract
*/
- var Track =
- /*#__PURE__*/
- function (_EventTarget) {
+ var Track = /*#__PURE__*/function (_EventTarget) {
inheritsLoose(Track, _EventTarget);
/**
@@ -7225,9 +7604,9 @@
var trackProps = {
id: options.id || 'vjs_track_' + newGUID(),
kind: options.kind || '',
- label: options.label || '',
language: options.language || ''
};
+ var label = options.label || '';
/**
* @memberof Track
* @member {string} id
@@ -7246,15 +7625,6 @@
* @readonly
*/
- /**
- * @memberof Track
- * @member {string} label
- * The label of this track. Cannot be changed after creation.
- * @instance
- *
- * @readonly
- */
-
/**
* @memberof Track
* @member {string} language
@@ -7277,17 +7647,47 @@
for (var key in trackProps) {
_loop(key);
}
+ /**
+ * @memberof Track
+ * @member {string} label
+ * The label of this track. Cannot be changed after creation.
+ * @instance
+ *
+ * @fires Track#labelchange
+ */
+
+ Object.defineProperty(assertThisInitialized(_this), 'label', {
+ get: function get() {
+ return label;
+ },
+ set: function set(newLabel) {
+ if (newLabel !== label) {
+ label = newLabel;
+ /**
+ * An event that fires when label changes on this track.
+ *
+ * > Note: This is not part of the spec!
+ *
+ * @event Track#labelchange
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('labelchange');
+ }
+ }
+ });
return _this;
}
return Track;
- }(EventTarget);
+ }(EventTarget$2);
/**
* @file url.js
* @module url
*/
+
/**
* @typedef {Object} url:URLObject
*
@@ -7323,35 +7723,20 @@
* @return {url:URLObject}
* An object of url details
*/
-
var parseUrl = function parseUrl(url) {
+ // This entire method can be replace with URL once we are able to drop IE11
var props = ['protocol', 'hostname', 'port', 'pathname', 'search', 'hash', 'host']; // add the url to an anchor and let the browser parse the URL
var a = document.createElement('a');
- a.href = url; // IE8 (and 9?) Fix
- // ie8 doesn't parse the URL correctly until the anchor is actually
- // added to the body, and an innerHTML is needed to trigger the parsing
-
- var addToBody = a.host === '' && a.protocol !== 'file:';
- var div;
-
- if (addToBody) {
- div = document.createElement('div');
- div.innerHTML = " ";
- a = div.firstChild; // prevent the div from affecting layout
-
- div.setAttribute('style', 'display:none; position:absolute;');
- document.body.appendChild(div);
- } // Copy the specific URL properties to a new object
- // This is also needed for IE8 because the anchor loses its
+ a.href = url; // Copy the specific URL properties to a new object
+ // This is also needed for IE because the anchor loses its
// properties when it's removed from the dom
-
var details = {};
for (var i = 0; i < props.length; i++) {
details[props[i]] = a[props[i]];
- } // IE9 adds the port to the host property unlike everyone else. If
+ } // IE adds the port to the host property unlike everyone else. If
// a port identifier is added for standard ports, strip it.
@@ -7364,11 +7749,13 @@
}
if (!details.protocol) {
- details.protocol = window$1.location.protocol;
+ details.protocol = window.location.protocol;
}
+ /* istanbul ignore if */
- if (addToBody) {
- document.body.removeChild(div);
+
+ if (!details.host) {
+ details.host = window.location.host;
}
return details;
@@ -7390,9 +7777,10 @@
// Check if absolute URL
if (!url.match(/^https?:\/\//)) {
// Convert to absolute URL. Flash hosted off-site needs an absolute URL.
- var div = document.createElement('div');
- div.innerHTML = "x ";
- url = div.firstChild.href;
+ // add the url to an anchor and let the browser parse the URL
+ var a = document.createElement('a');
+ a.href = url;
+ url = a.href;
}
return url;
@@ -7444,7 +7832,7 @@
var isCrossOrigin = function isCrossOrigin(url, winLoc) {
if (winLoc === void 0) {
- winLoc = window$1.location;
+ winLoc = window.location;
}
var urlInfo = parseUrl(url); // IE8 protocol relative urls will return ':' for protocol
@@ -7457,21 +7845,100 @@
};
var Url = /*#__PURE__*/Object.freeze({
+ __proto__: null,
parseUrl: parseUrl,
getAbsoluteURL: getAbsoluteURL,
getFileExtension: getFileExtension,
isCrossOrigin: isCrossOrigin
});
+ var win;
+
+ if (typeof window !== "undefined") {
+ win = window;
+ } else if (typeof commonjsGlobal !== "undefined") {
+ win = commonjsGlobal;
+ } else if (typeof self !== "undefined") {
+ win = self;
+ } else {
+ win = {};
+ }
+
+ var window_1 = win;
+
var isFunction_1 = isFunction;
- var toString$1 = Object.prototype.toString;
+ var toString = Object.prototype.toString;
function isFunction(fn) {
- var string = toString$1.call(fn);
+ if (!fn) {
+ return false;
+ }
+
+ var string = toString.call(fn);
return string === '[object Function]' || typeof fn === 'function' && string !== '[object RegExp]' || typeof window !== 'undefined' && ( // IE8 and below
fn === window.setTimeout || fn === window.alert || fn === window.confirm || fn === window.prompt);
}
+ var httpResponseHandler = function httpResponseHandler(callback, decodeResponseBody) {
+ if (decodeResponseBody === void 0) {
+ decodeResponseBody = false;
+ }
+
+ return function (err, response, responseBody) {
+ // if the XHR failed, return that error
+ if (err) {
+ callback(err);
+ return;
+ } // if the HTTP status code is 4xx or 5xx, the request also failed
+
+
+ if (response.statusCode >= 400 && response.statusCode <= 599) {
+ var cause = responseBody;
+
+ if (decodeResponseBody) {
+ if (window_1.TextDecoder) {
+ var charset = getCharset(response.headers && response.headers['content-type']);
+
+ try {
+ cause = new TextDecoder(charset).decode(responseBody);
+ } catch (e) {}
+ } else {
+ cause = String.fromCharCode.apply(null, new Uint8Array(responseBody));
+ }
+ }
+
+ callback({
+ cause: cause
+ });
+ return;
+ } // otherwise, request succeeded
+
+
+ callback(null, responseBody);
+ };
+ };
+
+ function getCharset(contentTypeHeader) {
+ if (contentTypeHeader === void 0) {
+ contentTypeHeader = '';
+ }
+
+ return contentTypeHeader.toLowerCase().split(';').reduce(function (charset, contentType) {
+ var _contentType$split = contentType.split('='),
+ type = _contentType$split[0],
+ value = _contentType$split[1];
+
+ if (type.trim() === 'charset') {
+ return value.trim();
+ }
+
+ return charset;
+ }, 'utf-8');
+ }
+
+ var httpHandler = httpResponseHandler;
+
+ createXHR.httpHandler = httpHandler;
/**
* @license
* slighly modified parse-headers 2.0.2
@@ -7480,7 +7947,6 @@
*
*/
-
var parseHeaders = function parseHeaders(headers) {
var result = {};
@@ -7504,11 +7970,11 @@
return result;
};
- var xhr = createXHR; // Allow use of default import syntax in TypeScript
+ var lib = createXHR; // Allow use of default import syntax in TypeScript
var default_1 = createXHR;
- createXHR.XMLHttpRequest = window$1.XMLHttpRequest || noop;
- createXHR.XDomainRequest = "withCredentials" in new createXHR.XMLHttpRequest() ? createXHR.XMLHttpRequest : window$1.XDomainRequest;
+ createXHR.XMLHttpRequest = window_1.XMLHttpRequest || noop$1;
+ createXHR.XDomainRequest = "withCredentials" in new createXHR.XMLHttpRequest() ? createXHR.XMLHttpRequest : window_1.XDomainRequest;
forEachArray(["get", "put", "post", "patch", "head", "delete"], function (method) {
createXHR[method === "delete" ? "del" : method] = function (uri, options, callback) {
options = initParams(uri, options, callback);
@@ -7759,8 +8225,8 @@
return null;
}
- function noop() {}
- xhr["default"] = default_1;
+ function noop$1() {}
+ lib["default"] = default_1;
/**
* Takes a webvtt file contents and parses it into cues
@@ -7775,7 +8241,7 @@
*/
var parseCues = function parseCues(srcContent, track) {
- var parser = new window$1.WebVTT.Parser(window$1, window$1.vttjs, window$1.WebVTT.StringDecoder());
+ var parser = new window.WebVTT.Parser(window, window.vttjs, window.WebVTT.StringDecoder());
var errors = [];
parser.oncue = function (cue) {
@@ -7796,16 +8262,16 @@
parser.parse(srcContent);
if (errors.length > 0) {
- if (window$1.console && window$1.console.groupCollapsed) {
- window$1.console.groupCollapsed("Text Track parsing errors for " + track.src);
+ if (window.console && window.console.groupCollapsed) {
+ window.console.groupCollapsed("Text Track parsing errors for " + track.src);
}
errors.forEach(function (error) {
- return log.error(error);
+ return log$1.error(error);
});
- if (window$1.console && window$1.console.groupEnd) {
- window$1.console.groupEnd();
+ if (window.console && window.console.groupEnd) {
+ window.console.groupEnd();
}
}
@@ -7834,21 +8300,27 @@
opts.cors = crossOrigin;
}
- xhr(opts, bind(this, function (err, response, responseBody) {
+ var withCredentials = track.tech_.crossOrigin() === 'use-credentials';
+
+ if (withCredentials) {
+ opts.withCredentials = withCredentials;
+ }
+
+ lib(opts, bind(this, function (err, response, responseBody) {
if (err) {
- return log.error(err, response);
+ return log$1.error(err, response);
}
track.loaded_ = true; // Make sure that vttjs has loaded, otherwise, wait till it finished loading
// NOTE: this is only used for the alt/video.novtt.js build
- if (typeof window$1.WebVTT !== 'function') {
+ if (typeof window.WebVTT !== 'function') {
if (track.tech_) {
// to prevent use before define eslint error, we define loadHandler
// as a let here
track.tech_.any(['vttjsloaded', 'vttjserror'], function (event) {
if (event.type === 'vttjserror') {
- log.error("vttjs failed to load, stopping trying to process " + track.src);
+ log$1.error("vttjs failed to load, stopping trying to process " + track.src);
return;
}
@@ -7868,9 +8340,7 @@
*/
- var TextTrack =
- /*#__PURE__*/
- function (_Track) {
+ var TextTrack = /*#__PURE__*/function (_Track) {
inheritsLoose(TextTrack, _Track);
/**
@@ -7918,7 +8388,7 @@
throw new Error('A tech was not provided.');
}
- var settings = mergeOptions(options, {
+ var settings = mergeOptions$3(options, {
kind: TextTrackKind[options.kind] || 'subtitles',
language: options.language || options.srclang || ''
});
@@ -7937,23 +8407,38 @@
var cues = new TextTrackCueList(_this.cues_);
var activeCues = new TextTrackCueList(_this.activeCues_);
var changed = false;
- var timeupdateHandler = bind(assertThisInitialized(_this), function () {
- // Accessing this.activeCues for the side-effects of updating itself
+ _this.timeupdateHandler = bind(assertThisInitialized(_this), function () {
+ if (this.tech_.isDisposed()) {
+ return;
+ }
+
+ if (!this.tech_.isReady_) {
+ this.rvf_ = this.tech_.requestVideoFrameCallback(this.timeupdateHandler);
+ return;
+ } // Accessing this.activeCues for the side-effects of updating itself
// due to its nature as a getter function. Do not remove or cues will
// stop updating!
// Use the setter to prevent deletion from uglify (pure_getters rule)
+
+
this.activeCues = this.activeCues;
if (changed) {
this.trigger('cuechange');
changed = false;
}
+
+ this.rvf_ = this.tech_.requestVideoFrameCallback(this.timeupdateHandler);
});
+ var disposeHandler = function disposeHandler() {
+ _this.stopTracking();
+ };
+
+ _this.tech_.one('dispose', disposeHandler);
+
if (mode !== 'disabled') {
- _this.tech_.ready(function () {
- _this.tech_.on('timeupdate', timeupdateHandler);
- }, true);
+ _this.startTracking();
}
Object.defineProperties(assertThisInitialized(_this), {
@@ -7987,12 +8472,14 @@
return mode;
},
set: function set(newMode) {
- var _this2 = this;
-
if (!TextTrackMode[newMode]) {
return;
}
+ if (mode === newMode) {
+ return;
+ }
+
mode = newMode;
if (!this.preload_ && mode !== 'disabled' && this.cues.length === 0) {
@@ -8000,12 +8487,10 @@
loadTrack(this.src, this);
}
+ this.stopTracking();
+
if (mode !== 'disabled') {
- this.tech_.ready(function () {
- _this2.tech_.on('timeupdate', timeupdateHandler);
- }, true);
- } else {
- this.tech_.off('timeupdate', timeupdateHandler);
+ this.startTracking();
}
/**
* An event that fires when mode changes on this track. This allows
@@ -8099,7 +8584,7 @@
_this.loaded_ = true;
}
- if (_this.preload_ || default_ || settings.kind !== 'subtitles' && settings.kind !== 'captions') {
+ if (_this.preload_ || settings.kind !== 'subtitles' && settings.kind !== 'captions') {
loadTrack(_this.src, assertThisInitialized(_this));
}
} else {
@@ -8108,21 +8593,32 @@
return _this;
}
+
+ var _proto = TextTrack.prototype;
+
+ _proto.startTracking = function startTracking() {
+ this.rvf_ = this.tech_.requestVideoFrameCallback(this.timeupdateHandler);
+ };
+
+ _proto.stopTracking = function stopTracking() {
+ if (this.rvf_) {
+ this.tech_.cancelVideoFrameCallback(this.rvf_);
+ this.rvf_ = undefined;
+ }
+ }
/**
* Add a cue to the internal list of cues.
*
* @param {TextTrack~Cue} cue
* The cue to add to our internal list
*/
-
-
- var _proto = TextTrack.prototype;
+ ;
_proto.addCue = function addCue(originalCue) {
var cue = originalCue;
- if (window$1.vttjs && !(originalCue instanceof window$1.vttjs.VTTCue)) {
- cue = new window$1.vttjs.VTTCue(originalCue.startTime, originalCue.endTime, originalCue.text);
+ if (window.vttjs && !(originalCue instanceof window.vttjs.VTTCue)) {
+ cue = new window.vttjs.VTTCue(originalCue.startTime, originalCue.endTime, originalCue.text);
for (var prop in originalCue) {
if (!(prop in cue)) {
@@ -8187,9 +8683,7 @@
* @extends Track
*/
- var AudioTrack =
- /*#__PURE__*/
- function (_Track) {
+ var AudioTrack = /*#__PURE__*/function (_Track) {
inheritsLoose(AudioTrack, _Track);
/**
@@ -8221,7 +8715,7 @@
options = {};
}
- var settings = mergeOptions(options, {
+ var settings = mergeOptions$3(options, {
kind: AudioTrackKind[options.kind] || ''
});
_this = _Track.call(this, settings) || this;
@@ -8282,9 +8776,7 @@
* @extends Track
*/
- var VideoTrack =
- /*#__PURE__*/
- function (_Track) {
+ var VideoTrack = /*#__PURE__*/function (_Track) {
inheritsLoose(VideoTrack, _Track);
/**
@@ -8315,7 +8807,7 @@
options = {};
}
- var settings = mergeOptions(options, {
+ var settings = mergeOptions$3(options, {
kind: VideoTrackKind[options.kind] || ''
});
_this = _Track.call(this, settings) || this;
@@ -8385,9 +8877,7 @@
* @extends EventTarget
*/
- var HTMLTrackElement =
- /*#__PURE__*/
- function (_EventTarget) {
+ var HTMLTrackElement = /*#__PURE__*/function (_EventTarget) {
inheritsLoose(HTMLTrackElement, _EventTarget);
/**
@@ -8416,7 +8906,7 @@
*
* @param {string} [options.srclang='']
* A valid two character language code. An alternative, but deprioritized
- * vesion of `options.language`
+ * version of `options.language`
*
* @param {string} [options.src]
* A url to TextTrack cues.
@@ -8483,7 +8973,7 @@
}
return HTMLTrackElement;
- }(EventTarget);
+ }(EventTarget$2);
HTMLTrackElement.prototype.allowedEvents_ = {
load: 'load'
@@ -8542,6 +9032,23 @@
NORMAL.names = Object.keys(NORMAL);
ALL.names = [].concat(REMOTE.names).concat(NORMAL.names);
+ var minDoc = {};
+
+ var topLevel = typeof commonjsGlobal !== 'undefined' ? commonjsGlobal : typeof window !== 'undefined' ? window : {};
+ var doccy;
+
+ if (typeof document !== 'undefined') {
+ doccy = document;
+ } else {
+ doccy = topLevel['__GLOBAL_DOCUMENT_CACHE@4'];
+
+ if (!doccy) {
+ doccy = topLevel['__GLOBAL_DOCUMENT_CACHE@4'] = minDoc;
+ }
+ }
+
+ var document_1 = doccy;
+
/**
* Copyright 2013 vtt.js Contributors
*
@@ -8670,9 +9177,8 @@
},
// Accept a setting if its a valid percentage.
percent: function percent(k, v) {
- var m;
- if (m = v.match(/^([\d]{1,3})(\.[\d]*)?%$/)) {
+ if (v.match(/^([\d]{1,3})(\.[\d]*)?%$/)) {
v = parseFloat(v);
if (v >= 0 && v <= 100) {
@@ -8837,9 +9343,11 @@
skipWhitespace();
consumeCueSettings(input, cue);
- }
+ } // When evaluating this file as part of a Webpack bundle for server
+ // side rendering, `document` is an empty object.
- var TEXTAREA_ELEMENT = document.createElement("textarea");
+
+ var TEXTAREA_ELEMENT = document_1.createElement && document_1.createElement("textarea");
var TAG_NAME = {
c: "span",
i: "i",
@@ -9481,8 +9989,8 @@
styleBox.move(bestPosition.toCSSCompatValues(containerBox));
}
- function WebVTT$1() {} // Nothing
- // Helper to allow strings to be decoded instead of the default binary utf8 data.
+ function WebVTT$1() {// Nothing
+ } // Helper to allow strings to be decoded instead of the default binary utf8 data.
WebVTT$1.StringDecoder = function () {
@@ -10374,30 +10882,30 @@
VTTCue: vttcue,
VTTRegion: vttregion
};
- window$1.vttjs = vttjs;
- window$1.WebVTT = vttjs.WebVTT;
+ window_1.vttjs = vttjs;
+ window_1.WebVTT = vttjs.WebVTT;
var cueShim = vttjs.VTTCue;
var regionShim = vttjs.VTTRegion;
- var nativeVTTCue = window$1.VTTCue;
- var nativeVTTRegion = window$1.VTTRegion;
+ var nativeVTTCue = window_1.VTTCue;
+ var nativeVTTRegion = window_1.VTTRegion;
vttjs.shim = function () {
- window$1.VTTCue = cueShim;
- window$1.VTTRegion = regionShim;
+ window_1.VTTCue = cueShim;
+ window_1.VTTRegion = regionShim;
};
vttjs.restore = function () {
- window$1.VTTCue = nativeVTTCue;
- window$1.VTTRegion = nativeVTTRegion;
+ window_1.VTTCue = nativeVTTCue;
+ window_1.VTTRegion = nativeVTTRegion;
};
- if (!window$1.VTTCue) {
+ if (!window_1.VTTCue) {
vttjs.shim();
}
});
- var browserIndex_1 = browserIndex.WebVTT;
- var browserIndex_2 = browserIndex.VTTCue;
- var browserIndex_3 = browserIndex.VTTRegion;
+ browserIndex.WebVTT;
+ browserIndex.VTTCue;
+ browserIndex.VTTRegion;
/**
* An Object containing a structure like: `{src: 'url', type: 'mimetype'}` or string
@@ -10461,15 +10969,13 @@
}
/**
* This is the base class for media playback technology controllers, such as
- * {@link Flash} and {@link HTML5}
+ * {@link HTML5}
*
* @extends Component
*/
- var Tech =
- /*#__PURE__*/
- function (_Component) {
+ var Tech = /*#__PURE__*/function (_Component) {
inheritsLoose(Tech, _Component);
/**
@@ -10495,7 +11001,29 @@
// we don't want the tech to report user activity automatically.
// This is done manually in addControlsListeners
options.reportTouchActivity = false;
- _this = _Component.call(this, null, options, ready) || this; // keep track of whether the current source has played at all to
+ _this = _Component.call(this, null, options, ready) || this;
+
+ _this.onDurationChange_ = function (e) {
+ return _this.onDurationChange(e);
+ };
+
+ _this.trackProgress_ = function (e) {
+ return _this.trackProgress(e);
+ };
+
+ _this.trackCurrentTime_ = function (e) {
+ return _this.trackCurrentTime(e);
+ };
+
+ _this.stopTrackingCurrentTime_ = function (e) {
+ return _this.stopTrackingCurrentTime(e);
+ };
+
+ _this.disposeSourceHandler_ = function (e) {
+ return _this.disposeSourceHandler(e);
+ };
+
+ _this.queuedHanders_ = new Set(); // keep track of whether the current source has played at all to
// implement a very limited played()
_this.hasStarted_ = false;
@@ -10514,11 +11042,11 @@
if (options && options[props.getterName]) {
_this[props.privateName] = options[props.getterName];
}
- }); // Manually track progress in cases where the browser/flash player doesn't report it.
+ }); // Manually track progress in cases where the browser/tech doesn't report it.
if (!_this.featuresProgressEvents) {
_this.manualProgressOn();
- } // Manually track timeupdates in cases where the browser/flash player doesn't report it.
+ } // Manually track timeupdates in cases where the browser/tech doesn't report it.
if (!_this.featuresTimeupdateEvents) {
@@ -10606,10 +11134,10 @@
;
_proto.manualProgressOn = function manualProgressOn() {
- this.on('durationchange', this.onDurationChange);
+ this.on('durationchange', this.onDurationChange_);
this.manualProgress = true; // Trigger progress watching when a source begins loading
- this.one('ready', this.trackProgress);
+ this.one('ready', this.trackProgress_);
}
/**
* Turn off the polyfill for `progress` events that was created in
@@ -10620,7 +11148,7 @@
_proto.manualProgressOff = function manualProgressOff() {
this.manualProgress = false;
this.stopTrackingProgress();
- this.off('durationchange', this.onDurationChange);
+ this.off('durationchange', this.onDurationChange_);
}
/**
* This is used to trigger a `progress` event when the buffered percent changes. It
@@ -10718,8 +11246,8 @@
_proto.manualTimeUpdatesOn = function manualTimeUpdatesOn() {
this.manualTimeUpdates = true;
- this.on('play', this.trackCurrentTime);
- this.on('pause', this.stopTrackingCurrentTime);
+ this.on('play', this.trackCurrentTime_);
+ this.on('pause', this.stopTrackingCurrentTime_);
}
/**
* Turn off the polyfill for `timeupdate` events that was created in
@@ -10730,8 +11258,8 @@
_proto.manualTimeUpdatesOff = function manualTimeUpdatesOff() {
this.manualTimeUpdates = false;
this.stopTrackingCurrentTime();
- this.off('play', this.trackCurrentTime);
- this.off('pause', this.stopTrackingCurrentTime);
+ this.off('play', this.trackCurrentTime_);
+ this.off('pause', this.stopTrackingCurrentTime_);
}
/**
* Sets up an interval function to track current time and trigger `timeupdate` every
@@ -10856,6 +11384,27 @@
;
_proto.reset = function reset() {}
+ /**
+ * Get the value of `crossOrigin` from the tech.
+ *
+ * @abstract
+ *
+ * @see {Html5#crossOrigin}
+ */
+ ;
+
+ _proto.crossOrigin = function crossOrigin() {}
+ /**
+ * Set the value of `crossOrigin` on the tech.
+ *
+ * @abstract
+ *
+ * @param {string} crossOrigin the crossOrigin value
+ * @see {Html5#setCrossOrigin}
+ */
+ ;
+
+ _proto.setCrossOrigin = function setCrossOrigin() {}
/**
* Get or set an error on the Tech.
*
@@ -10894,6 +11443,36 @@
return createTimeRanges();
}
+ /**
+ * Start playback
+ *
+ * @abstract
+ *
+ * @see {Html5#play}
+ */
+ ;
+
+ _proto.play = function play() {}
+ /**
+ * Set whether we are scrubbing or not
+ *
+ * @abstract
+ *
+ * @see {Html5#setScrubbing}
+ */
+ ;
+
+ _proto.setScrubbing = function setScrubbing() {}
+ /**
+ * Get whether we are scrubbing or not
+ *
+ * @abstract
+ *
+ * @see {Html5#scrubbing}
+ */
+ ;
+
+ _proto.scrubbing = function scrubbing() {}
/**
* Causes a manual time update to occur if {@link Tech#manualTimeUpdatesOn} was
* previously called.
@@ -10982,7 +11561,7 @@
_proto.addWebVttScript_ = function addWebVttScript_() {
var _this5 = this;
- if (window$1.WebVTT) {
+ if (window.WebVTT) {
return;
} // Initially, Tech.el_ is a child of a dummy-div wait until the Component system
// signals that the Tech is ready at which point Tech.el_ is part of the DOM
@@ -11029,7 +11608,7 @@
}); // but have not loaded yet and we set it to true before the inject so that
// we don't overwrite the injected window.WebVTT if it loads right away
- window$1.WebVTT = true;
+ window.WebVTT = true;
this.el().parentNode.appendChild(script);
} else {
this.ready(this.addWebVttScript_);
@@ -11141,7 +11720,7 @@
;
_proto.createRemoteTextTrack = function createRemoteTextTrack(options) {
- var track = mergeOptions(options, {
+ var track = mergeOptions$3(options, {
tech: this
});
return new REMOTE.remoteTextEl.TrackClass(track);
@@ -11179,7 +11758,7 @@
if (manualCleanup !== true && manualCleanup !== false) {
// deprecation warning
- log.warn('Calling addRemoteTextTrack without explicitly setting the "manualCleanup" parameter to `true` is deprecated and default to `false` in future version of video.js');
+ log$1.warn('Calling addRemoteTextTrack without explicitly setting the "manualCleanup" parameter to `true` is deprecated and default to `false` in future version of video.js');
manualCleanup = true;
} // store HTMLTrackElement and TextTrack to remote list
@@ -11244,12 +11823,73 @@
;
_proto.requestPictureInPicture = function requestPictureInPicture() {
- var PromiseClass = this.options_.Promise || window$1.Promise;
+ var PromiseClass = this.options_.Promise || window.Promise;
if (PromiseClass) {
return PromiseClass.reject();
}
}
+ /**
+ * A method to check for the value of the 'disablePictureInPicture' property.
+ * Defaults to true, as it should be considered disabled if the tech does not support pip
+ *
+ * @abstract
+ */
+ ;
+
+ _proto.disablePictureInPicture = function disablePictureInPicture() {
+ return true;
+ }
+ /**
+ * A method to set or unset the 'disablePictureInPicture' property.
+ *
+ * @abstract
+ */
+ ;
+
+ _proto.setDisablePictureInPicture = function setDisablePictureInPicture() {}
+ /**
+ * A fallback implementation of requestVideoFrameCallback using requestAnimationFrame
+ *
+ * @param {function} cb
+ * @return {number} request id
+ */
+ ;
+
+ _proto.requestVideoFrameCallback = function requestVideoFrameCallback(cb) {
+ var _this8 = this;
+
+ var id = newGUID();
+
+ if (this.paused()) {
+ this.queuedHanders_.add(id);
+ this.one('playing', function () {
+ if (_this8.queuedHanders_.has(id)) {
+ _this8.queuedHanders_["delete"](id);
+
+ cb();
+ }
+ });
+ } else {
+ this.requestNamedAnimationFrame(id, cb);
+ }
+
+ return id;
+ }
+ /**
+ * A fallback implementation of cancelVideoFrameCallback
+ *
+ * @param {number} id id of callback to be cancelled
+ */
+ ;
+
+ _proto.cancelVideoFrameCallback = function cancelVideoFrameCallback(id) {
+ if (this.queuedHanders_.has(id)) {
+ this.queuedHanders_["delete"](id);
+ } else {
+ this.cancelNamedAnimationFrame(id);
+ }
+ }
/**
* A method to set a poster from a `Tech`.
*
@@ -11391,7 +12031,7 @@
throw new Error('Techs must have a static canPlaySource method on them');
}
- name = toTitleCase(name);
+ name = toTitleCase$1(name);
Tech.techs_[name] = tech;
Tech.techs_[toLowerCase(name)] = tech;
@@ -11422,16 +12062,16 @@
return Tech.techs_[name];
}
- name = toTitleCase(name);
+ name = toTitleCase$1(name);
- if (window$1 && window$1.videojs && window$1.videojs[name]) {
- log.warn("The " + name + " tech was added to the videojs object when it should be registered using videojs.registerTech(name, tech)");
- return window$1.videojs[name];
+ if (window && window.videojs && window.videojs[name]) {
+ log$1.warn("The " + name + " tech was added to the videojs object when it should be registered using videojs.registerTech(name, tech)");
+ return window.videojs[name];
}
};
return Tech;
- }(Component);
+ }(Component$1);
/**
* Get the {@link VideoTrackList}
*
@@ -11577,6 +12217,14 @@
*/
Tech.prototype.featuresNativeTextTracks = false;
+ /**
+ * Boolean indicating whether the `Tech` supports `requestVideoFrameCallback`.
+ *
+ * @type {boolean}
+ * @default
+ */
+
+ Tech.prototype.featuresVideoFrameCallback = false;
/**
* A functional mixin for techs that want to use the Source Handler pattern.
* Source handlers are scripts for handling specific formats.
@@ -11748,20 +12396,20 @@
if (_Tech.nativeSourceHandler) {
sh = _Tech.nativeSourceHandler;
} else {
- log.error('No source handler found for the current source.');
+ log$1.error('No source handler found for the current source.');
}
} // Dispose any existing source handler
this.disposeSourceHandler();
- this.off('dispose', this.disposeSourceHandler);
+ this.off('dispose', this.disposeSourceHandler_);
if (sh !== _Tech.nativeSourceHandler) {
this.currentSource_ = source;
}
this.sourceHandler_ = sh.handleSource(source, this, this.options_);
- this.one('dispose', this.disposeSourceHandler);
+ this.one('dispose', this.disposeSourceHandler_);
};
/**
* Clean up any existing SourceHandlers and listeners when the Tech is disposed.
@@ -11794,7 +12442,7 @@
// Tech that can be registered as a Component.
- Component.registerComponent('Tech', Tech);
+ Component$1.registerComponent('Tech', Tech);
Tech.registerTech('Tech', Tech);
/**
* A list of techs that should be added to techOrder on Players
@@ -11956,7 +12604,7 @@
arg = null;
}
- var callMethod = 'call' + toTitleCase(method);
+ var callMethod = 'call' + toTitleCase$1(method);
var middlewareValue = middleware.reduce(middlewareIterator(callMethod), arg);
var terminated = middlewareValue === TERMINATOR; // deprecated. The `null` return value should instead return TERMINATOR to
// prevent confusion if a techs method actually returns null.
@@ -11979,7 +12627,8 @@
played: 1,
paused: 1,
seekable: 1,
- volume: 1
+ volume: 1,
+ ended: 1
};
/**
* Enumeration of allowed setters where the keys are method names.
@@ -12134,7 +12783,7 @@
/**
* Mimetypes
*
- * @see http://hul.harvard.edu/ois/////systems/wax/wax-public-help/mimetypes.htm
+ * @see https://www.iana.org/assignments/media-types/media-types.xhtml
* @typedef Mimetypes~Kind
* @enum
*/
@@ -12149,8 +12798,12 @@
m4a: 'audio/mp4',
mp3: 'audio/mpeg',
aac: 'audio/aac',
+ caf: 'audio/x-caf',
+ flac: 'audio/flac',
oga: 'audio/ogg',
+ wav: 'audio/wav',
m3u8: 'application/x-mpegURL',
+ mpd: 'application/dash+xml',
jpg: 'image/jpeg',
jpeg: 'image/jpeg',
gif: 'image/gif',
@@ -12251,7 +12904,7 @@
if (Array.isArray(srcobj)) {
newsrc = newsrc.concat(srcobj);
- } else if (isObject(srcobj)) {
+ } else if (isObject$1(srcobj)) {
newsrc.push(srcobj);
}
});
@@ -12261,7 +12914,7 @@
src = [fixSource({
src: src
})];
- } else if (isObject(src) && typeof src.src === 'string' && src.src && src.src.trim()) {
+ } else if (isObject$1(src) && typeof src.src === 'string' && src.src && src.src.trim()) {
// src is already valid
src = [fixSource(src)];
} else {
@@ -12300,9 +12953,7 @@
* @extends Component
*/
- var MediaLoader =
- /*#__PURE__*/
- function (_Component) {
+ var MediaLoader = /*#__PURE__*/function (_Component) {
inheritsLoose(MediaLoader, _Component);
/**
@@ -12321,7 +12972,7 @@
var _this;
// MediaLoader has no element
- var options_ = mergeOptions({
+ var options_ = mergeOptions$3({
createEl: false
}, options);
_this = _Component.call(this, player, options_, ready) || this; // If there are no sources when the player is initialized,
@@ -12329,12 +12980,12 @@
if (!options.playerOptions.sources || options.playerOptions.sources.length === 0) {
for (var i = 0, j = options.playerOptions.techOrder; i < j.length; i++) {
- var techName = toTitleCase(j[i]);
+ var techName = toTitleCase$1(j[i]);
var tech = Tech.getTech(techName); // Support old behavior of techs being registered as components.
// Remove once that deprecated behavior is removed.
if (!techName) {
- tech = Component.getComponent(techName);
+ tech = Component$1.getComponent(techName);
} // Check if the browser supports this technology
@@ -12344,7 +12995,7 @@
}
}
} else {
- // Loop through playback technologies (HTML5, Flash) and check for support.
+ // Loop through playback technologies (e.g. HTML5) and check for support.
// Then load the best source.
// A few assumptions here:
// All playback technologies respect preload false.
@@ -12355,9 +13006,9 @@
}
return MediaLoader;
- }(Component);
+ }(Component$1);
- Component.registerComponent('MediaLoader', MediaLoader);
+ Component$1.registerComponent('MediaLoader', MediaLoader);
/**
* Component which is clickable or keyboard actionable, but is not a
@@ -12366,9 +13017,7 @@
* @extends Component
*/
- var ClickableComponent =
- /*#__PURE__*/
- function (_Component) {
+ var ClickableComponent = /*#__PURE__*/function (_Component) {
inheritsLoose(ClickableComponent, _Component);
/**
@@ -12378,16 +13027,43 @@
* The `Player` that this class should be attached to.
*
* @param {Object} [options]
- * The key/value store of player options.
+ * The key/value store of component options.
*
* @param {function} [options.clickHandler]
* The function to call when the button is clicked / activated
+ *
+ * @param {string} [options.controlText]
+ * The text to set on the button
+ *
+ * @param {string} [options.className]
+ * A class or space separated list of classes to add the component
+ *
*/
function ClickableComponent(player, options) {
var _this;
_this = _Component.call(this, player, options) || this;
+ if (_this.options_.controlText) {
+ _this.controlText(_this.options_.controlText);
+ }
+
+ _this.handleMouseOver_ = function (e) {
+ return _this.handleMouseOver(e);
+ };
+
+ _this.handleMouseOut_ = function (e) {
+ return _this.handleMouseOut(e);
+ };
+
+ _this.handleClick_ = function (e) {
+ return _this.handleClick(e);
+ };
+
+ _this.handleKeyDown_ = function (e) {
+ return _this.handleKeyDown(e);
+ };
+
_this.emitTapEvents();
_this.enable();
@@ -12413,7 +13089,7 @@
var _proto = ClickableComponent.prototype;
- _proto.createEl = function createEl(tag, props, attributes) {
+ _proto.createEl = function createEl$1(tag, props, attributes) {
if (tag === void 0) {
tag = 'div';
}
@@ -12427,13 +13103,12 @@
}
props = assign({
- innerHTML: ' ',
className: this.buildCSSClass(),
tabIndex: 0
}, props);
if (tag === 'button') {
- log.error("Creating a ClickableComponent with an HTML element of " + tag + " is not supported; use a Button instead.");
+ log$1.error("Creating a ClickableComponent with an HTML element of " + tag + " is not supported; use a Button instead.");
} // Add ARIA attributes for clickable element which is not a native HTML button
@@ -12441,9 +13116,12 @@
role: 'button'
}, attributes);
this.tabIndex_ = props.tabIndex;
-
- var el = _Component.prototype.createEl.call(this, tag, props, attributes);
-
+ var el = createEl(tag, props, attributes);
+ el.appendChild(createEl('span', {
+ className: 'vjs-icon-placeholder'
+ }, {
+ 'aria-hidden': true
+ }));
this.createControlTextEl(el);
return el;
};
@@ -12507,7 +13185,7 @@
this.controlText_ = text;
textContent(this.controlTextEl_, localizedText);
- if (!this.nonIconControl) {
+ if (!this.nonIconControl && !this.player_.options_.noUITitleAttributes) {
// Set title attribute if only an icon is shown
el.setAttribute('title', localizedText);
}
@@ -12538,8 +13216,8 @@
this.el_.setAttribute('tabIndex', this.tabIndex_);
}
- this.on(['tap', 'click'], this.handleClick);
- this.on('keydown', this.handleKeyDown);
+ this.on(['tap', 'click'], this.handleClick_);
+ this.on('keydown', this.handleKeyDown_);
}
}
/**
@@ -12556,10 +13234,20 @@
this.el_.removeAttribute('tabIndex');
}
- this.off('mouseover', this.handleMouseOver);
- this.off('mouseout', this.handleMouseOut);
- this.off(['tap', 'click'], this.handleClick);
- this.off('keydown', this.handleKeyDown);
+ this.off('mouseover', this.handleMouseOver_);
+ this.off('mouseout', this.handleMouseOut_);
+ this.off(['tap', 'click'], this.handleClick_);
+ this.off('keydown', this.handleKeyDown_);
+ }
+ /**
+ * Handles language change in ClickableComponent for the player in components
+ *
+ *
+ */
+ ;
+
+ _proto.handleLanguagechange = function handleLanguagechange() {
+ this.controlText(this.controlText_);
}
/**
* Event handler that is called when a `ClickableComponent` receives a
@@ -12607,9 +13295,9 @@
};
return ClickableComponent;
- }(Component);
+ }(Component$1);
- Component.registerComponent('ClickableComponent', ClickableComponent);
+ Component$1.registerComponent('ClickableComponent', ClickableComponent);
/**
* A `ClickableComponent` that handles showing the poster image for the player.
@@ -12617,9 +13305,7 @@
* @extends ClickableComponent
*/
- var PosterImage =
- /*#__PURE__*/
- function (_ClickableComponent) {
+ var PosterImage = /*#__PURE__*/function (_ClickableComponent) {
inheritsLoose(PosterImage, _ClickableComponent);
/**
@@ -12638,7 +13324,11 @@
_this.update();
- player.on('posterchange', bind(assertThisInitialized(_this), _this.update));
+ _this.update_ = function (e) {
+ return _this.update(e);
+ };
+
+ player.on('posterchange', _this.update_);
return _this;
}
/**
@@ -12649,7 +13339,7 @@
var _proto = PosterImage.prototype;
_proto.dispose = function dispose() {
- this.player().off('posterchange', this.update);
+ this.player().off('posterchange', this.update_);
_ClickableComponent.prototype.dispose.call(this);
}
@@ -12727,7 +13417,12 @@
return;
}
- if (this.player_.tech(true)) {
+ var sourceIsEncrypted = this.player_.usingPlugin('eme') && this.player_.eme.sessions && this.player_.eme.sessions.length > 0;
+
+ if (this.player_.tech(true) && // We've observed a bug in IE and Edge when playing back DRM content where
+ // calling .focus() on the video element causes the video to go black,
+ // so we avoid it in that specific case
+ !((IE_VERSION || IS_EDGE) && sourceIsEncrypted)) {
this.player_.tech(true).focus();
}
@@ -12741,7 +13436,7 @@
return PosterImage;
}(ClickableComponent);
- Component.registerComponent('PosterImage', PosterImage);
+ Component$1.registerComponent('PosterImage', PosterImage);
var darkGray = '#222';
var lightGray = '#ccc';
@@ -12816,9 +13511,7 @@
*/
- var TextTrackDisplay =
- /*#__PURE__*/
- function (_Component) {
+ var TextTrackDisplay = /*#__PURE__*/function (_Component) {
inheritsLoose(TextTrackDisplay, _Component);
/**
@@ -12837,10 +13530,18 @@
var _this;
_this = _Component.call(this, player, options, ready) || this;
- var updateDisplayHandler = bind(assertThisInitialized(_this), _this.updateDisplay);
- player.on('loadstart', bind(assertThisInitialized(_this), _this.toggleDisplay));
+
+ var updateDisplayHandler = function updateDisplayHandler(e) {
+ return _this.updateDisplay(e);
+ };
+
+ player.on('loadstart', function (e) {
+ return _this.toggleDisplay(e);
+ });
player.on('texttrackchange', updateDisplayHandler);
- player.on('loadedmetadata', bind(assertThisInitialized(_this), _this.preselectTrack)); // This used to be called during player init, but was causing an error
+ player.on('loadedmetadata', function (e) {
+ return _this.preselectTrack(e);
+ }); // This used to be called during player init, but was causing an error
// if a track should show by default and the display hadn't loaded yet.
// Should probably be moved to an external track loader when we support
// tracks that don't need a display.
@@ -12853,9 +13554,9 @@
player.on('fullscreenchange', updateDisplayHandler);
player.on('playerresize', updateDisplayHandler);
- window$1.addEventListener('orientationchange', updateDisplayHandler);
+ window.addEventListener('orientationchange', updateDisplayHandler);
player.on('dispose', function () {
- return window$1.removeEventListener('orientationchange', updateDisplayHandler);
+ return window.removeEventListener('orientationchange', updateDisplayHandler);
});
var tracks = this.options_.playerOptions.tracks || [];
@@ -12956,6 +13657,7 @@
return _Component.prototype.createEl.call(this, 'div', {
className: 'vjs-text-track-display'
}, {
+ 'translate': 'yes',
'aria-live': 'off',
'aria-atomic': 'true'
});
@@ -12966,8 +13668,8 @@
;
_proto.clearDisplay = function clearDisplay() {
- if (typeof window$1.WebVTT === 'function') {
- window$1.WebVTT.processCues(window$1, [], this.el_);
+ if (typeof window.WebVTT === 'function') {
+ window.WebVTT.processCues(window, [], this.el_);
}
}
/**
@@ -13093,11 +13795,10 @@
}
if (overrides.fontPercent && overrides.fontPercent !== 1) {
- var fontSize = window$1.parseFloat(cueDiv.style.fontSize);
+ var fontSize = window.parseFloat(cueDiv.style.fontSize);
cueDiv.style.fontSize = fontSize * overrides.fontPercent + 'px';
cueDiv.style.height = 'auto';
cueDiv.style.top = 'auto';
- cueDiv.style.bottom = '2px';
}
if (overrides.fontFamily && overrides.fontFamily !== 'default') {
@@ -13122,7 +13823,7 @@
tracks = [tracks];
}
- if (typeof window$1.WebVTT !== 'function' || tracks.every(function (track) {
+ if (typeof window.WebVTT !== 'function' || tracks.every(function (track) {
return !track.activeCues;
})) {
return;
@@ -13139,7 +13840,7 @@
} // removes all cues before it processes new ones
- window$1.WebVTT.processCues(window$1, cues, this.el_); // add unique class to each language text track & add settings styling if necessary
+ window.WebVTT.processCues(window, cues, this.el_); // add unique class to each language text track & add settings styling if necessary
for (var _i2 = 0; _i2 < tracks.length; ++_i2) {
var _track2 = tracks[_i2];
@@ -13148,6 +13849,10 @@
var cueEl = _track2.activeCues[_j].displayState;
addClass(cueEl, 'vjs-text-track-cue');
addClass(cueEl, 'vjs-text-track-cue-' + (_track2.language ? _track2.language : _i2));
+
+ if (_track2.language) {
+ setAttribute(cueEl, 'lang', _track2.language);
+ }
}
if (this.player_.textTrackSettings) {
@@ -13157,9 +13862,9 @@
};
return TextTrackDisplay;
- }(Component);
+ }(Component$1);
- Component.registerComponent('TextTrackDisplay', TextTrackDisplay);
+ Component$1.registerComponent('TextTrackDisplay', TextTrackDisplay);
/**
* A loading spinner for use during waiting/loading events.
@@ -13167,9 +13872,7 @@
* @extends Component
*/
- var LoadingSpinner =
- /*#__PURE__*/
- function (_Component) {
+ var LoadingSpinner = /*#__PURE__*/function (_Component) {
inheritsLoose(LoadingSpinner, _Component);
function LoadingSpinner() {
@@ -13189,7 +13892,7 @@
var playerType = this.localize(isAudio ? 'Audio Player' : 'Video Player');
var controlText = createEl('span', {
className: 'vjs-control-text',
- innerHTML: this.localize('{1} is loading.', [playerType])
+ textContent: this.localize('{1} is loading.', [playerType])
});
var el = _Component.prototype.createEl.call(this, 'div', {
@@ -13202,9 +13905,9 @@
};
return LoadingSpinner;
- }(Component);
+ }(Component$1);
- Component.registerComponent('LoadingSpinner', LoadingSpinner);
+ Component$1.registerComponent('LoadingSpinner', LoadingSpinner);
/**
* Base class for all buttons.
@@ -13212,9 +13915,7 @@
* @extends ClickableComponent
*/
- var Button =
- /*#__PURE__*/
- function (_ClickableComponent) {
+ var Button = /*#__PURE__*/function (_ClickableComponent) {
inheritsLoose(Button, _ClickableComponent);
function Button() {
@@ -13239,7 +13940,7 @@
* @return {Element}
* The element that gets created.
*/
- _proto.createEl = function createEl(tag, props, attributes) {
+ _proto.createEl = function createEl$1(tag, props, attributes) {
if (props === void 0) {
props = {};
}
@@ -13250,7 +13951,6 @@
tag = 'button';
props = assign({
- innerHTML: ' ',
className: this.buildCSSClass()
}, props); // Add attributes for button element
@@ -13258,7 +13958,14 @@
// Necessary since the default button type is "submit"
type: 'button'
}, attributes);
- var el = Component.prototype.createEl.call(this, tag, props, attributes);
+
+ var el = createEl(tag, props, attributes);
+
+ el.appendChild(createEl('span', {
+ className: 'vjs-icon-placeholder'
+ }, {
+ 'aria-hidden': true
+ }));
this.createControlTextEl(el);
return el;
}
@@ -13286,9 +13993,9 @@
}
var className = this.constructor.name;
- log.warn("Adding an actionable (user controllable) child to a Button (" + className + ") is not supported; use a ClickableComponent instead."); // Avoid the error message generated by ClickableComponent's addChild method
+ log$1.warn("Adding an actionable (user controllable) child to a Button (" + className + ") is not supported; use a ClickableComponent instead."); // Avoid the error message generated by ClickableComponent's addChild method
- return Component.prototype.addChild.call(this, child, options);
+ return Component$1.prototype.addChild.call(this, child, options);
}
/**
* Enable the `Button` element so that it can be activated or clicked. Use this with
@@ -13341,7 +14048,7 @@
return Button;
}(ClickableComponent);
- Component.registerComponent('Button', Button);
+ Component$1.registerComponent('Button', Button);
/**
* The initial play button that shows before the video has played. The hiding of the
@@ -13350,9 +14057,7 @@
* @extends Button
*/
- var BigPlayButton =
- /*#__PURE__*/
- function (_Button) {
+ var BigPlayButton = /*#__PURE__*/function (_Button) {
inheritsLoose(BigPlayButton, _Button);
function BigPlayButton(player, options) {
@@ -13361,7 +14066,9 @@
_this = _Button.call(this, player, options) || this;
_this.mouseused_ = false;
- _this.on('mousedown', _this.handleMouseDown);
+ _this.on('mousedown', function (e) {
+ return _this.handleMouseDown(e);
+ });
return _this;
}
@@ -13448,7 +14155,7 @@
BigPlayButton.prototype.controlText_ = 'Play Video';
- Component.registerComponent('BigPlayButton', BigPlayButton);
+ Component$1.registerComponent('BigPlayButton', BigPlayButton);
/**
* The `CloseButton` is a `{@link Button}` that fires a `close` event when
@@ -13457,9 +14164,7 @@
* @extends Button
*/
- var CloseButton =
- /*#__PURE__*/
- function (_Button) {
+ var CloseButton = /*#__PURE__*/function (_Button) {
inheritsLoose(CloseButton, _Button);
/**
@@ -13552,7 +14257,7 @@
return CloseButton;
}(Button);
- Component.registerComponent('CloseButton', CloseButton);
+ Component$1.registerComponent('CloseButton', CloseButton);
/**
* Button to toggle between play and pause.
@@ -13560,9 +14265,7 @@
* @extends Button
*/
- var PlayToggle =
- /*#__PURE__*/
- function (_Button) {
+ var PlayToggle = /*#__PURE__*/function (_Button) {
inheritsLoose(PlayToggle, _Button);
/**
@@ -13585,12 +14288,18 @@
options.replay = options.replay === undefined || options.replay;
- _this.on(player, 'play', _this.handlePlay);
+ _this.on(player, 'play', function (e) {
+ return _this.handlePlay(e);
+ });
- _this.on(player, 'pause', _this.handlePause);
+ _this.on(player, 'pause', function (e) {
+ return _this.handlePause(e);
+ });
if (options.replay) {
- _this.on(player, 'ended', _this.handleEnded);
+ _this.on(player, 'ended', function (e) {
+ return _this.handleEnded(e);
+ });
}
return _this;
@@ -13623,7 +14332,7 @@
_proto.handleClick = function handleClick(event) {
if (this.player_.paused()) {
- this.player_.play();
+ silencePromise(this.player_.play());
} else {
this.player_.pause();
}
@@ -13692,12 +14401,16 @@
;
_proto.handleEnded = function handleEnded(event) {
+ var _this2 = this;
+
this.removeClass('vjs-playing');
this.addClass('vjs-ended'); // change the button text to "Replay"
this.controlText('Replay'); // on the next seek remove the replay button
- this.one(this.player_, 'seeked', this.handleSeeked);
+ this.one(this.player_, 'seeked', function (e) {
+ return _this2.handleSeeked(e);
+ });
};
return PlayToggle;
@@ -13711,7 +14424,7 @@
PlayToggle.prototype.controlText_ = 'Play';
- Component.registerComponent('PlayToggle', PlayToggle);
+ Component$1.registerComponent('PlayToggle', PlayToggle);
/**
* @file format-time.js
@@ -13812,9 +14525,7 @@
* @extends Component
*/
- var TimeDisplay =
- /*#__PURE__*/
- function (_Component) {
+ var TimeDisplay = /*#__PURE__*/function (_Component) {
inheritsLoose(TimeDisplay, _Component);
/**
@@ -13831,7 +14542,9 @@
_this = _Component.call(this, player, options) || this;
- _this.on(player, ['timeupdate', 'ended'], _this.updateContent);
+ _this.on(player, ['timeupdate', 'ended'], function (e) {
+ return _this.updateContent(e);
+ });
_this.updateTextNode_();
@@ -13851,10 +14564,16 @@
var className = this.buildCSSClass();
var el = _Component.prototype.createEl.call(this, 'div', {
- className: className + " vjs-time-control vjs-control",
- innerHTML: "" + this.localize(this.labelText_) + "\xA0 "
+ className: className + " vjs-time-control vjs-control"
});
+ var span = createEl('span', {
+ className: 'vjs-control-text',
+ textContent: this.localize(this.labelText_) + "\xA0"
+ }, {
+ role: 'presentation'
+ });
+ el.appendChild(span);
this.contentEl_ = createEl('span', {
className: className + "-display"
}, {
@@ -13899,12 +14618,18 @@
}
this.formattedTime_ = time;
- this.requestAnimationFrame(function () {
+ this.requestNamedAnimationFrame('TimeDisplay#updateTextNode_', function () {
if (!_this2.contentEl_) {
return;
}
var oldNode = _this2.textNode_;
+
+ if (oldNode && _this2.contentEl_.firstChild !== oldNode) {
+ oldNode = null;
+ log$1.warn('TimeDisplay#updateTextnode_: Prevented replacement of text node element since it was no longer a child of this node. Appending a new node instead.');
+ }
+
_this2.textNode_ = document.createTextNode(_this2.formattedTime_);
if (!_this2.textNode_) {
@@ -13932,7 +14657,7 @@
_proto.updateContent = function updateContent(event) {};
return TimeDisplay;
- }(Component);
+ }(Component$1);
/**
* The text that is added to the `TimeDisplay` for screen reader users.
*
@@ -13952,7 +14677,7 @@
*/
TimeDisplay.prototype.controlText_ = 'Time';
- Component.registerComponent('TimeDisplay', TimeDisplay);
+ Component$1.registerComponent('TimeDisplay', TimeDisplay);
/**
* Displays the current time
@@ -13960,9 +14685,7 @@
* @extends Component
*/
- var CurrentTimeDisplay =
- /*#__PURE__*/
- function (_TimeDisplay) {
+ var CurrentTimeDisplay = /*#__PURE__*/function (_TimeDisplay) {
inheritsLoose(CurrentTimeDisplay, _TimeDisplay);
function CurrentTimeDisplay() {
@@ -14024,7 +14747,7 @@
*/
CurrentTimeDisplay.prototype.controlText_ = 'Current Time';
- Component.registerComponent('CurrentTimeDisplay', CurrentTimeDisplay);
+ Component$1.registerComponent('CurrentTimeDisplay', CurrentTimeDisplay);
/**
* Displays the duration
@@ -14032,9 +14755,7 @@
* @extends Component
*/
- var DurationDisplay =
- /*#__PURE__*/
- function (_TimeDisplay) {
+ var DurationDisplay = /*#__PURE__*/function (_TimeDisplay) {
inheritsLoose(DurationDisplay, _TimeDisplay);
/**
@@ -14049,21 +14770,26 @@
function DurationDisplay(player, options) {
var _this;
- _this = _TimeDisplay.call(this, player, options) || this; // we do not want to/need to throttle duration changes,
+ _this = _TimeDisplay.call(this, player, options) || this;
+
+ var updateContent = function updateContent(e) {
+ return _this.updateContent(e);
+ }; // we do not want to/need to throttle duration changes,
// as they should always display the changed duration as
// it has changed
- _this.on(player, 'durationchange', _this.updateContent); // Listen to loadstart because the player duration is reset when a new media element is loaded,
+
+ _this.on(player, 'durationchange', updateContent); // Listen to loadstart because the player duration is reset when a new media element is loaded,
// but the durationchange on the user agent will not fire.
// @see [Spec]{@link https://www.w3.org/TR/2011/WD-html5-20110113/video.html#media-element-load-algorithm}
- _this.on(player, 'loadstart', _this.updateContent); // Also listen for timeupdate (in the parent) and loadedmetadata because removing those
+ _this.on(player, 'loadstart', updateContent); // Also listen for timeupdate (in the parent) and loadedmetadata because removing those
// listeners could have broken dependent applications/libraries. These
// can likely be removed for 7.0.
- _this.on(player, 'loadedmetadata', _this.updateContent);
+ _this.on(player, 'loadedmetadata', updateContent);
return _this;
}
@@ -14119,7 +14845,7 @@
*/
DurationDisplay.prototype.controlText_ = 'Duration';
- Component.registerComponent('DurationDisplay', DurationDisplay);
+ Component$1.registerComponent('DurationDisplay', DurationDisplay);
/**
* The separator between the current time and duration.
@@ -14128,9 +14854,7 @@
* @extends Component
*/
- var TimeDivider =
- /*#__PURE__*/
- function (_Component) {
+ var TimeDivider = /*#__PURE__*/function (_Component) {
inheritsLoose(TimeDivider, _Component);
function TimeDivider() {
@@ -14146,21 +14870,30 @@
* The element that was created.
*/
_proto.createEl = function createEl() {
- return _Component.prototype.createEl.call(this, 'div', {
- className: 'vjs-time-control vjs-time-divider',
- innerHTML: '/
'
+ var el = _Component.prototype.createEl.call(this, 'div', {
+ className: 'vjs-time-control vjs-time-divider'
}, {
// this element and its contents can be hidden from assistive techs since
// it is made extraneous by the announcement of the control text
// for the current time and duration displays
'aria-hidden': true
});
+
+ var div = _Component.prototype.createEl.call(this, 'div');
+
+ var span = _Component.prototype.createEl.call(this, 'span', {
+ textContent: '/'
+ });
+
+ div.appendChild(span);
+ el.appendChild(div);
+ return el;
};
return TimeDivider;
- }(Component);
+ }(Component$1);
- Component.registerComponent('TimeDivider', TimeDivider);
+ Component$1.registerComponent('TimeDivider', TimeDivider);
/**
* Displays the time left in the video
@@ -14168,9 +14901,7 @@
* @extends Component
*/
- var RemainingTimeDisplay =
- /*#__PURE__*/
- function (_TimeDisplay) {
+ var RemainingTimeDisplay = /*#__PURE__*/function (_TimeDisplay) {
inheritsLoose(RemainingTimeDisplay, _TimeDisplay);
/**
@@ -14187,7 +14918,9 @@
_this = _TimeDisplay.call(this, player, options) || this;
- _this.on(player, 'durationchange', _this.updateContent);
+ _this.on(player, 'durationchange', function (e) {
+ return _this.updateContent(e);
+ });
return _this;
}
@@ -14215,9 +14948,12 @@
_proto.createEl = function createEl$1() {
var el = _TimeDisplay.prototype.createEl.call(this);
- el.insertBefore(createEl('span', {}, {
- 'aria-hidden': true
- }, '-'), this.contentEl_);
+ if (this.options_.displayNegative !== false) {
+ el.insertBefore(createEl('span', {}, {
+ 'aria-hidden': true
+ }, '-'), this.contentEl_);
+ }
+
return el;
}
/**
@@ -14271,7 +15007,7 @@
*/
RemainingTimeDisplay.prototype.controlText_ = 'Remaining Time';
- Component.registerComponent('RemainingTimeDisplay', RemainingTimeDisplay);
+ Component$1.registerComponent('RemainingTimeDisplay', RemainingTimeDisplay);
/**
* Displays the live indicator when duration is Infinity.
@@ -14279,9 +15015,7 @@
* @extends Component
*/
- var LiveDisplay =
- /*#__PURE__*/
- function (_Component) {
+ var LiveDisplay = /*#__PURE__*/function (_Component) {
inheritsLoose(LiveDisplay, _Component);
/**
@@ -14300,7 +15034,9 @@
_this.updateShowing();
- _this.on(_this.player(), 'durationchange', _this.updateShowing);
+ _this.on(_this.player(), 'durationchange', function (e) {
+ return _this.updateShowing(e);
+ });
return _this;
}
@@ -14320,11 +15056,15 @@
});
this.contentEl_ = createEl('div', {
- className: 'vjs-live-display',
- innerHTML: "" + this.localize('Stream Type') + "\xA0 " + this.localize('LIVE')
+ className: 'vjs-live-display'
}, {
'aria-live': 'off'
});
+ this.contentEl_.appendChild(createEl('span', {
+ className: 'vjs-control-text',
+ textContent: this.localize('Stream Type') + "\xA0"
+ }));
+ this.contentEl_.appendChild(document.createTextNode(this.localize('LIVE')));
el.appendChild(this.contentEl_);
return el;
};
@@ -14354,9 +15094,9 @@
};
return LiveDisplay;
- }(Component);
+ }(Component$1);
- Component.registerComponent('LiveDisplay', LiveDisplay);
+ Component$1.registerComponent('LiveDisplay', LiveDisplay);
/**
* Displays the live indicator when duration is Infinity.
@@ -14364,9 +15104,7 @@
* @extends Component
*/
- var SeekToLive =
- /*#__PURE__*/
- function (_Button) {
+ var SeekToLive = /*#__PURE__*/function (_Button) {
inheritsLoose(SeekToLive, _Button);
/**
@@ -14386,7 +15124,11 @@
_this.updateLiveEdgeStatus();
if (_this.player_.liveTracker) {
- _this.on(_this.player_.liveTracker, 'liveedgechange', _this.updateLiveEdgeStatus);
+ _this.updateLiveEdgeStatusHandler_ = function (e) {
+ return _this.updateLiveEdgeStatus(e);
+ };
+
+ _this.on(_this.player_.liveTracker, 'liveedgechange', _this.updateLiveEdgeStatusHandler_);
}
return _this;
@@ -14408,7 +15150,7 @@
this.textEl_ = createEl('span', {
className: 'vjs-seek-to-live-text',
- innerHTML: this.localize('LIVE')
+ textContent: this.localize('LIVE')
}, {
'aria-hidden': 'true'
});
@@ -14421,7 +15163,7 @@
*/
;
- _proto.updateLiveEdgeStatus = function updateLiveEdgeStatus(e) {
+ _proto.updateLiveEdgeStatus = function updateLiveEdgeStatus() {
// default to live edge
if (!this.player_.liveTracker || this.player_.liveTracker.atLiveEdge()) {
this.setAttribute('aria-disabled', true);
@@ -14450,7 +15192,7 @@
_proto.dispose = function dispose() {
if (this.player_.liveTracker) {
- this.off(this.player_.liveTracker, 'liveedgechange', this.updateLiveEdgeStatus);
+ this.off(this.player_.liveTracker, 'liveedgechange', this.updateLiveEdgeStatusHandler_);
}
this.textEl_ = null;
@@ -14462,7 +15204,7 @@
}(Button);
SeekToLive.prototype.controlText_ = 'Seek to live, currently playing live';
- Component.registerComponent('SeekToLive', SeekToLive);
+ Component$1.registerComponent('SeekToLive', SeekToLive);
/**
* Keep a number between a min and a max value
@@ -14490,9 +15232,7 @@
* @extends Component
*/
- var Slider =
- /*#__PURE__*/
- function (_Component) {
+ var Slider = /*#__PURE__*/function (_Component) {
inheritsLoose(Slider, _Component);
/**
@@ -14507,7 +15247,32 @@
function Slider(player, options) {
var _this;
- _this = _Component.call(this, player, options) || this; // Set property names to bar to match with the child Slider class is looking for
+ _this = _Component.call(this, player, options) || this;
+
+ _this.handleMouseDown_ = function (e) {
+ return _this.handleMouseDown(e);
+ };
+
+ _this.handleMouseUp_ = function (e) {
+ return _this.handleMouseUp(e);
+ };
+
+ _this.handleKeyDown_ = function (e) {
+ return _this.handleKeyDown(e);
+ };
+
+ _this.handleClick_ = function (e) {
+ return _this.handleClick(e);
+ };
+
+ _this.handleMouseMove_ = function (e) {
+ return _this.handleMouseMove(e);
+ };
+
+ _this.update_ = function (e) {
+ return _this.update(e);
+ }; // Set property names to bar to match with the child Slider class is looking for
+
_this.bar = _this.getChild(_this.options_.barName); // Set a horizontal or vertical class on the slider depending on the slider type
@@ -14540,10 +15305,10 @@
return;
}
- this.on('mousedown', this.handleMouseDown);
- this.on('touchstart', this.handleMouseDown);
- this.on('keydown', this.handleKeyDown);
- this.on('click', this.handleClick); // TODO: deprecated, controlsvisible does not seem to be fired
+ this.on('mousedown', this.handleMouseDown_);
+ this.on('touchstart', this.handleMouseDown_);
+ this.on('keydown', this.handleKeyDown_);
+ this.on('click', this.handleClick_); // TODO: deprecated, controlsvisible does not seem to be fired
this.on(this.player_, 'controlsvisible', this.update);
@@ -14566,15 +15331,15 @@
}
var doc = this.bar.el_.ownerDocument;
- this.off('mousedown', this.handleMouseDown);
- this.off('touchstart', this.handleMouseDown);
- this.off('keydown', this.handleKeyDown);
- this.off('click', this.handleClick);
- this.off(this.player_, 'controlsvisible', this.update);
- this.off(doc, 'mousemove', this.handleMouseMove);
- this.off(doc, 'mouseup', this.handleMouseUp);
- this.off(doc, 'touchmove', this.handleMouseMove);
- this.off(doc, 'touchend', this.handleMouseUp);
+ this.off('mousedown', this.handleMouseDown_);
+ this.off('touchstart', this.handleMouseDown_);
+ this.off('keydown', this.handleKeyDown_);
+ this.off('click', this.handleClick_);
+ this.off(this.player_, 'controlsvisible', this.update_);
+ this.off(doc, 'mousemove', this.handleMouseMove_);
+ this.off(doc, 'mouseup', this.handleMouseUp_);
+ this.off(doc, 'touchmove', this.handleMouseMove_);
+ this.off(doc, 'touchend', this.handleMouseUp_);
this.removeAttribute('tabindex');
this.addClass('disabled');
@@ -14661,11 +15426,11 @@
*/
this.trigger('slideractive');
- this.on(doc, 'mousemove', this.handleMouseMove);
- this.on(doc, 'mouseup', this.handleMouseUp);
- this.on(doc, 'touchmove', this.handleMouseMove);
- this.on(doc, 'touchend', this.handleMouseUp);
- this.handleMouseMove(event);
+ this.on(doc, 'mousemove', this.handleMouseMove_);
+ this.on(doc, 'mouseup', this.handleMouseUp_);
+ this.on(doc, 'touchmove', this.handleMouseMove_);
+ this.on(doc, 'touchend', this.handleMouseUp_);
+ this.handleMouseMove(event, true);
}
/**
* Handle the `mousemove`, `touchmove`, and `mousedown` events on this `Slider`.
@@ -14676,6 +15441,7 @@
* @param {EventTarget~Event} event
* `mousedown`, `mousemove`, `touchstart`, or `touchmove` event that triggered
* this function
+ * @param {boolean} mouseDown this is a flag that should be set to true if `handleMouseMove` is called directly. It allows us to skip things that should not happen if coming from mouse down but should happen on regular mouse move handler. Defaults to false.
*
* @listens mousemove
* @listens touchmove
@@ -14707,10 +15473,10 @@
*/
this.trigger('sliderinactive');
- this.off(doc, 'mousemove', this.handleMouseMove);
- this.off(doc, 'mouseup', this.handleMouseUp);
- this.off(doc, 'touchmove', this.handleMouseMove);
- this.off(doc, 'touchend', this.handleMouseUp);
+ this.off(doc, 'mousemove', this.handleMouseMove_);
+ this.off(doc, 'mouseup', this.handleMouseUp_);
+ this.off(doc, 'touchmove', this.handleMouseMove_);
+ this.off(doc, 'touchend', this.handleMouseUp_);
this.update();
}
/**
@@ -14742,7 +15508,7 @@
}
this.progress_ = progress;
- this.requestAnimationFrame(function () {
+ this.requestNamedAnimationFrame('Slider#update', function () {
// Set the new bar width or height
var sizeKey = _this2.vertical() ? 'height' : 'width'; // Convert to a percentage for css value
@@ -14852,9 +15618,9 @@
};
return Slider;
- }(Component);
+ }(Component$1);
- Component.registerComponent('Slider', Slider);
+ Component$1.registerComponent('Slider', Slider);
var percentify = function percentify(time, end) {
return clamp(time / end * 100, 0, 100).toFixed(2) + '%';
@@ -14866,9 +15632,7 @@
*/
- var LoadProgressBar =
- /*#__PURE__*/
- function (_Component) {
+ var LoadProgressBar = /*#__PURE__*/function (_Component) {
inheritsLoose(LoadProgressBar, _Component);
/**
@@ -14886,7 +15650,9 @@
_this = _Component.call(this, player, options) || this;
_this.partEls_ = [];
- _this.on(player, 'progress', _this.update);
+ _this.on(player, 'progress', function (e) {
+ return _this.update(e);
+ });
return _this;
}
@@ -14942,7 +15708,7 @@
_proto.update = function update(event) {
var _this2 = this;
- this.requestAnimationFrame(function () {
+ this.requestNamedAnimationFrame('LoadProgressBar#update', function () {
var liveTracker = _this2.player_.liveTracker;
var buffered = _this2.player_.buffered();
@@ -14995,9 +15761,9 @@
};
return LoadProgressBar;
- }(Component);
+ }(Component$1);
- Component.registerComponent('LoadProgressBar', LoadProgressBar);
+ Component$1.registerComponent('LoadProgressBar', LoadProgressBar);
/**
* Time tooltips display a time above the progress bar.
@@ -15005,9 +15771,7 @@
* @extends Component
*/
- var TimeTooltip =
- /*#__PURE__*/
- function (_Component) {
+ var TimeTooltip = /*#__PURE__*/function (_Component) {
inheritsLoose(TimeTooltip, _Component);
/**
@@ -15056,7 +15820,7 @@
;
_proto.update = function update(seekBarRect, seekBarPoint, content) {
- var tooltipRect = getBoundingClientRect(this.el_);
+ var tooltipRect = findPosition(this.el_);
var playerRect = getBoundingClientRect(this.player_.el());
var seekBarPointPx = seekBarRect.width * seekBarPoint; // do nothing if either rect isn't available
// for example, if the player isn't in the DOM for testing
@@ -15093,8 +15857,13 @@
pullTooltipBy = 0;
} else if (pullTooltipBy > tooltipRect.width) {
pullTooltipBy = tooltipRect.width;
- }
+ } // prevent small width fluctuations within 0.4px from
+ // changing the value below.
+ // This really helps for live to prevent the play
+ // progress time tooltip from jittering
+
+ pullTooltipBy = Math.round(pullTooltipBy);
this.el_.style.right = "-" + pullTooltipBy + "px";
this.write(content);
}
@@ -15131,12 +15900,7 @@
_proto.updateTime = function updateTime(seekBarRect, seekBarPoint, time, cb) {
var _this2 = this;
- // If there is an existing rAF ID, cancel it so we don't over-queue.
- if (this.rafId_) {
- this.cancelAnimationFrame(this.rafId_);
- }
-
- this.rafId_ = this.requestAnimationFrame(function () {
+ this.requestNamedAnimationFrame('TimeTooltip#updateTime', function () {
var content;
var duration = _this2.player_.duration();
@@ -15159,9 +15923,9 @@
};
return TimeTooltip;
- }(Component);
+ }(Component$1);
- Component.registerComponent('TimeTooltip', TimeTooltip);
+ Component$1.registerComponent('TimeTooltip', TimeTooltip);
/**
* Used by {@link SeekBar} to display media playback progress as part of the
@@ -15170,9 +15934,7 @@
* @extends Component
*/
- var PlayProgressBar =
- /*#__PURE__*/
- function (_Component) {
+ var PlayProgressBar = /*#__PURE__*/function (_Component) {
inheritsLoose(PlayProgressBar, _Component);
/**
@@ -15233,7 +15995,7 @@
};
return PlayProgressBar;
- }(Component);
+ }(Component$1);
/**
* Default options for {@link PlayProgressBar}.
*
@@ -15250,7 +16012,7 @@
PlayProgressBar.prototype.options_.children.push('timeTooltip');
}
- Component.registerComponent('PlayProgressBar', PlayProgressBar);
+ Component$1.registerComponent('PlayProgressBar', PlayProgressBar);
/**
* The {@link MouseTimeDisplay} component tracks mouse movement over the
@@ -15261,9 +16023,7 @@
* @extends Component
*/
- var MouseTimeDisplay =
- /*#__PURE__*/
- function (_Component) {
+ var MouseTimeDisplay = /*#__PURE__*/function (_Component) {
inheritsLoose(MouseTimeDisplay, _Component);
/**
@@ -15320,7 +16080,7 @@
};
return MouseTimeDisplay;
- }(Component);
+ }(Component$1);
/**
* Default options for `MouseTimeDisplay`
*
@@ -15332,7 +16092,7 @@
MouseTimeDisplay.prototype.options_ = {
children: ['timeTooltip']
};
- Component.registerComponent('MouseTimeDisplay', MouseTimeDisplay);
+ Component$1.registerComponent('MouseTimeDisplay', MouseTimeDisplay);
var STEP_SECONDS = 5; // The multiplier of STEP_SECONDS that PgUp/PgDown move the timeline.
@@ -15344,9 +16104,7 @@
* @extends Slider
*/
- var SeekBar =
- /*#__PURE__*/
- function (_Slider) {
+ var SeekBar = /*#__PURE__*/function (_Slider) {
inheritsLoose(SeekBar, _Slider);
/**
@@ -15377,6 +16135,8 @@
var _proto = SeekBar.prototype;
_proto.setEventHandlers_ = function setEventHandlers_() {
+ var _this2 = this;
+
this.update_ = bind(this, this.update);
this.update = throttle(this.update_, UPDATE_REFRESH_INTERVAL);
this.on(this.player_, ['ended', 'durationchange', 'timeupdate'], this.update);
@@ -15388,8 +16148,17 @@
this.updateInterval = null;
- this.on(this.player_, ['playing'], this.enableInterval_);
- this.on(this.player_, ['ended', 'pause', 'waiting'], this.disableInterval_); // we don't need to update the play progress if the document is hidden,
+
+ this.enableIntervalHandler_ = function (e) {
+ return _this2.enableInterval_(e);
+ };
+
+ this.disableIntervalHandler_ = function (e) {
+ return _this2.disableInterval_(e);
+ };
+
+ this.on(this.player_, ['playing'], this.enableIntervalHandler_);
+ this.on(this.player_, ['ended', 'pause', 'waiting'], this.disableIntervalHandler_); // we don't need to update the play progress if the document is hidden,
// also, this causes the CPU to spike and eventually crash the page on IE11.
if ('hidden' in document && 'visibilityState' in document) {
@@ -15398,10 +16167,15 @@
};
_proto.toggleVisibility_ = function toggleVisibility_(e) {
- if (document.hidden) {
+ if (document.visibilityState === 'hidden') {
+ this.cancelNamedAnimationFrame('SeekBar#update');
+ this.cancelNamedAnimationFrame('Slider#update');
this.disableInterval_(e);
} else {
- this.enableInterval_(); // we just switched back to the page and someone may be looking, so, update ASAP
+ if (!this.player_.ended() && !this.player_.paused()) {
+ this.enableInterval_();
+ } // we just switched back to the page and someone may be looking, so, update ASAP
+
this.update();
}
@@ -15416,7 +16190,7 @@
};
_proto.disableInterval_ = function disableInterval_(e) {
- if (this.player_.liveTracker && this.player_.liveTracker.isLive() && e.type !== 'ended') {
+ if (this.player_.liveTracker && this.player_.liveTracker.isLive() && e && e.type !== 'ended') {
return;
}
@@ -15457,42 +16231,63 @@
;
_proto.update = function update(event) {
- var _this2 = this;
+ var _this3 = this;
+
+ // ignore updates while the tab is hidden
+ if (document.visibilityState === 'hidden') {
+ return;
+ }
var percent = _Slider.prototype.update.call(this);
- this.requestAnimationFrame(function () {
- var currentTime = _this2.player_.ended() ? _this2.player_.duration() : _this2.getCurrentTime_();
- var liveTracker = _this2.player_.liveTracker;
+ this.requestNamedAnimationFrame('SeekBar#update', function () {
+ var currentTime = _this3.player_.ended() ? _this3.player_.duration() : _this3.getCurrentTime_();
+ var liveTracker = _this3.player_.liveTracker;
- var duration = _this2.player_.duration();
+ var duration = _this3.player_.duration();
if (liveTracker && liveTracker.isLive()) {
- duration = _this2.player_.liveTracker.liveCurrentTime();
+ duration = _this3.player_.liveTracker.liveCurrentTime();
}
- if (_this2.percent_ !== percent) {
+ if (_this3.percent_ !== percent) {
// machine readable value of progress bar (percentage complete)
- _this2.el_.setAttribute('aria-valuenow', (percent * 100).toFixed(2));
+ _this3.el_.setAttribute('aria-valuenow', (percent * 100).toFixed(2));
- _this2.percent_ = percent;
+ _this3.percent_ = percent;
}
- if (_this2.currentTime_ !== currentTime || _this2.duration_ !== duration) {
+ if (_this3.currentTime_ !== currentTime || _this3.duration_ !== duration) {
// human readable value of progress bar (time complete)
- _this2.el_.setAttribute('aria-valuetext', _this2.localize('progress bar timing: currentTime={1} duration={2}', [formatTime(currentTime, duration), formatTime(duration, duration)], '{1} of {2}'));
+ _this3.el_.setAttribute('aria-valuetext', _this3.localize('progress bar timing: currentTime={1} duration={2}', [formatTime(currentTime, duration), formatTime(duration, duration)], '{1} of {2}'));
- _this2.currentTime_ = currentTime;
- _this2.duration_ = duration;
+ _this3.currentTime_ = currentTime;
+ _this3.duration_ = duration;
} // update the progress bar time tooltip with the current time
- if (_this2.bar) {
- _this2.bar.update(getBoundingClientRect(_this2.el()), _this2.getProgress());
+ if (_this3.bar) {
+ _this3.bar.update(getBoundingClientRect(_this3.el()), _this3.getProgress());
}
});
return percent;
}
+ /**
+ * Prevent liveThreshold from causing seeks to seem like they
+ * are not happening from a user perspective.
+ *
+ * @param {number} ct
+ * current time to seek to
+ */
+ ;
+
+ _proto.userSeek_ = function userSeek_(ct) {
+ if (this.player_.liveTracker && this.player_.liveTracker.isLive()) {
+ this.player_.liveTracker.nextSeekedFromUser();
+ }
+
+ this.player_.currentTime(ct);
+ }
/**
* Get the value of current time but allows for smooth scrubbing,
* when player can't keep up.
@@ -15549,7 +16344,6 @@
event.stopPropagation();
- this.player_.scrubbing(true);
this.videoWasPlaying = !this.player_.paused();
this.player_.pause();
@@ -15560,16 +16354,25 @@
*
* @param {EventTarget~Event} event
* The `mousemove` event that caused this to run.
+ * @param {boolean} mouseDown this is a flag that should be set to true if `handleMouseMove` is called directly. It allows us to skip things that should not happen if coming from mouse down but should happen on regular mouse move handler. Defaults to false
*
* @listens mousemove
*/
;
- _proto.handleMouseMove = function handleMouseMove(event) {
+ _proto.handleMouseMove = function handleMouseMove(event, mouseDown) {
+ if (mouseDown === void 0) {
+ mouseDown = false;
+ }
+
if (!isSingleLeftClick(event)) {
return;
}
+ if (!mouseDown && !this.player_.scrubbing()) {
+ this.player_.scrubbing(true);
+ }
+
var newTime;
var distance = this.calculateDistance(event);
var liveTracker = this.player_.liveTracker;
@@ -15581,6 +16384,11 @@
newTime = newTime - 0.1;
}
} else {
+ if (distance >= 0.99) {
+ liveTracker.seekToLiveEdge();
+ return;
+ }
+
var seekableStart = liveTracker.seekableStart();
var seekableEnd = liveTracker.liveCurrentTime();
newTime = seekableStart + distance * liveTracker.liveWindow(); // Don't let video end while scrubbing.
@@ -15604,7 +16412,7 @@
} // Set new time (tell player to seek to new time)
- this.player_.currentTime(newTime);
+ this.userSeek_(newTime);
};
_proto.enable = function enable() {
@@ -15677,7 +16485,7 @@
;
_proto.stepForward = function stepForward() {
- this.player_.currentTime(this.player_.currentTime() + STEP_SECONDS);
+ this.userSeek_(this.player_.currentTime() + STEP_SECONDS);
}
/**
* Move more quickly rewind for keyboard-only users
@@ -15685,7 +16493,7 @@
;
_proto.stepBack = function stepBack() {
- this.player_.currentTime(this.player_.currentTime() - STEP_SECONDS);
+ this.userSeek_(this.player_.currentTime() - STEP_SECONDS);
}
/**
* Toggles the playback state of the player
@@ -15723,6 +16531,8 @@
;
_proto.handleKeyDown = function handleKeyDown(event) {
+ var liveTracker = this.player_.liveTracker;
+
if (keycode.isEventKey(event, 'Space') || keycode.isEventKey(event, 'Enter')) {
event.preventDefault();
event.stopPropagation();
@@ -15730,30 +16540,59 @@
} else if (keycode.isEventKey(event, 'Home')) {
event.preventDefault();
event.stopPropagation();
- this.player_.currentTime(0);
+ this.userSeek_(0);
} else if (keycode.isEventKey(event, 'End')) {
event.preventDefault();
event.stopPropagation();
- this.player_.currentTime(this.player_.duration());
+
+ if (liveTracker && liveTracker.isLive()) {
+ this.userSeek_(liveTracker.liveCurrentTime());
+ } else {
+ this.userSeek_(this.player_.duration());
+ }
} else if (/^[0-9]$/.test(keycode(event))) {
event.preventDefault();
event.stopPropagation();
var gotoFraction = (keycode.codes[keycode(event)] - keycode.codes['0']) * 10.0 / 100.0;
- this.player_.currentTime(this.player_.duration() * gotoFraction);
+
+ if (liveTracker && liveTracker.isLive()) {
+ this.userSeek_(liveTracker.seekableStart() + liveTracker.liveWindow() * gotoFraction);
+ } else {
+ this.userSeek_(this.player_.duration() * gotoFraction);
+ }
} else if (keycode.isEventKey(event, 'PgDn')) {
event.preventDefault();
event.stopPropagation();
- this.player_.currentTime(this.player_.currentTime() - STEP_SECONDS * PAGE_KEY_MULTIPLIER);
+ this.userSeek_(this.player_.currentTime() - STEP_SECONDS * PAGE_KEY_MULTIPLIER);
} else if (keycode.isEventKey(event, 'PgUp')) {
event.preventDefault();
event.stopPropagation();
- this.player_.currentTime(this.player_.currentTime() + STEP_SECONDS * PAGE_KEY_MULTIPLIER);
+ this.userSeek_(this.player_.currentTime() + STEP_SECONDS * PAGE_KEY_MULTIPLIER);
} else {
// Pass keydown handling up for unsupported keys
_Slider.prototype.handleKeyDown.call(this, event);
}
};
+ _proto.dispose = function dispose() {
+ this.disableInterval_();
+ this.off(this.player_, ['ended', 'durationchange', 'timeupdate'], this.update);
+
+ if (this.player_.liveTracker) {
+ this.off(this.player_.liveTracker, 'liveedgechange', this.update);
+ }
+
+ this.off(this.player_, ['playing'], this.enableIntervalHandler_);
+ this.off(this.player_, ['ended', 'pause', 'waiting'], this.disableIntervalHandler_); // we don't need to update the play progress if the document is hidden,
+ // also, this causes the CPU to spike and eventually crash the page on IE11.
+
+ if ('hidden' in document && 'visibilityState' in document) {
+ this.off(document, 'visibilitychange', this.toggleVisibility_);
+ }
+
+ _Slider.prototype.dispose.call(this);
+ };
+
return SeekBar;
}(Slider);
/**
@@ -15773,7 +16612,7 @@
SeekBar.prototype.options_.children.splice(1, 0, 'mouseTimeDisplay');
}
- Component.registerComponent('SeekBar', SeekBar);
+ Component$1.registerComponent('SeekBar', SeekBar);
/**
* The Progress Control component contains the seek bar, load progress,
@@ -15782,9 +16621,7 @@
* @extends Component
*/
- var ProgressControl =
- /*#__PURE__*/
- function (_Component) {
+ var ProgressControl = /*#__PURE__*/function (_Component) {
inheritsLoose(ProgressControl, _Component);
/**
@@ -15803,6 +16640,14 @@
_this.handleMouseMove = throttle(bind(assertThisInitialized(_this), _this.handleMouseMove), UPDATE_REFRESH_INTERVAL);
_this.throttledHandleMouseSeek = throttle(bind(assertThisInitialized(_this), _this.handleMouseSeek), UPDATE_REFRESH_INTERVAL);
+ _this.handleMouseUpHandler_ = function (e) {
+ return _this.handleMouseUp(e);
+ };
+
+ _this.handleMouseDownHandler_ = function (e) {
+ return _this.handleMouseDown(e);
+ };
+
_this.enable();
return _this;
@@ -15848,12 +16693,12 @@
}
var seekBarEl = seekBar.el();
- var seekBarRect = getBoundingClientRect(seekBarEl);
+ var seekBarRect = findPosition(seekBarEl);
var seekBarPoint = getPointerPosition(seekBarEl, event).x; // The default skin has a gap on either side of the `SeekBar`. This means
// that it's possible to trigger this behavior outside the boundaries of
// the `SeekBar`. This ensures we stay within it at all times.
- seekBarPoint = clamp(0, 1, seekBarPoint);
+ seekBarPoint = clamp(seekBarPoint, 0, 1);
if (mouseTimeDisplay) {
mouseTimeDisplay.update(seekBarRect, seekBarPoint);
@@ -15917,11 +16762,20 @@
return;
}
- this.off(['mousedown', 'touchstart'], this.handleMouseDown);
+ this.off(['mousedown', 'touchstart'], this.handleMouseDownHandler_);
this.off(this.el_, 'mousemove', this.handleMouseMove);
- this.handleMouseUp();
+ this.removeListenersAddedOnMousedownAndTouchstart();
this.addClass('disabled');
- this.enabled_ = false;
+ this.enabled_ = false; // Restore normal playback state if controls are disabled while scrubbing
+
+ if (this.player_.scrubbing()) {
+ var seekBar = this.getChild('seekBar');
+ this.player_.scrubbing(false);
+
+ if (seekBar.videoWasPlaying) {
+ silencePromise(this.player_.play());
+ }
+ }
}
/**
* Enable all controls on the progress control and its children
@@ -15937,11 +16791,23 @@
return;
}
- this.on(['mousedown', 'touchstart'], this.handleMouseDown);
+ this.on(['mousedown', 'touchstart'], this.handleMouseDownHandler_);
this.on(this.el_, 'mousemove', this.handleMouseMove);
this.removeClass('disabled');
this.enabled_ = true;
}
+ /**
+ * Cleanup listeners after the user finishes interacting with the progress controls
+ */
+ ;
+
+ _proto.removeListenersAddedOnMousedownAndTouchstart = function removeListenersAddedOnMousedownAndTouchstart() {
+ var doc = this.el_.ownerDocument;
+ this.off(doc, 'mousemove', this.throttledHandleMouseSeek);
+ this.off(doc, 'touchmove', this.throttledHandleMouseSeek);
+ this.off(doc, 'mouseup', this.handleMouseUpHandler_);
+ this.off(doc, 'touchend', this.handleMouseUpHandler_);
+ }
/**
* Handle `mousedown` or `touchstart` events on the `ProgressControl`.
*
@@ -15963,8 +16829,8 @@
this.on(doc, 'mousemove', this.throttledHandleMouseSeek);
this.on(doc, 'touchmove', this.throttledHandleMouseSeek);
- this.on(doc, 'mouseup', this.handleMouseUp);
- this.on(doc, 'touchend', this.handleMouseUp);
+ this.on(doc, 'mouseup', this.handleMouseUpHandler_);
+ this.on(doc, 'touchend', this.handleMouseUpHandler_);
}
/**
* Handle `mouseup` or `touchend` events on the `ProgressControl`.
@@ -15978,21 +16844,17 @@
;
_proto.handleMouseUp = function handleMouseUp(event) {
- var doc = this.el_.ownerDocument;
var seekBar = this.getChild('seekBar');
if (seekBar) {
seekBar.handleMouseUp(event);
}
- this.off(doc, 'mousemove', this.throttledHandleMouseSeek);
- this.off(doc, 'touchmove', this.throttledHandleMouseSeek);
- this.off(doc, 'mouseup', this.handleMouseUp);
- this.off(doc, 'touchend', this.handleMouseUp);
+ this.removeListenersAddedOnMousedownAndTouchstart();
};
return ProgressControl;
- }(Component);
+ }(Component$1);
/**
* Default options for `ProgressControl`
*
@@ -16004,7 +16866,7 @@
ProgressControl.prototype.options_ = {
children: ['seekBar']
};
- Component.registerComponent('ProgressControl', ProgressControl);
+ Component$1.registerComponent('ProgressControl', ProgressControl);
/**
* Toggle Picture-in-Picture mode
@@ -16012,9 +16874,7 @@
* @extends Button
*/
- var PictureInPictureToggle =
- /*#__PURE__*/
- function (_Button) {
+ var PictureInPictureToggle = /*#__PURE__*/function (_Button) {
inheritsLoose(PictureInPictureToggle, _Button);
/**
@@ -16034,14 +16894,31 @@
_this = _Button.call(this, player, options) || this;
- _this.on(player, ['enterpictureinpicture', 'leavepictureinpicture'], _this.handlePictureInPictureChange); // TODO: Activate button on player loadedmetadata event.
- // TODO: Deactivate button on player emptied event.
- // TODO: Deactivate button if disablepictureinpicture attribute is present.
+ _this.on(player, ['enterpictureinpicture', 'leavepictureinpicture'], function (e) {
+ return _this.handlePictureInPictureChange(e);
+ });
+
+ _this.on(player, ['disablepictureinpicturechanged', 'loadedmetadata'], function (e) {
+ return _this.handlePictureInPictureEnabledChange(e);
+ });
+
+ _this.on(player, ['loadedmetadata', 'audioonlymodechange', 'audiopostermodechange'], function () {
+ // This audio detection will not detect HLS or DASH audio-only streams because there was no reliable way to detect them at the time
+ var isSourceAudio = player.currentType().substring(0, 5) === 'audio';
+
+ if (isSourceAudio || player.audioPosterMode() || player.audioOnlyMode()) {
+ if (player.isInPictureInPicture()) {
+ player.exitPictureInPicture();
+ }
+
+ _this.hide();
+ } else {
+ _this.show();
+ }
+ }); // TODO: Deactivate button on player emptied event.
- if (!document.pictureInPictureEnabled) {
- _this.disable();
- }
+ _this.disable();
return _this;
}
@@ -16058,6 +16935,19 @@
_proto.buildCSSClass = function buildCSSClass() {
return "vjs-picture-in-picture-control " + _Button.prototype.buildCSSClass.call(this);
}
+ /**
+ * Enables or disables button based on document.pictureInPictureEnabled property value
+ * or on value returned by player.disablePictureInPicture() method.
+ */
+ ;
+
+ _proto.handlePictureInPictureEnabledChange = function handlePictureInPictureEnabledChange() {
+ if (document.pictureInPictureEnabled && this.player_.disablePictureInPicture() === false) {
+ this.enable();
+ } else {
+ this.disable();
+ }
+ }
/**
* Handles enterpictureinpicture and leavepictureinpicture on the player and change control text accordingly.
*
@@ -16076,6 +16966,8 @@
} else {
this.controlText('Picture-in-Picture');
}
+
+ this.handlePictureInPictureEnabledChange();
}
/**
* This gets called when an `PictureInPictureToggle` is "clicked". See
@@ -16109,7 +17001,7 @@
PictureInPictureToggle.prototype.controlText_ = 'Picture-in-Picture';
- Component.registerComponent('PictureInPictureToggle', PictureInPictureToggle);
+ Component$1.registerComponent('PictureInPictureToggle', PictureInPictureToggle);
/**
* Toggle fullscreen video
@@ -16117,9 +17009,7 @@
* @extends Button
*/
- var FullscreenToggle =
- /*#__PURE__*/
- function (_Button) {
+ var FullscreenToggle = /*#__PURE__*/function (_Button) {
inheritsLoose(FullscreenToggle, _Button);
/**
@@ -16136,7 +17026,9 @@
_this = _Button.call(this, player, options) || this;
- _this.on(player, 'fullscreenchange', _this.handleFullscreenChange);
+ _this.on(player, 'fullscreenchange', function (e) {
+ return _this.handleFullscreenChange(e);
+ });
if (document[player.fsApi_.fullscreenEnabled] === false) {
_this.disable();
@@ -16207,7 +17099,7 @@
FullscreenToggle.prototype.controlText_ = 'Fullscreen';
- Component.registerComponent('FullscreenToggle', FullscreenToggle);
+ Component$1.registerComponent('FullscreenToggle', FullscreenToggle);
/**
* Check if volume control is supported and if it isn't hide the
@@ -16242,9 +17134,7 @@
* @extends Component
*/
- var VolumeLevel =
- /*#__PURE__*/
- function (_Component) {
+ var VolumeLevel = /*#__PURE__*/function (_Component) {
inheritsLoose(VolumeLevel, _Component);
function VolumeLevel() {
@@ -16260,16 +17150,251 @@
* The element that was created.
*/
_proto.createEl = function createEl() {
- return _Component.prototype.createEl.call(this, 'div', {
- className: 'vjs-volume-level',
- innerHTML: ' '
+ var el = _Component.prototype.createEl.call(this, 'div', {
+ className: 'vjs-volume-level'
});
+
+ el.appendChild(_Component.prototype.createEl.call(this, 'span', {
+ className: 'vjs-control-text'
+ }));
+ return el;
};
return VolumeLevel;
- }(Component);
+ }(Component$1);
- Component.registerComponent('VolumeLevel', VolumeLevel);
+ Component$1.registerComponent('VolumeLevel', VolumeLevel);
+
+ /**
+ * Volume level tooltips display a volume above or side by side the volume bar.
+ *
+ * @extends Component
+ */
+
+ var VolumeLevelTooltip = /*#__PURE__*/function (_Component) {
+ inheritsLoose(VolumeLevelTooltip, _Component);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The {@link Player} that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function VolumeLevelTooltip(player, options) {
+ var _this;
+
+ _this = _Component.call(this, player, options) || this;
+ _this.update = throttle(bind(assertThisInitialized(_this), _this.update), UPDATE_REFRESH_INTERVAL);
+ return _this;
+ }
+ /**
+ * Create the volume tooltip DOM element
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+
+
+ var _proto = VolumeLevelTooltip.prototype;
+
+ _proto.createEl = function createEl() {
+ return _Component.prototype.createEl.call(this, 'div', {
+ className: 'vjs-volume-tooltip'
+ }, {
+ 'aria-hidden': 'true'
+ });
+ }
+ /**
+ * Updates the position of the tooltip relative to the `VolumeBar` and
+ * its content text.
+ *
+ * @param {Object} rangeBarRect
+ * The `ClientRect` for the {@link VolumeBar} element.
+ *
+ * @param {number} rangeBarPoint
+ * A number from 0 to 1, representing a horizontal/vertical reference point
+ * from the left edge of the {@link VolumeBar}
+ *
+ * @param {boolean} vertical
+ * Referees to the Volume control position
+ * in the control bar{@link VolumeControl}
+ *
+ */
+ ;
+
+ _proto.update = function update(rangeBarRect, rangeBarPoint, vertical, content) {
+ if (!vertical) {
+ var tooltipRect = getBoundingClientRect(this.el_);
+ var playerRect = getBoundingClientRect(this.player_.el());
+ var volumeBarPointPx = rangeBarRect.width * rangeBarPoint;
+
+ if (!playerRect || !tooltipRect) {
+ return;
+ }
+
+ var spaceLeftOfPoint = rangeBarRect.left - playerRect.left + volumeBarPointPx;
+ var spaceRightOfPoint = rangeBarRect.width - volumeBarPointPx + (playerRect.right - rangeBarRect.right);
+ var pullTooltipBy = tooltipRect.width / 2;
+
+ if (spaceLeftOfPoint < pullTooltipBy) {
+ pullTooltipBy += pullTooltipBy - spaceLeftOfPoint;
+ } else if (spaceRightOfPoint < pullTooltipBy) {
+ pullTooltipBy = spaceRightOfPoint;
+ }
+
+ if (pullTooltipBy < 0) {
+ pullTooltipBy = 0;
+ } else if (pullTooltipBy > tooltipRect.width) {
+ pullTooltipBy = tooltipRect.width;
+ }
+
+ this.el_.style.right = "-" + pullTooltipBy + "px";
+ }
+
+ this.write(content + "%");
+ }
+ /**
+ * Write the volume to the tooltip DOM element.
+ *
+ * @param {string} content
+ * The formatted volume for the tooltip.
+ */
+ ;
+
+ _proto.write = function write(content) {
+ textContent(this.el_, content);
+ }
+ /**
+ * Updates the position of the volume tooltip relative to the `VolumeBar`.
+ *
+ * @param {Object} rangeBarRect
+ * The `ClientRect` for the {@link VolumeBar} element.
+ *
+ * @param {number} rangeBarPoint
+ * A number from 0 to 1, representing a horizontal/vertical reference point
+ * from the left edge of the {@link VolumeBar}
+ *
+ * @param {boolean} vertical
+ * Referees to the Volume control position
+ * in the control bar{@link VolumeControl}
+ *
+ * @param {number} volume
+ * The volume level to update the tooltip to
+ *
+ * @param {Function} cb
+ * A function that will be called during the request animation frame
+ * for tooltips that need to do additional animations from the default
+ */
+ ;
+
+ _proto.updateVolume = function updateVolume(rangeBarRect, rangeBarPoint, vertical, volume, cb) {
+ var _this2 = this;
+
+ this.requestNamedAnimationFrame('VolumeLevelTooltip#updateVolume', function () {
+ _this2.update(rangeBarRect, rangeBarPoint, vertical, volume.toFixed(0));
+
+ if (cb) {
+ cb();
+ }
+ });
+ };
+
+ return VolumeLevelTooltip;
+ }(Component$1);
+
+ Component$1.registerComponent('VolumeLevelTooltip', VolumeLevelTooltip);
+
+ /**
+ * The {@link MouseVolumeLevelDisplay} component tracks mouse movement over the
+ * {@link VolumeControl}. It displays an indicator and a {@link VolumeLevelTooltip}
+ * indicating the volume level which is represented by a given point in the
+ * {@link VolumeBar}.
+ *
+ * @extends Component
+ */
+
+ var MouseVolumeLevelDisplay = /*#__PURE__*/function (_Component) {
+ inheritsLoose(MouseVolumeLevelDisplay, _Component);
+
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The {@link Player} that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ */
+ function MouseVolumeLevelDisplay(player, options) {
+ var _this;
+
+ _this = _Component.call(this, player, options) || this;
+ _this.update = throttle(bind(assertThisInitialized(_this), _this.update), UPDATE_REFRESH_INTERVAL);
+ return _this;
+ }
+ /**
+ * Create the DOM element for this class.
+ *
+ * @return {Element}
+ * The element that was created.
+ */
+
+
+ var _proto = MouseVolumeLevelDisplay.prototype;
+
+ _proto.createEl = function createEl() {
+ return _Component.prototype.createEl.call(this, 'div', {
+ className: 'vjs-mouse-display'
+ });
+ }
+ /**
+ * Enquires updates to its own DOM as well as the DOM of its
+ * {@link VolumeLevelTooltip} child.
+ *
+ * @param {Object} rangeBarRect
+ * The `ClientRect` for the {@link VolumeBar} element.
+ *
+ * @param {number} rangeBarPoint
+ * A number from 0 to 1, representing a horizontal/vertical reference point
+ * from the left edge of the {@link VolumeBar}
+ *
+ * @param {boolean} vertical
+ * Referees to the Volume control position
+ * in the control bar{@link VolumeControl}
+ *
+ */
+ ;
+
+ _proto.update = function update(rangeBarRect, rangeBarPoint, vertical) {
+ var _this2 = this;
+
+ var volume = 100 * rangeBarPoint;
+ this.getChild('volumeLevelTooltip').updateVolume(rangeBarRect, rangeBarPoint, vertical, volume, function () {
+ if (vertical) {
+ _this2.el_.style.bottom = rangeBarRect.height * rangeBarPoint + "px";
+ } else {
+ _this2.el_.style.left = rangeBarRect.width * rangeBarPoint + "px";
+ }
+ });
+ };
+
+ return MouseVolumeLevelDisplay;
+ }(Component$1);
+ /**
+ * Default options for `MouseVolumeLevelDisplay`
+ *
+ * @type {Object}
+ * @private
+ */
+
+
+ MouseVolumeLevelDisplay.prototype.options_ = {
+ children: ['volumeLevelTooltip']
+ };
+ Component$1.registerComponent('MouseVolumeLevelDisplay', MouseVolumeLevelDisplay);
/**
* The bar that contains the volume level and can be clicked on to adjust the level
@@ -16277,9 +17402,7 @@
* @extends Slider
*/
- var VolumeBar =
- /*#__PURE__*/
- function (_Slider) {
+ var VolumeBar = /*#__PURE__*/function (_Slider) {
inheritsLoose(VolumeBar, _Slider);
/**
@@ -16296,9 +17419,13 @@
_this = _Slider.call(this, player, options) || this;
- _this.on('slideractive', _this.updateLastVolume_);
+ _this.on('slideractive', function (e) {
+ return _this.updateLastVolume_(e);
+ });
- _this.on(player, 'volumechange', _this.updateARIAAttributes);
+ _this.on(player, 'volumechange', function (e) {
+ return _this.updateARIAAttributes(e);
+ });
player.ready(function () {
return _this.updateARIAAttributes();
@@ -16351,6 +17478,21 @@
;
_proto.handleMouseMove = function handleMouseMove(event) {
+ var mouseVolumeLevelDisplay = this.getChild('mouseVolumeLevelDisplay');
+
+ if (mouseVolumeLevelDisplay) {
+ var volumeBarEl = this.el();
+ var volumeBarRect = getBoundingClientRect(volumeBarEl);
+ var vertical = this.vertical();
+ var volumeBarPoint = getPointerPosition(volumeBarEl, event);
+ volumeBarPoint = vertical ? volumeBarPoint.y : volumeBarPoint.x; // The default skin has a gap on either side of the `VolumeBar`. This means
+ // that it's possible to trigger this behavior outside the boundaries of
+ // the `VolumeBar`. This ensures we stay within it at all times.
+
+ volumeBarPoint = clamp(volumeBarPoint, 0, 1);
+ mouseVolumeLevelDisplay.update(volumeBarRect, volumeBarPoint, vertical);
+ }
+
if (!isSingleLeftClick(event)) {
return;
}
@@ -16460,15 +17602,20 @@
VolumeBar.prototype.options_ = {
children: ['volumeLevel'],
barName: 'volumeLevel'
- };
+ }; // MouseVolumeLevelDisplay tooltip should not be added to a player on mobile devices
+
+ if (!IS_IOS && !IS_ANDROID) {
+ VolumeBar.prototype.options_.children.splice(0, 0, 'mouseVolumeLevelDisplay');
+ }
/**
* Call the update event for this Slider when this event happens on the player.
*
* @type {string}
*/
+
VolumeBar.prototype.playerEvent = 'volumechange';
- Component.registerComponent('VolumeBar', VolumeBar);
+ Component$1.registerComponent('VolumeBar', VolumeBar);
/**
* The component for controlling the volume level
@@ -16476,9 +17623,7 @@
* @extends Component
*/
- var VolumeControl =
- /*#__PURE__*/
- function (_Component) {
+ var VolumeControl = /*#__PURE__*/function (_Component) {
inheritsLoose(VolumeControl, _Component);
/**
@@ -16510,9 +17655,21 @@
checkVolumeSupport(assertThisInitialized(_this), player);
_this.throttledHandleMouseMove = throttle(bind(assertThisInitialized(_this), _this.handleMouseMove), UPDATE_REFRESH_INTERVAL);
- _this.on('mousedown', _this.handleMouseDown);
+ _this.handleMouseUpHandler_ = function (e) {
+ return _this.handleMouseUp(e);
+ };
- _this.on('touchstart', _this.handleMouseDown); // while the slider is active (the mouse has been pressed down and
+ _this.on('mousedown', function (e) {
+ return _this.handleMouseDown(e);
+ });
+
+ _this.on('touchstart', function (e) {
+ return _this.handleMouseDown(e);
+ });
+
+ _this.on('mousemove', function (e) {
+ return _this.handleMouseMove(e);
+ }); // while the slider is active (the mouse has been pressed down and
// is dragging) or in focus we do not want to hide the VolumeBar
@@ -16570,8 +17727,8 @@
var doc = this.el_.ownerDocument;
this.on(doc, 'mousemove', this.throttledHandleMouseMove);
this.on(doc, 'touchmove', this.throttledHandleMouseMove);
- this.on(doc, 'mouseup', this.handleMouseUp);
- this.on(doc, 'touchend', this.handleMouseUp);
+ this.on(doc, 'mouseup', this.handleMouseUpHandler_);
+ this.on(doc, 'touchend', this.handleMouseUpHandler_);
}
/**
* Handle `mouseup` or `touchend` events on the `VolumeControl`.
@@ -16588,8 +17745,8 @@
var doc = this.el_.ownerDocument;
this.off(doc, 'mousemove', this.throttledHandleMouseMove);
this.off(doc, 'touchmove', this.throttledHandleMouseMove);
- this.off(doc, 'mouseup', this.handleMouseUp);
- this.off(doc, 'touchend', this.handleMouseUp);
+ this.off(doc, 'mouseup', this.handleMouseUpHandler_);
+ this.off(doc, 'touchend', this.handleMouseUpHandler_);
}
/**
* Handle `mousedown` or `touchstart` events on the `VolumeControl`.
@@ -16607,7 +17764,7 @@
};
return VolumeControl;
- }(Component);
+ }(Component$1);
/**
* Default options for the `VolumeControl`
*
@@ -16619,7 +17776,7 @@
VolumeControl.prototype.options_ = {
children: ['volumeBar']
};
- Component.registerComponent('VolumeControl', VolumeControl);
+ Component$1.registerComponent('VolumeControl', VolumeControl);
/**
* Check if muting volume is supported and if it isn't hide the mute toggle
@@ -16654,9 +17811,7 @@
* @extends Button
*/
- var MuteToggle =
- /*#__PURE__*/
- function (_Button) {
+ var MuteToggle = /*#__PURE__*/function (_Button) {
inheritsLoose(MuteToggle, _Button);
/**
@@ -16675,7 +17830,9 @@
checkMuteSupport(assertThisInitialized(_this), player);
- _this.on(player, ['loadstart', 'volumechange'], _this.update);
+ _this.on(player, ['loadstart', 'volumechange'], function (e) {
+ return _this.update(e);
+ });
return _this;
}
@@ -16801,7 +17958,7 @@
MuteToggle.prototype.controlText_ = 'Mute';
- Component.registerComponent('MuteToggle', MuteToggle);
+ Component$1.registerComponent('MuteToggle', MuteToggle);
/**
* A Component to contain the MuteToggle and VolumeControl so that
@@ -16810,9 +17967,7 @@
* @extends Component
*/
- var VolumePanel =
- /*#__PURE__*/
- function (_Component) {
+ var VolumePanel = /*#__PURE__*/function (_Component) {
inheritsLoose(VolumePanel, _Component);
/**
@@ -16844,19 +17999,35 @@
options.volumeControl.vertical = !options.inline;
}
- _this = _Component.call(this, player, options) || this;
+ _this = _Component.call(this, player, options) || this; // this handler is used by mouse handler methods below
- _this.on(player, ['loadstart'], _this.volumePanelState_);
+ _this.handleKeyPressHandler_ = function (e) {
+ return _this.handleKeyPress(e);
+ };
- _this.on(_this.muteToggle, 'keyup', _this.handleKeyPress);
+ _this.on(player, ['loadstart'], function (e) {
+ return _this.volumePanelState_(e);
+ });
- _this.on(_this.volumeControl, 'keyup', _this.handleVolumeControlKeyUp);
+ _this.on(_this.muteToggle, 'keyup', function (e) {
+ return _this.handleKeyPress(e);
+ });
- _this.on('keydown', _this.handleKeyPress);
+ _this.on(_this.volumeControl, 'keyup', function (e) {
+ return _this.handleVolumeControlKeyUp(e);
+ });
- _this.on('mouseover', _this.handleMouseOver);
+ _this.on('keydown', function (e) {
+ return _this.handleKeyPress(e);
+ });
- _this.on('mouseout', _this.handleMouseOut); // while the slider is active (the mouse has been pressed down and
+ _this.on('mouseover', function (e) {
+ return _this.handleMouseOver(e);
+ });
+
+ _this.on('mouseout', function (e) {
+ return _this.handleMouseOut(e);
+ }); // while the slider is active (the mouse has been pressed down and
// is dragging) we do not want to hide the VolumeBar
@@ -16971,7 +18142,7 @@
_proto.handleMouseOver = function handleMouseOver(event) {
this.addClass('vjs-hover');
- on(document, 'keyup', bind(this, this.handleKeyPress));
+ on(document, 'keyup', this.handleKeyPressHandler_);
}
/**
* This gets called when a `VolumePanel` gains hover via a `mouseout` event.
@@ -16987,7 +18158,7 @@
_proto.handleMouseOut = function handleMouseOut(event) {
this.removeClass('vjs-hover');
- off(document, 'keyup', bind(this, this.handleKeyPress));
+ off(document, 'keyup', this.handleKeyPressHandler_);
}
/**
* Handles `keyup` event on the document or `keydown` event on the `VolumePanel`,
@@ -17007,7 +18178,7 @@
};
return VolumePanel;
- }(Component);
+ }(Component$1);
/**
* Default options for the `VolumeControl`
*
@@ -17019,7 +18190,7 @@
VolumePanel.prototype.options_ = {
children: ['muteToggle', 'volumeControl']
};
- Component.registerComponent('VolumePanel', VolumePanel);
+ Component$1.registerComponent('VolumePanel', VolumePanel);
/**
* The Menu component is used to build popup menus, including subtitle and
@@ -17028,9 +18199,7 @@
* @extends Component
*/
- var Menu =
- /*#__PURE__*/
- function (_Component) {
+ var Menu = /*#__PURE__*/function (_Component) {
inheritsLoose(Menu, _Component);
/**
@@ -17054,11 +18223,19 @@
_this.focusedChild_ = -1;
- _this.on('keydown', _this.handleKeyDown); // All the menu item instances share the same blur handler provided by the menu container.
+ _this.on('keydown', function (e) {
+ return _this.handleKeyDown(e);
+ }); // All the menu item instances share the same blur handler provided by the menu container.
- _this.boundHandleBlur_ = bind(assertThisInitialized(_this), _this.handleBlur);
- _this.boundHandleTapClick_ = bind(assertThisInitialized(_this), _this.handleTapClick);
+ _this.boundHandleBlur_ = function (e) {
+ return _this.handleBlur(e);
+ };
+
+ _this.boundHandleTapClick_ = function (e) {
+ return _this.handleTapClick(e);
+ };
+
return _this;
}
/**
@@ -17073,7 +18250,7 @@
var _proto = Menu.prototype;
_proto.addEventListenerForItem = function addEventListenerForItem(component) {
- if (!(component instanceof Component)) {
+ if (!(component instanceof Component$1)) {
return;
}
@@ -17090,7 +18267,7 @@
;
_proto.removeEventListenerForItem = function removeEventListenerForItem(component) {
- if (!(component instanceof Component)) {
+ if (!(component instanceof Component$1)) {
return;
}
@@ -17292,7 +18469,7 @@
}
var children = this.children().slice();
- var haveTitle = children.length && children[0].className && /vjs-menu-title/.test(children[0].className);
+ var haveTitle = children.length && children[0].hasClass('vjs-menu-title');
if (haveTitle) {
children.shift();
@@ -17311,9 +18488,9 @@
};
return Menu;
- }(Component);
+ }(Component$1);
- Component.registerComponent('Menu', Menu);
+ Component$1.registerComponent('Menu', Menu);
/**
* A `MenuButton` class for any popup {@link Menu}.
@@ -17321,9 +18498,7 @@
* @extends Component
*/
- var MenuButton =
- /*#__PURE__*/
- function (_Component) {
+ var MenuButton = /*#__PURE__*/function (_Component) {
inheritsLoose(MenuButton, _Component);
/**
@@ -17361,23 +18536,37 @@
_this.enabled_ = true;
- _this.on(_this.menuButton_, 'tap', _this.handleClick);
+ var handleClick = function handleClick(e) {
+ return _this.handleClick(e);
+ };
- _this.on(_this.menuButton_, 'click', _this.handleClick);
+ _this.handleMenuKeyUp_ = function (e) {
+ return _this.handleMenuKeyUp(e);
+ };
- _this.on(_this.menuButton_, 'keydown', _this.handleKeyDown);
+ _this.on(_this.menuButton_, 'tap', handleClick);
+
+ _this.on(_this.menuButton_, 'click', handleClick);
+
+ _this.on(_this.menuButton_, 'keydown', function (e) {
+ return _this.handleKeyDown(e);
+ });
_this.on(_this.menuButton_, 'mouseenter', function () {
_this.addClass('vjs-hover');
_this.menu.show();
- on(document, 'keyup', bind(assertThisInitialized(_this), _this.handleMenuKeyUp));
+ on(document, 'keyup', _this.handleMenuKeyUp_);
});
- _this.on('mouseleave', _this.handleMouseLeave);
+ _this.on('mouseleave', function (e) {
+ return _this.handleMouseLeave(e);
+ });
- _this.on('keydown', _this.handleSubmenuKeyDown);
+ _this.on('keydown', function (e) {
+ return _this.handleSubmenuKeyDown(e);
+ });
return _this;
}
@@ -17410,8 +18599,10 @@
if (this.items && this.items.length <= this.hideThreshold_) {
this.hide();
+ this.menu.contentEl_.removeAttribute('role');
} else {
this.show();
+ this.menu.contentEl_.setAttribute('role', 'menu');
}
}
/**
@@ -17440,11 +18631,10 @@
if (this.options_.title) {
var titleEl = createEl('li', {
className: 'vjs-menu-title',
- innerHTML: toTitleCase(this.options_.title),
+ textContent: toTitleCase$1(this.options_.title),
tabIndex: -1
});
- this.hideThreshold_ += 1;
- var titleComponent = new Component(this.player_, {
+ var titleComponent = new Component$1(this.player_, {
el: titleEl
});
menu.addItem(titleComponent);
@@ -17587,7 +18777,7 @@
_proto.handleMouseLeave = function handleMouseLeave(event) {
this.removeClass('vjs-hover');
- off(document, 'keyup', bind(this, this.handleMenuKeyUp));
+ off(document, 'keyup', this.handleMenuKeyUp_);
}
/**
* Set the focus to the actual button, not to this element
@@ -17750,9 +18940,9 @@
};
return MenuButton;
- }(Component);
+ }(Component$1);
- Component.registerComponent('MenuButton', MenuButton);
+ Component$1.registerComponent('MenuButton', MenuButton);
/**
* The base class for buttons that toggle specific track types (e.g. subtitles).
@@ -17760,9 +18950,7 @@
* @extends MenuButton
*/
- var TrackButton =
- /*#__PURE__*/
- function (_MenuButton) {
+ var TrackButton = /*#__PURE__*/function (_MenuButton) {
inheritsLoose(TrackButton, _MenuButton);
/**
@@ -17791,12 +18979,14 @@
var updateHandler = bind(assertThisInitialized(_this), _this.update);
tracks.addEventListener('removetrack', updateHandler);
tracks.addEventListener('addtrack', updateHandler);
+ tracks.addEventListener('labelchange', updateHandler);
_this.player_.on('ready', updateHandler);
_this.player_.on('dispose', function () {
tracks.removeEventListener('removetrack', updateHandler);
tracks.removeEventListener('addtrack', updateHandler);
+ tracks.removeEventListener('labelchange', updateHandler);
});
return _this;
@@ -17805,7 +18995,7 @@
return TrackButton;
}(MenuButton);
- Component.registerComponent('TrackButton', TrackButton);
+ Component$1.registerComponent('TrackButton', TrackButton);
/**
* @file menu-keys.js
@@ -17826,9 +19016,7 @@
* @extends ClickableComponent
*/
- var MenuItem =
- /*#__PURE__*/
- function (_ClickableComponent) {
+ var MenuItem = /*#__PURE__*/function (_ClickableComponent) {
inheritsLoose(MenuItem, _ClickableComponent);
/**
@@ -17882,14 +19070,21 @@
var _proto = MenuItem.prototype;
- _proto.createEl = function createEl(type, props, attrs) {
+ _proto.createEl = function createEl$1(type, props, attrs) {
// The control is textual, not just an icon
this.nonIconControl = true;
- return _ClickableComponent.prototype.createEl.call(this, 'li', assign({
+
+ var el = _ClickableComponent.prototype.createEl.call(this, 'li', assign({
className: 'vjs-menu-item',
- innerHTML: "",
tabIndex: -1
- }, props), attrs);
+ }, props), attrs); // swap icon with menu item text.
+
+
+ el.replaceChild(createEl('span', {
+ className: 'vjs-menu-item-text',
+ textContent: this.localize(this.options_.label)
+ }), el.querySelector('.vjs-icon-placeholder'));
+ return el;
}
/**
* Ignore keys which are used by the menu, but pass any other ones up. See
@@ -17956,7 +19151,7 @@
return MenuItem;
}(ClickableComponent);
- Component.registerComponent('MenuItem', MenuItem);
+ Component$1.registerComponent('MenuItem', MenuItem);
/**
* The specific menu item type for selecting a language within a text track kind
@@ -17964,9 +19159,7 @@
* @extends MenuItem
*/
- var TextTrackMenuItem =
- /*#__PURE__*/
- function (_MenuItem) {
+ var TextTrackMenuItem = /*#__PURE__*/function (_MenuItem) {
inheritsLoose(TextTrackMenuItem, _MenuItem);
/**
@@ -18028,10 +19221,10 @@
var event;
_this.on(['tap', 'click'], function () {
- if (typeof window$1.Event !== 'object') {
+ if (typeof window.Event !== 'object') {
// Android 2.3 throws an Illegal Constructor error for window.Event
try {
- event = new window$1.Event('change');
+ event = new window.Event('change');
} catch (err) {// continue regardless of error
}
}
@@ -18141,7 +19334,7 @@
return TextTrackMenuItem;
}(MenuItem);
- Component.registerComponent('TextTrackMenuItem', TextTrackMenuItem);
+ Component$1.registerComponent('TextTrackMenuItem', TextTrackMenuItem);
/**
* A special menu item for turning of a specific type of text track
@@ -18149,9 +19342,7 @@
* @extends TextTrackMenuItem
*/
- var OffTextTrackMenuItem =
- /*#__PURE__*/
- function (_TextTrackMenuItem) {
+ var OffTextTrackMenuItem = /*#__PURE__*/function (_TextTrackMenuItem) {
inheritsLoose(OffTextTrackMenuItem, _TextTrackMenuItem);
/**
@@ -18246,7 +19437,7 @@
return OffTextTrackMenuItem;
}(TextTrackMenuItem);
- Component.registerComponent('OffTextTrackMenuItem', OffTextTrackMenuItem);
+ Component$1.registerComponent('OffTextTrackMenuItem', OffTextTrackMenuItem);
/**
* The base class for buttons that toggle specific text track types (e.g. subtitles)
@@ -18254,9 +19445,7 @@
* @extends MenuButton
*/
- var TextTrackButton =
- /*#__PURE__*/
- function (_TrackButton) {
+ var TextTrackButton = /*#__PURE__*/function (_TrackButton) {
inheritsLoose(TextTrackButton, _TrackButton);
/**
@@ -18343,7 +19532,7 @@
return TextTrackButton;
}(TrackButton);
- Component.registerComponent('TextTrackButton', TextTrackButton);
+ Component$1.registerComponent('TextTrackButton', TextTrackButton);
/**
* The chapter track menu item
@@ -18351,9 +19540,7 @@
* @extends MenuItem
*/
- var ChaptersTrackMenuItem =
- /*#__PURE__*/
- function (_MenuItem) {
+ var ChaptersTrackMenuItem = /*#__PURE__*/function (_MenuItem) {
inheritsLoose(ChaptersTrackMenuItem, _MenuItem);
/**
@@ -18379,7 +19566,6 @@
_this = _MenuItem.call(this, player, options) || this;
_this.track = track;
_this.cue = cue;
- track.addEventListener('cuechange', bind(assertThisInitialized(_this), _this.update));
return _this;
}
/**
@@ -18401,29 +19587,12 @@
_MenuItem.prototype.handleClick.call(this);
this.player_.currentTime(this.cue.startTime);
- this.update(this.cue.startTime);
- }
- /**
- * Update chapter menu item
- *
- * @param {EventTarget~Event} [event]
- * The `cuechange` event that caused this function to run.
- *
- * @listens TextTrack#cuechange
- */
- ;
-
- _proto.update = function update(event) {
- var cue = this.cue;
- var currentTime = this.player_.currentTime(); // vjs.log(currentTime, cue.startTime);
-
- this.selected(cue.startTime <= currentTime && currentTime < cue.endTime);
};
return ChaptersTrackMenuItem;
}(MenuItem);
- Component.registerComponent('ChaptersTrackMenuItem', ChaptersTrackMenuItem);
+ Component$1.registerComponent('ChaptersTrackMenuItem', ChaptersTrackMenuItem);
/**
* The button component for toggling and selecting chapters
@@ -18433,9 +19602,7 @@
* @extends TextTrackButton
*/
- var ChaptersButton =
- /*#__PURE__*/
- function (_TextTrackButton) {
+ var ChaptersButton = /*#__PURE__*/function (_TextTrackButton) {
inheritsLoose(ChaptersButton, _TextTrackButton);
/**
@@ -18451,7 +19618,17 @@
* The function to call when this function is ready.
*/
function ChaptersButton(player, options, ready) {
- return _TextTrackButton.call(this, player, options, ready) || this;
+ var _this;
+
+ _this = _TextTrackButton.call(this, player, options, ready) || this;
+
+ _this.selectCurrentItem_ = function () {
+ _this.items.forEach(function (item) {
+ item.selected(_this.track_.activeCues[0] === item.cue);
+ });
+ };
+
+ return _this;
}
/**
* Builds the default DOM `className`.
@@ -18483,11 +19660,20 @@
;
_proto.update = function update(event) {
- if (!this.track_ || event && (event.type === 'addtrack' || event.type === 'removetrack')) {
- this.setTrack(this.findChaptersTrack());
+ if (event && event.track && event.track.kind !== 'chapters') {
+ return;
}
- _TextTrackButton.prototype.update.call(this);
+ var track = this.findChaptersTrack();
+
+ if (track !== this.track_) {
+ this.setTrack(track);
+
+ _TextTrackButton.prototype.update.call(this);
+ } else if (!this.items || track && track.cues && track.cues.length !== this.items.length) {
+ // Update the menu initially or if the number of cues has changed since set
+ _TextTrackButton.prototype.update.call(this);
+ }
}
/**
* Set the currently selected track for the chapters button.
@@ -18515,6 +19701,7 @@
remoteTextTrackEl.removeEventListener('load', this.updateHandler_);
}
+ this.track_.removeEventListener('cuechange', this.selectCurrentItem_);
this.track_ = null;
}
@@ -18528,6 +19715,8 @@
if (_remoteTextTrackEl) {
_remoteTextTrackEl.addEventListener('load', this.updateHandler_);
}
+
+ this.track_.addEventListener('cuechange', this.selectCurrentItem_);
}
}
/**
@@ -18564,7 +19753,7 @@
return this.track_.label;
}
- return this.localize(toTitleCase(this.kind_));
+ return this.localize(toTitleCase$1(this.kind_));
}
/**
* Create menu from chapter track
@@ -18630,7 +19819,7 @@
*/
ChaptersButton.prototype.controlText_ = 'Chapters';
- Component.registerComponent('ChaptersButton', ChaptersButton);
+ Component$1.registerComponent('ChaptersButton', ChaptersButton);
/**
* The button component for toggling and selecting descriptions
@@ -18638,9 +19827,7 @@
* @extends TextTrackButton
*/
- var DescriptionsButton =
- /*#__PURE__*/
- function (_TextTrackButton) {
+ var DescriptionsButton = /*#__PURE__*/function (_TextTrackButton) {
inheritsLoose(DescriptionsButton, _TextTrackButton);
/**
@@ -18736,7 +19923,7 @@
*/
DescriptionsButton.prototype.controlText_ = 'Descriptions';
- Component.registerComponent('DescriptionsButton', DescriptionsButton);
+ Component$1.registerComponent('DescriptionsButton', DescriptionsButton);
/**
* The button component for toggling and selecting subtitles
@@ -18744,9 +19931,7 @@
* @extends TextTrackButton
*/
- var SubtitlesButton =
- /*#__PURE__*/
- function (_TextTrackButton) {
+ var SubtitlesButton = /*#__PURE__*/function (_TextTrackButton) {
inheritsLoose(SubtitlesButton, _TextTrackButton);
/**
@@ -18801,7 +19986,7 @@
*/
SubtitlesButton.prototype.controlText_ = 'Subtitles';
- Component.registerComponent('SubtitlesButton', SubtitlesButton);
+ Component$1.registerComponent('SubtitlesButton', SubtitlesButton);
/**
* The menu item for caption track settings menu
@@ -18809,9 +19994,7 @@
* @extends TextTrackMenuItem
*/
- var CaptionSettingsMenuItem =
- /*#__PURE__*/
- function (_TextTrackMenuItem) {
+ var CaptionSettingsMenuItem = /*#__PURE__*/function (_TextTrackMenuItem) {
inheritsLoose(CaptionSettingsMenuItem, _TextTrackMenuItem);
/**
@@ -18867,7 +20050,7 @@
return CaptionSettingsMenuItem;
}(TextTrackMenuItem);
- Component.registerComponent('CaptionSettingsMenuItem', CaptionSettingsMenuItem);
+ Component$1.registerComponent('CaptionSettingsMenuItem', CaptionSettingsMenuItem);
/**
* The button component for toggling and selecting captions
@@ -18875,9 +20058,7 @@
* @extends TextTrackButton
*/
- var CaptionsButton =
- /*#__PURE__*/
- function (_TextTrackButton) {
+ var CaptionsButton = /*#__PURE__*/function (_TextTrackButton) {
inheritsLoose(CaptionsButton, _TextTrackButton);
/**
@@ -18952,7 +20133,7 @@
*/
CaptionsButton.prototype.controlText_ = 'Captions';
- Component.registerComponent('CaptionsButton', CaptionsButton);
+ Component$1.registerComponent('CaptionsButton', CaptionsButton);
/**
* SubsCapsMenuItem has an [cc] icon to distinguish captions from subtitles
@@ -18961,9 +20142,7 @@
* @extends TextTrackMenuItem
*/
- var SubsCapsMenuItem =
- /*#__PURE__*/
- function (_TextTrackMenuItem) {
+ var SubsCapsMenuItem = /*#__PURE__*/function (_TextTrackMenuItem) {
inheritsLoose(SubsCapsMenuItem, _TextTrackMenuItem);
function SubsCapsMenuItem() {
@@ -18972,26 +20151,32 @@
var _proto = SubsCapsMenuItem.prototype;
- _proto.createEl = function createEl(type, props, attrs) {
- var innerHTML = "';
-
- var el = _TextTrackMenuItem.prototype.createEl.call(this, type, assign({
- innerHTML: innerHTML
- }, props), attrs);
-
return el;
};
return SubsCapsMenuItem;
}(TextTrackMenuItem);
- Component.registerComponent('SubsCapsMenuItem', SubsCapsMenuItem);
+ Component$1.registerComponent('SubsCapsMenuItem', SubsCapsMenuItem);
/**
* The button component for toggling and selecting captions and/or subtitles
@@ -18999,9 +20184,7 @@
* @extends TextTrackButton
*/
- var SubsCapsButton =
- /*#__PURE__*/
- function (_TextTrackButton) {
+ var SubsCapsButton = /*#__PURE__*/function (_TextTrackButton) {
inheritsLoose(SubsCapsButton, _TextTrackButton);
function SubsCapsButton(player, options) {
@@ -19020,7 +20203,7 @@
_this.label_ = 'captions';
}
- _this.menuButton_.controlText(toTitleCase(_this.label_));
+ _this.menuButton_.controlText(toTitleCase$1(_this.label_));
return _this;
}
@@ -19083,7 +20266,7 @@
*/
SubsCapsButton.prototype.controlText_ = 'Subtitles';
- Component.registerComponent('SubsCapsButton', SubsCapsButton);
+ Component$1.registerComponent('SubsCapsButton', SubsCapsButton);
/**
* An {@link AudioTrack} {@link MenuItem}
@@ -19091,9 +20274,7 @@
* @extends MenuItem
*/
- var AudioTrackMenuItem =
- /*#__PURE__*/
- function (_MenuItem) {
+ var AudioTrackMenuItem = /*#__PURE__*/function (_MenuItem) {
inheritsLoose(AudioTrackMenuItem, _MenuItem);
/**
@@ -19137,19 +20318,23 @@
var _proto = AudioTrackMenuItem.prototype;
- _proto.createEl = function createEl(type, props, attrs) {
- var innerHTML = "';
-
- var el = _MenuItem.prototype.createEl.call(this, type, assign({
- innerHTML: innerHTML
- }, props), attrs);
-
return el;
}
/**
@@ -19166,13 +20351,24 @@
;
_proto.handleClick = function handleClick(event) {
- var tracks = this.player_.audioTracks();
+ _MenuItem.prototype.handleClick.call(this, event); // the audio track list will automatically toggle other tracks
+ // off for us.
- _MenuItem.prototype.handleClick.call(this, event);
- for (var i = 0; i < tracks.length; i++) {
- var track = tracks[i];
- track.enabled = track === this.track;
+ this.track.enabled = true; // when native audio tracks are used, we want to make sure that other tracks are turned off
+
+ if (this.player_.tech_.featuresNativeAudioTracks) {
+ var tracks = this.player_.audioTracks();
+
+ for (var i = 0; i < tracks.length; i++) {
+ var track = tracks[i]; // skip the current track since we enabled it above
+
+ if (track === this.track) {
+ continue;
+ }
+
+ track.enabled = track === this.track;
+ }
}
}
/**
@@ -19192,7 +20388,7 @@
return AudioTrackMenuItem;
}(MenuItem);
- Component.registerComponent('AudioTrackMenuItem', AudioTrackMenuItem);
+ Component$1.registerComponent('AudioTrackMenuItem', AudioTrackMenuItem);
/**
* The base class for buttons that toggle specific {@link AudioTrack} types.
@@ -19200,9 +20396,7 @@
* @extends TrackButton
*/
- var AudioTrackButton =
- /*#__PURE__*/
- function (_TrackButton) {
+ var AudioTrackButton = /*#__PURE__*/function (_TrackButton) {
inheritsLoose(AudioTrackButton, _TrackButton);
/**
@@ -19284,7 +20478,7 @@
AudioTrackButton.prototype.controlText_ = 'Audio Track';
- Component.registerComponent('AudioTrackButton', AudioTrackButton);
+ Component$1.registerComponent('AudioTrackButton', AudioTrackButton);
/**
* The specific menu item type for selecting a playback rate.
@@ -19292,9 +20486,7 @@
* @extends MenuItem
*/
- var PlaybackRateMenuItem =
- /*#__PURE__*/
- function (_MenuItem) {
+ var PlaybackRateMenuItem = /*#__PURE__*/function (_MenuItem) {
inheritsLoose(PlaybackRateMenuItem, _MenuItem);
/**
@@ -19313,14 +20505,16 @@
var rate = parseFloat(label, 10); // Modify options for parent MenuItem class's init.
options.label = label;
- options.selected = rate === 1;
+ options.selected = rate === player.playbackRate();
options.selectable = true;
options.multiSelectable = false;
_this = _MenuItem.call(this, player, options) || this;
_this.label = label;
_this.rate = rate;
- _this.on(player, 'ratechange', _this.update);
+ _this.on(player, 'ratechange', function (e) {
+ return _this.update(e);
+ });
return _this;
}
@@ -19369,7 +20563,7 @@
PlaybackRateMenuItem.prototype.contentElType = 'button';
- Component.registerComponent('PlaybackRateMenuItem', PlaybackRateMenuItem);
+ Component$1.registerComponent('PlaybackRateMenuItem', PlaybackRateMenuItem);
/**
* The component for controlling the playback rate.
@@ -19377,9 +20571,7 @@
* @extends MenuButton
*/
- var PlaybackRateMenuButton =
- /*#__PURE__*/
- function (_MenuButton) {
+ var PlaybackRateMenuButton = /*#__PURE__*/function (_MenuButton) {
inheritsLoose(PlaybackRateMenuButton, _MenuButton);
/**
@@ -19396,13 +20588,23 @@
_this = _MenuButton.call(this, player, options) || this;
+ _this.menuButton_.el_.setAttribute('aria-describedby', _this.labelElId_);
+
_this.updateVisibility();
_this.updateLabel();
- _this.on(player, 'loadstart', _this.updateVisibility);
+ _this.on(player, 'loadstart', function (e) {
+ return _this.updateVisibility(e);
+ });
- _this.on(player, 'ratechange', _this.updateLabel);
+ _this.on(player, 'ratechange', function (e) {
+ return _this.updateLabel(e);
+ });
+
+ _this.on(player, 'playbackrateschange', function (e) {
+ return _this.handlePlaybackRateschange(e);
+ });
return _this;
}
@@ -19419,9 +20621,11 @@
_proto.createEl = function createEl$1() {
var el = _MenuButton.prototype.createEl.call(this);
+ this.labelElId_ = 'vjs-playback-rate-value-label-' + this.id_;
this.labelEl_ = createEl('div', {
className: 'vjs-playback-rate-value',
- innerHTML: '1x'
+ id: this.labelElId_,
+ textContent: '1x'
});
el.appendChild(this.labelEl_);
return el;
@@ -19448,26 +20652,22 @@
return "vjs-playback-rate " + _MenuButton.prototype.buildWrapperCSSClass.call(this);
}
/**
- * Create the playback rate menu
+ * Create the list of menu items. Specific to each subclass.
*
- * @return {Menu}
- * Menu object populated with {@link PlaybackRateMenuItem}s
*/
;
- _proto.createMenu = function createMenu() {
- var menu = new Menu(this.player());
+ _proto.createItems = function createItems() {
var rates = this.playbackRates();
+ var items = [];
- if (rates) {
- for (var i = rates.length - 1; i >= 0; i--) {
- menu.addChild(new PlaybackRateMenuItem(this.player(), {
- rate: rates[i] + 'x'
- }));
- }
+ for (var i = rates.length - 1; i >= 0; i--) {
+ items.push(new PlaybackRateMenuItem(this.player(), {
+ rate: rates[i] + 'x'
+ }));
}
- return menu;
+ return items;
}
/**
* Updates ARIA accessibility attributes
@@ -19494,18 +20694,21 @@
_proto.handleClick = function handleClick(event) {
// select next rate option
var currentRate = this.player().playbackRate();
- var rates = this.playbackRates(); // this will select first one if the last one currently selected
+ var rates = this.playbackRates();
+ var currentIndex = rates.indexOf(currentRate); // this get the next rate and it will select first one if the last one currently selected
- var newRate = rates[0];
+ var newIndex = (currentIndex + 1) % rates.length;
+ this.player().playbackRate(rates[newIndex]);
+ }
+ /**
+ * On playbackrateschange, update the menu to account for the new items.
+ *
+ * @listens Player#playbackrateschange
+ */
+ ;
- for (var i = 0; i < rates.length; i++) {
- if (rates[i] > currentRate) {
- newRate = rates[i];
- break;
- }
- }
-
- this.player().playbackRate(newRate);
+ _proto.handlePlaybackRateschange = function handlePlaybackRateschange(event) {
+ this.update();
}
/**
* Get possible playback rates
@@ -19516,7 +20719,8 @@
;
_proto.playbackRates = function playbackRates() {
- return this.options_.playbackRates || this.options_.playerOptions && this.options_.playerOptions.playbackRates;
+ var player = this.player();
+ return player.playbackRates && player.playbackRates() || [];
}
/**
* Get whether playback rates is supported by the tech
@@ -19559,7 +20763,7 @@
_proto.updateLabel = function updateLabel(event) {
if (this.playbackRateSupported()) {
- this.labelEl_.innerHTML = this.player().playbackRate() + 'x';
+ this.labelEl_.textContent = this.player().playbackRate() + 'x';
}
};
@@ -19574,7 +20778,7 @@
PlaybackRateMenuButton.prototype.controlText_ = 'Playback Rate';
- Component.registerComponent('PlaybackRateMenuButton', PlaybackRateMenuButton);
+ Component$1.registerComponent('PlaybackRateMenuButton', PlaybackRateMenuButton);
/**
* Just an empty spacer element that can be used as an append point for plugins, etc.
@@ -19583,9 +20787,7 @@
* @extends Component
*/
- var Spacer =
- /*#__PURE__*/
- function (_Component) {
+ var Spacer = /*#__PURE__*/function (_Component) {
inheritsLoose(Spacer, _Component);
function Spacer() {
@@ -19611,16 +20813,30 @@
*/
;
- _proto.createEl = function createEl() {
- return _Component.prototype.createEl.call(this, 'div', {
- className: this.buildCSSClass()
- });
+ _proto.createEl = function createEl(tag, props, attributes) {
+ if (tag === void 0) {
+ tag = 'div';
+ }
+
+ if (props === void 0) {
+ props = {};
+ }
+
+ if (attributes === void 0) {
+ attributes = {};
+ }
+
+ if (!props.className) {
+ props.className = this.buildCSSClass();
+ }
+
+ return _Component.prototype.createEl.call(this, tag, props, attributes);
};
return Spacer;
- }(Component);
+ }(Component$1);
- Component.registerComponent('Spacer', Spacer);
+ Component$1.registerComponent('Spacer', Spacer);
/**
* Spacer specifically meant to be used as an insertion point for new plugins, etc.
@@ -19628,9 +20844,7 @@
* @extends Spacer
*/
- var CustomControlSpacer =
- /*#__PURE__*/
- function (_Spacer) {
+ var CustomControlSpacer = /*#__PURE__*/function (_Spacer) {
inheritsLoose(CustomControlSpacer, _Spacer);
function CustomControlSpacer() {
@@ -19657,20 +20871,18 @@
;
_proto.createEl = function createEl() {
- var el = _Spacer.prototype.createEl.call(this, {
- className: this.buildCSSClass()
- }); // No-flex/table-cell mode requires there be some content
- // in the cell to fill the remaining space of the table.
-
-
- el.innerHTML = "\xA0";
- return el;
+ return _Spacer.prototype.createEl.call(this, 'div', {
+ className: this.buildCSSClass(),
+ // No-flex/table-cell mode requires there be some content
+ // in the cell to fill the remaining space of the table.
+ textContent: "\xA0"
+ });
};
return CustomControlSpacer;
}(Spacer);
- Component.registerComponent('CustomControlSpacer', CustomControlSpacer);
+ Component$1.registerComponent('CustomControlSpacer', CustomControlSpacer);
/**
* Container of main controls.
@@ -19678,9 +20890,7 @@
* @extends Component
*/
- var ControlBar =
- /*#__PURE__*/
- function (_Component) {
+ var ControlBar = /*#__PURE__*/function (_Component) {
inheritsLoose(ControlBar, _Component);
function ControlBar() {
@@ -19703,7 +20913,7 @@
};
return ControlBar;
- }(Component);
+ }(Component$1);
/**
* Default options for `ControlBar`
*
@@ -19720,7 +20930,7 @@
ControlBar.prototype.options_.children.splice(ControlBar.prototype.options_.children.length - 1, 0, 'pictureInPictureToggle');
}
- Component.registerComponent('ControlBar', ControlBar);
+ Component$1.registerComponent('ControlBar', ControlBar);
/**
* A display that indicates an error has occurred. This means that the video
@@ -19729,9 +20939,7 @@
* @extends ModalDialog
*/
- var ErrorDisplay =
- /*#__PURE__*/
- function (_ModalDialog) {
+ var ErrorDisplay = /*#__PURE__*/function (_ModalDialog) {
inheritsLoose(ErrorDisplay, _ModalDialog);
/**
@@ -19748,7 +20956,9 @@
_this = _ModalDialog.call(this, player, options) || this;
- _this.on(player, 'error', _this.open);
+ _this.on(player, 'error', function (e) {
+ return _this.open(e);
+ });
return _this;
}
@@ -19795,9 +21005,9 @@
temporary: false,
uncloseable: true
});
- Component.registerComponent('ErrorDisplay', ErrorDisplay);
+ Component$1.registerComponent('ErrorDisplay', ErrorDisplay);
- var LOCAL_STORAGE_KEY = 'vjs-text-track-settings';
+ var LOCAL_STORAGE_KEY$1 = 'vjs-text-track-settings';
var COLOR_BLACK = ['#000', 'Black'];
var COLOR_BLUE = ['#00F', 'Blue'];
var COLOR_CYAN = ['#0FF', 'Cyan'];
@@ -19966,9 +21176,7 @@
*/
- var TextTrackSettings =
- /*#__PURE__*/
- function (_ModalDialog) {
+ var TextTrackSettings = /*#__PURE__*/function (_ModalDialog) {
inheritsLoose(TextTrackSettings, _ModalDialog);
/**
@@ -19985,7 +21193,7 @@
options.temporary = false;
_this = _ModalDialog.call(this, player, options) || this;
- _this.updateDisplay = bind(assertThisInitialized(_this), _this.updateDisplay); // fill the modal and pretend we have opened it
+ _this.updateDisplay = _this.updateDisplay.bind(assertThisInitialized(_this)); // fill the modal and pretend we have opened it
_this.fill();
@@ -20231,9 +21439,9 @@
var values;
try {
- values = JSON.parse(window$1.localStorage.getItem(LOCAL_STORAGE_KEY));
+ values = JSON.parse(window.localStorage.getItem(LOCAL_STORAGE_KEY$1));
} catch (err) {
- log.warn(err);
+ log$1.warn(err);
}
if (values) {
@@ -20254,12 +21462,12 @@
try {
if (Object.keys(values).length) {
- window$1.localStorage.setItem(LOCAL_STORAGE_KEY, JSON.stringify(values));
+ window.localStorage.setItem(LOCAL_STORAGE_KEY$1, JSON.stringify(values));
} else {
- window$1.localStorage.removeItem(LOCAL_STORAGE_KEY);
+ window.localStorage.removeItem(LOCAL_STORAGE_KEY$1);
}
} catch (err) {
- log.warn(err);
+ log$1.warn(err);
}
}
/**
@@ -20297,7 +21505,7 @@
return TextTrackSettings;
}(ModalDialog);
- Component.registerComponent('TextTrackSettings', TextTrackSettings);
+ Component$1.registerComponent('TextTrackSettings', TextTrackSettings);
/**
* A Resize Manager. It is in charge of triggering `playerresize` on the player in the right conditions.
@@ -20316,9 +21524,7 @@
* @extends Component
*/
- var ResizeManager =
- /*#__PURE__*/
- function (_Component) {
+ var ResizeManager = /*#__PURE__*/function (_Component) {
inheritsLoose(ResizeManager, _Component);
/**
@@ -20337,19 +21543,19 @@
function ResizeManager(player, options) {
var _this;
- var RESIZE_OBSERVER_AVAILABLE = options.ResizeObserver || window$1.ResizeObserver; // if `null` was passed, we want to disable the ResizeObserver
+ var RESIZE_OBSERVER_AVAILABLE = options.ResizeObserver || window.ResizeObserver; // if `null` was passed, we want to disable the ResizeObserver
if (options.ResizeObserver === null) {
RESIZE_OBSERVER_AVAILABLE = false;
} // Only create an element when ResizeObserver isn't available
- var options_ = mergeOptions({
+ var options_ = mergeOptions$3({
createEl: !RESIZE_OBSERVER_AVAILABLE,
reportTouchActivity: false
}, options);
_this = _Component.call(this, player, options_) || this;
- _this.ResizeObserver = options.ResizeObserver || window$1.ResizeObserver;
+ _this.ResizeObserver = options.ResizeObserver || window.ResizeObserver;
_this.loadListener_ = null;
_this.resizeObserver_ = null;
_this.debouncedHandler_ = debounce(function () {
@@ -20391,7 +21597,8 @@
_proto.createEl = function createEl() {
return _Component.prototype.createEl.call(this, 'iframe', {
className: 'vjs-resize-manager',
- tabIndex: -1
+ tabIndex: -1,
+ title: this.localize('No content')
}, {
'aria-hidden': 'true'
});
@@ -20449,56 +21656,102 @@
};
return ResizeManager;
- }(Component);
+ }(Component$1);
- Component.registerComponent('ResizeManager', ResizeManager);
+ Component$1.registerComponent('ResizeManager', ResizeManager);
+
+ var defaults = {
+ trackingThreshold: 20,
+ liveTolerance: 15
+ };
+ /*
+ track when we are at the live edge, and other helpers for live playback */
/**
- * Computes the median of an array.
- *
- * @param {number[]} arr
- * Input array of numbers.
- *
- * @return {number}
- * Median value.
+ * A class for checking live current time and determining when the player
+ * is at or behind the live edge.
*/
- var median = function median(arr) {
- var mid = Math.floor(arr.length / 2);
- var sortedList = [].concat(arr).sort(function (a, b) {
- return a - b;
- });
- return arr.length % 2 !== 0 ? sortedList[mid] : (sortedList[mid - 1] + sortedList[mid]) / 2;
- };
- /* track when we are at the live edge, and other helpers for live playback */
-
- var LiveTracker =
- /*#__PURE__*/
- function (_Component) {
+ var LiveTracker = /*#__PURE__*/function (_Component) {
inheritsLoose(LiveTracker, _Component);
+ /**
+ * Creates an instance of this class.
+ *
+ * @param {Player} player
+ * The `Player` that this class should be attached to.
+ *
+ * @param {Object} [options]
+ * The key/value store of player options.
+ *
+ * @param {number} [options.trackingThreshold=20]
+ * Number of seconds of live window (seekableEnd - seekableStart) that
+ * media needs to have before the liveui will be shown.
+ *
+ * @param {number} [options.liveTolerance=15]
+ * Number of seconds behind live that we have to be
+ * before we will be considered non-live. Note that this will only
+ * be used when playing at the live edge. This allows large seekable end
+ * changes to not effect wether we are live or not.
+ */
function LiveTracker(player, options) {
var _this;
// LiveTracker does not need an element
- var options_ = mergeOptions({
+ var options_ = mergeOptions$3(defaults, options, {
createEl: false
- }, options);
+ });
_this = _Component.call(this, player, options_) || this;
+ _this.handleVisibilityChange_ = function (e) {
+ return _this.handleVisibilityChange(e);
+ };
+
+ _this.trackLiveHandler_ = function () {
+ return _this.trackLive_();
+ };
+
+ _this.handlePlay_ = function (e) {
+ return _this.handlePlay(e);
+ };
+
+ _this.handleFirstTimeupdate_ = function (e) {
+ return _this.handleFirstTimeupdate(e);
+ };
+
+ _this.handleSeeked_ = function (e) {
+ return _this.handleSeeked(e);
+ };
+
+ _this.seekToLiveEdge_ = function (e) {
+ return _this.seekToLiveEdge(e);
+ };
+
_this.reset_();
- _this.on(_this.player_, 'durationchange', _this.handleDurationchange); // we don't need to track live playback if the document is hidden,
+ _this.on(_this.player_, 'durationchange', function (e) {
+ return _this.handleDurationchange(e);
+ }); // we should try to toggle tracking on canplay as native playback engines, like Safari
+ // may not have the proper values for things like seekableEnd until then
+
+
+ _this.on(_this.player_, 'canplay', function () {
+ return _this.toggleTracking();
+ }); // we don't need to track live playback if the document is hidden,
// also, tracking when the document is hidden can
// cause the CPU to spike and eventually crash the page on IE11.
if (IE_VERSION && 'hidden' in document && 'visibilityState' in document) {
- _this.on(document, 'visibilitychange', _this.handleVisibilityChange);
+ _this.on(document, 'visibilitychange', _this.handleVisibilityChange_);
}
return _this;
}
+ /**
+ * toggle tracking based on document visiblility
+ */
+
var _proto = LiveTracker.prototype;
@@ -20512,67 +21765,41 @@
} else {
this.startTracking();
}
- };
-
- _proto.isBehind_ = function isBehind_() {
- // don't report that we are behind until a timeupdate has been seen
- if (!this.timeupdateSeen_) {
- return false;
- }
-
- var liveCurrentTime = this.liveCurrentTime();
- var currentTime = this.player_.currentTime(); // the live edge window is the amount of seconds away from live
- // that a player can be, but still be considered live.
- // we add 0.07 because the live tracking happens every 30ms
- // and we want some wiggle room for short segment live playback
-
- var liveEdgeWindow = this.seekableIncrement_ * 2 + 0.07; // on Android liveCurrentTime can bee Infinity, because seekableEnd
- // can be Infinity, so we handle that case.
-
- return liveCurrentTime !== Infinity && liveCurrentTime - liveEdgeWindow >= currentTime;
- } // all the functionality for tracking when seek end changes
- // and for tracking how far past seek end we should be
+ }
+ /**
+ * all the functionality for tracking when seek end changes
+ * and for tracking how far past seek end we should be
+ */
;
_proto.trackLive_ = function trackLive_() {
- this.pastSeekEnd_ = this.pastSeekEnd_;
var seekable = this.player_.seekable(); // skip undefined seekable
if (!seekable || !seekable.length) {
return;
}
- var newSeekEnd = this.seekableEnd(); // we can only tell if we are behind live, when seekable changes
- // once we detect that seekable has changed we check the new seek
- // end against current time, with a fudge value of half a second.
+ var newTime = Number(window.performance.now().toFixed(4));
+ var deltaTime = this.lastTime_ === -1 ? 0 : (newTime - this.lastTime_) / 1000;
+ this.lastTime_ = newTime;
+ this.pastSeekEnd_ = this.pastSeekEnd() + deltaTime;
+ var liveCurrentTime = this.liveCurrentTime();
+ var currentTime = this.player_.currentTime(); // we are behind live if any are true
+ // 1. the player is paused
+ // 2. the user seeked to a location 2 seconds away from live
+ // 3. the difference between live and current time is greater
+ // liveTolerance which defaults to 15s
- if (newSeekEnd !== this.lastSeekEnd_) {
- if (this.lastSeekEnd_) {
- // we try to get the best fit value for the seeking increment
- // variable from the last 12 values.
- this.seekableIncrementList_ = this.seekableIncrementList_.slice(-11);
- this.seekableIncrementList_.push(Math.abs(newSeekEnd - this.lastSeekEnd_));
+ var isBehind = this.player_.paused() || this.seekedBehindLive_ || Math.abs(liveCurrentTime - currentTime) > this.options_.liveTolerance; // we cannot be behind if
+ // 1. until we have not seen a timeupdate yet
+ // 2. liveCurrentTime is Infinity, which happens on Android and Native Safari
- if (this.seekableIncrementList_.length > 3) {
- this.seekableIncrement_ = median(this.seekableIncrementList_);
- }
- }
-
- this.pastSeekEnd_ = 0;
- this.lastSeekEnd_ = newSeekEnd;
- this.trigger('seekableendchange');
- } // we should reset pastSeekEnd when the value
- // is much higher than seeking increment.
-
-
- if (this.pastSeekEnd() > this.seekableIncrement_ * 1.5) {
- this.pastSeekEnd_ = 0;
- } else {
- this.pastSeekEnd_ = this.pastSeekEnd() + 0.03;
+ if (!this.timeupdateSeen_ || liveCurrentTime === Infinity) {
+ isBehind = false;
}
- if (this.isBehind_() !== this.behindLiveEdge()) {
- this.behindLiveEdge_ = this.isBehind_();
+ if (isBehind !== this.behindLiveEdge_) {
+ this.behindLiveEdge_ = isBehind;
this.trigger('liveedgechange');
}
}
@@ -20583,9 +21810,22 @@
;
_proto.handleDurationchange = function handleDurationchange() {
- if (this.player_.duration() === Infinity) {
+ this.toggleTracking();
+ }
+ /**
+ * start/stop tracking
+ */
+ ;
+
+ _proto.toggleTracking = function toggleTracking() {
+ if (this.player_.duration() === Infinity && this.liveWindow() >= this.options_.trackingThreshold) {
+ if (this.player_.options_.liveui) {
+ this.player_.addClass('vjs-liveui');
+ }
+
this.startTracking();
} else {
+ this.player_.removeClass('vjs-liveui');
this.stopTracking();
}
}
@@ -20595,8 +21835,6 @@
;
_proto.startTracking = function startTracking() {
- var _this2 = this;
-
if (this.isTracking()) {
return;
} // If we haven't seen a timeupdate, we need to check whether playback
@@ -20608,26 +21846,47 @@
this.timeupdateSeen_ = this.player_.hasStarted();
}
- this.trackingInterval_ = this.setInterval(this.trackLive_, UPDATE_REFRESH_INTERVAL);
+ this.trackingInterval_ = this.setInterval(this.trackLiveHandler_, UPDATE_REFRESH_INTERVAL);
this.trackLive_();
- this.on(this.player_, 'play', this.trackLive_);
- this.on(this.player_, 'pause', this.trackLive_); // this is to prevent showing that we are not live
- // before a video starts to play
+ this.on(this.player_, ['play', 'pause'], this.trackLiveHandler_);
if (!this.timeupdateSeen_) {
- this.one(this.player_, 'play', this.handlePlay);
-
- this.handleTimeupdate = function () {
- _this2.timeupdateSeen_ = true;
- _this2.handleTimeupdate = null;
- };
-
- this.one(this.player_, 'timeupdate', this.handleTimeupdate);
+ this.one(this.player_, 'play', this.handlePlay_);
+ this.one(this.player_, 'timeupdate', this.handleFirstTimeupdate_);
+ } else {
+ this.on(this.player_, 'seeked', this.handleSeeked_);
}
- };
+ }
+ /**
+ * handle the first timeupdate on the player if it wasn't already playing
+ * when live tracker started tracking.
+ */
+ ;
+
+ _proto.handleFirstTimeupdate = function handleFirstTimeupdate() {
+ this.timeupdateSeen_ = true;
+ this.on(this.player_, 'seeked', this.handleSeeked_);
+ }
+ /**
+ * Keep track of what time a seek starts, and listen for seeked
+ * to find where a seek ends.
+ */
+ ;
+
+ _proto.handleSeeked = function handleSeeked() {
+ var timeDiff = Math.abs(this.liveCurrentTime() - this.player_.currentTime());
+ this.seekedBehindLive_ = this.nextSeekedFromUser_ && timeDiff > 2;
+ this.nextSeekedFromUser_ = false;
+ this.trackLive_();
+ }
+ /**
+ * handle the first play on the player, and make sure that we seek
+ * right to the live edge.
+ */
+ ;
_proto.handlePlay = function handlePlay() {
- this.one(this.player_, 'timeupdate', this.seekToLiveEdge);
+ this.one(this.player_, 'timeupdate', this.seekToLiveEdge_);
}
/**
* Stop tracking, and set all internal variables to
@@ -20636,23 +21895,30 @@
;
_proto.reset_ = function reset_() {
+ this.lastTime_ = -1;
this.pastSeekEnd_ = 0;
- this.lastSeekEnd_ = null;
- this.behindLiveEdge_ = null;
+ this.lastSeekEnd_ = -1;
+ this.behindLiveEdge_ = true;
this.timeupdateSeen_ = false;
+ this.seekedBehindLive_ = false;
+ this.nextSeekedFromUser_ = false;
this.clearInterval(this.trackingInterval_);
this.trackingInterval_ = null;
- this.seekableIncrement_ = 12;
- this.seekableIncrementList_ = [];
- this.off(this.player_, 'play', this.trackLive_);
- this.off(this.player_, 'pause', this.trackLive_);
- this.off(this.player_, 'play', this.handlePlay);
- this.off(this.player_, 'timeupdate', this.seekToLiveEdge);
+ this.off(this.player_, ['play', 'pause'], this.trackLiveHandler_);
+ this.off(this.player_, 'seeked', this.handleSeeked_);
+ this.off(this.player_, 'play', this.handlePlay_);
+ this.off(this.player_, 'timeupdate', this.handleFirstTimeupdate_);
+ this.off(this.player_, 'timeupdate', this.seekToLiveEdge_);
+ }
+ /**
+ * The next seeked event is from the user. Meaning that any seek
+ * > 2s behind live will be considered behind live for real and
+ * liveTolerance will be ignored.
+ */
+ ;
- if (this.handleTimeupdate) {
- this.off(this.player_, 'timeupdate', this.handleTimeupdate);
- this.handleTimeupdate = null;
- }
+ _proto.nextSeekedFromUser = function nextSeekedFromUser() {
+ this.nextSeekedFromUser_ = true;
}
/**
* stop tracking live playback
@@ -20665,10 +21931,14 @@
}
this.reset_();
+ this.trigger('liveedgechange');
}
/**
* A helper to get the player seekable end
* so that we don't have to null check everywhere
+ *
+ * @return {number}
+ * The furthest seekable end or Infinity.
*/
;
@@ -20688,6 +21958,9 @@
/**
* A helper to get the player seekable start
* so that we don't have to null check everywhere
+ *
+ * @return {number}
+ * The earliest seekable start or 0.
*/
;
@@ -20705,15 +21978,21 @@
return seekableStarts.length ? seekableStarts.sort()[0] : 0;
}
/**
- * Get the live time window
+ * Get the live time window aka
+ * the amount of time between seekable start and
+ * live current time.
+ *
+ * @return {number}
+ * The amount of seconds that are seekable in
+ * the live video.
*/
;
_proto.liveWindow = function liveWindow() {
- var liveCurrentTime = this.liveCurrentTime();
+ var liveCurrentTime = this.liveCurrentTime(); // if liveCurrenTime is Infinity then we don't have a liveWindow at all
if (liveCurrentTime === Infinity) {
- return Infinity;
+ return 0;
}
return liveCurrentTime - this.seekableStart();
@@ -20721,6 +22000,9 @@
/**
* Determines if the player is live, only checks if this component
* is tracking live playback or not
+ *
+ * @return {boolean}
+ * Wether liveTracker is tracking
*/
;
@@ -20730,6 +22012,9 @@
/**
* Determines if currentTime is at the live edge and won't fall behind
* on each seekableendchange
+ *
+ * @return {boolean}
+ * Wether playback is at the live edge
*/
;
@@ -20738,6 +22023,9 @@
}
/**
* get what we expect the live current time to be
+ *
+ * @return {number}
+ * The expected live current time
*/
;
@@ -20745,22 +22033,40 @@
return this.pastSeekEnd() + this.seekableEnd();
}
/**
- * Returns how far past seek end we expect current time to be
+ * The number of seconds that have occured after seekable end
+ * changed. This will be reset to 0 once seekable end changes.
+ *
+ * @return {number}
+ * Seconds past the current seekable end
*/
;
_proto.pastSeekEnd = function pastSeekEnd() {
+ var seekableEnd = this.seekableEnd();
+
+ if (this.lastSeekEnd_ !== -1 && seekableEnd !== this.lastSeekEnd_) {
+ this.pastSeekEnd_ = 0;
+ }
+
+ this.lastSeekEnd_ = seekableEnd;
return this.pastSeekEnd_;
}
/**
* If we are currently behind the live edge, aka currentTime will be
* behind on a seekableendchange
+ *
+ * @return {boolean}
+ * If we are behind the live edge
*/
;
_proto.behindLiveEdge = function behindLiveEdge() {
return this.behindLiveEdge_;
- };
+ }
+ /**
+ * Wether live tracker is currently tracking or not.
+ */
+ ;
_proto.isTracking = function isTracking() {
return typeof this.trackingInterval_ === 'number';
@@ -20771,27 +22077,31 @@
;
_proto.seekToLiveEdge = function seekToLiveEdge() {
+ this.seekedBehindLive_ = false;
+
if (this.atLiveEdge()) {
return;
}
+ this.nextSeekedFromUser_ = false;
this.player_.currentTime(this.liveCurrentTime());
-
- if (this.player_.paused()) {
- this.player_.play();
- }
- };
+ }
+ /**
+ * Dispose of liveTracker
+ */
+ ;
_proto.dispose = function dispose() {
+ this.off(document, 'visibilitychange', this.handleVisibilityChange_);
this.stopTracking();
_Component.prototype.dispose.call(this);
};
return LiveTracker;
- }(Component);
+ }(Component$1);
- Component.registerComponent('LiveTracker', LiveTracker);
+ Component$1.registerComponent('LiveTracker', LiveTracker);
/**
* This function is used to fire a sourceset when there is something
@@ -20886,7 +22196,7 @@
this.innerText = ''; // now we add all of that html in one by appending the
// document fragment. This is how innerHTML does it.
- window$1.Element.prototype.appendChild.call(this, docFrag); // then return the result that innerHTML's setter would
+ window.Element.prototype.appendChild.call(this, docFrag); // then return the result that innerHTML's setter would
return this.innerHTML;
}
@@ -20913,7 +22223,7 @@
};
var getInnerHTMLDescriptor = function getInnerHTMLDescriptor(tech) {
- return getDescriptor([tech.el(), window$1.HTMLMediaElement.prototype, window$1.Element.prototype, innerHTMLDescriptorPolyfill], 'innerHTML');
+ return getDescriptor([tech.el(), window.HTMLMediaElement.prototype, window.Element.prototype, innerHTMLDescriptorPolyfill], 'innerHTML');
};
/**
* Patches browser internal functions so that we can tell synchronously
@@ -20968,7 +22278,7 @@
el[k] = appendWrapper(old[k]);
});
- Object.defineProperty(el, 'innerHTML', mergeOptions(innerDescriptor, {
+ Object.defineProperty(el, 'innerHTML', mergeOptions$3(innerDescriptor, {
set: appendWrapper(innerDescriptor.set)
}));
@@ -20992,19 +22302,19 @@
var srcDescriptorPolyfill = Object.defineProperty({}, 'src', {
get: function get() {
if (this.hasAttribute('src')) {
- return getAbsoluteURL(window$1.Element.prototype.getAttribute.call(this, 'src'));
+ return getAbsoluteURL(window.Element.prototype.getAttribute.call(this, 'src'));
}
return '';
},
set: function set(v) {
- window$1.Element.prototype.setAttribute.call(this, 'src', v);
+ window.Element.prototype.setAttribute.call(this, 'src', v);
return v;
}
});
var getSrcDescriptor = function getSrcDescriptor(tech) {
- return getDescriptor([tech.el(), window$1.HTMLMediaElement.prototype, srcDescriptorPolyfill], 'src');
+ return getDescriptor([tech.el(), window.HTMLMediaElement.prototype, srcDescriptorPolyfill], 'src');
};
/**
* setup `sourceset` handling on the `Html5` tech. This function
@@ -21037,7 +22347,7 @@
var srcDescriptor = getSrcDescriptor(tech);
var oldSetAttribute = el.setAttribute;
var oldLoad = el.load;
- Object.defineProperty(el, 'src', mergeOptions(srcDescriptor, {
+ Object.defineProperty(el, 'src', mergeOptions$3(srcDescriptor, {
set: function set(v) {
var retval = srcDescriptor.set.call(el, v); // we use the getter here to get the actual value set on src
@@ -21134,9 +22444,7 @@
* @extends Tech
*/
- var Html5 =
- /*#__PURE__*/
- function (_Tech) {
+ var Html5 = /*#__PURE__*/function (_Tech) {
inheritsLoose(Html5, _Tech);
/**
@@ -21153,7 +22461,8 @@
_this = _Tech.call(this, options, ready) || this;
var source = options.source;
- var crossoriginTracks = false; // Set the source if one is provided
+ var crossoriginTracks = false;
+ _this.featuresVideoFrameCallback = _this.featuresVideoFrameCallback && _this.el_.tagName === 'VIDEO'; // Set the source if one is provided
// 1) Check if the source is new (if not, we want to keep the original so playback isn't interrupted)
// 2) Check to see if the network state of the tag was failed at init, and if so, reset the source
// anyway so the error gets fired.
@@ -21169,6 +22478,8 @@
_this.setupSourcesetHandling_();
}
+ _this.isScrubbing_ = false;
+
if (_this.el_.hasChildNodes()) {
var nodes = _this.el_.childNodes;
var nodesLength = nodes.length;
@@ -21208,7 +22519,7 @@
_this.proxyNativeTracks_();
if (_this.featuresNativeTextTracks && crossoriginTracks) {
- log.warn('Text Tracks are being loaded from another origin but the crossorigin attribute isn\'t used.\n' + 'This may prevent text tracks from loading.');
+ log$1.warn('Text Tracks are being loaded from another origin but the crossorigin attribute isn\'t used.\n' + 'This may prevent text tracks from loading.');
} // prevent iOS Safari from disabling metadata text tracks during native playback
@@ -21400,12 +22711,23 @@
var listeners = {
change: function change(e) {
- techTracks.trigger({
+ var event = {
type: 'change',
target: techTracks,
currentTarget: techTracks,
srcElement: techTracks
- });
+ };
+ techTracks.trigger(event); // if we are a text track change event, we should also notify the
+ // remote text track list. This can potentially cause a false positive
+ // if we were to get a change event on a non-remote track and
+ // we triggered the event on the remote text track list which doesn't
+ // contain that track. However, best practices mean looping through the
+ // list of tracks and searching for the appropriate mode value, so,
+ // this shouldn't pose an issue
+
+ if (name === 'text') {
+ _this3[REMOTE.remoteText.getterName]().trigger(event);
+ }
},
addtrack: function addtrack(e) {
techTracks.addTrack(e.track);
@@ -21497,7 +22819,7 @@
el = document.createElement('video'); // determine if native controls should be used
var tagAttributes = this.options_.tag && getAttributes(this.options_.tag);
- var attributes = mergeOptions({}, tagAttributes);
+ var attributes = mergeOptions$3({}, tagAttributes);
if (!TOUCH_ENABLED || this.options_.nativeControlsForTouch !== true) {
delete attributes.controls;
@@ -21514,6 +22836,10 @@
if (typeof this.options_.preload !== 'undefined') {
setAttribute(el, 'preload', this.options_.preload);
+ }
+
+ if (this.options_.disablePictureInPicture !== undefined) {
+ el.disablePictureInPicture = this.options_.disablePictureInPicture;
} // Update specific tag settings, in case they were overridden
// `autoplay` has to be *last* so that `muted` and `playsinline` are present
// when iOS/Safari or other browsers attempt to autoplay.
@@ -21629,6 +22955,32 @@
}, this);
});
}
+ /**
+ * Set whether we are scrubbing or not.
+ * This is used to decide whether we should use `fastSeek` or not.
+ * `fastSeek` is used to provide trick play on Safari browsers.
+ *
+ * @param {boolean} isScrubbing
+ * - true for we are currently scrubbing
+ * - false for we are no longer scrubbing
+ */
+ ;
+
+ _proto.setScrubbing = function setScrubbing(isScrubbing) {
+ this.isScrubbing_ = isScrubbing;
+ }
+ /**
+ * Get whether we are scrubbing or not.
+ *
+ * @return {boolean} isScrubbing
+ * - true for we are currently scrubbing
+ * - false for we are no longer scrubbing
+ */
+ ;
+
+ _proto.scrubbing = function scrubbing() {
+ return this.isScrubbing_;
+ }
/**
* Set current time for the `HTML5` tech.
*
@@ -21639,9 +22991,13 @@
_proto.setCurrentTime = function setCurrentTime(seconds) {
try {
- this.el_.currentTime = seconds;
+ if (this.isScrubbing_ && this.el_.fastSeek && IS_ANY_SAFARI) {
+ this.el_.fastSeek(seconds);
+ } else {
+ this.el_.currentTime = seconds;
+ }
} catch (e) {
- log(e, 'Video is not ready. (Video.js)'); // this.warning(VideoJS.warnings.videoNotReady);
+ log$1(e, 'Video is not ready. (Video.js)'); // this.warning(VideoJS.warnings.videoNotReady);
}
}
/**
@@ -21723,14 +23079,20 @@
var endFn = function endFn() {
this.trigger('fullscreenchange', {
isFullscreen: false
- });
+ }); // Safari will sometimes set contols on the videoelement when existing fullscreen.
+
+ if (this.el_.controls && !this.options_.nativeControlsForTouch && this.controls()) {
+ this.el_.controls = false;
+ }
};
var beginFn = function beginFn() {
if ('webkitPresentationMode' in this.el_ && this.el_.webkitPresentationMode !== 'picture-in-picture') {
this.one('webkitendfullscreen', endFn);
this.trigger('fullscreenchange', {
- isFullscreen: true
+ isFullscreen: true,
+ // set a flag in case another tech triggers fullscreenchange
+ nativeIOSFullscreen: true
});
}
};
@@ -21753,7 +23115,7 @@
_proto.supportsFullScreen = function supportsFullScreen() {
if (typeof this.el_.webkitEnterFullScreen === 'function') {
- var userAgent = window$1.navigator && window$1.navigator.userAgent || ''; // Seems to be broken in Chromium/Chrome && Safari in Leopard
+ var userAgent = window.navigator && window.navigator.userAgent || ''; // Seems to be broken in Chromium/Chrome && Safari in Leopard
if (/Android/.test(userAgent) || !/Chrome|Mac OS X 10.5/.test(userAgent)) {
return true;
@@ -21773,15 +23135,24 @@
if (video.paused && video.networkState <= video.HAVE_METADATA) {
// attempt to prime the video element for programmatic access
// this isn't necessary on the desktop but shouldn't hurt
- this.el_.play(); // playing and pausing synchronously during the transition to fullscreen
+ silencePromise(this.el_.play()); // playing and pausing synchronously during the transition to fullscreen
// can get iOS ~6.1 devices into a play/pause loop
this.setTimeout(function () {
video.pause();
- video.webkitEnterFullScreen();
+
+ try {
+ video.webkitEnterFullScreen();
+ } catch (e) {
+ this.trigger('fullscreenerror', e);
+ }
}, 0);
} else {
- video.webkitEnterFullScreen();
+ try {
+ video.webkitEnterFullScreen();
+ } catch (e) {
+ this.trigger('fullscreenerror', e);
+ }
}
}
/**
@@ -21790,6 +23161,11 @@
;
_proto.exitFullScreen = function exitFullScreen() {
+ if (!this.el_.webkitDisplayingFullscreen) {
+ this.trigger('fullscreenerror', new Error('The video is not fullscreen'));
+ return;
+ }
+
this.el_.webkitExitFullScreen();
}
/**
@@ -21807,6 +23183,38 @@
_proto.requestPictureInPicture = function requestPictureInPicture() {
return this.el_.requestPictureInPicture();
}
+ /**
+ * Native requestVideoFrameCallback if supported by browser/tech, or fallback
+ * Don't use rVCF on Safari when DRM is playing, as it doesn't fire
+ * Needs to be checked later than the constructor
+ * This will be a false positive for clear sources loaded after a Fairplay source
+ *
+ * @param {function} cb function to call
+ * @return {number} id of request
+ */
+ ;
+
+ _proto.requestVideoFrameCallback = function requestVideoFrameCallback(cb) {
+ if (this.featuresVideoFrameCallback && !this.el_.webkitKeys) {
+ return this.el_.requestVideoFrameCallback(cb);
+ }
+
+ return _Tech.prototype.requestVideoFrameCallback.call(this, cb);
+ }
+ /**
+ * Native or fallback requestVideoFrameCallback
+ *
+ * @param {number} id request id to cancel
+ */
+ ;
+
+ _proto.cancelVideoFrameCallback = function cancelVideoFrameCallback(id) {
+ if (this.featuresVideoFrameCallback && !this.el_.webkitKeys) {
+ this.el_.cancelVideoFrameCallback(id);
+ } else {
+ _Tech.prototype.cancelVideoFrameCallback.call(this, id);
+ }
+ }
/**
* A getter/setter for the `Html5` Tech's source object.
* > Note: Please use {@link Html5#setSource}
@@ -22022,10 +23430,10 @@
videoPlaybackQuality.totalVideoFrames = this.el().webkitDecodedFrameCount;
}
- if (window$1.performance && typeof window$1.performance.now === 'function') {
- videoPlaybackQuality.creationTime = window$1.performance.now();
- } else if (window$1.performance && window$1.performance.timing && typeof window$1.performance.timing.navigationStart === 'number') {
- videoPlaybackQuality.creationTime = window$1.Date.now() - window$1.performance.timing.navigationStart;
+ if (window.performance && typeof window.performance.now === 'function') {
+ videoPlaybackQuality.creationTime = window.performance.now();
+ } else if (window.performance && window.performance.timing && typeof window.performance.timing.navigationStart === 'number') {
+ videoPlaybackQuality.creationTime = window.Date.now() - window.performance.timing.navigationStart;
}
return videoPlaybackQuality;
@@ -22117,7 +23525,23 @@
try {
var volume = Html5.TEST_VID.volume;
Html5.TEST_VID.volume = volume / 2 + 0.1;
- return volume !== Html5.TEST_VID.volume;
+ var canControl = volume !== Html5.TEST_VID.volume; // With the introduction of iOS 15, there are cases where the volume is read as
+ // changed but reverts back to its original state at the start of the next tick.
+ // To determine whether volume can be controlled on iOS,
+ // a timeout is set and the volume is checked asynchronously.
+ // Since `features` doesn't currently work asynchronously, the value is manually set.
+
+ if (canControl && IS_IOS) {
+ window.setTimeout(function () {
+ if (Html5 && Html5.prototype) {
+ Html5.prototype.featuresVolumeControl = volume !== Html5.TEST_VID.volume;
+ }
+ }); // default iOS to false, which will be updated in the timeout above.
+
+ return false;
+ }
+
+ return canControl;
} catch (e) {
return false;
}
@@ -22311,13 +23735,14 @@
* @default {@link Html5.supportsNativeAudioTracks}
*/
- [['featuresVolumeControl', 'canControlVolume'], ['featuresMuteControl', 'canMuteVolume'], ['featuresPlaybackRate', 'canControlPlaybackRate'], ['featuresSourceset', 'canOverrideAttributes'], ['featuresNativeTextTracks', 'supportsNativeTextTracks'], ['featuresNativeVideoTracks', 'supportsNativeVideoTracks'], ['featuresNativeAudioTracks', 'supportsNativeAudioTracks']].forEach(function (_ref) {
+ [['featuresMuteControl', 'canMuteVolume'], ['featuresPlaybackRate', 'canControlPlaybackRate'], ['featuresSourceset', 'canOverrideAttributes'], ['featuresNativeTextTracks', 'supportsNativeTextTracks'], ['featuresNativeVideoTracks', 'supportsNativeVideoTracks'], ['featuresNativeAudioTracks', 'supportsNativeAudioTracks']].forEach(function (_ref) {
var key = _ref[0],
fn = _ref[1];
defineLazyProperty(Html5.prototype, key, function () {
return Html5[fn]();
}, true);
});
+ Html5.prototype.featuresVolumeControl = Html5.canControlVolume();
/**
* Boolean indicating whether the `HTML5` tech currently supports the media element
* moving in the DOM. iOS breaks if you move the media element, so this is set this to
@@ -22355,7 +23780,14 @@
* @default
*/
- Html5.prototype.featuresTimeupdateEvents = true; // HTML5 Feature detection and Device Fixes --------------------------------- //
+ Html5.prototype.featuresTimeupdateEvents = true;
+ /**
+ * Whether the HTML5 el supports `requestVideoFrameCallback`
+ *
+ * @type {boolean}
+ */
+
+ Html5.prototype.featuresVideoFrameCallback = !!(Html5.TEST_VID && Html5.TEST_VID.requestVideoFrameCallback); // HTML5 Feature detection and Device Fixes --------------------------------- //
var canPlayType;
@@ -22560,7 +23992,7 @@
'muted',
/**
* Set the value of `defaultMuted` on the media element. `defaultMuted` indicates that the current
- * audio level should be silent, but will only effect the muted level on intial playback..
+ * audio level should be silent, but will only effect the muted level on initial playback..
*
* @method Html5.prototype.setDefaultMuted
* @param {boolean} defaultMuted
@@ -22610,7 +24042,7 @@
* @see [Spec]{@link https://html.spec.whatwg.org/#attr-video-playsinline}
*/
'playsinline'].forEach(function (prop) {
- Html5.prototype['set' + toTitleCase(prop)] = function (v) {
+ Html5.prototype['set' + toTitleCase$1(prop)] = function (v) {
this.el_[prop] = v;
if (v) {
@@ -22622,8 +24054,8 @@
}); // Wrap native properties with a getter
// The list is as followed
// paused, currentTime, buffered, volume, poster, preload, error, seeking
- // seekable, ended, playbackRate, defaultPlaybackRate, played, networkState
- // readyState, videoWidth, videoHeight
+ // seekable, ended, playbackRate, defaultPlaybackRate, disablePictureInPicture
+ // played, networkState, readyState, videoWidth, videoHeight, crossOrigin
[
/**
@@ -22784,6 +24216,18 @@
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-playbackrate}
*/
'defaultPlaybackRate',
+ /**
+ * Get the value of 'disablePictureInPicture' from the video element.
+ *
+ * @method Html5#disablePictureInPicture
+ * @return {boolean} value
+ * - The value of `disablePictureInPicture` from the video element.
+ * - True indicates that the video can't be played in Picture-In-Picture mode
+ * - False indicates that the video can be played in Picture-In-Picture mode
+ *
+ * @see [Spec]{@link https://w3c.github.io/picture-in-picture/#disable-pip}
+ */
+ 'disablePictureInPicture',
/**
* Get the value of `played` from the media element. `played` returns a `TimeRange`
* object representing points in the media timeline that have been played.
@@ -22853,14 +24297,28 @@
*
* @see [Spec] {@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-video-videowidth}
*/
- 'videoHeight'].forEach(function (prop) {
+ 'videoHeight',
+ /**
+ * Get the value of `crossOrigin` from the media element. `crossOrigin` indicates
+ * to the browser that should sent the cookies along with the requests for the
+ * different assets/playlists
+ *
+ * @method Html5#crossOrigin
+ * @return {string}
+ * - anonymous indicates that the media should not sent cookies.
+ * - use-credentials indicates that the media should sent cookies along the requests.
+ *
+ * @see [Spec]{@link https://html.spec.whatwg.org/#attr-media-crossorigin}
+ */
+ 'crossOrigin'].forEach(function (prop) {
Html5.prototype[prop] = function () {
return this.el_[prop];
};
}); // Wrap native properties with a setter in this format:
// set + toTitleCase(name)
// The list is as follows:
- // setVolume, setSrc, setPoster, setPreload, setPlaybackRate, setDefaultPlaybackRate
+ // setVolume, setSrc, setPoster, setPreload, setPlaybackRate, setDefaultPlaybackRate,
+ // setDisablePictureInPicture, setCrossOrigin
[
/**
@@ -22946,8 +24404,32 @@
*
* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-defaultplaybackrate}
*/
- 'defaultPlaybackRate'].forEach(function (prop) {
- Html5.prototype['set' + toTitleCase(prop)] = function (v) {
+ 'defaultPlaybackRate',
+ /**
+ * Prevents the browser from suggesting a Picture-in-Picture context menu
+ * or to request Picture-in-Picture automatically in some cases.
+ *
+ * @method Html5#setDisablePictureInPicture
+ * @param {boolean} value
+ * The true value will disable Picture-in-Picture mode.
+ *
+ * @see [Spec]{@link https://w3c.github.io/picture-in-picture/#disable-pip}
+ */
+ 'disablePictureInPicture',
+ /**
+ * Set the value of `crossOrigin` from the media element. `crossOrigin` indicates
+ * to the browser that should sent the cookies along with the requests for the
+ * different assets/playlists
+ *
+ * @method Html5#setCrossOrigin
+ * @param {string} crossOrigin
+ * - anonymous indicates that the media should not sent cookies.
+ * - use-credentials indicates that the media should sent cookies along the requests.
+ *
+ * @see [Spec]{@link https://html.spec.whatwg.org/#attr-media-crossorigin}
+ */
+ 'crossOrigin'].forEach(function (prop) {
+ Html5.prototype['set' + toTitleCase$1(prop)] = function (v) {
this.el_[prop] = v;
};
}); // wrap native functions with a function
@@ -23157,7 +24639,7 @@
*/
/**
- * Retrigger the `stalled` event that was triggered by the {@link Tech}.
+ * Retrigger the `loadedmetadata` event that was triggered by the {@link Tech}.
*
* @private
* @method Player#handleTechLoadedmetadata_
@@ -23287,9 +24769,7 @@
* @extends Component
*/
- var Player =
- /*#__PURE__*/
- function (_Component) {
+ var Player = /*#__PURE__*/function (_Component) {
inheritsLoose(Player, _Component);
/**
@@ -23350,12 +24830,54 @@
_this = _Component.call(this, null, options, ready) || this; // Create bound methods for document listeners.
- _this.boundDocumentFullscreenChange_ = bind(assertThisInitialized(_this), _this.documentFullscreenChange_);
- _this.boundFullWindowOnEscKey_ = bind(assertThisInitialized(_this), _this.fullWindowOnEscKey); // default isFullscreen_ to false
+ _this.boundDocumentFullscreenChange_ = function (e) {
+ return _this.documentFullscreenChange_(e);
+ };
+
+ _this.boundFullWindowOnEscKey_ = function (e) {
+ return _this.fullWindowOnEscKey(e);
+ };
+
+ _this.boundUpdateStyleEl_ = function (e) {
+ return _this.updateStyleEl_(e);
+ };
+
+ _this.boundApplyInitTime_ = function (e) {
+ return _this.applyInitTime_(e);
+ };
+
+ _this.boundUpdateCurrentBreakpoint_ = function (e) {
+ return _this.updateCurrentBreakpoint_(e);
+ };
+
+ _this.boundHandleTechClick_ = function (e) {
+ return _this.handleTechClick_(e);
+ };
+
+ _this.boundHandleTechDoubleClick_ = function (e) {
+ return _this.handleTechDoubleClick_(e);
+ };
+
+ _this.boundHandleTechTouchStart_ = function (e) {
+ return _this.handleTechTouchStart_(e);
+ };
+
+ _this.boundHandleTechTouchMove_ = function (e) {
+ return _this.handleTechTouchMove_(e);
+ };
+
+ _this.boundHandleTechTouchEnd_ = function (e) {
+ return _this.handleTechTouchEnd_(e);
+ };
+
+ _this.boundHandleTechTap_ = function (e) {
+ return _this.handleTechTap_(e);
+ }; // default isFullscreen_ to false
+
_this.isFullscreen_ = false; // create logger
- _this.log = createLogger$1(_this.id_); // Hold our own reference to fullscreen api so it can be mocked in tests
+ _this.log = createLogger(_this.id_); // Hold our own reference to fullscreen api so it can be mocked in tests
_this.fsApi_ = FullscreenApi; // Tracks when a tech changes the poster
@@ -23368,7 +24890,18 @@
_this.hasStarted_ = false; // Init state userActive_
- _this.userActive_ = false; // if the global option object was accidentally blown away by
+ _this.userActive_ = false; // Init debugEnabled_
+
+ _this.debugEnabled_ = false; // Init state audioOnlyMode_
+
+ _this.audioOnlyMode_ = false; // Init state audioPosterMode_
+
+ _this.audioPosterMode_ = false; // Init state audioOnlyCache_
+
+ _this.audioOnlyCache_ = {
+ playerHeight: null,
+ hiddenChildren: []
+ }; // if the global option object was accidentally blown away by
// someone, bail early with an informative error
if (!_this.options_ || !_this.options_.techOrder || !_this.options_.techOrder.length) {
@@ -23450,24 +24983,31 @@
}
if (_this.fluid_) {
- _this.on('playerreset', _this.updateStyleEl_);
+ _this.on(['playerreset', 'resize'], _this.boundUpdateStyleEl_);
} // We also want to pass the original player options to each component and plugin
// as well so they don't need to reach back into the player for options later.
// We also need to do another copy of this.options_ so we don't end up with
// an infinite loop.
- var playerOptionsCopy = mergeOptions(_this.options_); // Load plugins
+ var playerOptionsCopy = mergeOptions$3(_this.options_); // Load plugins
if (options.plugins) {
Object.keys(options.plugins).forEach(function (name) {
_this[name](options.plugins[name]);
});
+ } // Enable debug mode to fire debugon event for all plugins.
+
+
+ if (options.debug) {
+ _this.debug(true);
}
_this.options_.playerOptions = playerOptionsCopy;
_this.middleware_ = [];
+ _this.playbackRates(options.playbackRates);
+
_this.initChildren(); // Set isAudio based on whether or not an audio tag was used
@@ -23514,7 +25054,7 @@
Player.players[_this.id_] = assertThisInitialized(_this); // Add a major version class to aid css in plugins
- var majorVersion = version.split('.')[0];
+ var majorVersion = version$5.split('.')[0];
_this.addClass("vjs-v" + majorVersion); // When the player is first initialized, trigger activity so components
// like the control bar show themselves if needed
@@ -23524,15 +25064,35 @@
_this.reportUserActivity();
- _this.one('play', _this.listenForUserActivity_);
+ _this.one('play', function (e) {
+ return _this.listenForUserActivity_(e);
+ });
- _this.on('stageclick', _this.handleStageClick_);
+ _this.on('stageclick', function (e) {
+ return _this.handleStageClick_(e);
+ });
- _this.on('keydown', _this.handleKeyDown);
+ _this.on('keydown', function (e) {
+ return _this.handleKeyDown(e);
+ });
+
+ _this.on('languagechange', function (e) {
+ return _this.handleLanguagechange(e);
+ });
_this.breakpoints(_this.options_.breakpoints);
- _this.responsive(_this.options_.responsive);
+ _this.responsive(_this.options_.responsive); // Calling both the audio mode methods after the player is fully
+ // setup to be able to listen to the events triggered by them
+
+
+ _this.on('ready', function () {
+ // Calling the audioPosterMode method first so that
+ // the audioOnlyMode can take precedence when both options are set to true
+ _this.audioPosterMode(_this.options_.audioPosterMode);
+
+ _this.audioOnlyMode(_this.options_.audioOnlyMode);
+ });
return _this;
}
@@ -23608,9 +25168,11 @@
if (list && list.off) {
list.off();
}
- }); // the actual .el_ is removed here
+ }); // the actual .el_ is removed here, or replaced if
- _Component.prototype.dispose.call(this);
+ _Component.prototype.dispose.call(this, {
+ restoreEl: this.options_.restoreEl
+ });
}
/**
* Create the `Player`'s DOM element.
@@ -23712,7 +25274,7 @@
// of the player in a way that's still overrideable by CSS, just like the
// video element
- if (window$1.VIDEOJS_NO_DYNAMIC_STYLE !== true) {
+ if (window.VIDEOJS_NO_DYNAMIC_STYLE !== true) {
this.styleEl_ = createStyleElement('vjs-styles-dimensions');
var defaultsStyleEl = $('.vjs-styles-defaults');
var head = $('head');
@@ -23726,7 +25288,9 @@
this.height(this.options_.height);
this.fill(this.options_.fill);
this.fluid(this.options_.fluid);
- this.aspectRatio(this.options_.aspectRatio); // Hide any links within the video/audio tag,
+ this.aspectRatio(this.options_.aspectRatio); // support both crossOrigin and crossorigin to reduce confusion and issues around the name
+
+ this.crossOrigin(this.options_.crossOrigin || this.options_.crossorigin); // Hide any links within the video/audio tag,
// because IE doesn't hide them completely from screen readers.
var links = tag.getElementsByTagName('a');
@@ -23755,9 +25319,40 @@
// if it's been set to something different to the doc
this.el_.setAttribute('lang', this.language_);
+ this.el_.setAttribute('translate', 'no');
this.el_ = el;
return el;
}
+ /**
+ * Get or set the `Player`'s crossOrigin option. For the HTML5 player, this
+ * sets the `crossOrigin` property on the `` tag to control the CORS
+ * behavior.
+ *
+ * @see [Video Element Attributes]{@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/video#attr-crossorigin}
+ *
+ * @param {string} [value]
+ * The value to set the `Player`'s crossOrigin to. If an argument is
+ * given, must be one of `anonymous` or `use-credentials`.
+ *
+ * @return {string|undefined}
+ * - The current crossOrigin value of the `Player` when getting.
+ * - undefined when setting
+ */
+ ;
+
+ _proto.crossOrigin = function crossOrigin(value) {
+ if (!value) {
+ return this.techGet_('crossOrigin');
+ }
+
+ if (value !== 'anonymous' && value !== 'use-credentials') {
+ log$1.warn("crossOrigin must be \"anonymous\" or \"use-credentials\", given \"" + value + "\"");
+ return;
+ }
+
+ this.techCall_('setCrossOrigin', value);
+ return;
+ }
/**
* A getter/setter for the `Player`'s width. Returns the player's configured value.
* To get the current width use `currentWidth()`.
@@ -23821,7 +25416,7 @@
var parsedVal = parseFloat(value);
if (isNaN(parsedVal)) {
- log.error("Improper value \"" + value + "\" supplied for for " + _dimension);
+ log$1.error("Improper value \"" + value + "\" supplied for for " + _dimension);
return;
}
@@ -23845,6 +25440,8 @@
;
_proto.fluid = function fluid(bool) {
+ var _this3 = this;
+
if (bool === undefined) {
return !!this.fluid_;
}
@@ -23852,14 +25449,14 @@
this.fluid_ = !!bool;
if (isEvented(this)) {
- this.off('playerreset', this.updateStyleEl_);
+ this.off(['playerreset', 'resize'], this.boundUpdateStyleEl_);
}
if (bool) {
this.addClass('vjs-fluid');
this.fill(false);
- addEventedCallback(function () {
- this.on('playerreset', this.updateStyleEl_);
+ addEventedCallback(this, function () {
+ _this3.on(['playerreset', 'resize'], _this3.boundUpdateStyleEl_);
});
} else {
this.removeClass('vjs-fluid');
@@ -23911,7 +25508,7 @@
* A getter/setter for the `Player`'s aspect ratio.
*
* @param {string} [ratio]
- * The value to set the `Player's aspect ratio to.
+ * The value to set the `Player`'s aspect ratio to.
*
* @return {string|undefined}
* - The current aspect ratio of the `Player` when getting.
@@ -23944,7 +25541,7 @@
;
_proto.updateStyleEl_ = function updateStyleEl_() {
- if (window$1.VIDEOJS_NO_DYNAMIC_STYLE === true) {
+ if (window.VIDEOJS_NO_DYNAMIC_STYLE === true) {
var _width = typeof this.width_ === 'number' ? this.width_ : this.options_.width;
var _height = typeof this.height_ === 'number' ? this.height_ : this.options_.height;
@@ -24012,7 +25609,7 @@
this.addClass(idClass);
- setTextContent(this.styleEl_, "\n ." + idClass + " {\n width: " + width + "px;\n height: " + height + "px;\n }\n\n ." + idClass + ".vjs-fluid {\n padding-top: " + ratioMultiplier * 100 + "%;\n }\n ");
+ setTextContent(this.styleEl_, "\n ." + idClass + " {\n width: " + width + "px;\n height: " + height + "px;\n }\n\n ." + idClass + ".vjs-fluid:not(.vjs-audio-only-mode) {\n padding-top: " + ratioMultiplier * 100 + "%;\n }\n ");
}
/**
* Load/Create an instance of playback {@link Tech} including element
@@ -24029,14 +25626,14 @@
;
_proto.loadTech_ = function loadTech_(techName, source) {
- var _this3 = this;
+ var _this4 = this;
// Pause and remove current playback technology
if (this.tech_) {
this.unloadTech_();
}
- var titleTechName = toTitleCase(techName);
+ var titleTechName = toTitleCase$1(techName);
var camelTechName = techName.charAt(0).toLowerCase() + techName.slice(1); // get rid of the HTML5 video tag as soon as we are using another tech
if (titleTechName !== 'Html5' && this.tag) {
@@ -24047,10 +25644,14 @@
this.techName_ = titleTechName; // Turn off API access because we're loading a new tech that might load asynchronously
- this.isReady_ = false; // if autoplay is a string we pass false to the tech
+ this.isReady_ = false;
+ var autoplay = this.autoplay(); // if autoplay is a string (or `true` with normalizeAutoplay: true) we pass false to the tech
// because the player is going to handle autoplay on `loadstart`
- var autoplay = typeof this.autoplay() === 'string' ? false : this.autoplay(); // Grab tech-specific options from player options and add source and parent element to use.
+ if (typeof this.autoplay() === 'string' || this.autoplay() === true && this.options_.normalizeAutoplay) {
+ autoplay = false;
+ } // Grab tech-specific options from player options and add source and parent element to use.
+
var techOptions = {
source: source,
@@ -24061,6 +25662,7 @@
'playsinline': this.options_.playsinline,
'preload': this.options_.preload,
'loop': this.options_.loop,
+ 'disablePictureInPicture': this.options_.disablePictureInPicture,
'muted': this.options_.muted,
'poster': this.poster(),
'language': this.language(),
@@ -24072,7 +25674,7 @@
};
ALL.names.forEach(function (name) {
var props = ALL[name];
- techOptions[props.getterName] = _this3[props.privateName];
+ techOptions[props.getterName] = _this4[props.privateName];
});
assign(techOptions, this.options_[titleTechName]);
assign(techOptions, this.options_[camelTechName]);
@@ -24099,39 +25701,76 @@
textTrackConverter.jsonToTextTracks(this.textTracksJson_ || [], this.tech_); // Listen to all HTML5-defined events and trigger them on the player
TECH_EVENTS_RETRIGGER.forEach(function (event) {
- _this3.on(_this3.tech_, event, _this3["handleTech" + toTitleCase(event) + "_"]);
+ _this4.on(_this4.tech_, event, function (e) {
+ return _this4["handleTech" + toTitleCase$1(event) + "_"](e);
+ });
});
Object.keys(TECH_EVENTS_QUEUE).forEach(function (event) {
- _this3.on(_this3.tech_, event, function (eventObj) {
- if (_this3.tech_.playbackRate() === 0 && _this3.tech_.seeking()) {
- _this3.queuedCallbacks_.push({
- callback: _this3["handleTech" + TECH_EVENTS_QUEUE[event] + "_"].bind(_this3),
+ _this4.on(_this4.tech_, event, function (eventObj) {
+ if (_this4.tech_.playbackRate() === 0 && _this4.tech_.seeking()) {
+ _this4.queuedCallbacks_.push({
+ callback: _this4["handleTech" + TECH_EVENTS_QUEUE[event] + "_"].bind(_this4),
event: eventObj
});
return;
}
- _this3["handleTech" + TECH_EVENTS_QUEUE[event] + "_"](eventObj);
+ _this4["handleTech" + TECH_EVENTS_QUEUE[event] + "_"](eventObj);
});
});
- this.on(this.tech_, 'loadstart', this.handleTechLoadStart_);
- this.on(this.tech_, 'sourceset', this.handleTechSourceset_);
- this.on(this.tech_, 'waiting', this.handleTechWaiting_);
- this.on(this.tech_, 'ended', this.handleTechEnded_);
- this.on(this.tech_, 'seeking', this.handleTechSeeking_);
- this.on(this.tech_, 'play', this.handleTechPlay_);
- this.on(this.tech_, 'firstplay', this.handleTechFirstPlay_);
- this.on(this.tech_, 'pause', this.handleTechPause_);
- this.on(this.tech_, 'durationchange', this.handleTechDurationChange_);
- this.on(this.tech_, 'fullscreenchange', this.handleTechFullscreenChange_);
- this.on(this.tech_, 'enterpictureinpicture', this.handleTechEnterPictureInPicture_);
- this.on(this.tech_, 'leavepictureinpicture', this.handleTechLeavePictureInPicture_);
- this.on(this.tech_, 'error', this.handleTechError_);
- this.on(this.tech_, 'loadedmetadata', this.updateStyleEl_);
- this.on(this.tech_, 'posterchange', this.handleTechPosterChange_);
- this.on(this.tech_, 'textdata', this.handleTechTextData_);
- this.on(this.tech_, 'ratechange', this.handleTechRateChange_);
+ this.on(this.tech_, 'loadstart', function (e) {
+ return _this4.handleTechLoadStart_(e);
+ });
+ this.on(this.tech_, 'sourceset', function (e) {
+ return _this4.handleTechSourceset_(e);
+ });
+ this.on(this.tech_, 'waiting', function (e) {
+ return _this4.handleTechWaiting_(e);
+ });
+ this.on(this.tech_, 'ended', function (e) {
+ return _this4.handleTechEnded_(e);
+ });
+ this.on(this.tech_, 'seeking', function (e) {
+ return _this4.handleTechSeeking_(e);
+ });
+ this.on(this.tech_, 'play', function (e) {
+ return _this4.handleTechPlay_(e);
+ });
+ this.on(this.tech_, 'firstplay', function (e) {
+ return _this4.handleTechFirstPlay_(e);
+ });
+ this.on(this.tech_, 'pause', function (e) {
+ return _this4.handleTechPause_(e);
+ });
+ this.on(this.tech_, 'durationchange', function (e) {
+ return _this4.handleTechDurationChange_(e);
+ });
+ this.on(this.tech_, 'fullscreenchange', function (e, data) {
+ return _this4.handleTechFullscreenChange_(e, data);
+ });
+ this.on(this.tech_, 'fullscreenerror', function (e, err) {
+ return _this4.handleTechFullscreenError_(e, err);
+ });
+ this.on(this.tech_, 'enterpictureinpicture', function (e) {
+ return _this4.handleTechEnterPictureInPicture_(e);
+ });
+ this.on(this.tech_, 'leavepictureinpicture', function (e) {
+ return _this4.handleTechLeavePictureInPicture_(e);
+ });
+ this.on(this.tech_, 'error', function (e) {
+ return _this4.handleTechError_(e);
+ });
+ this.on(this.tech_, 'posterchange', function (e) {
+ return _this4.handleTechPosterChange_(e);
+ });
+ this.on(this.tech_, 'textdata', function (e) {
+ return _this4.handleTechTextData_(e);
+ });
+ this.on(this.tech_, 'ratechange', function (e) {
+ return _this4.handleTechRateChange_(e);
+ });
+ this.on(this.tech_, 'loadedmetadata', this.boundUpdateStyleEl_);
this.usingNativeControls(this.techGet_('controls'));
if (this.controls() && !this.usingNativeControls()) {
@@ -24158,12 +25797,12 @@
;
_proto.unloadTech_ = function unloadTech_() {
- var _this4 = this;
+ var _this5 = this;
// Save the current text tracks so that we can reuse the same text tracks with the next tech
ALL.names.forEach(function (name) {
var props = ALL[name];
- _this4[props.privateName] = _this4[props.getterName]();
+ _this5[props.privateName] = _this5[props.getterName]();
});
this.textTracksJson_ = textTrackConverter.textTracksToJson(this.tech_);
this.isReady_ = false;
@@ -24192,7 +25831,7 @@
_proto.tech = function tech(safety) {
if (safety === undefined) {
- log.warn('Using the tech directly can be dangerous. I hope you know what you\'re doing.\n' + 'See https://github.com/videojs/video.js/issues/2617 for more info.\n');
+ log$1.warn('Using the tech directly can be dangerous. I hope you know what you\'re doing.\n' + 'See https://github.com/videojs/video.js/issues/2617 for more info.\n');
}
return this.tech_;
@@ -24222,22 +25861,18 @@
_proto.addTechControlsListeners_ = function addTechControlsListeners_() {
// Make sure to remove all the previous listeners in case we are called multiple times.
- this.removeTechControlsListeners_(); // Some browsers (Chrome & IE) don't trigger a click on a flash swf, but do
- // trigger mousedown/up.
- // http://stackoverflow.com/questions/1444562/javascript-onclick-event-over-flash-object
- // Any touch events are set to block the mousedown event from happening
-
- this.on(this.tech_, 'mouseup', this.handleTechClick_);
- this.on(this.tech_, 'dblclick', this.handleTechDoubleClick_); // If the controls were hidden we don't want that to change without a tap event
+ this.removeTechControlsListeners_();
+ this.on(this.tech_, 'click', this.boundHandleTechClick_);
+ this.on(this.tech_, 'dblclick', this.boundHandleTechDoubleClick_); // If the controls were hidden we don't want that to change without a tap event
// so we'll check if the controls were already showing before reporting user
// activity
- this.on(this.tech_, 'touchstart', this.handleTechTouchStart_);
- this.on(this.tech_, 'touchmove', this.handleTechTouchMove_);
- this.on(this.tech_, 'touchend', this.handleTechTouchEnd_); // The tap listener needs to come after the touchend listener because the tap
+ this.on(this.tech_, 'touchstart', this.boundHandleTechTouchStart_);
+ this.on(this.tech_, 'touchmove', this.boundHandleTechTouchMove_);
+ this.on(this.tech_, 'touchend', this.boundHandleTechTouchEnd_); // The tap listener needs to come after the touchend listener because the tap
// listener cancels out any reportedUserActivity when setting userActive(false)
- this.on(this.tech_, 'tap', this.handleTechTap_);
+ this.on(this.tech_, 'tap', this.boundHandleTechTap_);
}
/**
* Remove the listeners used for click and tap controls. This is needed for
@@ -24250,12 +25885,12 @@
_proto.removeTechControlsListeners_ = function removeTechControlsListeners_() {
// We don't want to just use `this.off()` because there might be other needed
// listeners added by techs that extend this.
- this.off(this.tech_, 'tap', this.handleTechTap_);
- this.off(this.tech_, 'touchstart', this.handleTechTouchStart_);
- this.off(this.tech_, 'touchmove', this.handleTechTouchMove_);
- this.off(this.tech_, 'touchend', this.handleTechTouchEnd_);
- this.off(this.tech_, 'mouseup', this.handleTechClick_);
- this.off(this.tech_, 'dblclick', this.handleTechDoubleClick_);
+ this.off(this.tech_, 'tap', this.boundHandleTechTap_);
+ this.off(this.tech_, 'touchstart', this.boundHandleTechTouchStart_);
+ this.off(this.tech_, 'touchmove', this.boundHandleTechTouchMove_);
+ this.off(this.tech_, 'touchend', this.boundHandleTechTouchEnd_);
+ this.off(this.tech_, 'click', this.boundHandleTechClick_);
+ this.off(this.tech_, 'dblclick', this.boundHandleTechDoubleClick_);
}
/**
* Player waits for the tech to be ready
@@ -24316,7 +25951,7 @@
// so we mimic that behavior
- this.manualAutoplay_(this.autoplay());
+ this.manualAutoplay_(this.autoplay() === true && this.options_.normalizeAutoplay ? 'play' : this.autoplay());
}
/**
* Handle autoplay string values, rather than the typical boolean
@@ -24327,44 +25962,49 @@
;
_proto.manualAutoplay_ = function manualAutoplay_(type) {
- var _this5 = this;
+ var _this6 = this;
if (!this.tech_ || typeof type !== 'string') {
return;
- }
+ } // Save original muted() value, set muted to true, and attempt to play().
+ // On promise rejection, restore muted from saved value
- var muted = function muted() {
- var previouslyMuted = _this5.muted();
- _this5.muted(true);
+ var resolveMuted = function resolveMuted() {
+ var previouslyMuted = _this6.muted();
+
+ _this6.muted(true);
var restoreMuted = function restoreMuted() {
- _this5.muted(previouslyMuted);
+ _this6.muted(previouslyMuted);
}; // restore muted on play terminatation
- _this5.playTerminatedQueue_.push(restoreMuted);
+ _this6.playTerminatedQueue_.push(restoreMuted);
- var mutedPromise = _this5.play();
+ var mutedPromise = _this6.play();
if (!isPromise(mutedPromise)) {
return;
}
- return mutedPromise["catch"](restoreMuted);
+ return mutedPromise["catch"](function (err) {
+ restoreMuted();
+ throw new Error("Rejection at manualAutoplay. Restoring muted value. " + (err ? err : ''));
+ });
};
var promise; // if muted defaults to true
// the only thing we can do is call play
- if (type === 'any' && this.muted() !== true) {
+ if (type === 'any' && !this.muted()) {
promise = this.play();
if (isPromise(promise)) {
- promise = promise["catch"](muted);
+ promise = promise["catch"](resolveMuted);
}
- } else if (type === 'muted' && this.muted() !== true) {
- promise = muted();
+ } else if (type === 'muted' && !this.muted()) {
+ promise = resolveMuted();
} else {
promise = this.play();
}
@@ -24374,12 +26014,12 @@
}
return promise.then(function () {
- _this5.trigger({
+ _this6.trigger({
type: 'autoplay-success',
autoplay: type
});
- })["catch"](function (e) {
- _this5.trigger({
+ })["catch"](function () {
+ _this6.trigger({
type: 'autoplay-failure',
autoplay: type
});
@@ -24421,7 +26061,7 @@
} // update `currentSource` cache always
- this.cache_.source = mergeOptions({}, srcObj, {
+ this.cache_.source = mergeOptions$3({}, srcObj, {
src: src,
type: type
});
@@ -24492,13 +26132,13 @@
;
_proto.handleTechSourceset_ = function handleTechSourceset_(event) {
- var _this6 = this;
+ var _this7 = this;
// only update the source cache when the source
// was not updated using the player api
if (!this.changingSrc_) {
var updateSourceCaches = function updateSourceCaches(src) {
- return _this6.updateSourceCaches_(src);
+ return _this7.updateSourceCaches_(src);
};
var playerSrc = this.currentSource().src;
@@ -24510,7 +26150,7 @@
if (!this.lastSource_ || this.lastSource_.tech !== eventSrc && this.lastSource_.player !== playerSrc) {
updateSourceCaches = function updateSourceCaches() {};
}
- } // update the source to the intial source right away
+ } // update the source to the initial source right away
// in some cases this will be empty string
@@ -24527,11 +26167,11 @@
return;
}
- var techSrc = _this6.techGet('currentSrc');
+ var techSrc = _this7.techGet('currentSrc');
- _this6.lastSource_.tech = techSrc;
+ _this7.lastSource_.tech = techSrc;
- _this6.updateSourceCaches_(techSrc);
+ _this7.updateSourceCaches_(techSrc);
});
}
}
@@ -24645,7 +26285,7 @@
;
_proto.handleTechWaiting_ = function handleTechWaiting_() {
- var _this7 = this;
+ var _this8 = this;
this.addClass('vjs-waiting');
/**
@@ -24661,10 +26301,10 @@
var timeWhenWaiting = this.currentTime();
var timeUpdateListener = function timeUpdateListener() {
- if (timeWhenWaiting !== _this7.currentTime()) {
- _this7.removeClass('vjs-waiting');
+ if (timeWhenWaiting !== _this8.currentTime()) {
+ _this8.removeClass('vjs-waiting');
- _this7.off('timeupdate', timeUpdateListener);
+ _this8.off('timeupdate', timeUpdateListener);
}
};
@@ -24788,7 +26428,7 @@
// If the first starttime attribute is specified
// then we will start at the given offset in seconds
if (this.options_.starttime) {
- log.warn('Passing the `starttime` option to the player will be deprecated in 6.0');
+ log$1.warn('Passing the `starttime` option to the player will be deprecated in 6.0');
this.currentTime(this.options_.starttime);
}
@@ -24837,6 +26477,7 @@
_proto.handleTechEnded_ = function handleTechEnded_() {
this.addClass('vjs-ended');
+ this.removeClass('vjs-waiting');
if (this.options_.loop) {
this.currentTime(0);
@@ -24871,26 +26512,26 @@
* @param {EventTarget~Event} event
* the event that caused this function to trigger
*
- * @listens Tech#mouseup
+ * @listens Tech#click
* @private
*/
;
_proto.handleTechClick_ = function handleTechClick_(event) {
- if (!isSingleLeftClick(event)) {
- return;
- } // When controls are disabled a click should not toggle playback because
+ // When controls are disabled a click should not toggle playback because
// the click is considered a control
-
-
if (!this.controls_) {
return;
}
- if (this.paused()) {
- silencePromise(this.play());
- } else {
- this.pause();
+ if (this.options_ === undefined || this.options_.userActions === undefined || this.options_.userActions.click === undefined || this.options_.userActions.click !== false) {
+ if (this.options_ !== undefined && this.options_.userActions !== undefined && typeof this.options_.userActions.click === 'function') {
+ this.options_.userActions.click.call(this, event);
+ } else if (this.paused()) {
+ silencePromise(this.play());
+ } else {
+ this.pause();
+ }
}
}
/**
@@ -24984,7 +26625,9 @@
_proto.handleTechTouchEnd_ = function handleTechTouchEnd_(event) {
// Stop the mouse events from also happening
- event.preventDefault();
+ if (event.cancelable) {
+ event.preventDefault();
+ }
}
/**
* native click events on the SWF aren't triggered on IE11, Win8.1RT
@@ -25050,9 +26693,22 @@
;
_proto.handleTechFullscreenChange_ = function handleTechFullscreenChange_(event, data) {
+ var _this9 = this;
+
if (data) {
+ if (data.nativeIOSFullscreen) {
+ this.addClass('vjs-ios-native-fs');
+ this.tech_.one('webkitendfullscreen', function () {
+ _this9.removeClass('vjs-ios-native-fs');
+ });
+ }
+
this.isFullscreen(data.isFullscreen);
}
+ };
+
+ _proto.handleTechFullscreenError_ = function handleTechFullscreenError_(event, err) {
+ this.trigger('fullscreenerror', err);
}
/**
* @private
@@ -25159,6 +26815,7 @@
// we set it to zero here to ensure that if we do start actually caching
// it, we reset it along with everything else.
currentTime: 0,
+ initTime: 0,
inactivityTimeout: this.options_.inactivityTimeout,
duration: NaN,
lastVolume: 1,
@@ -25167,6 +26824,7 @@
src: '',
source: {},
sources: [],
+ playbackRates: [],
volume: 1
};
}
@@ -25197,7 +26855,7 @@
this.tech_[method](arg);
}
} catch (e) {
- log(e);
+ log$1(e);
throw e;
}
}, true);
@@ -25226,6 +26884,7 @@
return mediate(this.middleware_, this.tech_, method);
} // Flash likes to die and reload when you hide or reposition it.
// In these cases the object methods go away and we get errors.
+ // TODO: Is this needed for techs other than Flash?
// When that happens we'll catch the errors and inform tech that it's not ready any more.
@@ -25234,19 +26893,19 @@
} catch (e) {
// When building additional tech libs, an expected method may not be defined yet
if (this.tech_[method] === undefined) {
- log("Video.js: " + method + " method not defined for " + this.techName_ + " playback technology.", e);
+ log$1("Video.js: " + method + " method not defined for " + this.techName_ + " playback technology.", e);
throw e;
} // When a method isn't available on the object it throws a TypeError
if (e.name === 'TypeError') {
- log("Video.js: " + method + " unavailable on " + this.techName_ + " playback technology element.", e);
+ log$1("Video.js: " + method + " unavailable on " + this.techName_ + " playback technology element.", e);
this.tech_.isReady_ = false;
throw e;
} // If error unknown, just log and throw
- log(e);
+ log$1(e);
throw e;
}
}
@@ -25263,13 +26922,13 @@
;
_proto.play = function play() {
- var _this8 = this;
+ var _this10 = this;
- var PromiseClass = this.options_.Promise || window$1.Promise;
+ var PromiseClass = this.options_.Promise || window.Promise;
if (PromiseClass) {
return new PromiseClass(function (resolve) {
- _this8.play_(resolve);
+ _this10.play_(resolve);
});
}
@@ -25287,7 +26946,7 @@
;
_proto.play_ = function play_(callback) {
- var _this9 = this;
+ var _this11 = this;
if (callback === void 0) {
callback = silencePromise;
@@ -25305,7 +26964,7 @@
if (!this.isReady_ || !isSrcReady) {
this.waitToPlay_ = function (e) {
- _this9.play_();
+ _this11.play_();
};
this.one(['ready', 'loadstart'], this.waitToPlay_); // if we are in Safari, there is a high chance that loadstart will trigger after the gesture timeperiod
@@ -25417,6 +27076,7 @@
}
this.scrubbing_ = !!isScrubbing;
+ this.techCall_('setScrubbing', this.scrubbing_);
if (isScrubbing) {
this.addClass('vjs-scrubbing');
@@ -25441,7 +27101,15 @@
seconds = 0;
}
+ if (!this.isReady_ || this.changingSrc_ || !this.tech_ || !this.tech_.isReady_) {
+ this.cache_.initTime = seconds;
+ this.off('canplay', this.boundApplyInitTime_);
+ this.one('canplay', this.boundApplyInitTime_);
+ return;
+ }
+
this.techCall_('setCurrentTime', seconds);
+ this.cache_.initTime = 0;
return;
} // cache last currentTime and return. default to 0 seconds
//
@@ -25454,12 +27122,22 @@
this.cache_.currentTime = this.techGet_('currentTime') || 0;
return this.cache_.currentTime;
}
+ /**
+ * Apply the value of initTime stored in cache as currentTime.
+ *
+ * @private
+ */
+ ;
+
+ _proto.applyInitTime_ = function applyInitTime_() {
+ this.currentTime(this.cache_.initTime);
+ }
/**
* Normally gets the length in time of the video in seconds;
* in all but the rarest use cases an argument will NOT be passed to the method
*
* > **NOTE**: The video must have started loading before the duration can be
- * known, and in the case of Flash, may not be known until the video starts
+ * known, and depending on preload behaviour may not be known until the video starts
* playing.
*
* @fires Player#durationchange
@@ -25486,17 +27164,13 @@
if (seconds !== this.cache_.duration) {
// Cache the last set value for optimized scrubbing (esp. Flash)
+ // TODO: Required for techs other than Flash?
this.cache_.duration = seconds;
if (seconds === Infinity) {
this.addClass('vjs-live');
-
- if (this.options_.liveui && this.player_.liveTracker) {
- this.addClass('vjs-liveui');
- }
} else {
this.removeClass('vjs-live');
- this.removeClass('vjs-liveui');
}
if (!isNaN(seconds)) {
@@ -25707,7 +27381,7 @@
}
/**
* Check if current tech can support native fullscreen
- * (e.g. with built in controls like iOS, so not our flash swf)
+ * (e.g. with built in controls like iOS)
*
* @return {boolean}
* if native fullscreen is supported
@@ -25772,7 +27446,42 @@
;
_proto.requestFullscreen = function requestFullscreen(fullscreenOptions) {
- var _this10 = this;
+ var PromiseClass = this.options_.Promise || window.Promise;
+
+ if (PromiseClass) {
+ var self = this;
+ return new PromiseClass(function (resolve, reject) {
+ function offHandler() {
+ self.off('fullscreenerror', errorHandler);
+ self.off('fullscreenchange', changeHandler);
+ }
+
+ function changeHandler() {
+ offHandler();
+ resolve();
+ }
+
+ function errorHandler(e, err) {
+ offHandler();
+ reject(err);
+ }
+
+ self.one('fullscreenchange', changeHandler);
+ self.one('fullscreenerror', errorHandler);
+ var promise = self.requestFullscreenHelper_(fullscreenOptions);
+
+ if (promise) {
+ promise.then(offHandler, offHandler);
+ promise.then(resolve, reject);
+ }
+ });
+ }
+
+ return this.requestFullscreenHelper_();
+ };
+
+ _proto.requestFullscreenHelper_ = function requestFullscreenHelper_(fullscreenOptions) {
+ var _this12 = this;
var fsOptions; // Only pass fullscreen options to requestFullscreen in spec-compliant browsers.
// Use defaults or player configured option unless passed directly to this method.
@@ -25797,14 +27506,14 @@
if (promise) {
promise.then(function () {
- return _this10.isFullscreen(true);
+ return _this12.isFullscreen(true);
}, function () {
- return _this10.isFullscreen(false);
+ return _this12.isFullscreen(false);
});
}
return promise;
- } else if (this.tech_.supportsFullScreen()) {
+ } else if (this.tech_.supportsFullScreen() && !this.options_.preferFullWindow === true) {
// we can't take the video.js controls fullscreen but we can go fullscreen
// with native controls
this.techCall_('enterFullScreen');
@@ -25822,19 +27531,57 @@
;
_proto.exitFullscreen = function exitFullscreen() {
- var _this11 = this;
+ var PromiseClass = this.options_.Promise || window.Promise;
+
+ if (PromiseClass) {
+ var self = this;
+ return new PromiseClass(function (resolve, reject) {
+ function offHandler() {
+ self.off('fullscreenerror', errorHandler);
+ self.off('fullscreenchange', changeHandler);
+ }
+
+ function changeHandler() {
+ offHandler();
+ resolve();
+ }
+
+ function errorHandler(e, err) {
+ offHandler();
+ reject(err);
+ }
+
+ self.one('fullscreenchange', changeHandler);
+ self.one('fullscreenerror', errorHandler);
+ var promise = self.exitFullscreenHelper_();
+
+ if (promise) {
+ promise.then(offHandler, offHandler); // map the promise to our resolve/reject methods
+
+ promise.then(resolve, reject);
+ }
+ });
+ }
+
+ return this.exitFullscreenHelper_();
+ };
+
+ _proto.exitFullscreenHelper_ = function exitFullscreenHelper_() {
+ var _this13 = this;
if (this.fsApi_.requestFullscreen) {
var promise = document[this.fsApi_.exitFullscreen]();
if (promise) {
- promise.then(function () {
- return _this11.isFullscreen(false);
- });
+ // we're splitting the promise here, so, we want to catch the
+ // potential error so that this chain doesn't have unhandled errors
+ silencePromise(promise.then(function () {
+ return _this13.isFullscreen(false);
+ }));
}
return promise;
- } else if (this.tech_.supportsFullScreen()) {
+ } else if (this.tech_.supportsFullScreen() && !this.options_.preferFullWindow === true) {
this.techCall_('exitFullScreen');
} else {
this.exitFullWindow();
@@ -25878,9 +27625,11 @@
_proto.fullWindowOnEscKey = function fullWindowOnEscKey(event) {
if (keycode.isEventKey(event, 'Esc')) {
if (this.isFullscreen() === true) {
- this.exitFullscreen();
- } else {
- this.exitFullWindow();
+ if (!this.isFullWindow) {
+ this.exitFullscreen();
+ } else {
+ this.exitFullWindow();
+ }
}
}
}
@@ -25908,6 +27657,24 @@
this.trigger('exitFullWindow');
}
+ /**
+ * Disable Picture-in-Picture mode.
+ *
+ * @param {boolean} value
+ * - true will disable Picture-in-Picture mode
+ * - false will enable Picture-in-Picture mode
+ */
+ ;
+
+ _proto.disablePictureInPicture = function disablePictureInPicture(value) {
+ if (value === undefined) {
+ return this.techGet_('disablePictureInPicture');
+ }
+
+ this.techCall_('setDisablePictureInPicture', value);
+ this.options_.disablePictureInPicture = value;
+ this.trigger('disablepictureinpicturechanged');
+ }
/**
* Check if the player is in Picture-in-Picture mode or tell the player that it
* is or is not in Picture-in-Picture mode.
@@ -25945,7 +27712,7 @@
;
_proto.requestPictureInPicture = function requestPictureInPicture() {
- if ('pictureInPictureEnabled' in document) {
+ if ('pictureInPictureEnabled' in document && this.disablePictureInPicture() === false) {
/**
* This event fires when the player enters picture in picture mode
*
@@ -26063,7 +27830,7 @@
if (fullscreenKey.call(this, event)) {
event.preventDefault();
event.stopPropagation();
- var FSToggle = Component.getComponent('FullscreenToggle');
+ var FSToggle = Component$1.getComponent('FullscreenToggle');
if (document[this.fsApi_.fullscreenEnabled] !== false) {
FSToggle.prototype.handleClick.call(this, event);
@@ -26071,12 +27838,12 @@
} else if (muteKey.call(this, event)) {
event.preventDefault();
event.stopPropagation();
- var MuteToggle = Component.getComponent('MuteToggle');
+ var MuteToggle = Component$1.getComponent('MuteToggle');
MuteToggle.prototype.handleClick.call(this, event);
} else if (playPauseKey.call(this, event)) {
event.preventDefault();
event.stopPropagation();
- var PlayToggle = Component.getComponent('PlayToggle');
+ var PlayToggle = Component$1.getComponent('PlayToggle');
PlayToggle.prototype.handleClick.call(this, event);
}
}
@@ -26102,12 +27869,12 @@
// Remove once that deprecated behavior is removed.
if (!tech) {
- tech = Component.getComponent(techName);
+ tech = Component$1.getComponent(techName);
} // Check if the current tech is defined before continuing
if (!tech) {
- log.error("The \"" + techName + "\" tech is undefined. Skipped browser support check for that tech.");
+ log$1.error("The \"" + techName + "\" tech is undefined. Skipped browser support check for that tech.");
continue;
} // Check if the browser supports this technology
@@ -26137,7 +27904,7 @@
;
_proto.selectSource = function selectSource(sources) {
- var _this12 = this;
+ var _this14 = this;
// Get only the techs specified in `techOrder` that exist and are supported by the
// current platform
@@ -26153,7 +27920,7 @@
return tech.isSupported();
}
- log.error("The \"" + techName + "\" tech is undefined. Skipped browser support check for that tech.");
+ log$1.error("The \"" + techName + "\" tech is undefined. Skipped browser support check for that tech.");
return false;
}); // Iterate over each `innerArray` element once per `outerArray` element and execute
// `tester` with both. If `tester` returns a non-falsy value, exit early and return
@@ -26185,7 +27952,7 @@
var techName = _ref2[0],
tech = _ref2[1];
- if (tech.canPlaySource(source, _this12.options_[techName.toLowerCase()])) {
+ if (tech.canPlaySource(source, _this14.options_[techName.toLowerCase()])) {
return {
source: source,
tech: techName
@@ -26205,6 +27972,122 @@
return foundSourceAndTech || false;
}
+ /**
+ * Executes source setting and getting logic
+ *
+ * @param {Tech~SourceObject|Tech~SourceObject[]|string} [source]
+ * A SourceObject, an array of SourceObjects, or a string referencing
+ * a URL to a media source. It is _highly recommended_ that an object
+ * or array of objects is used here, so that source selection
+ * algorithms can take the `type` into account.
+ *
+ * If not provided, this method acts as a getter.
+ * @param {boolean} isRetry
+ * Indicates whether this is being called internally as a result of a retry
+ *
+ * @return {string|undefined}
+ * If the `source` argument is missing, returns the current source
+ * URL. Otherwise, returns nothing/undefined.
+ */
+ ;
+
+ _proto.handleSrc_ = function handleSrc_(source, isRetry) {
+ var _this15 = this;
+
+ // getter usage
+ if (typeof source === 'undefined') {
+ return this.cache_.src || '';
+ } // Reset retry behavior for new source
+
+
+ if (this.resetRetryOnError_) {
+ this.resetRetryOnError_();
+ } // filter out invalid sources and turn our source into
+ // an array of source objects
+
+
+ var sources = filterSource(source); // if a source was passed in then it is invalid because
+ // it was filtered to a zero length Array. So we have to
+ // show an error
+
+ if (!sources.length) {
+ this.setTimeout(function () {
+ this.error({
+ code: 4,
+ message: this.options_.notSupportedMessage
+ });
+ }, 0);
+ return;
+ } // initial sources
+
+
+ this.changingSrc_ = true; // Only update the cached source list if we are not retrying a new source after error,
+ // since in that case we want to include the failed source(s) in the cache
+
+ if (!isRetry) {
+ this.cache_.sources = sources;
+ }
+
+ this.updateSourceCaches_(sources[0]); // middlewareSource is the source after it has been changed by middleware
+
+ setSource(this, sources[0], function (middlewareSource, mws) {
+ _this15.middleware_ = mws; // since sourceSet is async we have to update the cache again after we select a source since
+ // the source that is selected could be out of order from the cache update above this callback.
+
+ if (!isRetry) {
+ _this15.cache_.sources = sources;
+ }
+
+ _this15.updateSourceCaches_(middlewareSource);
+
+ var err = _this15.src_(middlewareSource);
+
+ if (err) {
+ if (sources.length > 1) {
+ return _this15.handleSrc_(sources.slice(1));
+ }
+
+ _this15.changingSrc_ = false; // We need to wrap this in a timeout to give folks a chance to add error event handlers
+
+ _this15.setTimeout(function () {
+ this.error({
+ code: 4,
+ message: this.options_.notSupportedMessage
+ });
+ }, 0); // we could not find an appropriate tech, but let's still notify the delegate that this is it
+ // this needs a better comment about why this is needed
+
+
+ _this15.triggerReady();
+
+ return;
+ }
+
+ setTech(mws, _this15.tech_);
+ }); // Try another available source if this one fails before playback.
+
+ if (this.options_.retryOnError && sources.length > 1) {
+ var retry = function retry() {
+ // Remove the error modal
+ _this15.error(null);
+
+ _this15.handleSrc_(sources.slice(1), true);
+ };
+
+ var stopListeningForErrors = function stopListeningForErrors() {
+ _this15.off('error', retry);
+ };
+
+ this.one('error', retry);
+ this.one('playing', stopListeningForErrors);
+
+ this.resetRetryOnError_ = function () {
+ _this15.off('error', retry);
+
+ _this15.off('playing', stopListeningForErrors);
+ };
+ }
+ }
/**
* Get or set the video source.
*
@@ -26223,67 +28106,7 @@
;
_proto.src = function src(source) {
- var _this13 = this;
-
- // getter usage
- if (typeof source === 'undefined') {
- return this.cache_.src || '';
- } // filter out invalid sources and turn our source into
- // an array of source objects
-
-
- var sources = filterSource(source); // if a source was passed in then it is invalid because
- // it was filtered to a zero length Array. So we have to
- // show an error
-
- if (!sources.length) {
- this.setTimeout(function () {
- this.error({
- code: 4,
- message: this.localize(this.options_.notSupportedMessage)
- });
- }, 0);
- return;
- } // intial sources
-
-
- this.changingSrc_ = true;
- this.cache_.sources = sources;
- this.updateSourceCaches_(sources[0]); // middlewareSource is the source after it has been changed by middleware
-
- setSource(this, sources[0], function (middlewareSource, mws) {
- _this13.middleware_ = mws; // since sourceSet is async we have to update the cache again after we select a source since
- // the source that is selected could be out of order from the cache update above this callback.
-
- _this13.cache_.sources = sources;
-
- _this13.updateSourceCaches_(middlewareSource);
-
- var err = _this13.src_(middlewareSource);
-
- if (err) {
- if (sources.length > 1) {
- return _this13.src(sources.slice(1));
- }
-
- _this13.changingSrc_ = false; // We need to wrap this in a timeout to give folks a chance to add error event handlers
-
- _this13.setTimeout(function () {
- this.error({
- code: 4,
- message: this.localize(this.options_.notSupportedMessage)
- });
- }, 0); // we could not find an appropriate tech, but let's still notify the delegate that this is it
- // this needs a better comment about why this is needed
-
-
- _this13.triggerReady();
-
- return;
- }
-
- setTech(mws, _this13.tech_);
- });
+ return this.handleSrc_(source, false);
}
/**
* Set the source object on the tech, returns a boolean that indicates whether
@@ -26301,7 +28124,7 @@
;
_proto.src_ = function src_(source) {
- var _this14 = this;
+ var _this16 = this;
var sourceTech = this.selectSource([source]);
@@ -26314,7 +28137,7 @@
this.loadTech_(sourceTech.tech, sourceTech.source);
this.tech_.ready(function () {
- _this14.changingSrc_ = false;
+ _this16.changingSrc_ = false;
});
return false;
} // wait until the tech is ready to set the source
@@ -26352,16 +28175,16 @@
;
_proto.reset = function reset() {
- var _this15 = this;
+ var _this17 = this;
- var PromiseClass = this.options_.Promise || window$1.Promise;
+ var PromiseClass = this.options_.Promise || window.Promise;
if (this.paused() || !PromiseClass) {
this.doReset_();
} else {
var playPromise = this.play();
silencePromise(playPromise.then(function () {
- return _this15.doReset_();
+ return _this17.doReset_();
}));
}
};
@@ -26399,9 +28222,10 @@
_proto.resetProgressBar_ = function resetProgressBar_() {
this.currentTime(0);
- var _this$controlBar = this.controlBar,
- durationDisplay = _this$controlBar.durationDisplay,
- remainingTimeDisplay = _this$controlBar.remainingTimeDisplay;
+
+ var _ref3 = this.controlBar || {},
+ durationDisplay = _ref3.durationDisplay,
+ remainingTimeDisplay = _ref3.remainingTimeDisplay;
if (durationDisplay) {
durationDisplay.updateContent();
@@ -26528,11 +28352,11 @@
return this.options_.autoplay || false;
}
- var techAutoplay; // if the value is a valid string set it to that
+ var techAutoplay; // if the value is a valid string set it to that, or normalize `true` to 'play', if need be
- if (typeof value === 'string' && /(any|play|muted)/.test(value)) {
+ if (typeof value === 'string' && /(any|play|muted)/.test(value) || value === true && this.options_.normalizeAutoplay) {
this.options_.autoplay = value;
- this.manualAutoplay_(value);
+ this.manualAutoplay_(typeof value === 'string' ? value : 'play');
techAutoplay = false; // any falsy value sets autoplay to false in the browser,
// lets do the same
} else if (!value) {
@@ -26728,7 +28552,7 @@
}
/**
* Toggle native controls on/off. Native controls are the controls built into
- * devices (e.g. default iPhone controls), Flash, or other techs
+ * devices (e.g. default iPhone controls) or other techs
* (e.g. Vimeo Controls)
* **This should only be set by the current tech, because only the tech knows
* if it can support native controls**
@@ -26795,12 +28619,26 @@
;
_proto.error = function error(err) {
+ var _this18 = this;
+
if (err === undefined) {
return this.error_ || null;
- } // Suppress the first error message for no compatible source until
- // user interaction
+ } // allow hooks to modify error object
+ hooks('beforeerror').forEach(function (hookFunction) {
+ var newErr = hookFunction(_this18, err);
+
+ if (!(isObject$1(newErr) && !Array.isArray(newErr) || typeof newErr === 'string' || typeof newErr === 'number' || newErr === null)) {
+ _this18.log.error('please return a value that MediaError expects in beforeerror hooks');
+
+ return;
+ }
+
+ err = newErr;
+ }); // Suppress the first error message for no compatible source until
+ // user interaction
+
if (this.options_.suppressNotSupportedError && err && err.code === 4) {
var triggerSuppressedError = function triggerSuppressedError() {
this.error(err);
@@ -26831,13 +28669,17 @@
this.addClass('vjs-error'); // log the name of the error type and any message
// IE11 logs "[object object]" and required you to expand message to see error object
- log.error("(CODE:" + this.error_.code + " " + MediaError.errorTypes[this.error_.code] + ")", this.error_.message, this.error_);
+ log$1.error("(CODE:" + this.error_.code + " " + MediaError.errorTypes[this.error_.code] + ")", this.error_.message, this.error_);
/**
* @event Player#error
* @type {EventTarget~Event}
*/
- this.trigger('error');
+ this.trigger('error'); // notify hooks of the per player error
+
+ hooks('error').forEach(function (hookFunction) {
+ return hookFunction(_this18, _this18.error_);
+ });
return;
}
/**
@@ -26968,7 +28810,10 @@
if (controlBar && !IS_IOS && !IS_ANDROID) {
controlBar.on('mouseenter', function (event) {
- this.player().cache_.inactivityTimeout = this.player().options_.inactivityTimeout;
+ if (this.player().options_.inactivityTimeout !== 0) {
+ this.player().cache_.inactivityTimeout = this.player().options_.inactivityTimeout;
+ }
+
this.player().options_.inactivityTimeout = 0;
});
controlBar.on('mouseleave', function (event) {
@@ -27092,6 +28937,182 @@
}
return !!this.isAudio_;
+ };
+
+ _proto.enableAudioOnlyUI_ = function enableAudioOnlyUI_() {
+ var _this19 = this;
+
+ // Update styling immediately to show the control bar so we can get its height
+ this.addClass('vjs-audio-only-mode');
+ var playerChildren = this.children();
+ var controlBar = this.getChild('ControlBar');
+ var controlBarHeight = controlBar && controlBar.currentHeight(); // Hide all player components except the control bar. Control bar components
+ // needed only for video are hidden with CSS
+
+ playerChildren.forEach(function (child) {
+ if (child === controlBar) {
+ return;
+ }
+
+ if (child.el_ && !child.hasClass('vjs-hidden')) {
+ child.hide();
+
+ _this19.audioOnlyCache_.hiddenChildren.push(child);
+ }
+ });
+ this.audioOnlyCache_.playerHeight = this.currentHeight(); // Set the player height the same as the control bar
+
+ this.height(controlBarHeight);
+ this.trigger('audioonlymodechange');
+ };
+
+ _proto.disableAudioOnlyUI_ = function disableAudioOnlyUI_() {
+ this.removeClass('vjs-audio-only-mode'); // Show player components that were previously hidden
+
+ this.audioOnlyCache_.hiddenChildren.forEach(function (child) {
+ return child.show();
+ }); // Reset player height
+
+ this.height(this.audioOnlyCache_.playerHeight);
+ this.trigger('audioonlymodechange');
+ }
+ /**
+ * Get the current audioOnlyMode state or set audioOnlyMode to true or false.
+ *
+ * Setting this to `true` will hide all player components except the control bar,
+ * as well as control bar components needed only for video.
+ *
+ * @param {boolean} [value]
+ * The value to set audioOnlyMode to.
+ *
+ * @return {Promise|boolean}
+ * A Promise is returned when setting the state, and a boolean when getting
+ * the present state
+ */
+ ;
+
+ _proto.audioOnlyMode = function audioOnlyMode(value) {
+ var _this20 = this;
+
+ if (typeof value !== 'boolean' || value === this.audioOnlyMode_) {
+ return this.audioOnlyMode_;
+ }
+
+ this.audioOnlyMode_ = value;
+ var PromiseClass = this.options_.Promise || window.Promise;
+
+ if (PromiseClass) {
+ // Enable Audio Only Mode
+ if (value) {
+ var exitPromises = []; // Fullscreen and PiP are not supported in audioOnlyMode, so exit if we need to.
+
+ if (this.isInPictureInPicture()) {
+ exitPromises.push(this.exitPictureInPicture());
+ }
+
+ if (this.isFullscreen()) {
+ exitPromises.push(this.exitFullscreen());
+ }
+
+ if (this.audioPosterMode()) {
+ exitPromises.push(this.audioPosterMode(false));
+ }
+
+ return PromiseClass.all(exitPromises).then(function () {
+ return _this20.enableAudioOnlyUI_();
+ });
+ } // Disable Audio Only Mode
+
+
+ return PromiseClass.resolve().then(function () {
+ return _this20.disableAudioOnlyUI_();
+ });
+ }
+
+ if (value) {
+ if (this.isInPictureInPicture()) {
+ this.exitPictureInPicture();
+ }
+
+ if (this.isFullscreen()) {
+ this.exitFullscreen();
+ }
+
+ this.enableAudioOnlyUI_();
+ } else {
+ this.disableAudioOnlyUI_();
+ }
+ };
+
+ _proto.enablePosterModeUI_ = function enablePosterModeUI_() {
+ // Hide the video element and show the poster image to enable posterModeUI
+ var tech = this.tech_ && this.tech_;
+ tech.hide();
+ this.addClass('vjs-audio-poster-mode');
+ this.trigger('audiopostermodechange');
+ };
+
+ _proto.disablePosterModeUI_ = function disablePosterModeUI_() {
+ // Show the video element and hide the poster image to disable posterModeUI
+ var tech = this.tech_ && this.tech_;
+ tech.show();
+ this.removeClass('vjs-audio-poster-mode');
+ this.trigger('audiopostermodechange');
+ }
+ /**
+ * Get the current audioPosterMode state or set audioPosterMode to true or false
+ *
+ * @param {boolean} [value]
+ * The value to set audioPosterMode to.
+ *
+ * @return {Promise|boolean}
+ * A Promise is returned when setting the state, and a boolean when getting
+ * the present state
+ */
+ ;
+
+ _proto.audioPosterMode = function audioPosterMode(value) {
+ var _this21 = this;
+
+ if (typeof value !== 'boolean' || value === this.audioPosterMode_) {
+ return this.audioPosterMode_;
+ }
+
+ this.audioPosterMode_ = value;
+ var PromiseClass = this.options_.Promise || window.Promise;
+
+ if (PromiseClass) {
+ if (value) {
+ if (this.audioOnlyMode()) {
+ var audioOnlyModePromise = this.audioOnlyMode(false);
+ return audioOnlyModePromise.then(function () {
+ // enable audio poster mode after audio only mode is disabled
+ _this21.enablePosterModeUI_();
+ });
+ }
+
+ return PromiseClass.resolve().then(function () {
+ // enable audio poster mode
+ _this21.enablePosterModeUI_();
+ });
+ }
+
+ return PromiseClass.resolve().then(function () {
+ // disable audio poster mode
+ _this21.disablePosterModeUI_();
+ });
+ }
+
+ if (value) {
+ if (this.audioOnlyMode()) {
+ this.audioOnlyMode(false);
+ }
+
+ this.enablePosterModeUI_();
+ return;
+ }
+
+ this.disablePosterModeUI_();
}
/**
* A helper method for adding a {@link TextTrack} to our
@@ -27122,15 +29143,16 @@
}
}
/**
- * Create a remote {@link TextTrack} and an {@link HTMLTrackElement}. It will
- * automatically removed from the video element whenever the source changes, unless
- * manualCleanup is set to false.
+ * Create a remote {@link TextTrack} and an {@link HTMLTrackElement}.
+ * When manualCleanup is set to false, the track will be automatically removed
+ * on source changes.
*
* @param {Object} options
* Options to pass to {@link HTMLTrackElement} during creation. See
* {@link HTMLTrackElement} for object properties that you should use.
*
* @param {boolean} [manualCleanup=true] if set to false, the TextTrack will be
+ * removed on a source change
*
* @return {HtmlTrackElement}
* the HTMLTrackElement that was created and added
@@ -27215,10 +29237,15 @@
return this.tech_ && this.tech_.videoHeight && this.tech_.videoHeight() || 0;
}
/**
- * The player's language code
- * NOTE: The language should be set in the player options if you want the
- * the controls to be built with a specific language. Changing the language
- * later will not update controls text.
+ * The player's language code.
+ *
+ * Changing the langauge will trigger
+ * [languagechange]{@link Player#event:languagechange}
+ * which Components can use to update control text.
+ * ClickableComponent will update its control text by default on
+ * [languagechange]{@link Player#event:languagechange}.
+ *
+ * @fires Player#languagechange
*
* @param {string} [code]
* the language code to set the player to
@@ -27233,7 +29260,19 @@
return this.language_;
}
- this.language_ = String(code).toLowerCase();
+ if (this.language_ !== String(code).toLowerCase()) {
+ this.language_ = String(code).toLowerCase(); // during first init, it's possible some things won't be evented
+
+ if (isEvented(this)) {
+ /**
+ * fires when the player language change
+ *
+ * @event Player#languagechange
+ * @type {EventTarget~Event}
+ */
+ this.trigger('languagechange');
+ }
+ }
}
/**
* Get the player's language dictionary
@@ -27246,7 +29285,7 @@
;
_proto.languages = function languages() {
- return mergeOptions(Player.prototype.options_.languages, this.languages_);
+ return mergeOptions$3(Player.prototype.options_.languages, this.languages_);
}
/**
* returns a JavaScript object reperesenting the current track
@@ -27258,14 +29297,14 @@
;
_proto.toJSON = function toJSON() {
- var options = mergeOptions(this.options_);
+ var options = mergeOptions$3(this.options_);
var tracks = options.tracks;
options.tracks = [];
for (var i = 0; i < tracks.length; i++) {
var track = tracks[i]; // deep merge tracks and null out player so no circular references
- track = mergeOptions(track);
+ track = mergeOptions$3(track);
track.player = undefined;
options.tracks[i] = track;
}
@@ -27291,14 +29330,14 @@
;
_proto.createModal = function createModal(content, options) {
- var _this16 = this;
+ var _this22 = this;
options = options || {};
options.content = content || '';
var modal = new ModalDialog(this, options);
this.addChild(modal);
modal.on('dispose', function () {
- _this16.removeChild(modal);
+ _this22.removeChild(modal);
});
modal.open();
return modal;
@@ -27438,10 +29477,10 @@
// player is now responsive.
if (value) {
- this.on('playerresize', this.updateCurrentBreakpoint_);
+ this.on('playerresize', this.boundUpdateCurrentBreakpoint_);
this.updateCurrentBreakpoint_(); // Stop listening for breakpoints if the player is no longer responsive.
} else {
- this.off('playerresize', this.updateCurrentBreakpoint_);
+ this.off('playerresize', this.boundUpdateCurrentBreakpoint_);
this.removeCurrentBreakpoint_();
}
@@ -27529,7 +29568,7 @@
;
_proto.loadMedia = function loadMedia(media, ready) {
- var _this17 = this;
+ var _this23 = this;
if (!media || typeof media !== 'object') {
return;
@@ -27537,7 +29576,7 @@
this.reset(); // Clone the media object so it cannot be mutated from outside.
- this.cache_.media = mergeOptions(media);
+ this.cache_.media = mergeOptions$3(media);
var _this$cache_$media = this.cache_.media,
artwork = _this$cache_$media.artwork,
poster = _this$cache_$media.poster,
@@ -27561,7 +29600,7 @@
if (Array.isArray(textTracks)) {
textTracks.forEach(function (tt) {
- return _this17.addRemoteTextTrack(tt, false);
+ return _this23.addRemoteTextTrack(tt, false);
});
}
@@ -27605,7 +29644,7 @@
return media;
}
- return mergeOptions(this.cache_.media);
+ return mergeOptions$3(this.cache_.media);
}
/**
* Gets tag settings
@@ -27644,7 +29683,7 @@
data = _safeParseTuple[1];
if (err) {
- log.error(err);
+ log$1.error(err);
}
assign(tagOptions, data);
@@ -27685,10 +29724,77 @@
return !('flexBasis' in elem.style || 'webkitFlexBasis' in elem.style || 'mozFlexBasis' in elem.style || 'msFlexBasis' in elem.style || // IE10-specific (2012 flex spec), available for completeness
'msFlexOrder' in elem.style);
+ }
+ /**
+ * Set debug mode to enable/disable logs at info level.
+ *
+ * @param {boolean} enabled
+ * @fires Player#debugon
+ * @fires Player#debugoff
+ */
+ ;
+
+ _proto.debug = function debug(enabled) {
+ if (enabled === undefined) {
+ return this.debugEnabled_;
+ }
+
+ if (enabled) {
+ this.trigger('debugon');
+ this.previousLogLevel_ = this.log.level;
+ this.log.level('debug');
+ this.debugEnabled_ = true;
+ } else {
+ this.trigger('debugoff');
+ this.log.level(this.previousLogLevel_);
+ this.previousLogLevel_ = undefined;
+ this.debugEnabled_ = false;
+ }
+ }
+ /**
+ * Set or get current playback rates.
+ * Takes an array and updates the playback rates menu with the new items.
+ * Pass in an empty array to hide the menu.
+ * Values other than arrays are ignored.
+ *
+ * @fires Player#playbackrateschange
+ * @param {number[]} newRates
+ * The new rates that the playback rates menu should update to.
+ * An empty array will hide the menu
+ * @return {number[]} When used as a getter will return the current playback rates
+ */
+ ;
+
+ _proto.playbackRates = function playbackRates(newRates) {
+ if (newRates === undefined) {
+ return this.cache_.playbackRates;
+ } // ignore any value that isn't an array
+
+
+ if (!Array.isArray(newRates)) {
+ return;
+ } // ignore any arrays that don't only contain numbers
+
+
+ if (!newRates.every(function (rate) {
+ return typeof rate === 'number';
+ })) {
+ return;
+ }
+
+ this.cache_.playbackRates = newRates;
+ /**
+ * fires when the playback rates in a player are changed
+ *
+ * @event Player#playbackrateschange
+ * @type {EventTarget~Event}
+ */
+
+ this.trigger('playbackrateschange');
};
return Player;
- }(Component);
+ }(Component$1);
/**
* Get the {@link VideoTrackList}
* @link https://html.spec.whatwg.org/multipage/embedded-content.html#videotracklist
@@ -27753,6 +29859,23 @@
return this[props.privateName];
};
});
+ /**
+ * Get or set the `Player`'s crossorigin option. For the HTML5 player, this
+ * sets the `crossOrigin` property on the `` tag to control the CORS
+ * behavior.
+ *
+ * @see [Video Element Attributes]{@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/video#attr-crossorigin}
+ *
+ * @param {string} [value]
+ * The value to set the `Player`'s crossorigin to. If an argument is
+ * given, must be one of `anonymous` or `use-credentials`.
+ *
+ * @return {string|undefined}
+ * - The current crossorigin value of the `Player` when getting.
+ * - undefined when setting
+ */
+
+ Player.prototype.crossorigin = Player.prototype.crossOrigin;
/**
* Global enumeration of players.
*
@@ -27763,7 +29886,7 @@
*/
Player.players = {};
- var navigator = window$1.navigator;
+ var navigator = window.navigator;
/*
* Player instance options, surfaced using options
* options = Player.prototype.options_
@@ -27777,7 +29900,6 @@
// Default order of fallback technology
techOrder: Tech.defaultTechOrder_,
html5: {},
- flash: {},
// default inactivity timeout
inactivityTimeout: 2000,
// default playback rates
@@ -27792,13 +29914,16 @@
languages: {},
// Default message to show when a video cannot be played.
notSupportedMessage: 'No compatible source was found for this media.',
+ normalizeAutoplay: false,
fullscreen: {
options: {
navigationUI: 'hide'
}
},
breakpoints: {},
- responsive: false
+ responsive: false,
+ audioOnlyMode: false,
+ audioPosterMode: false
};
[
/**
@@ -27873,7 +29998,7 @@
};
});
TECH_EVENTS_RETRIGGER.forEach(function (event) {
- Player.prototype["handleTech" + toTitleCase(event) + "_"] = function () {
+ Player.prototype["handleTech" + toTitleCase$1(event) + "_"] = function () {
return this.trigger(event);
};
});
@@ -27935,7 +30060,7 @@
* Whether or not this player is using the requested plugin.
*/
- Component.registerComponent('Player', Player);
+ Component$1.registerComponent('Player', Player);
var setPrototypeOf = createCommonjsModule(function (module) {
function _setPrototypeOf(o, p) {
@@ -27950,20 +30075,22 @@
module.exports = _setPrototypeOf;
});
- var construct = createCommonjsModule(function (module) {
- function isNativeReflectConstruct() {
- if (typeof Reflect === "undefined" || !Reflect.construct) return false;
- if (Reflect.construct.sham) return false;
- if (typeof Proxy === "function") return true;
+ function _isNativeReflectConstruct() {
+ if (typeof Reflect === "undefined" || !Reflect.construct) return false;
+ if (Reflect.construct.sham) return false;
+ if (typeof Proxy === "function") return true;
- try {
- Date.prototype.toString.call(Reflect.construct(Date, [], function () {}));
- return true;
- } catch (e) {
- return false;
- }
+ try {
+ Date.prototype.toString.call(Reflect.construct(Date, [], function () {}));
+ return true;
+ } catch (e) {
+ return false;
}
+ }
+ var isNativeReflectConstruct = _isNativeReflectConstruct;
+
+ var construct = createCommonjsModule(function (module) {
function _construct(Parent, args, Class) {
if (isNativeReflectConstruct()) {
module.exports = _construct = Reflect.construct;
@@ -28182,9 +30309,7 @@
*/
- var Plugin =
- /*#__PURE__*/
- function () {
+ var Plugin = /*#__PURE__*/function () {
/**
* Creates an instance of this class.
*
@@ -28198,16 +30323,21 @@
throw new Error('Plugin must be sub-classed; not directly instantiated.');
}
- this.player = player; // Make this object evented, but remove the added `trigger` method so we
+ this.player = player;
+
+ if (!this.log) {
+ this.log = this.player.log.createLogger(this.name);
+ } // Make this object evented, but remove the added `trigger` method so we
// use the prototype version instead.
+
evented(this);
delete this.trigger;
stateful(this, this.constructor.defaultState);
markPluginAsActive(player, this.name); // Auto-bind the dispose method so we can use it as a listener and unbind
// it later easily.
- this.dispose = bind(this, this.dispose); // If the player is disposed, dispose the plugin.
+ this.dispose = this.dispose.bind(this); // If the player is disposed, dispose the plugin.
player.on('dispose', this.dispose);
}
@@ -28354,7 +30484,7 @@
}
if (pluginExists(name)) {
- log.warn("A plugin named \"" + name + "\" already exists. You may want to avoid re-registering plugins!");
+ log$1.warn("A plugin named \"" + name + "\" already exists. You may want to avoid re-registering plugins!");
} else if (Player.prototype.hasOwnProperty(name)) {
throw new Error("Illegal plugin name, \"" + name + "\", cannot share a name with an existing player method!");
}
@@ -28712,12 +30842,12 @@
*/
- function videojs$1(id, options, ready) {
- var player = videojs$1.getPlayer(id);
+ function videojs(id, options, ready) {
+ var player = videojs.getPlayer(id);
if (player) {
if (options) {
- log.warn("Player \"" + id + "\" is already initialised. Options will not be applied.");
+ log$1.warn("Player \"" + id + "\" is already initialised. Options will not be applied.");
}
if (ready) {
@@ -28740,123 +30870,43 @@
if (!el.ownerDocument.defaultView || !el.ownerDocument.body.contains(el)) {
- log.warn('The element supplied is not included in the DOM');
+ log$1.warn('The element supplied is not included in the DOM');
}
- options = options || {};
- videojs$1.hooks('beforesetup').forEach(function (hookFunction) {
- var opts = hookFunction(el, mergeOptions(options));
+ options = options || {}; // Store a copy of the el before modification, if it is to be restored in destroy()
+ // If div ingest, store the parent div
- if (!isObject(opts) || Array.isArray(opts)) {
- log.error('please return an object in beforesetup hooks');
+ if (options.restoreEl === true) {
+ options.restoreEl = (el.parentNode && el.parentNode.hasAttribute('data-vjs-player') ? el.parentNode : el).cloneNode(true);
+ }
+
+ hooks('beforesetup').forEach(function (hookFunction) {
+ var opts = hookFunction(el, mergeOptions$3(options));
+
+ if (!isObject$1(opts) || Array.isArray(opts)) {
+ log$1.error('please return an object in beforesetup hooks');
return;
}
- options = mergeOptions(options, opts);
+ options = mergeOptions$3(options, opts);
}); // We get the current "Player" component here in case an integration has
// replaced it with a custom player.
- var PlayerComponent = Component.getComponent('Player');
+ var PlayerComponent = Component$1.getComponent('Player');
player = new PlayerComponent(el, options, ready);
- videojs$1.hooks('setup').forEach(function (hookFunction) {
+ hooks('setup').forEach(function (hookFunction) {
return hookFunction(player);
});
return player;
}
- /**
- * An Object that contains lifecycle hooks as keys which point to an array
- * of functions that are run when a lifecycle is triggered
- *
- * @private
- */
+ videojs.hooks_ = hooks_;
+ videojs.hooks = hooks;
+ videojs.hook = hook;
+ videojs.hookOnce = hookOnce;
+ videojs.removeHook = removeHook; // Add default styles
- videojs$1.hooks_ = {};
- /**
- * Get a list of hooks for a specific lifecycle
- *
- * @param {string} type
- * the lifecyle to get hooks from
- *
- * @param {Function|Function[]} [fn]
- * Optionally add a hook (or hooks) to the lifecycle that your are getting.
- *
- * @return {Array}
- * an array of hooks, or an empty array if there are none.
- */
-
- videojs$1.hooks = function (type, fn) {
- videojs$1.hooks_[type] = videojs$1.hooks_[type] || [];
-
- if (fn) {
- videojs$1.hooks_[type] = videojs$1.hooks_[type].concat(fn);
- }
-
- return videojs$1.hooks_[type];
- };
- /**
- * Add a function hook to a specific videojs lifecycle.
- *
- * @param {string} type
- * the lifecycle to hook the function to.
- *
- * @param {Function|Function[]}
- * The function or array of functions to attach.
- */
-
-
- videojs$1.hook = function (type, fn) {
- videojs$1.hooks(type, fn);
- };
- /**
- * Add a function hook that will only run once to a specific videojs lifecycle.
- *
- * @param {string} type
- * the lifecycle to hook the function to.
- *
- * @param {Function|Function[]}
- * The function or array of functions to attach.
- */
-
-
- videojs$1.hookOnce = function (type, fn) {
- videojs$1.hooks(type, [].concat(fn).map(function (original) {
- var wrapper = function wrapper() {
- videojs$1.removeHook(type, wrapper);
- return original.apply(void 0, arguments);
- };
-
- return wrapper;
- }));
- };
- /**
- * Remove a hook from a specific videojs lifecycle.
- *
- * @param {string} type
- * the lifecycle that the function hooked to
- *
- * @param {Function} fn
- * The hooked function to remove
- *
- * @return {boolean}
- * The function that was removed or undef
- */
-
-
- videojs$1.removeHook = function (type, fn) {
- var index = videojs$1.hooks(type).indexOf(fn);
-
- if (index <= -1) {
- return false;
- }
-
- videojs$1.hooks_[type] = videojs$1.hooks_[type].slice();
- videojs$1.hooks_[type].splice(index, 1);
- return true;
- }; // Add default styles
-
-
- if (window$1.VIDEOJS_NO_DYNAMIC_STYLE !== true && isReal()) {
+ if (window.VIDEOJS_NO_DYNAMIC_STYLE !== true && isReal()) {
var style = $('.vjs-styles-defaults');
if (!style) {
@@ -28867,21 +30917,21 @@
head.insertBefore(style, head.firstChild);
}
- setTextContent(style, "\n .video-js {\n width: 300px;\n height: 150px;\n }\n\n .vjs-fluid {\n padding-top: 56.25%\n }\n ");
+ setTextContent(style, "\n .video-js {\n width: 300px;\n height: 150px;\n }\n\n .vjs-fluid:not(.vjs-audio-only-mode) {\n padding-top: 56.25%\n }\n ");
}
} // Run Auto-load players
// You have to wait at least once in case this script is loaded after your
// video in the DOM (weird behavior only with minified version)
- autoSetupTimeout(1, videojs$1);
+ autoSetupTimeout(1, videojs);
/**
* Current Video.js version. Follows [semantic versioning](https://semver.org/).
*
* @type {string}
*/
- videojs$1.VERSION = version;
+ videojs.VERSION = version$5;
/**
* The global options object. These are the settings that take effect
* if no overrides are specified when the player is created.
@@ -28889,7 +30939,7 @@
* @type {Object}
*/
- videojs$1.options = Player.prototype.options_;
+ videojs.options = Player.prototype.options_;
/**
* Get an object with the currently created players, keyed by player ID
*
@@ -28897,7 +30947,7 @@
* The created players
*/
- videojs$1.getPlayers = function () {
+ videojs.getPlayers = function () {
return Player.players;
};
/**
@@ -28916,7 +30966,7 @@
*/
- videojs$1.getPlayer = function (id) {
+ videojs.getPlayer = function (id) {
var players = Player.players;
var tag;
@@ -28955,7 +31005,7 @@
*/
- videojs$1.getAllPlayers = function () {
+ videojs.getAllPlayers = function () {
return (// Disposed players leave a key with a `null` value, so we need to make sure
// we filter those out.
Object.keys(Player.players).map(function (k) {
@@ -28964,8 +31014,8 @@
);
};
- videojs$1.players = Player.players;
- videojs$1.getComponent = Component.getComponent;
+ videojs.players = Player.players;
+ videojs.getComponent = Component$1.getComponent;
/**
* Register a component so it can referred to by name. Used when adding to other
* components, either through addChild `component.addChild('myComponent')` or through
@@ -28984,17 +31034,17 @@
* The newly registered component
*/
- videojs$1.registerComponent = function (name, comp) {
+ videojs.registerComponent = function (name, comp) {
if (Tech.isTech(comp)) {
- log.warn("The " + name + " tech was registered as a component. It should instead be registered using videojs.registerTech(name, tech)");
+ log$1.warn("The " + name + " tech was registered as a component. It should instead be registered using videojs.registerTech(name, tech)");
}
- Component.registerComponent.call(Component, name, comp);
+ Component$1.registerComponent.call(Component$1, name, comp);
};
- videojs$1.getTech = Tech.getTech;
- videojs$1.registerTech = Tech.registerTech;
- videojs$1.use = use;
+ videojs.getTech = Tech.getTech;
+ videojs.registerTech = Tech.registerTech;
+ videojs.use = use;
/**
* An object that can be returned by a middleware to signify
* that the middleware is being terminated.
@@ -29003,12 +31053,12 @@
* @property {object} middleware.TERMINATOR
*/
- Object.defineProperty(videojs$1, 'middleware', {
+ Object.defineProperty(videojs, 'middleware', {
value: {},
writeable: false,
enumerable: true
});
- Object.defineProperty(videojs$1.middleware, 'TERMINATOR', {
+ Object.defineProperty(videojs.middleware, 'TERMINATOR', {
value: TERMINATOR,
writeable: false,
enumerable: true
@@ -29020,7 +31070,7 @@
* @see {@link module:browser|browser}
*/
- videojs$1.browser = browser;
+ videojs.browser = browser;
/**
* Use {@link module:browser.TOUCH_ENABLED|browser.TOUCH_ENABLED} instead; only
* included for backward-compatibility with 4.x.
@@ -29029,12 +31079,12 @@
* @type {boolean}
*/
- videojs$1.TOUCH_ENABLED = TOUCH_ENABLED;
- videojs$1.extend = extend;
- videojs$1.mergeOptions = mergeOptions;
- videojs$1.bind = bind;
- videojs$1.registerPlugin = Plugin.registerPlugin;
- videojs$1.deregisterPlugin = Plugin.deregisterPlugin;
+ videojs.TOUCH_ENABLED = TOUCH_ENABLED;
+ videojs.extend = extend;
+ videojs.mergeOptions = mergeOptions$3;
+ videojs.bind = bind;
+ videojs.registerPlugin = Plugin.registerPlugin;
+ videojs.deregisterPlugin = Plugin.deregisterPlugin;
/**
* Deprecated method to register a plugin with Video.js
*
@@ -29047,14 +31097,14 @@
* The plugin sub-class or function
*/
- videojs$1.plugin = function (name, plugin) {
- log.warn('videojs.plugin() is deprecated; use videojs.registerPlugin() instead');
+ videojs.plugin = function (name, plugin) {
+ log$1.warn('videojs.plugin() is deprecated; use videojs.registerPlugin() instead');
return Plugin.registerPlugin(name, plugin);
};
- videojs$1.getPlugins = Plugin.getPlugins;
- videojs$1.getPlugin = Plugin.getPlugin;
- videojs$1.getPluginVersion = Plugin.getPluginVersion;
+ videojs.getPlugins = Plugin.getPlugins;
+ videojs.getPlugin = Plugin.getPlugin;
+ videojs.getPluginVersion = Plugin.getPluginVersion;
/**
* Adding languages so that they're available to all players.
* Example: `videojs.addLanguage('es', { 'Hello': 'Hola' });`
@@ -29069,12 +31119,12 @@
* The resulting language dictionary object
*/
- videojs$1.addLanguage = function (code, data) {
+ videojs.addLanguage = function (code, data) {
var _mergeOptions;
code = ('' + code).toLowerCase();
- videojs$1.options.languages = mergeOptions(videojs$1.options.languages, (_mergeOptions = {}, _mergeOptions[code] = data, _mergeOptions));
- return videojs$1.options.languages[code];
+ videojs.options.languages = mergeOptions$3(videojs.options.languages, (_mergeOptions = {}, _mergeOptions[code] = data, _mergeOptions));
+ return videojs.options.languages[code];
};
/**
* A reference to the {@link module:log|log utility module} as an object.
@@ -29084,19 +31134,19 @@
*/
- videojs$1.log = log;
- videojs$1.createLogger = createLogger$1;
- videojs$1.createTimeRange = videojs$1.createTimeRanges = createTimeRanges;
- videojs$1.formatTime = formatTime;
- videojs$1.setFormatTime = setFormatTime;
- videojs$1.resetFormatTime = resetFormatTime;
- videojs$1.parseUrl = parseUrl;
- videojs$1.isCrossOrigin = isCrossOrigin;
- videojs$1.EventTarget = EventTarget;
- videojs$1.on = on;
- videojs$1.one = one;
- videojs$1.off = off;
- videojs$1.trigger = trigger;
+ videojs.log = log$1;
+ videojs.createLogger = createLogger;
+ videojs.createTimeRange = videojs.createTimeRanges = createTimeRanges;
+ videojs.formatTime = formatTime;
+ videojs.setFormatTime = setFormatTime;
+ videojs.resetFormatTime = resetFormatTime;
+ videojs.parseUrl = parseUrl;
+ videojs.isCrossOrigin = isCrossOrigin;
+ videojs.EventTarget = EventTarget$2;
+ videojs.on = on;
+ videojs.one = one;
+ videojs.off = off;
+ videojs.trigger = trigger;
/**
* A cross-browser XMLHttpRequest wrapper.
*
@@ -29110,17 +31160,17 @@
* @see https://github.com/Raynos/xhr
*/
- videojs$1.xhr = xhr;
- videojs$1.TextTrack = TextTrack;
- videojs$1.AudioTrack = AudioTrack;
- videojs$1.VideoTrack = VideoTrack;
+ videojs.xhr = lib;
+ videojs.TextTrack = TextTrack;
+ videojs.AudioTrack = AudioTrack;
+ videojs.VideoTrack = VideoTrack;
['isEl', 'isTextNode', 'createEl', 'hasClass', 'addClass', 'removeClass', 'toggleClass', 'setAttributes', 'getAttributes', 'emptyEl', 'appendContent', 'insertContent'].forEach(function (k) {
- videojs$1[k] = function () {
- log.warn("videojs." + k + "() is deprecated; use videojs.dom." + k + "() instead");
+ videojs[k] = function () {
+ log$1.warn("videojs." + k + "() is deprecated; use videojs.dom." + k + "() instead");
return Dom[k].apply(null, arguments);
};
});
- videojs$1.computedStyle = computedStyle;
+ videojs.computedStyle = computedStyle;
/**
* A reference to the {@link module:dom|DOM utility module} as an object.
*
@@ -29128,7 +31178,7 @@
* @see {@link module:dom|dom}
*/
- videojs$1.dom = Dom;
+ videojs.dom = Dom;
/**
* A reference to the {@link module:url|URL utility module} as an object.
*
@@ -29136,21 +31186,22 @@
* @see {@link module:url|url}
*/
- videojs$1.url = Url;
- videojs$1.defineLazyProperty = defineLazyProperty;
+ videojs.url = Url;
+ videojs.defineLazyProperty = defineLazyProperty; // Adding less ambiguous text for fullscreen button.
+ // In a major update this could become the default text and key.
+
+ videojs.addLanguage('en', {
+ 'Non-Fullscreen': 'Exit Fullscreen'
+ });
var urlToolkit = createCommonjsModule(function (module, exports) {
// see https://tools.ietf.org/html/rfc1808
-
- /* jshint ignore:start */
(function (root) {
- /* jshint ignore:end */
- var URL_REGEX = /^((?:[a-zA-Z0-9+\-.]+:)?)(\/\/[^\/?#]*)?((?:[^\/\?#]*\/)*.*?)??(;.*?)?(\?.*?)?(#.*?)?$/;
- var FIRST_SEGMENT_REGEX = /^([^\/?#]*)(.*)$/;
+ var URL_REGEX = /^((?:[a-zA-Z0-9+\-.]+:)?)(\/\/[^\/?#]*)?((?:[^\/?#]*\/)*[^;?#]*)?(;[^?#]*)?(\?[^#]*)?(#[^]*)?$/;
+ var FIRST_SEGMENT_REGEX = /^([^\/?#]*)([^]*)$/;
var SLASH_DOT_REGEX = /(?:\/|^)\.(?=\/)/g;
- var SLASH_DOT_DOT_REGEX = /(?:\/|^)\.\.\/(?!\.\.\/).*?(?=\/)/g;
+ var SLASH_DOT_DOT_REGEX = /(?:\/|^)\.\.\/(?!\.\.\/)[^\/]*(?=\/)/g;
var URLToolkit = {
- // jshint ignore:line
// If opts.alwaysNormalize is true then the path will always be normalized even when it starts with / or //
// E.g
// With opts.alwaysNormalize = false (default, spec compliant)
@@ -29301,8 +31352,7 @@
// complete path segment not equal to "..", that
// "/.." is removed.
- while (path.length !== (path = path.replace(SLASH_DOT_DOT_REGEX, '')).length) {} // jshint ignore:line
-
+ while (path.length !== (path = path.replace(SLASH_DOT_DOT_REGEX, '')).length) {}
return path.split('').reverse().join('');
},
@@ -29310,61 +31360,64 @@
return parts.scheme + parts.netLoc + parts.path + parts.params + parts.query + parts.fragment;
}
};
- /* jshint ignore:start */
-
module.exports = URLToolkit;
})();
- /* jshint ignore:end */
-
});
- /*! @name m3u8-parser @version 4.4.0 @license Apache-2.0 */
+ var DEFAULT_LOCATION = 'http://example.com';
- function _extends() {
- _extends = Object.assign || function (target) {
- for (var i = 1; i < arguments.length; i++) {
- var source = arguments[i];
+ var resolveUrl$1 = function resolveUrl(baseUrl, relativeUrl) {
+ // return early if we don't need to resolve
+ if (/^[a-z]+:/i.test(relativeUrl)) {
+ return relativeUrl;
+ } // if baseUrl is a data URI, ignore it and resolve everything relative to window.location
- for (var key in source) {
- if (Object.prototype.hasOwnProperty.call(source, key)) {
- target[key] = source[key];
- }
- }
- }
- return target;
- };
+ if (/^data:/.test(baseUrl)) {
+ baseUrl = window.location && window.location.href || '';
+ } // IE11 supports URL but not the URL constructor
+ // feature detect the behavior we want
- return _extends.apply(this, arguments);
- }
- function _inheritsLoose$1(subClass, superClass) {
- subClass.prototype = Object.create(superClass.prototype);
- subClass.prototype.constructor = subClass;
- subClass.__proto__ = superClass;
- }
+ var nativeURL = typeof window.URL === 'function';
+ var protocolLess = /^\/\//.test(baseUrl); // remove location if window.location isn't available (i.e. we're in node)
+ // and if baseUrl isn't an absolute url
- function _assertThisInitialized$1(self) {
- if (self === void 0) {
- throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
+ var removeLocation = !window.location && !/\/\//i.test(baseUrl); // if the base URL is relative then combine with the current location
+
+ if (nativeURL) {
+ baseUrl = new window.URL(baseUrl, window.location || DEFAULT_LOCATION);
+ } else if (!/\/\//i.test(baseUrl)) {
+ baseUrl = urlToolkit.buildAbsoluteURL(window.location && window.location.href || '', baseUrl);
}
- return self;
- }
+ if (nativeURL) {
+ var newUrl = new URL(relativeUrl, baseUrl); // if we're a protocol-less url, remove the protocol
+ // and if we're location-less, remove the location
+ // otherwise, return the url unmodified
+
+ if (removeLocation) {
+ return newUrl.href.slice(DEFAULT_LOCATION.length);
+ } else if (protocolLess) {
+ return newUrl.href.slice(newUrl.protocol.length);
+ }
+
+ return newUrl.href;
+ }
+
+ return urlToolkit.buildAbsoluteURL(baseUrl, relativeUrl);
+ };
+
/**
* @file stream.js
*/
/**
- * A lightweight readable stream implementation that handles event dispatching.
+ * A lightweight readable stream implemention that handles event dispatching.
*
* @class Stream
*/
-
-
- var Stream =
- /*#__PURE__*/
- function () {
+ var Stream = /*#__PURE__*/function () {
function Stream() {
this.listeners = {};
}
@@ -29401,7 +31454,16 @@
return false;
}
- var index = this.listeners[type].indexOf(listener);
+ var index = this.listeners[type].indexOf(listener); // TODO: which is better?
+ // In Video.js we slice listener functions
+ // on trigger so that it does not mess up the order
+ // while we loop through.
+ //
+ // Here we slice on off so that the loop in trigger
+ // can continue using it's old reference to loop without
+ // messing up the order.
+
+ this.listeners[type] = this.listeners[type].slice(0);
this.listeners[type].splice(index, 1);
return index > -1;
}
@@ -29415,9 +31477,6 @@
_proto.trigger = function trigger(type) {
var callbacks = this.listeners[type];
- var i;
- var length;
- var args;
if (!callbacks) {
return;
@@ -29428,17 +31487,17 @@
if (arguments.length === 2) {
- length = callbacks.length;
+ var length = callbacks.length;
- for (i = 0; i < length; ++i) {
+ for (var i = 0; i < length; ++i) {
callbacks[i].call(this, arguments[1]);
}
} else {
- args = Array.prototype.slice.call(arguments, 1);
- length = callbacks.length;
+ var args = Array.prototype.slice.call(arguments, 1);
+ var _length = callbacks.length;
- for (i = 0; i < length; ++i) {
- callbacks[i].apply(this, args);
+ for (var _i = 0; _i < _length; ++_i) {
+ callbacks[_i].apply(this, args);
}
}
}
@@ -29468,6 +31527,23 @@
return Stream;
}();
+
+ var atob = function atob(s) {
+ return window.atob ? window.atob(s) : Buffer.from(s, 'base64').toString('binary');
+ };
+
+ function decodeB64ToUint8Array(b64Text) {
+ var decodedString = atob(b64Text);
+ var array = new Uint8Array(decodedString.length);
+
+ for (var i = 0; i < decodedString.length; i++) {
+ array[i] = decodedString.charCodeAt(i);
+ }
+
+ return array;
+ }
+
+ /*! @name m3u8-parser @version 4.7.1 @license Apache-2.0 */
/**
* A stream that buffers string input and generates a `data` event for each
* line.
@@ -29476,11 +31552,8 @@
* @extends Stream
*/
-
- var LineStream =
- /*#__PURE__*/
- function (_Stream) {
- _inheritsLoose$1(LineStream, _Stream);
+ var LineStream = /*#__PURE__*/function (_Stream) {
+ inheritsLoose(LineStream, _Stream);
function LineStream() {
var _this;
@@ -29511,6 +31584,25 @@
return LineStream;
}(Stream);
+
+ var TAB = String.fromCharCode(0x09);
+
+ var parseByterange = function parseByterange(byterangeString) {
+ // optionally match and capture 0+ digits before `@`
+ // optionally match and capture 0+ digits after `@`
+ var match = /([0-9.]*)?@?([0-9.]*)?/.exec(byterangeString || '');
+ var result = {};
+
+ if (match[1]) {
+ result.length = parseInt(match[1], 10);
+ }
+
+ if (match[2]) {
+ result.offset = parseInt(match[2], 10);
+ }
+
+ return result;
+ };
/**
* "forgiving" attribute list psuedo-grammar:
* attributes -> keyvalue (',' keyvalue)*
@@ -29533,7 +31625,7 @@
*/
- var parseAttributes = function parseAttributes(attributes) {
+ var parseAttributes$1 = function parseAttributes(attributes) {
// split the string using attributes as the separator
var attrs = attributes.split(attributeSeparator());
var result = {};
@@ -29583,10 +31675,8 @@
*/
- var ParseStream =
- /*#__PURE__*/
- function (_Stream) {
- _inheritsLoose$1(ParseStream, _Stream);
+ var ParseStream = /*#__PURE__*/function (_Stream) {
+ inheritsLoose(ParseStream, _Stream);
function ParseStream() {
var _this;
@@ -29707,23 +31797,6 @@
return;
}
- match = /^#ZEN-TOTAL-DURATION:?([0-9.]*)?/.exec(newLine);
-
- if (match) {
- event = {
- type: 'tag',
- tagType: 'totalduration'
- };
-
- if (match[1]) {
- event.duration = parseInt(match[1], 10);
- }
-
- _this2.trigger('data', event);
-
- return;
- }
-
match = /^#EXT-X-VERSION:?([0-9.]*)?/.exec(newLine);
if (match) {
@@ -29792,21 +31865,13 @@
return;
}
- match = /^#EXT-X-BYTERANGE:?([0-9.]*)?@?([0-9.]*)?/.exec(newLine);
+ match = /^#EXT-X-BYTERANGE:?(.*)?$/.exec(newLine);
if (match) {
- event = {
+ event = _extends_1(parseByterange(match[1]), {
type: 'tag',
tagType: 'byterange'
- };
-
- if (match[1]) {
- event.length = parseInt(match[1], 10);
- }
-
- if (match[2]) {
- event.offset = parseInt(match[2], 10);
- }
+ });
_this2.trigger('data', event);
@@ -29839,26 +31904,14 @@
};
if (match[1]) {
- var attributes = parseAttributes(match[1]);
+ var attributes = parseAttributes$1(match[1]);
if (attributes.URI) {
event.uri = attributes.URI;
}
if (attributes.BYTERANGE) {
- var _attributes$BYTERANGE = attributes.BYTERANGE.split('@'),
- length = _attributes$BYTERANGE[0],
- offset = _attributes$BYTERANGE[1];
-
- event.byterange = {};
-
- if (length) {
- event.byterange.length = parseInt(length, 10);
- }
-
- if (offset) {
- event.byterange.offset = parseInt(offset, 10);
- }
+ event.byterange = parseByterange(attributes.BYTERANGE);
}
}
@@ -29876,7 +31929,7 @@
};
if (match[1]) {
- event.attributes = parseAttributes(match[1]);
+ event.attributes = parseAttributes$1(match[1]);
if (event.attributes.RESOLUTION) {
var split = event.attributes.RESOLUTION.split('x');
@@ -29916,7 +31969,7 @@
};
if (match[1]) {
- event.attributes = parseAttributes(match[1]);
+ event.attributes = parseAttributes$1(match[1]);
}
_this2.trigger('data', event);
@@ -29973,7 +32026,7 @@
};
if (match[1]) {
- event.attributes = parseAttributes(match[1]); // parse the IV string into a Uint32Array
+ event.attributes = parseAttributes$1(match[1]); // parse the IV string into a Uint32Array
if (event.attributes.IV) {
if (event.attributes.IV.substring(0, 2).toLowerCase() === '0x') {
@@ -30003,7 +32056,7 @@
};
if (match[1]) {
- event.attributes = parseAttributes(match[1]);
+ event.attributes = parseAttributes$1(match[1]);
event.attributes['TIME-OFFSET'] = parseFloat(event.attributes['TIME-OFFSET']);
event.attributes.PRECISE = /YES/.test(event.attributes.PRECISE);
}
@@ -30067,6 +32120,142 @@
_this2.trigger('data', event);
+ return;
+ }
+
+ match = /^#EXT-X-SKIP:(.*)$/.exec(newLine);
+
+ if (match && match[1]) {
+ event = {
+ type: 'tag',
+ tagType: 'skip'
+ };
+ event.attributes = parseAttributes$1(match[1]);
+
+ if (event.attributes.hasOwnProperty('SKIPPED-SEGMENTS')) {
+ event.attributes['SKIPPED-SEGMENTS'] = parseInt(event.attributes['SKIPPED-SEGMENTS'], 10);
+ }
+
+ if (event.attributes.hasOwnProperty('RECENTLY-REMOVED-DATERANGES')) {
+ event.attributes['RECENTLY-REMOVED-DATERANGES'] = event.attributes['RECENTLY-REMOVED-DATERANGES'].split(TAB);
+ }
+
+ _this2.trigger('data', event);
+
+ return;
+ }
+
+ match = /^#EXT-X-PART:(.*)$/.exec(newLine);
+
+ if (match && match[1]) {
+ event = {
+ type: 'tag',
+ tagType: 'part'
+ };
+ event.attributes = parseAttributes$1(match[1]);
+ ['DURATION'].forEach(function (key) {
+ if (event.attributes.hasOwnProperty(key)) {
+ event.attributes[key] = parseFloat(event.attributes[key]);
+ }
+ });
+ ['INDEPENDENT', 'GAP'].forEach(function (key) {
+ if (event.attributes.hasOwnProperty(key)) {
+ event.attributes[key] = /YES/.test(event.attributes[key]);
+ }
+ });
+
+ if (event.attributes.hasOwnProperty('BYTERANGE')) {
+ event.attributes.byterange = parseByterange(event.attributes.BYTERANGE);
+ }
+
+ _this2.trigger('data', event);
+
+ return;
+ }
+
+ match = /^#EXT-X-SERVER-CONTROL:(.*)$/.exec(newLine);
+
+ if (match && match[1]) {
+ event = {
+ type: 'tag',
+ tagType: 'server-control'
+ };
+ event.attributes = parseAttributes$1(match[1]);
+ ['CAN-SKIP-UNTIL', 'PART-HOLD-BACK', 'HOLD-BACK'].forEach(function (key) {
+ if (event.attributes.hasOwnProperty(key)) {
+ event.attributes[key] = parseFloat(event.attributes[key]);
+ }
+ });
+ ['CAN-SKIP-DATERANGES', 'CAN-BLOCK-RELOAD'].forEach(function (key) {
+ if (event.attributes.hasOwnProperty(key)) {
+ event.attributes[key] = /YES/.test(event.attributes[key]);
+ }
+ });
+
+ _this2.trigger('data', event);
+
+ return;
+ }
+
+ match = /^#EXT-X-PART-INF:(.*)$/.exec(newLine);
+
+ if (match && match[1]) {
+ event = {
+ type: 'tag',
+ tagType: 'part-inf'
+ };
+ event.attributes = parseAttributes$1(match[1]);
+ ['PART-TARGET'].forEach(function (key) {
+ if (event.attributes.hasOwnProperty(key)) {
+ event.attributes[key] = parseFloat(event.attributes[key]);
+ }
+ });
+
+ _this2.trigger('data', event);
+
+ return;
+ }
+
+ match = /^#EXT-X-PRELOAD-HINT:(.*)$/.exec(newLine);
+
+ if (match && match[1]) {
+ event = {
+ type: 'tag',
+ tagType: 'preload-hint'
+ };
+ event.attributes = parseAttributes$1(match[1]);
+ ['BYTERANGE-START', 'BYTERANGE-LENGTH'].forEach(function (key) {
+ if (event.attributes.hasOwnProperty(key)) {
+ event.attributes[key] = parseInt(event.attributes[key], 10);
+ var subkey = key === 'BYTERANGE-LENGTH' ? 'length' : 'offset';
+ event.attributes.byterange = event.attributes.byterange || {};
+ event.attributes.byterange[subkey] = event.attributes[key]; // only keep the parsed byterange object.
+
+ delete event.attributes[key];
+ }
+ });
+
+ _this2.trigger('data', event);
+
+ return;
+ }
+
+ match = /^#EXT-X-RENDITION-REPORT:(.*)$/.exec(newLine);
+
+ if (match && match[1]) {
+ event = {
+ type: 'tag',
+ tagType: 'rendition-report'
+ };
+ event.attributes = parseAttributes$1(match[1]);
+ ['LAST-MSN', 'LAST-PART'].forEach(function (key) {
+ if (event.attributes.hasOwnProperty(key)) {
+ event.attributes[key] = parseInt(event.attributes[key], 10);
+ }
+ });
+
+ _this2.trigger('data', event);
+
return;
} // unknown tag type
@@ -30144,16 +32333,69 @@
return ParseStream;
}(Stream);
- function decodeB64ToUint8Array(b64Text) {
- var decodedString = window$1.atob(b64Text || '');
- var array = new Uint8Array(decodedString.length);
+ var camelCase = function camelCase(str) {
+ return str.toLowerCase().replace(/-(\w)/g, function (a) {
+ return a[1].toUpperCase();
+ });
+ };
- for (var i = 0; i < decodedString.length; i++) {
- array[i] = decodedString.charCodeAt(i);
+ var camelCaseKeys = function camelCaseKeys(attributes) {
+ var result = {};
+ Object.keys(attributes).forEach(function (key) {
+ result[camelCase(key)] = attributes[key];
+ });
+ return result;
+ }; // set SERVER-CONTROL hold back based upon targetDuration and partTargetDuration
+ // we need this helper because defaults are based upon targetDuration and
+ // partTargetDuration being set, but they may not be if SERVER-CONTROL appears before
+ // target durations are set.
+
+
+ var setHoldBack = function setHoldBack(manifest) {
+ var serverControl = manifest.serverControl,
+ targetDuration = manifest.targetDuration,
+ partTargetDuration = manifest.partTargetDuration;
+
+ if (!serverControl) {
+ return;
}
- return array;
- }
+ var tag = '#EXT-X-SERVER-CONTROL';
+ var hb = 'holdBack';
+ var phb = 'partHoldBack';
+ var minTargetDuration = targetDuration && targetDuration * 3;
+ var minPartDuration = partTargetDuration && partTargetDuration * 2;
+
+ if (targetDuration && !serverControl.hasOwnProperty(hb)) {
+ serverControl[hb] = minTargetDuration;
+ this.trigger('info', {
+ message: tag + " defaulting HOLD-BACK to targetDuration * 3 (" + minTargetDuration + ")."
+ });
+ }
+
+ if (minTargetDuration && serverControl[hb] < minTargetDuration) {
+ this.trigger('warn', {
+ message: tag + " clamping HOLD-BACK (" + serverControl[hb] + ") to targetDuration * 3 (" + minTargetDuration + ")"
+ });
+ serverControl[hb] = minTargetDuration;
+ } // default no part hold back to part target duration * 3
+
+
+ if (partTargetDuration && !serverControl.hasOwnProperty(phb)) {
+ serverControl[phb] = partTargetDuration * 3;
+ this.trigger('info', {
+ message: tag + " defaulting PART-HOLD-BACK to partTargetDuration * 3 (" + serverControl[phb] + ")."
+ });
+ } // if part hold back is too small default it to part target duration * 2
+
+
+ if (partTargetDuration && serverControl[phb] < minPartDuration) {
+ this.trigger('warn', {
+ message: tag + " clamping PART-HOLD-BACK (" + serverControl[phb] + ") to partTargetDuration * 2 (" + minPartDuration + ")."
+ });
+ serverControl[phb] = minPartDuration;
+ }
+ };
/**
* A parser for M3U8 files. The current interpretation of the input is
* exposed as a property `manifest` on parser objects. It's just two lines to
@@ -30177,10 +32419,8 @@
*/
- var Parser =
- /*#__PURE__*/
- function (_Stream) {
- _inheritsLoose$1(Parser, _Stream);
+ var Parser = /*#__PURE__*/function (_Stream) {
+ inheritsLoose(Parser, _Stream);
function Parser() {
var _this;
@@ -30193,7 +32433,7 @@
/* eslint-disable consistent-this */
- var self = _assertThisInitialized$1(_this);
+ var self = assertThisInitialized(_this);
/* eslint-enable consistent-this */
@@ -30204,6 +32444,8 @@
var _key;
+ var hasParts = false;
+
var noop = function noop() {};
var defaultMediaGroups = {
@@ -30222,7 +32464,36 @@
allowCache: true,
discontinuityStarts: [],
segments: []
- }; // update the manifest with the m3u8 entry from the parse stream
+ }; // keep track of the last seen segment's byte range end, as segments are not required
+ // to provide the offset, in which case it defaults to the next byte after the
+ // previous segment
+
+ var lastByterangeEnd = 0; // keep track of the last seen part's byte range end.
+
+ var lastPartByterangeEnd = 0;
+
+ _this.on('end', function () {
+ // only add preloadSegment if we don't yet have a uri for it.
+ // and we actually have parts/preloadHints
+ if (currentUri.uri || !currentUri.parts && !currentUri.preloadHints) {
+ return;
+ }
+
+ if (!currentUri.map && currentMap) {
+ currentUri.map = currentMap;
+ }
+
+ if (!currentUri.key && _key) {
+ currentUri.key = _key;
+ }
+
+ if (!currentUri.timeline && typeof currentTimeline === 'number') {
+ currentUri.timeline = currentTimeline;
+ }
+
+ _this.manifest.preloadSegment = currentUri;
+ }); // update the manifest with the m3u8 entry from the parse stream
+
_this.parseStream.on('data', function (entry) {
var mediaGroup;
@@ -30231,6 +32502,11 @@
tag: function tag() {
// switch based on the tag type
(({
+ version: function version() {
+ if (entry.version) {
+ this.manifest.version = entry.version;
+ }
+ },
'allow-cache': function allowCache() {
this.manifest.allowCache = entry.allowed;
@@ -30249,10 +32525,17 @@
byterange.length = entry.length;
if (!('offset' in entry)) {
- this.trigger('info', {
- message: 'defaulting offset to zero'
- });
- entry.offset = 0;
+ /*
+ * From the latest spec (as of this writing):
+ * https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.2.2
+ *
+ * Same text since EXT-X-BYTERANGE's introduction in draft 7:
+ * https://tools.ietf.org/html/draft-pantos-http-live-streaming-07#section-3.3.1)
+ *
+ * "If o [offset] is not present, the sub-range begins at the next byte
+ * following the sub-range of the previous media segment."
+ */
+ entry.offset = lastByterangeEnd;
}
}
@@ -30260,6 +32543,8 @@
currentUri.byterange = byterange;
byterange.offset = entry.offset;
}
+
+ lastByterangeEnd = byterange.offset + byterange.length;
},
endlist: function endlist() {
this.manifest.endList = true;
@@ -30311,6 +32596,24 @@
message: 'ignoring key declaration without URI'
});
return;
+ }
+
+ if (entry.attributes.KEYFORMAT === 'com.apple.streamingkeydelivery') {
+ this.manifest.contentProtection = this.manifest.contentProtection || {}; // TODO: add full support for this.
+
+ this.manifest.contentProtection['com.apple.fps.1_0'] = {
+ attributes: entry.attributes
+ };
+ return;
+ }
+
+ if (entry.attributes.KEYFORMAT === 'com.microsoft.playready') {
+ this.manifest.contentProtection = this.manifest.contentProtection || {}; // TODO: add full support for this.
+
+ this.manifest.contentProtection['com.microsoft.playready'] = {
+ uri: entry.attributes.URI
+ };
+ return;
} // check if the content is encrypted for Widevine
// Widevine/HLS spec: https://storage.googleapis.com/wvdocs/Widevine_DRM_HLS.pdf
@@ -30347,16 +32650,15 @@
// on the manifest to emulate Widevine tag structure in a DASH mpd
- this.manifest.contentProtection = {
- 'com.widevine.alpha': {
- attributes: {
- schemeIdUri: entry.attributes.KEYFORMAT,
- // remove '0x' from the key id string
- keyId: entry.attributes.KEYID.substring(2)
- },
- // decode the base64-encoded PSSH box
- pssh: decodeB64ToUint8Array(entry.attributes.URI.split(',')[1])
- }
+ this.manifest.contentProtection = this.manifest.contentProtection || {};
+ this.manifest.contentProtection['com.widevine.alpha'] = {
+ attributes: {
+ schemeIdUri: entry.attributes.KEYFORMAT,
+ // remove '0x' from the key id string
+ keyId: entry.attributes.KEYID.substring(2)
+ },
+ // decode the base64-encoded PSSH box
+ pssh: decodeB64ToUint8Array(entry.attributes.URI.split(',')[1])
};
return;
}
@@ -30418,6 +32720,10 @@
if (entry.byterange) {
currentMap.byterange = entry.byterange;
}
+
+ if (_key) {
+ currentMap.key = _key;
+ }
},
'stream-inf': function streamInf() {
this.manifest.playlists = uris;
@@ -30434,7 +32740,7 @@
currentUri.attributes = {};
}
- _extends(currentUri.attributes, entry.attributes);
+ _extends_1(currentUri.attributes, entry.attributes);
},
media: function media() {
this.manifest.mediaGroups = this.manifest.mediaGroups || defaultMediaGroups;
@@ -30511,16 +32817,7 @@
}
this.manifest.targetDuration = entry.duration;
- },
- totalduration: function totalduration() {
- if (!isFinite(entry.duration) || entry.duration < 0) {
- this.trigger('warn', {
- message: 'ignoring invalid total duration: ' + entry.duration
- });
- return;
- }
-
- this.manifest.totalDuration = entry.duration;
+ setHoldBack.call(this, this.manifest);
},
start: function start() {
if (!entry.attributes || isNaN(entry.attributes['TIME-OFFSET'])) {
@@ -30543,6 +32840,124 @@
},
'cue-in': function cueIn() {
currentUri.cueIn = entry.data;
+ },
+ 'skip': function skip() {
+ this.manifest.skip = camelCaseKeys(entry.attributes);
+ this.warnOnMissingAttributes_('#EXT-X-SKIP', entry.attributes, ['SKIPPED-SEGMENTS']);
+ },
+ 'part': function part() {
+ var _this2 = this;
+
+ hasParts = true; // parts are always specifed before a segment
+
+ var segmentIndex = this.manifest.segments.length;
+ var part = camelCaseKeys(entry.attributes);
+ currentUri.parts = currentUri.parts || [];
+ currentUri.parts.push(part);
+
+ if (part.byterange) {
+ if (!part.byterange.hasOwnProperty('offset')) {
+ part.byterange.offset = lastPartByterangeEnd;
+ }
+
+ lastPartByterangeEnd = part.byterange.offset + part.byterange.length;
+ }
+
+ var partIndex = currentUri.parts.length - 1;
+ this.warnOnMissingAttributes_("#EXT-X-PART #" + partIndex + " for segment #" + segmentIndex, entry.attributes, ['URI', 'DURATION']);
+
+ if (this.manifest.renditionReports) {
+ this.manifest.renditionReports.forEach(function (r, i) {
+ if (!r.hasOwnProperty('lastPart')) {
+ _this2.trigger('warn', {
+ message: "#EXT-X-RENDITION-REPORT #" + i + " lacks required attribute(s): LAST-PART"
+ });
+ }
+ });
+ }
+ },
+ 'server-control': function serverControl() {
+ var attrs = this.manifest.serverControl = camelCaseKeys(entry.attributes);
+
+ if (!attrs.hasOwnProperty('canBlockReload')) {
+ attrs.canBlockReload = false;
+ this.trigger('info', {
+ message: '#EXT-X-SERVER-CONTROL defaulting CAN-BLOCK-RELOAD to false'
+ });
+ }
+
+ setHoldBack.call(this, this.manifest);
+
+ if (attrs.canSkipDateranges && !attrs.hasOwnProperty('canSkipUntil')) {
+ this.trigger('warn', {
+ message: '#EXT-X-SERVER-CONTROL lacks required attribute CAN-SKIP-UNTIL which is required when CAN-SKIP-DATERANGES is set'
+ });
+ }
+ },
+ 'preload-hint': function preloadHint() {
+ // parts are always specifed before a segment
+ var segmentIndex = this.manifest.segments.length;
+ var hint = camelCaseKeys(entry.attributes);
+ var isPart = hint.type && hint.type === 'PART';
+ currentUri.preloadHints = currentUri.preloadHints || [];
+ currentUri.preloadHints.push(hint);
+
+ if (hint.byterange) {
+ if (!hint.byterange.hasOwnProperty('offset')) {
+ // use last part byterange end or zero if not a part.
+ hint.byterange.offset = isPart ? lastPartByterangeEnd : 0;
+
+ if (isPart) {
+ lastPartByterangeEnd = hint.byterange.offset + hint.byterange.length;
+ }
+ }
+ }
+
+ var index = currentUri.preloadHints.length - 1;
+ this.warnOnMissingAttributes_("#EXT-X-PRELOAD-HINT #" + index + " for segment #" + segmentIndex, entry.attributes, ['TYPE', 'URI']);
+
+ if (!hint.type) {
+ return;
+ } // search through all preload hints except for the current one for
+ // a duplicate type.
+
+
+ for (var i = 0; i < currentUri.preloadHints.length - 1; i++) {
+ var otherHint = currentUri.preloadHints[i];
+
+ if (!otherHint.type) {
+ continue;
+ }
+
+ if (otherHint.type === hint.type) {
+ this.trigger('warn', {
+ message: "#EXT-X-PRELOAD-HINT #" + index + " for segment #" + segmentIndex + " has the same TYPE " + hint.type + " as preload hint #" + i
+ });
+ }
+ }
+ },
+ 'rendition-report': function renditionReport() {
+ var report = camelCaseKeys(entry.attributes);
+ this.manifest.renditionReports = this.manifest.renditionReports || [];
+ this.manifest.renditionReports.push(report);
+ var index = this.manifest.renditionReports.length - 1;
+ var required = ['LAST-MSN', 'URI'];
+
+ if (hasParts) {
+ required.push('LAST-PART');
+ }
+
+ this.warnOnMissingAttributes_("#EXT-X-RENDITION-REPORT #" + index, entry.attributes, required);
+ },
+ 'part-inf': function partInf() {
+ this.manifest.partInf = camelCaseKeys(entry.attributes);
+ this.warnOnMissingAttributes_('#EXT-X-PART-INF', entry.attributes, ['PART-TARGET']);
+
+ if (this.manifest.partInf.partTarget) {
+ this.manifest.partTargetDuration = this.manifest.partInf.partTarget;
+ }
+
+ setHoldBack.call(this, this.manifest);
}
})[entry.tagType] || noop).call(self);
},
@@ -30566,9 +32981,11 @@
if (currentMap) {
currentUri.map = currentMap;
- } // prepare for the next URI
+ } // reset the last byterange end as it needs to be 0 between parts
+ lastPartByterangeEnd = 0; // prepare for the next URI
+
currentUri = {};
},
comment: function comment() {// comments are not important for playback
@@ -30588,14 +33005,29 @@
return _this;
}
+
+ var _proto = Parser.prototype;
+
+ _proto.warnOnMissingAttributes_ = function warnOnMissingAttributes_(identifier, attributes, required) {
+ var missing = [];
+ required.forEach(function (key) {
+ if (!attributes.hasOwnProperty(key)) {
+ missing.push(key);
+ }
+ });
+
+ if (missing.length) {
+ this.trigger('warn', {
+ message: identifier + " lacks required attribute(s): " + missing.join(', ')
+ });
+ }
+ }
/**
* Parse the input string and update the manifest object.
*
* @param {string} chunk a potentially incomplete portion of the manifest
*/
-
-
- var _proto = Parser.prototype;
+ ;
_proto.push = function push(chunk) {
this.lineStream.push(chunk);
@@ -30610,6 +33042,7 @@
_proto.end = function end() {
// flush any buffered input
this.lineStream.push('\n');
+ this.trigger('end');
}
/**
* Add an additional parser for non-standard tags
@@ -30641,9 +33074,3675 @@
return Parser;
}(Stream);
- /*! @name mpd-parser @version 0.8.1 @license Apache-2.0 */
+ var regexs = {
+ // to determine mime types
+ mp4: /^(av0?1|avc0?[1234]|vp0?9|flac|opus|mp3|mp4a|mp4v|stpp.ttml.im1t)/,
+ webm: /^(vp0?[89]|av0?1|opus|vorbis)/,
+ ogg: /^(vp0?[89]|theora|flac|opus|vorbis)/,
+ // to determine if a codec is audio or video
+ video: /^(av0?1|avc0?[1234]|vp0?[89]|hvc1|hev1|theora|mp4v)/,
+ audio: /^(mp4a|flac|vorbis|opus|ac-[34]|ec-3|alac|mp3|speex|aac)/,
+ text: /^(stpp.ttml.im1t)/,
+ // mux.js support regex
+ muxerVideo: /^(avc0?1)/,
+ muxerAudio: /^(mp4a)/,
+ // match nothing as muxer does not support text right now.
+ // there cannot never be a character before the start of a string
+ // so this matches nothing.
+ muxerText: /a^/
+ };
+ var mediaTypes = ['video', 'audio', 'text'];
+ var upperMediaTypes = ['Video', 'Audio', 'Text'];
+ /**
+ * Replace the old apple-style `avc1..` codec string with the standard
+ * `avc1.`
+ *
+ * @param {string} codec
+ * Codec string to translate
+ * @return {string}
+ * The translated codec string
+ */
- var isObject$1 = function isObject(obj) {
+ var translateLegacyCodec = function translateLegacyCodec(codec) {
+ if (!codec) {
+ return codec;
+ }
+
+ return codec.replace(/avc1\.(\d+)\.(\d+)/i, function (orig, profile, avcLevel) {
+ var profileHex = ('00' + Number(profile).toString(16)).slice(-2);
+ var avcLevelHex = ('00' + Number(avcLevel).toString(16)).slice(-2);
+ return 'avc1.' + profileHex + '00' + avcLevelHex;
+ });
+ };
+ /**
+ * @typedef {Object} ParsedCodecInfo
+ * @property {number} codecCount
+ * Number of codecs parsed
+ * @property {string} [videoCodec]
+ * Parsed video codec (if found)
+ * @property {string} [videoObjectTypeIndicator]
+ * Video object type indicator (if found)
+ * @property {string|null} audioProfile
+ * Audio profile
+ */
+
+ /**
+ * Parses a codec string to retrieve the number of codecs specified, the video codec and
+ * object type indicator, and the audio profile.
+ *
+ * @param {string} [codecString]
+ * The codec string to parse
+ * @return {ParsedCodecInfo}
+ * Parsed codec info
+ */
+
+ var parseCodecs = function parseCodecs(codecString) {
+ if (codecString === void 0) {
+ codecString = '';
+ }
+
+ var codecs = codecString.split(',');
+ var result = [];
+ codecs.forEach(function (codec) {
+ codec = codec.trim();
+ var codecType;
+ mediaTypes.forEach(function (name) {
+ var match = regexs[name].exec(codec.toLowerCase());
+
+ if (!match || match.length <= 1) {
+ return;
+ }
+
+ codecType = name; // maintain codec case
+
+ var type = codec.substring(0, match[1].length);
+ var details = codec.replace(type, '');
+ result.push({
+ type: type,
+ details: details,
+ mediaType: name
+ });
+ });
+
+ if (!codecType) {
+ result.push({
+ type: codec,
+ details: '',
+ mediaType: 'unknown'
+ });
+ }
+ });
+ return result;
+ };
+ /**
+ * Returns a ParsedCodecInfo object for the default alternate audio playlist if there is
+ * a default alternate audio playlist for the provided audio group.
+ *
+ * @param {Object} master
+ * The master playlist
+ * @param {string} audioGroupId
+ * ID of the audio group for which to find the default codec info
+ * @return {ParsedCodecInfo}
+ * Parsed codec info
+ */
+
+ var codecsFromDefault = function codecsFromDefault(master, audioGroupId) {
+ if (!master.mediaGroups.AUDIO || !audioGroupId) {
+ return null;
+ }
+
+ var audioGroup = master.mediaGroups.AUDIO[audioGroupId];
+
+ if (!audioGroup) {
+ return null;
+ }
+
+ for (var name in audioGroup) {
+ var audioType = audioGroup[name];
+
+ if (audioType["default"] && audioType.playlists) {
+ // codec should be the same for all playlists within the audio type
+ return parseCodecs(audioType.playlists[0].attributes.CODECS);
+ }
+ }
+
+ return null;
+ };
+ var isAudioCodec = function isAudioCodec(codec) {
+ if (codec === void 0) {
+ codec = '';
+ }
+
+ return regexs.audio.test(codec.trim().toLowerCase());
+ };
+ var isTextCodec = function isTextCodec(codec) {
+ if (codec === void 0) {
+ codec = '';
+ }
+
+ return regexs.text.test(codec.trim().toLowerCase());
+ };
+ var getMimeForCodec = function getMimeForCodec(codecString) {
+ if (!codecString || typeof codecString !== 'string') {
+ return;
+ }
+
+ var codecs = codecString.toLowerCase().split(',').map(function (c) {
+ return translateLegacyCodec(c.trim());
+ }); // default to video type
+
+ var type = 'video'; // only change to audio type if the only codec we have is
+ // audio
+
+ if (codecs.length === 1 && isAudioCodec(codecs[0])) {
+ type = 'audio';
+ } else if (codecs.length === 1 && isTextCodec(codecs[0])) {
+ // text uses application/ for now
+ type = 'application';
+ } // default the container to mp4
+
+
+ var container = 'mp4'; // every codec must be able to go into the container
+ // for that container to be the correct one
+
+ if (codecs.every(function (c) {
+ return regexs.mp4.test(c);
+ })) {
+ container = 'mp4';
+ } else if (codecs.every(function (c) {
+ return regexs.webm.test(c);
+ })) {
+ container = 'webm';
+ } else if (codecs.every(function (c) {
+ return regexs.ogg.test(c);
+ })) {
+ container = 'ogg';
+ }
+
+ return type + "/" + container + ";codecs=\"" + codecString + "\"";
+ };
+ var browserSupportsCodec = function browserSupportsCodec(codecString) {
+ if (codecString === void 0) {
+ codecString = '';
+ }
+
+ return window.MediaSource && window.MediaSource.isTypeSupported && window.MediaSource.isTypeSupported(getMimeForCodec(codecString)) || false;
+ };
+ var muxerSupportsCodec = function muxerSupportsCodec(codecString) {
+ if (codecString === void 0) {
+ codecString = '';
+ }
+
+ return codecString.toLowerCase().split(',').every(function (codec) {
+ codec = codec.trim(); // any match is supported.
+
+ for (var i = 0; i < upperMediaTypes.length; i++) {
+ var type = upperMediaTypes[i];
+
+ if (regexs["muxer" + type].test(codec)) {
+ return true;
+ }
+ }
+
+ return false;
+ });
+ };
+ var DEFAULT_AUDIO_CODEC = 'mp4a.40.2';
+ var DEFAULT_VIDEO_CODEC = 'avc1.4d400d';
+
+ var MPEGURL_REGEX = /^(audio|video|application)\/(x-|vnd\.apple\.)?mpegurl/i;
+ var DASH_REGEX = /^application\/dash\+xml/i;
+ /**
+ * Returns a string that describes the type of source based on a video source object's
+ * media type.
+ *
+ * @see {@link https://dev.w3.org/html5/pf-summary/video.html#dom-source-type|Source Type}
+ *
+ * @param {string} type
+ * Video source object media type
+ * @return {('hls'|'dash'|'vhs-json'|null)}
+ * VHS source type string
+ */
+
+ var simpleTypeFromSourceType = function simpleTypeFromSourceType(type) {
+ if (MPEGURL_REGEX.test(type)) {
+ return 'hls';
+ }
+
+ if (DASH_REGEX.test(type)) {
+ return 'dash';
+ } // Denotes the special case of a manifest object passed to http-streaming instead of a
+ // source URL.
+ //
+ // See https://en.wikipedia.org/wiki/Media_type for details on specifying media types.
+ //
+ // In this case, vnd stands for vendor, video.js for the organization, VHS for this
+ // project, and the +json suffix identifies the structure of the media type.
+
+
+ if (type === 'application/vnd.videojs.vhs+json') {
+ return 'vhs-json';
+ }
+
+ return null;
+ };
+
+ // const log2 = Math.log2 ? Math.log2 : (x) => (Math.log(x) / Math.log(2));
+ // we used to do this with log2 but BigInt does not support builtin math
+ // Math.ceil(log2(x));
+
+
+ var countBits = function countBits(x) {
+ return x.toString(2).length;
+ }; // count the number of whole bytes it would take to represent a number
+
+ var countBytes = function countBytes(x) {
+ return Math.ceil(countBits(x) / 8);
+ };
+ var isArrayBufferView = function isArrayBufferView(obj) {
+ if (ArrayBuffer.isView === 'function') {
+ return ArrayBuffer.isView(obj);
+ }
+
+ return obj && obj.buffer instanceof ArrayBuffer;
+ };
+ var isTypedArray = function isTypedArray(obj) {
+ return isArrayBufferView(obj);
+ };
+ var toUint8 = function toUint8(bytes) {
+ if (bytes instanceof Uint8Array) {
+ return bytes;
+ }
+
+ if (!Array.isArray(bytes) && !isTypedArray(bytes) && !(bytes instanceof ArrayBuffer)) {
+ // any non-number or NaN leads to empty uint8array
+ // eslint-disable-next-line
+ if (typeof bytes !== 'number' || typeof bytes === 'number' && bytes !== bytes) {
+ bytes = 0;
+ } else {
+ bytes = [bytes];
+ }
+ }
+
+ return new Uint8Array(bytes && bytes.buffer || bytes, bytes && bytes.byteOffset || 0, bytes && bytes.byteLength || 0);
+ };
+ var BigInt = window.BigInt || Number;
+ var BYTE_TABLE = [BigInt('0x1'), BigInt('0x100'), BigInt('0x10000'), BigInt('0x1000000'), BigInt('0x100000000'), BigInt('0x10000000000'), BigInt('0x1000000000000'), BigInt('0x100000000000000'), BigInt('0x10000000000000000')];
+ var bytesToNumber = function bytesToNumber(bytes, _temp) {
+ var _ref = _temp === void 0 ? {} : _temp,
+ _ref$signed = _ref.signed,
+ signed = _ref$signed === void 0 ? false : _ref$signed,
+ _ref$le = _ref.le,
+ le = _ref$le === void 0 ? false : _ref$le;
+
+ bytes = toUint8(bytes);
+ var fn = le ? 'reduce' : 'reduceRight';
+ var obj = bytes[fn] ? bytes[fn] : Array.prototype[fn];
+ var number = obj.call(bytes, function (total, _byte, i) {
+ var exponent = le ? i : Math.abs(i + 1 - bytes.length);
+ return total + BigInt(_byte) * BYTE_TABLE[exponent];
+ }, BigInt(0));
+
+ if (signed) {
+ var max = BYTE_TABLE[bytes.length] / BigInt(2) - BigInt(1);
+ number = BigInt(number);
+
+ if (number > max) {
+ number -= max;
+ number -= max;
+ number -= BigInt(2);
+ }
+ }
+
+ return Number(number);
+ };
+ var numberToBytes = function numberToBytes(number, _temp2) {
+ var _ref2 = _temp2 === void 0 ? {} : _temp2,
+ _ref2$le = _ref2.le,
+ le = _ref2$le === void 0 ? false : _ref2$le; // eslint-disable-next-line
+
+
+ if (typeof number !== 'bigint' && typeof number !== 'number' || typeof number === 'number' && number !== number) {
+ number = 0;
+ }
+
+ number = BigInt(number);
+ var byteCount = countBytes(number);
+ var bytes = new Uint8Array(new ArrayBuffer(byteCount));
+
+ for (var i = 0; i < byteCount; i++) {
+ var byteIndex = le ? i : Math.abs(i + 1 - bytes.length);
+ bytes[byteIndex] = Number(number / BYTE_TABLE[i] & BigInt(0xFF));
+
+ if (number < 0) {
+ bytes[byteIndex] = Math.abs(~bytes[byteIndex]);
+ bytes[byteIndex] -= i === 0 ? 1 : 2;
+ }
+ }
+
+ return bytes;
+ };
+ var stringToBytes = function stringToBytes(string, stringIsBytes) {
+ if (typeof string !== 'string' && string && typeof string.toString === 'function') {
+ string = string.toString();
+ }
+
+ if (typeof string !== 'string') {
+ return new Uint8Array();
+ } // If the string already is bytes, we don't have to do this
+ // otherwise we do this so that we split multi length characters
+ // into individual bytes
+
+
+ if (!stringIsBytes) {
+ string = unescape(encodeURIComponent(string));
+ }
+
+ var view = new Uint8Array(string.length);
+
+ for (var i = 0; i < string.length; i++) {
+ view[i] = string.charCodeAt(i);
+ }
+
+ return view;
+ };
+ var concatTypedArrays = function concatTypedArrays() {
+ for (var _len = arguments.length, buffers = new Array(_len), _key = 0; _key < _len; _key++) {
+ buffers[_key] = arguments[_key];
+ }
+
+ buffers = buffers.filter(function (b) {
+ return b && (b.byteLength || b.length) && typeof b !== 'string';
+ });
+
+ if (buffers.length <= 1) {
+ // for 0 length we will return empty uint8
+ // for 1 length we return the first uint8
+ return toUint8(buffers[0]);
+ }
+
+ var totalLen = buffers.reduce(function (total, buf, i) {
+ return total + (buf.byteLength || buf.length);
+ }, 0);
+ var tempBuffer = new Uint8Array(totalLen);
+ var offset = 0;
+ buffers.forEach(function (buf) {
+ buf = toUint8(buf);
+ tempBuffer.set(buf, offset);
+ offset += buf.byteLength;
+ });
+ return tempBuffer;
+ };
+ /**
+ * Check if the bytes "b" are contained within bytes "a".
+ *
+ * @param {Uint8Array|Array} a
+ * Bytes to check in
+ *
+ * @param {Uint8Array|Array} b
+ * Bytes to check for
+ *
+ * @param {Object} options
+ * options
+ *
+ * @param {Array|Uint8Array} [offset=0]
+ * offset to use when looking at bytes in a
+ *
+ * @param {Array|Uint8Array} [mask=[]]
+ * mask to use on bytes before comparison.
+ *
+ * @return {boolean}
+ * If all bytes in b are inside of a, taking into account
+ * bit masks.
+ */
+
+ var bytesMatch = function bytesMatch(a, b, _temp3) {
+ var _ref3 = _temp3 === void 0 ? {} : _temp3,
+ _ref3$offset = _ref3.offset,
+ offset = _ref3$offset === void 0 ? 0 : _ref3$offset,
+ _ref3$mask = _ref3.mask,
+ mask = _ref3$mask === void 0 ? [] : _ref3$mask;
+
+ a = toUint8(a);
+ b = toUint8(b); // ie 11 does not support uint8 every
+
+ var fn = b.every ? b.every : Array.prototype.every;
+ return b.length && a.length - offset >= b.length && // ie 11 doesn't support every on uin8
+ fn.call(b, function (bByte, i) {
+ var aByte = mask[i] ? mask[i] & a[offset + i] : a[offset + i];
+ return bByte === aByte;
+ });
+ };
+
+ /**
+ * Loops through all supported media groups in master and calls the provided
+ * callback for each group
+ *
+ * @param {Object} master
+ * The parsed master manifest object
+ * @param {string[]} groups
+ * The media groups to call the callback for
+ * @param {Function} callback
+ * Callback to call for each media group
+ */
+ var forEachMediaGroup$1 = function forEachMediaGroup(master, groups, callback) {
+ groups.forEach(function (mediaType) {
+ for (var groupKey in master.mediaGroups[mediaType]) {
+ for (var labelKey in master.mediaGroups[mediaType][groupKey]) {
+ var mediaProperties = master.mediaGroups[mediaType][groupKey][labelKey];
+ callback(mediaProperties, mediaType, groupKey, labelKey);
+ }
+ }
+ });
+ };
+
+ /**
+ * "Shallow freezes" an object to render it immutable.
+ * Uses `Object.freeze` if available,
+ * otherwise the immutability is only in the type.
+ *
+ * Is used to create "enum like" objects.
+ *
+ * @template T
+ * @param {T} object the object to freeze
+ * @param {Pick = Object} oc `Object` by default,
+ * allows to inject custom object constructor for tests
+ * @returns {Readonly}
+ *
+ * @see https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/freeze
+ */
+
+ function freeze(object, oc) {
+ if (oc === undefined) {
+ oc = Object;
+ }
+
+ return oc && typeof oc.freeze === 'function' ? oc.freeze(object) : object;
+ }
+ /**
+ * All mime types that are allowed as input to `DOMParser.parseFromString`
+ *
+ * @see https://developer.mozilla.org/en-US/docs/Web/API/DOMParser/parseFromString#Argument02 MDN
+ * @see https://html.spec.whatwg.org/multipage/dynamic-markup-insertion.html#domparsersupportedtype WHATWG HTML Spec
+ * @see DOMParser.prototype.parseFromString
+ */
+
+
+ var MIME_TYPE = freeze({
+ /**
+ * `text/html`, the only mime type that triggers treating an XML document as HTML.
+ *
+ * @see DOMParser.SupportedType.isHTML
+ * @see https://www.iana.org/assignments/media-types/text/html IANA MimeType registration
+ * @see https://en.wikipedia.org/wiki/HTML Wikipedia
+ * @see https://developer.mozilla.org/en-US/docs/Web/API/DOMParser/parseFromString MDN
+ * @see https://html.spec.whatwg.org/multipage/dynamic-markup-insertion.html#dom-domparser-parsefromstring WHATWG HTML Spec
+ */
+ HTML: 'text/html',
+
+ /**
+ * Helper method to check a mime type if it indicates an HTML document
+ *
+ * @param {string} [value]
+ * @returns {boolean}
+ *
+ * @see https://www.iana.org/assignments/media-types/text/html IANA MimeType registration
+ * @see https://en.wikipedia.org/wiki/HTML Wikipedia
+ * @see https://developer.mozilla.org/en-US/docs/Web/API/DOMParser/parseFromString MDN
+ * @see https://html.spec.whatwg.org/multipage/dynamic-markup-insertion.html#dom-domparser-parsefromstring */
+ isHTML: function isHTML(value) {
+ return value === MIME_TYPE.HTML;
+ },
+
+ /**
+ * `application/xml`, the standard mime type for XML documents.
+ *
+ * @see https://www.iana.org/assignments/media-types/application/xml IANA MimeType registration
+ * @see https://tools.ietf.org/html/rfc7303#section-9.1 RFC 7303
+ * @see https://en.wikipedia.org/wiki/XML_and_MIME Wikipedia
+ */
+ XML_APPLICATION: 'application/xml',
+
+ /**
+ * `text/html`, an alias for `application/xml`.
+ *
+ * @see https://tools.ietf.org/html/rfc7303#section-9.2 RFC 7303
+ * @see https://www.iana.org/assignments/media-types/text/xml IANA MimeType registration
+ * @see https://en.wikipedia.org/wiki/XML_and_MIME Wikipedia
+ */
+ XML_TEXT: 'text/xml',
+
+ /**
+ * `application/xhtml+xml`, indicates an XML document that has the default HTML namespace,
+ * but is parsed as an XML document.
+ *
+ * @see https://www.iana.org/assignments/media-types/application/xhtml+xml IANA MimeType registration
+ * @see https://dom.spec.whatwg.org/#dom-domimplementation-createdocument WHATWG DOM Spec
+ * @see https://en.wikipedia.org/wiki/XHTML Wikipedia
+ */
+ XML_XHTML_APPLICATION: 'application/xhtml+xml',
+
+ /**
+ * `image/svg+xml`,
+ *
+ * @see https://www.iana.org/assignments/media-types/image/svg+xml IANA MimeType registration
+ * @see https://www.w3.org/TR/SVG11/ W3C SVG 1.1
+ * @see https://en.wikipedia.org/wiki/Scalable_Vector_Graphics Wikipedia
+ */
+ XML_SVG_IMAGE: 'image/svg+xml'
+ });
+ /**
+ * Namespaces that are used in this code base.
+ *
+ * @see http://www.w3.org/TR/REC-xml-names
+ */
+
+ var NAMESPACE$3 = freeze({
+ /**
+ * The XHTML namespace.
+ *
+ * @see http://www.w3.org/1999/xhtml
+ */
+ HTML: 'http://www.w3.org/1999/xhtml',
+
+ /**
+ * Checks if `uri` equals `NAMESPACE.HTML`.
+ *
+ * @param {string} [uri]
+ *
+ * @see NAMESPACE.HTML
+ */
+ isHTML: function isHTML(uri) {
+ return uri === NAMESPACE$3.HTML;
+ },
+
+ /**
+ * The SVG namespace.
+ *
+ * @see http://www.w3.org/2000/svg
+ */
+ SVG: 'http://www.w3.org/2000/svg',
+
+ /**
+ * The `xml:` namespace.
+ *
+ * @see http://www.w3.org/XML/1998/namespace
+ */
+ XML: 'http://www.w3.org/XML/1998/namespace',
+
+ /**
+ * The `xmlns:` namespace
+ *
+ * @see https://www.w3.org/2000/xmlns/
+ */
+ XMLNS: 'http://www.w3.org/2000/xmlns/'
+ });
+ var freeze_1 = freeze;
+ var MIME_TYPE_1 = MIME_TYPE;
+ var NAMESPACE_1 = NAMESPACE$3;
+ var conventions = {
+ freeze: freeze_1,
+ MIME_TYPE: MIME_TYPE_1,
+ NAMESPACE: NAMESPACE_1
+ };
+
+ var NAMESPACE$2 = conventions.NAMESPACE;
+ /**
+ * A prerequisite for `[].filter`, to drop elements that are empty
+ * @param {string} input
+ * @returns {boolean}
+ */
+
+ function notEmptyString(input) {
+ return input !== '';
+ }
+ /**
+ * @see https://infra.spec.whatwg.org/#split-on-ascii-whitespace
+ * @see https://infra.spec.whatwg.org/#ascii-whitespace
+ *
+ * @param {string} input
+ * @returns {string[]} (can be empty)
+ */
+
+
+ function splitOnASCIIWhitespace(input) {
+ // U+0009 TAB, U+000A LF, U+000C FF, U+000D CR, U+0020 SPACE
+ return input ? input.split(/[\t\n\f\r ]+/).filter(notEmptyString) : [];
+ }
+ /**
+ * Adds element as a key to current if it is not already present.
+ *
+ * @param {Record} current
+ * @param {string} element
+ * @returns {Record}
+ */
+
+
+ function orderedSetReducer(current, element) {
+ if (!current.hasOwnProperty(element)) {
+ current[element] = true;
+ }
+
+ return current;
+ }
+ /**
+ * @see https://infra.spec.whatwg.org/#ordered-set
+ * @param {string} input
+ * @returns {string[]}
+ */
+
+
+ function toOrderedSet(input) {
+ if (!input) return [];
+ var list = splitOnASCIIWhitespace(input);
+ return Object.keys(list.reduce(orderedSetReducer, {}));
+ }
+ /**
+ * Uses `list.indexOf` to implement something like `Array.prototype.includes`,
+ * which we can not rely on being available.
+ *
+ * @param {any[]} list
+ * @returns {function(any): boolean}
+ */
+
+
+ function arrayIncludes(list) {
+ return function (element) {
+ return list && list.indexOf(element) !== -1;
+ };
+ }
+
+ function copy(src, dest) {
+ for (var p in src) {
+ dest[p] = src[p];
+ }
+ }
+ /**
+ ^\w+\.prototype\.([_\w]+)\s*=\s*((?:.*\{\s*?[\r\n][\s\S]*?^})|\S.*?(?=[;\r\n]));?
+ ^\w+\.prototype\.([_\w]+)\s*=\s*(\S.*?(?=[;\r\n]));?
+ */
+
+
+ function _extends(Class, Super) {
+ var pt = Class.prototype;
+
+ if (!(pt instanceof Super)) {
+ var t = function t() {};
+ t.prototype = Super.prototype;
+ t = new t();
+ copy(pt, t);
+ Class.prototype = pt = t;
+ }
+
+ if (pt.constructor != Class) {
+ if (typeof Class != 'function') {
+ console.error("unknown Class:" + Class);
+ }
+
+ pt.constructor = Class;
+ }
+ } // Node Types
+
+
+ var NodeType = {};
+ var ELEMENT_NODE = NodeType.ELEMENT_NODE = 1;
+ var ATTRIBUTE_NODE = NodeType.ATTRIBUTE_NODE = 2;
+ var TEXT_NODE = NodeType.TEXT_NODE = 3;
+ var CDATA_SECTION_NODE = NodeType.CDATA_SECTION_NODE = 4;
+ var ENTITY_REFERENCE_NODE = NodeType.ENTITY_REFERENCE_NODE = 5;
+ var ENTITY_NODE = NodeType.ENTITY_NODE = 6;
+ var PROCESSING_INSTRUCTION_NODE = NodeType.PROCESSING_INSTRUCTION_NODE = 7;
+ var COMMENT_NODE = NodeType.COMMENT_NODE = 8;
+ var DOCUMENT_NODE = NodeType.DOCUMENT_NODE = 9;
+ var DOCUMENT_TYPE_NODE = NodeType.DOCUMENT_TYPE_NODE = 10;
+ var DOCUMENT_FRAGMENT_NODE = NodeType.DOCUMENT_FRAGMENT_NODE = 11;
+ var NOTATION_NODE = NodeType.NOTATION_NODE = 12; // ExceptionCode
+
+ var ExceptionCode = {};
+ var ExceptionMessage = {};
+ ExceptionCode.INDEX_SIZE_ERR = (ExceptionMessage[1] = "Index size error", 1);
+ ExceptionCode.DOMSTRING_SIZE_ERR = (ExceptionMessage[2] = "DOMString size error", 2);
+ var HIERARCHY_REQUEST_ERR = ExceptionCode.HIERARCHY_REQUEST_ERR = (ExceptionMessage[3] = "Hierarchy request error", 3);
+ ExceptionCode.WRONG_DOCUMENT_ERR = (ExceptionMessage[4] = "Wrong document", 4);
+ ExceptionCode.INVALID_CHARACTER_ERR = (ExceptionMessage[5] = "Invalid character", 5);
+ ExceptionCode.NO_DATA_ALLOWED_ERR = (ExceptionMessage[6] = "No data allowed", 6);
+ ExceptionCode.NO_MODIFICATION_ALLOWED_ERR = (ExceptionMessage[7] = "No modification allowed", 7);
+ var NOT_FOUND_ERR = ExceptionCode.NOT_FOUND_ERR = (ExceptionMessage[8] = "Not found", 8);
+ ExceptionCode.NOT_SUPPORTED_ERR = (ExceptionMessage[9] = "Not supported", 9);
+ var INUSE_ATTRIBUTE_ERR = ExceptionCode.INUSE_ATTRIBUTE_ERR = (ExceptionMessage[10] = "Attribute in use", 10); //level2
+
+ ExceptionCode.INVALID_STATE_ERR = (ExceptionMessage[11] = "Invalid state", 11);
+ ExceptionCode.SYNTAX_ERR = (ExceptionMessage[12] = "Syntax error", 12);
+ ExceptionCode.INVALID_MODIFICATION_ERR = (ExceptionMessage[13] = "Invalid modification", 13);
+ ExceptionCode.NAMESPACE_ERR = (ExceptionMessage[14] = "Invalid namespace", 14);
+ ExceptionCode.INVALID_ACCESS_ERR = (ExceptionMessage[15] = "Invalid access", 15);
+ /**
+ * DOM Level 2
+ * Object DOMException
+ * @see http://www.w3.org/TR/2000/REC-DOM-Level-2-Core-20001113/ecma-script-binding.html
+ * @see http://www.w3.org/TR/REC-DOM-Level-1/ecma-script-language-binding.html
+ */
+
+ function DOMException(code, message) {
+ if (message instanceof Error) {
+ var error = message;
+ } else {
+ error = this;
+ Error.call(this, ExceptionMessage[code]);
+ this.message = ExceptionMessage[code];
+ if (Error.captureStackTrace) Error.captureStackTrace(this, DOMException);
+ }
+
+ error.code = code;
+ if (message) this.message = this.message + ": " + message;
+ return error;
+ }
+ DOMException.prototype = Error.prototype;
+ copy(ExceptionCode, DOMException);
+ /**
+ * @see http://www.w3.org/TR/2000/REC-DOM-Level-2-Core-20001113/core.html#ID-536297177
+ * The NodeList interface provides the abstraction of an ordered collection of nodes, without defining or constraining how this collection is implemented. NodeList objects in the DOM are live.
+ * The items in the NodeList are accessible via an integral index, starting from 0.
+ */
+
+ function NodeList() {}
+ NodeList.prototype = {
+ /**
+ * The number of nodes in the list. The range of valid child node indices is 0 to length-1 inclusive.
+ * @standard level1
+ */
+ length: 0,
+
+ /**
+ * Returns the indexth item in the collection. If index is greater than or equal to the number of nodes in the list, this returns null.
+ * @standard level1
+ * @param index unsigned long
+ * Index into the collection.
+ * @return Node
+ * The node at the indexth position in the NodeList, or null if that is not a valid index.
+ */
+ item: function item(index) {
+ return this[index] || null;
+ },
+ toString: function toString(isHTML, nodeFilter) {
+ for (var buf = [], i = 0; i < this.length; i++) {
+ serializeToString(this[i], buf, isHTML, nodeFilter);
+ }
+
+ return buf.join('');
+ }
+ };
+
+ function LiveNodeList(node, refresh) {
+ this._node = node;
+ this._refresh = refresh;
+
+ _updateLiveList(this);
+ }
+
+ function _updateLiveList(list) {
+ var inc = list._node._inc || list._node.ownerDocument._inc;
+
+ if (list._inc != inc) {
+ var ls = list._refresh(list._node); //console.log(ls.length)
+
+
+ __set__(list, 'length', ls.length);
+
+ copy(ls, list);
+ list._inc = inc;
+ }
+ }
+
+ LiveNodeList.prototype.item = function (i) {
+ _updateLiveList(this);
+
+ return this[i];
+ };
+
+ _extends(LiveNodeList, NodeList);
+ /**
+ * Objects implementing the NamedNodeMap interface are used
+ * to represent collections of nodes that can be accessed by name.
+ * Note that NamedNodeMap does not inherit from NodeList;
+ * NamedNodeMaps are not maintained in any particular order.
+ * Objects contained in an object implementing NamedNodeMap may also be accessed by an ordinal index,
+ * but this is simply to allow convenient enumeration of the contents of a NamedNodeMap,
+ * and does not imply that the DOM specifies an order to these Nodes.
+ * NamedNodeMap objects in the DOM are live.
+ * used for attributes or DocumentType entities
+ */
+
+
+ function NamedNodeMap() {}
+
+ function _findNodeIndex(list, node) {
+ var i = list.length;
+
+ while (i--) {
+ if (list[i] === node) {
+ return i;
+ }
+ }
+ }
+
+ function _addNamedNode(el, list, newAttr, oldAttr) {
+ if (oldAttr) {
+ list[_findNodeIndex(list, oldAttr)] = newAttr;
+ } else {
+ list[list.length++] = newAttr;
+ }
+
+ if (el) {
+ newAttr.ownerElement = el;
+ var doc = el.ownerDocument;
+
+ if (doc) {
+ oldAttr && _onRemoveAttribute(doc, el, oldAttr);
+
+ _onAddAttribute(doc, el, newAttr);
+ }
+ }
+ }
+
+ function _removeNamedNode(el, list, attr) {
+ //console.log('remove attr:'+attr)
+ var i = _findNodeIndex(list, attr);
+
+ if (i >= 0) {
+ var lastIndex = list.length - 1;
+
+ while (i < lastIndex) {
+ list[i] = list[++i];
+ }
+
+ list.length = lastIndex;
+
+ if (el) {
+ var doc = el.ownerDocument;
+
+ if (doc) {
+ _onRemoveAttribute(doc, el, attr);
+
+ attr.ownerElement = null;
+ }
+ }
+ } else {
+ throw DOMException(NOT_FOUND_ERR, new Error(el.tagName + '@' + attr));
+ }
+ }
+
+ NamedNodeMap.prototype = {
+ length: 0,
+ item: NodeList.prototype.item,
+ getNamedItem: function getNamedItem(key) {
+ // if(key.indexOf(':')>0 || key == 'xmlns'){
+ // return null;
+ // }
+ //console.log()
+ var i = this.length;
+
+ while (i--) {
+ var attr = this[i]; //console.log(attr.nodeName,key)
+
+ if (attr.nodeName == key) {
+ return attr;
+ }
+ }
+ },
+ setNamedItem: function setNamedItem(attr) {
+ var el = attr.ownerElement;
+
+ if (el && el != this._ownerElement) {
+ throw new DOMException(INUSE_ATTRIBUTE_ERR);
+ }
+
+ var oldAttr = this.getNamedItem(attr.nodeName);
+
+ _addNamedNode(this._ownerElement, this, attr, oldAttr);
+
+ return oldAttr;
+ },
+
+ /* returns Node */
+ setNamedItemNS: function setNamedItemNS(attr) {
+ // raises: WRONG_DOCUMENT_ERR,NO_MODIFICATION_ALLOWED_ERR,INUSE_ATTRIBUTE_ERR
+ var el = attr.ownerElement,
+ oldAttr;
+
+ if (el && el != this._ownerElement) {
+ throw new DOMException(INUSE_ATTRIBUTE_ERR);
+ }
+
+ oldAttr = this.getNamedItemNS(attr.namespaceURI, attr.localName);
+
+ _addNamedNode(this._ownerElement, this, attr, oldAttr);
+
+ return oldAttr;
+ },
+
+ /* returns Node */
+ removeNamedItem: function removeNamedItem(key) {
+ var attr = this.getNamedItem(key);
+
+ _removeNamedNode(this._ownerElement, this, attr);
+
+ return attr;
+ },
+ // raises: NOT_FOUND_ERR,NO_MODIFICATION_ALLOWED_ERR
+ //for level2
+ removeNamedItemNS: function removeNamedItemNS(namespaceURI, localName) {
+ var attr = this.getNamedItemNS(namespaceURI, localName);
+
+ _removeNamedNode(this._ownerElement, this, attr);
+
+ return attr;
+ },
+ getNamedItemNS: function getNamedItemNS(namespaceURI, localName) {
+ var i = this.length;
+
+ while (i--) {
+ var node = this[i];
+
+ if (node.localName == localName && node.namespaceURI == namespaceURI) {
+ return node;
+ }
+ }
+
+ return null;
+ }
+ };
+ /**
+ * The DOMImplementation interface represents an object providing methods
+ * which are not dependent on any particular document.
+ * Such an object is returned by the `Document.implementation` property.
+ *
+ * __The individual methods describe the differences compared to the specs.__
+ *
+ * @constructor
+ *
+ * @see https://developer.mozilla.org/en-US/docs/Web/API/DOMImplementation MDN
+ * @see https://www.w3.org/TR/REC-DOM-Level-1/level-one-core.html#ID-102161490 DOM Level 1 Core (Initial)
+ * @see https://www.w3.org/TR/DOM-Level-2-Core/core.html#ID-102161490 DOM Level 2 Core
+ * @see https://www.w3.org/TR/DOM-Level-3-Core/core.html#ID-102161490 DOM Level 3 Core
+ * @see https://dom.spec.whatwg.org/#domimplementation DOM Living Standard
+ */
+
+ function DOMImplementation$1() {}
+
+ DOMImplementation$1.prototype = {
+ /**
+ * The DOMImplementation.hasFeature() method returns a Boolean flag indicating if a given feature is supported.
+ * The different implementations fairly diverged in what kind of features were reported.
+ * The latest version of the spec settled to force this method to always return true, where the functionality was accurate and in use.
+ *
+ * @deprecated It is deprecated and modern browsers return true in all cases.
+ *
+ * @param {string} feature
+ * @param {string} [version]
+ * @returns {boolean} always true
+ *
+ * @see https://developer.mozilla.org/en-US/docs/Web/API/DOMImplementation/hasFeature MDN
+ * @see https://www.w3.org/TR/REC-DOM-Level-1/level-one-core.html#ID-5CED94D7 DOM Level 1 Core
+ * @see https://dom.spec.whatwg.org/#dom-domimplementation-hasfeature DOM Living Standard
+ */
+ hasFeature: function hasFeature(feature, version) {
+ return true;
+ },
+
+ /**
+ * Creates an XML Document object of the specified type with its document element.
+ *
+ * __It behaves slightly different from the description in the living standard__:
+ * - There is no interface/class `XMLDocument`, it returns a `Document` instance.
+ * - `contentType`, `encoding`, `mode`, `origin`, `url` fields are currently not declared.
+ * - this implementation is not validating names or qualified names
+ * (when parsing XML strings, the SAX parser takes care of that)
+ *
+ * @param {string|null} namespaceURI
+ * @param {string} qualifiedName
+ * @param {DocumentType=null} doctype
+ * @returns {Document}
+ *
+ * @see https://developer.mozilla.org/en-US/docs/Web/API/DOMImplementation/createDocument MDN
+ * @see https://www.w3.org/TR/DOM-Level-2-Core/core.html#Level-2-Core-DOM-createDocument DOM Level 2 Core (initial)
+ * @see https://dom.spec.whatwg.org/#dom-domimplementation-createdocument DOM Level 2 Core
+ *
+ * @see https://dom.spec.whatwg.org/#validate-and-extract DOM: Validate and extract
+ * @see https://www.w3.org/TR/xml/#NT-NameStartChar XML Spec: Names
+ * @see https://www.w3.org/TR/xml-names/#ns-qualnames XML Namespaces: Qualified names
+ */
+ createDocument: function createDocument(namespaceURI, qualifiedName, doctype) {
+ var doc = new Document();
+ doc.implementation = this;
+ doc.childNodes = new NodeList();
+ doc.doctype = doctype || null;
+
+ if (doctype) {
+ doc.appendChild(doctype);
+ }
+
+ if (qualifiedName) {
+ var root = doc.createElementNS(namespaceURI, qualifiedName);
+ doc.appendChild(root);
+ }
+
+ return doc;
+ },
+
+ /**
+ * Returns a doctype, with the given `qualifiedName`, `publicId`, and `systemId`.
+ *
+ * __This behavior is slightly different from the in the specs__:
+ * - this implementation is not validating names or qualified names
+ * (when parsing XML strings, the SAX parser takes care of that)
+ *
+ * @param {string} qualifiedName
+ * @param {string} [publicId]
+ * @param {string} [systemId]
+ * @returns {DocumentType} which can either be used with `DOMImplementation.createDocument` upon document creation
+ * or can be put into the document via methods like `Node.insertBefore()` or `Node.replaceChild()`
+ *
+ * @see https://developer.mozilla.org/en-US/docs/Web/API/DOMImplementation/createDocumentType MDN
+ * @see https://www.w3.org/TR/DOM-Level-2-Core/core.html#Level-2-Core-DOM-createDocType DOM Level 2 Core
+ * @see https://dom.spec.whatwg.org/#dom-domimplementation-createdocumenttype DOM Living Standard
+ *
+ * @see https://dom.spec.whatwg.org/#validate-and-extract DOM: Validate and extract
+ * @see https://www.w3.org/TR/xml/#NT-NameStartChar XML Spec: Names
+ * @see https://www.w3.org/TR/xml-names/#ns-qualnames XML Namespaces: Qualified names
+ */
+ createDocumentType: function createDocumentType(qualifiedName, publicId, systemId) {
+ var node = new DocumentType();
+ node.name = qualifiedName;
+ node.nodeName = qualifiedName;
+ node.publicId = publicId || '';
+ node.systemId = systemId || '';
+ return node;
+ }
+ };
+ /**
+ * @see http://www.w3.org/TR/2000/REC-DOM-Level-2-Core-20001113/core.html#ID-1950641247
+ */
+
+ function Node() {}
+ Node.prototype = {
+ firstChild: null,
+ lastChild: null,
+ previousSibling: null,
+ nextSibling: null,
+ attributes: null,
+ parentNode: null,
+ childNodes: null,
+ ownerDocument: null,
+ nodeValue: null,
+ namespaceURI: null,
+ prefix: null,
+ localName: null,
+ // Modified in DOM Level 2:
+ insertBefore: function insertBefore(newChild, refChild) {
+ //raises
+ return _insertBefore(this, newChild, refChild);
+ },
+ replaceChild: function replaceChild(newChild, oldChild) {
+ //raises
+ this.insertBefore(newChild, oldChild);
+
+ if (oldChild) {
+ this.removeChild(oldChild);
+ }
+ },
+ removeChild: function removeChild(oldChild) {
+ return _removeChild(this, oldChild);
+ },
+ appendChild: function appendChild(newChild) {
+ return this.insertBefore(newChild, null);
+ },
+ hasChildNodes: function hasChildNodes() {
+ return this.firstChild != null;
+ },
+ cloneNode: function cloneNode(deep) {
+ return _cloneNode(this.ownerDocument || this, this, deep);
+ },
+ // Modified in DOM Level 2:
+ normalize: function normalize() {
+ var child = this.firstChild;
+
+ while (child) {
+ var next = child.nextSibling;
+
+ if (next && next.nodeType == TEXT_NODE && child.nodeType == TEXT_NODE) {
+ this.removeChild(next);
+ child.appendData(next.data);
+ } else {
+ child.normalize();
+ child = next;
+ }
+ }
+ },
+ // Introduced in DOM Level 2:
+ isSupported: function isSupported(feature, version) {
+ return this.ownerDocument.implementation.hasFeature(feature, version);
+ },
+ // Introduced in DOM Level 2:
+ hasAttributes: function hasAttributes() {
+ return this.attributes.length > 0;
+ },
+
+ /**
+ * Look up the prefix associated to the given namespace URI, starting from this node.
+ * **The default namespace declarations are ignored by this method.**
+ * See Namespace Prefix Lookup for details on the algorithm used by this method.
+ *
+ * _Note: The implementation seems to be incomplete when compared to the algorithm described in the specs._
+ *
+ * @param {string | null} namespaceURI
+ * @returns {string | null}
+ * @see https://www.w3.org/TR/DOM-Level-3-Core/core.html#Node3-lookupNamespacePrefix
+ * @see https://www.w3.org/TR/DOM-Level-3-Core/namespaces-algorithms.html#lookupNamespacePrefixAlgo
+ * @see https://dom.spec.whatwg.org/#dom-node-lookupprefix
+ * @see https://github.com/xmldom/xmldom/issues/322
+ */
+ lookupPrefix: function lookupPrefix(namespaceURI) {
+ var el = this;
+
+ while (el) {
+ var map = el._nsMap; //console.dir(map)
+
+ if (map) {
+ for (var n in map) {
+ if (map[n] == namespaceURI) {
+ return n;
+ }
+ }
+ }
+
+ el = el.nodeType == ATTRIBUTE_NODE ? el.ownerDocument : el.parentNode;
+ }
+
+ return null;
+ },
+ // Introduced in DOM Level 3:
+ lookupNamespaceURI: function lookupNamespaceURI(prefix) {
+ var el = this;
+
+ while (el) {
+ var map = el._nsMap; //console.dir(map)
+
+ if (map) {
+ if (prefix in map) {
+ return map[prefix];
+ }
+ }
+
+ el = el.nodeType == ATTRIBUTE_NODE ? el.ownerDocument : el.parentNode;
+ }
+
+ return null;
+ },
+ // Introduced in DOM Level 3:
+ isDefaultNamespace: function isDefaultNamespace(namespaceURI) {
+ var prefix = this.lookupPrefix(namespaceURI);
+ return prefix == null;
+ }
+ };
+
+ function _xmlEncoder(c) {
+ return c == '<' && '<' || c == '>' && '>' || c == '&' && '&' || c == '"' && '"' || '' + c.charCodeAt() + ';';
+ }
+
+ copy(NodeType, Node);
+ copy(NodeType, Node.prototype);
+ /**
+ * @param callback return true for continue,false for break
+ * @return boolean true: break visit;
+ */
+
+ function _visitNode(node, callback) {
+ if (callback(node)) {
+ return true;
+ }
+
+ if (node = node.firstChild) {
+ do {
+ if (_visitNode(node, callback)) {
+ return true;
+ }
+ } while (node = node.nextSibling);
+ }
+ }
+
+ function Document() {}
+
+ function _onAddAttribute(doc, el, newAttr) {
+ doc && doc._inc++;
+ var ns = newAttr.namespaceURI;
+
+ if (ns === NAMESPACE$2.XMLNS) {
+ //update namespace
+ el._nsMap[newAttr.prefix ? newAttr.localName : ''] = newAttr.value;
+ }
+ }
+
+ function _onRemoveAttribute(doc, el, newAttr, remove) {
+ doc && doc._inc++;
+ var ns = newAttr.namespaceURI;
+
+ if (ns === NAMESPACE$2.XMLNS) {
+ //update namespace
+ delete el._nsMap[newAttr.prefix ? newAttr.localName : ''];
+ }
+ }
+
+ function _onUpdateChild(doc, el, newChild) {
+ if (doc && doc._inc) {
+ doc._inc++; //update childNodes
+
+ var cs = el.childNodes;
+
+ if (newChild) {
+ cs[cs.length++] = newChild;
+ } else {
+ //console.log(1)
+ var child = el.firstChild;
+ var i = 0;
+
+ while (child) {
+ cs[i++] = child;
+ child = child.nextSibling;
+ }
+
+ cs.length = i;
+ }
+ }
+ }
+ /**
+ * attributes;
+ * children;
+ *
+ * writeable properties:
+ * nodeValue,Attr:value,CharacterData:data
+ * prefix
+ */
+
+
+ function _removeChild(parentNode, child) {
+ var previous = child.previousSibling;
+ var next = child.nextSibling;
+
+ if (previous) {
+ previous.nextSibling = next;
+ } else {
+ parentNode.firstChild = next;
+ }
+
+ if (next) {
+ next.previousSibling = previous;
+ } else {
+ parentNode.lastChild = previous;
+ }
+
+ _onUpdateChild(parentNode.ownerDocument, parentNode);
+
+ return child;
+ }
+ /**
+ * preformance key(refChild == null)
+ */
+
+
+ function _insertBefore(parentNode, newChild, nextChild) {
+ var cp = newChild.parentNode;
+
+ if (cp) {
+ cp.removeChild(newChild); //remove and update
+ }
+
+ if (newChild.nodeType === DOCUMENT_FRAGMENT_NODE) {
+ var newFirst = newChild.firstChild;
+
+ if (newFirst == null) {
+ return newChild;
+ }
+
+ var newLast = newChild.lastChild;
+ } else {
+ newFirst = newLast = newChild;
+ }
+
+ var pre = nextChild ? nextChild.previousSibling : parentNode.lastChild;
+ newFirst.previousSibling = pre;
+ newLast.nextSibling = nextChild;
+
+ if (pre) {
+ pre.nextSibling = newFirst;
+ } else {
+ parentNode.firstChild = newFirst;
+ }
+
+ if (nextChild == null) {
+ parentNode.lastChild = newLast;
+ } else {
+ nextChild.previousSibling = newLast;
+ }
+
+ do {
+ newFirst.parentNode = parentNode;
+ } while (newFirst !== newLast && (newFirst = newFirst.nextSibling));
+
+ _onUpdateChild(parentNode.ownerDocument || parentNode, parentNode); //console.log(parentNode.lastChild.nextSibling == null)
+
+
+ if (newChild.nodeType == DOCUMENT_FRAGMENT_NODE) {
+ newChild.firstChild = newChild.lastChild = null;
+ }
+
+ return newChild;
+ }
+
+ function _appendSingleChild(parentNode, newChild) {
+ var cp = newChild.parentNode;
+
+ if (cp) {
+ var pre = parentNode.lastChild;
+ cp.removeChild(newChild); //remove and update
+
+ var pre = parentNode.lastChild;
+ }
+
+ var pre = parentNode.lastChild;
+ newChild.parentNode = parentNode;
+ newChild.previousSibling = pre;
+ newChild.nextSibling = null;
+
+ if (pre) {
+ pre.nextSibling = newChild;
+ } else {
+ parentNode.firstChild = newChild;
+ }
+
+ parentNode.lastChild = newChild;
+
+ _onUpdateChild(parentNode.ownerDocument, parentNode, newChild);
+
+ return newChild; //console.log("__aa",parentNode.lastChild.nextSibling == null)
+ }
+
+ Document.prototype = {
+ //implementation : null,
+ nodeName: '#document',
+ nodeType: DOCUMENT_NODE,
+
+ /**
+ * The DocumentType node of the document.
+ *
+ * @readonly
+ * @type DocumentType
+ */
+ doctype: null,
+ documentElement: null,
+ _inc: 1,
+ insertBefore: function insertBefore(newChild, refChild) {
+ //raises
+ if (newChild.nodeType == DOCUMENT_FRAGMENT_NODE) {
+ var child = newChild.firstChild;
+
+ while (child) {
+ var next = child.nextSibling;
+ this.insertBefore(child, refChild);
+ child = next;
+ }
+
+ return newChild;
+ }
+
+ if (this.documentElement == null && newChild.nodeType == ELEMENT_NODE) {
+ this.documentElement = newChild;
+ }
+
+ return _insertBefore(this, newChild, refChild), newChild.ownerDocument = this, newChild;
+ },
+ removeChild: function removeChild(oldChild) {
+ if (this.documentElement == oldChild) {
+ this.documentElement = null;
+ }
+
+ return _removeChild(this, oldChild);
+ },
+ // Introduced in DOM Level 2:
+ importNode: function importNode(importedNode, deep) {
+ return _importNode(this, importedNode, deep);
+ },
+ // Introduced in DOM Level 2:
+ getElementById: function getElementById(id) {
+ var rtv = null;
+
+ _visitNode(this.documentElement, function (node) {
+ if (node.nodeType == ELEMENT_NODE) {
+ if (node.getAttribute('id') == id) {
+ rtv = node;
+ return true;
+ }
+ }
+ });
+
+ return rtv;
+ },
+
+ /**
+ * The `getElementsByClassName` method of `Document` interface returns an array-like object
+ * of all child elements which have **all** of the given class name(s).
+ *
+ * Returns an empty list if `classeNames` is an empty string or only contains HTML white space characters.
+ *
+ *
+ * Warning: This is a live LiveNodeList.
+ * Changes in the DOM will reflect in the array as the changes occur.
+ * If an element selected by this array no longer qualifies for the selector,
+ * it will automatically be removed. Be aware of this for iteration purposes.
+ *
+ * @param {string} classNames is a string representing the class name(s) to match; multiple class names are separated by (ASCII-)whitespace
+ *
+ * @see https://developer.mozilla.org/en-US/docs/Web/API/Document/getElementsByClassName
+ * @see https://dom.spec.whatwg.org/#concept-getelementsbyclassname
+ */
+ getElementsByClassName: function getElementsByClassName(classNames) {
+ var classNamesSet = toOrderedSet(classNames);
+ return new LiveNodeList(this, function (base) {
+ var ls = [];
+
+ if (classNamesSet.length > 0) {
+ _visitNode(base.documentElement, function (node) {
+ if (node !== base && node.nodeType === ELEMENT_NODE) {
+ var nodeClassNames = node.getAttribute('class'); // can be null if the attribute does not exist
+
+ if (nodeClassNames) {
+ // before splitting and iterating just compare them for the most common case
+ var matches = classNames === nodeClassNames;
+
+ if (!matches) {
+ var nodeClassNamesSet = toOrderedSet(nodeClassNames);
+ matches = classNamesSet.every(arrayIncludes(nodeClassNamesSet));
+ }
+
+ if (matches) {
+ ls.push(node);
+ }
+ }
+ }
+ });
+ }
+
+ return ls;
+ });
+ },
+ //document factory method:
+ createElement: function createElement(tagName) {
+ var node = new Element();
+ node.ownerDocument = this;
+ node.nodeName = tagName;
+ node.tagName = tagName;
+ node.localName = tagName;
+ node.childNodes = new NodeList();
+ var attrs = node.attributes = new NamedNodeMap();
+ attrs._ownerElement = node;
+ return node;
+ },
+ createDocumentFragment: function createDocumentFragment() {
+ var node = new DocumentFragment();
+ node.ownerDocument = this;
+ node.childNodes = new NodeList();
+ return node;
+ },
+ createTextNode: function createTextNode(data) {
+ var node = new Text();
+ node.ownerDocument = this;
+ node.appendData(data);
+ return node;
+ },
+ createComment: function createComment(data) {
+ var node = new Comment();
+ node.ownerDocument = this;
+ node.appendData(data);
+ return node;
+ },
+ createCDATASection: function createCDATASection(data) {
+ var node = new CDATASection();
+ node.ownerDocument = this;
+ node.appendData(data);
+ return node;
+ },
+ createProcessingInstruction: function createProcessingInstruction(target, data) {
+ var node = new ProcessingInstruction();
+ node.ownerDocument = this;
+ node.tagName = node.target = target;
+ node.nodeValue = node.data = data;
+ return node;
+ },
+ createAttribute: function createAttribute(name) {
+ var node = new Attr();
+ node.ownerDocument = this;
+ node.name = name;
+ node.nodeName = name;
+ node.localName = name;
+ node.specified = true;
+ return node;
+ },
+ createEntityReference: function createEntityReference(name) {
+ var node = new EntityReference();
+ node.ownerDocument = this;
+ node.nodeName = name;
+ return node;
+ },
+ // Introduced in DOM Level 2:
+ createElementNS: function createElementNS(namespaceURI, qualifiedName) {
+ var node = new Element();
+ var pl = qualifiedName.split(':');
+ var attrs = node.attributes = new NamedNodeMap();
+ node.childNodes = new NodeList();
+ node.ownerDocument = this;
+ node.nodeName = qualifiedName;
+ node.tagName = qualifiedName;
+ node.namespaceURI = namespaceURI;
+
+ if (pl.length == 2) {
+ node.prefix = pl[0];
+ node.localName = pl[1];
+ } else {
+ //el.prefix = null;
+ node.localName = qualifiedName;
+ }
+
+ attrs._ownerElement = node;
+ return node;
+ },
+ // Introduced in DOM Level 2:
+ createAttributeNS: function createAttributeNS(namespaceURI, qualifiedName) {
+ var node = new Attr();
+ var pl = qualifiedName.split(':');
+ node.ownerDocument = this;
+ node.nodeName = qualifiedName;
+ node.name = qualifiedName;
+ node.namespaceURI = namespaceURI;
+ node.specified = true;
+
+ if (pl.length == 2) {
+ node.prefix = pl[0];
+ node.localName = pl[1];
+ } else {
+ //el.prefix = null;
+ node.localName = qualifiedName;
+ }
+
+ return node;
+ }
+ };
+
+ _extends(Document, Node);
+
+ function Element() {
+ this._nsMap = {};
+ }
+ Element.prototype = {
+ nodeType: ELEMENT_NODE,
+ hasAttribute: function hasAttribute(name) {
+ return this.getAttributeNode(name) != null;
+ },
+ getAttribute: function getAttribute(name) {
+ var attr = this.getAttributeNode(name);
+ return attr && attr.value || '';
+ },
+ getAttributeNode: function getAttributeNode(name) {
+ return this.attributes.getNamedItem(name);
+ },
+ setAttribute: function setAttribute(name, value) {
+ var attr = this.ownerDocument.createAttribute(name);
+ attr.value = attr.nodeValue = "" + value;
+ this.setAttributeNode(attr);
+ },
+ removeAttribute: function removeAttribute(name) {
+ var attr = this.getAttributeNode(name);
+ attr && this.removeAttributeNode(attr);
+ },
+ //four real opeartion method
+ appendChild: function appendChild(newChild) {
+ if (newChild.nodeType === DOCUMENT_FRAGMENT_NODE) {
+ return this.insertBefore(newChild, null);
+ } else {
+ return _appendSingleChild(this, newChild);
+ }
+ },
+ setAttributeNode: function setAttributeNode(newAttr) {
+ return this.attributes.setNamedItem(newAttr);
+ },
+ setAttributeNodeNS: function setAttributeNodeNS(newAttr) {
+ return this.attributes.setNamedItemNS(newAttr);
+ },
+ removeAttributeNode: function removeAttributeNode(oldAttr) {
+ //console.log(this == oldAttr.ownerElement)
+ return this.attributes.removeNamedItem(oldAttr.nodeName);
+ },
+ //get real attribute name,and remove it by removeAttributeNode
+ removeAttributeNS: function removeAttributeNS(namespaceURI, localName) {
+ var old = this.getAttributeNodeNS(namespaceURI, localName);
+ old && this.removeAttributeNode(old);
+ },
+ hasAttributeNS: function hasAttributeNS(namespaceURI, localName) {
+ return this.getAttributeNodeNS(namespaceURI, localName) != null;
+ },
+ getAttributeNS: function getAttributeNS(namespaceURI, localName) {
+ var attr = this.getAttributeNodeNS(namespaceURI, localName);
+ return attr && attr.value || '';
+ },
+ setAttributeNS: function setAttributeNS(namespaceURI, qualifiedName, value) {
+ var attr = this.ownerDocument.createAttributeNS(namespaceURI, qualifiedName);
+ attr.value = attr.nodeValue = "" + value;
+ this.setAttributeNode(attr);
+ },
+ getAttributeNodeNS: function getAttributeNodeNS(namespaceURI, localName) {
+ return this.attributes.getNamedItemNS(namespaceURI, localName);
+ },
+ getElementsByTagName: function getElementsByTagName(tagName) {
+ return new LiveNodeList(this, function (base) {
+ var ls = [];
+
+ _visitNode(base, function (node) {
+ if (node !== base && node.nodeType == ELEMENT_NODE && (tagName === '*' || node.tagName == tagName)) {
+ ls.push(node);
+ }
+ });
+
+ return ls;
+ });
+ },
+ getElementsByTagNameNS: function getElementsByTagNameNS(namespaceURI, localName) {
+ return new LiveNodeList(this, function (base) {
+ var ls = [];
+
+ _visitNode(base, function (node) {
+ if (node !== base && node.nodeType === ELEMENT_NODE && (namespaceURI === '*' || node.namespaceURI === namespaceURI) && (localName === '*' || node.localName == localName)) {
+ ls.push(node);
+ }
+ });
+
+ return ls;
+ });
+ }
+ };
+ Document.prototype.getElementsByTagName = Element.prototype.getElementsByTagName;
+ Document.prototype.getElementsByTagNameNS = Element.prototype.getElementsByTagNameNS;
+
+ _extends(Element, Node);
+
+ function Attr() {}
+ Attr.prototype.nodeType = ATTRIBUTE_NODE;
+
+ _extends(Attr, Node);
+
+ function CharacterData() {}
+ CharacterData.prototype = {
+ data: '',
+ substringData: function substringData(offset, count) {
+ return this.data.substring(offset, offset + count);
+ },
+ appendData: function appendData(text) {
+ text = this.data + text;
+ this.nodeValue = this.data = text;
+ this.length = text.length;
+ },
+ insertData: function insertData(offset, text) {
+ this.replaceData(offset, 0, text);
+ },
+ appendChild: function appendChild(newChild) {
+ throw new Error(ExceptionMessage[HIERARCHY_REQUEST_ERR]);
+ },
+ deleteData: function deleteData(offset, count) {
+ this.replaceData(offset, count, "");
+ },
+ replaceData: function replaceData(offset, count, text) {
+ var start = this.data.substring(0, offset);
+ var end = this.data.substring(offset + count);
+ text = start + text + end;
+ this.nodeValue = this.data = text;
+ this.length = text.length;
+ }
+ };
+
+ _extends(CharacterData, Node);
+
+ function Text() {}
+ Text.prototype = {
+ nodeName: "#text",
+ nodeType: TEXT_NODE,
+ splitText: function splitText(offset) {
+ var text = this.data;
+ var newText = text.substring(offset);
+ text = text.substring(0, offset);
+ this.data = this.nodeValue = text;
+ this.length = text.length;
+ var newNode = this.ownerDocument.createTextNode(newText);
+
+ if (this.parentNode) {
+ this.parentNode.insertBefore(newNode, this.nextSibling);
+ }
+
+ return newNode;
+ }
+ };
+
+ _extends(Text, CharacterData);
+
+ function Comment() {}
+ Comment.prototype = {
+ nodeName: "#comment",
+ nodeType: COMMENT_NODE
+ };
+
+ _extends(Comment, CharacterData);
+
+ function CDATASection() {}
+ CDATASection.prototype = {
+ nodeName: "#cdata-section",
+ nodeType: CDATA_SECTION_NODE
+ };
+
+ _extends(CDATASection, CharacterData);
+
+ function DocumentType() {}
+ DocumentType.prototype.nodeType = DOCUMENT_TYPE_NODE;
+
+ _extends(DocumentType, Node);
+
+ function Notation() {}
+ Notation.prototype.nodeType = NOTATION_NODE;
+
+ _extends(Notation, Node);
+
+ function Entity() {}
+ Entity.prototype.nodeType = ENTITY_NODE;
+
+ _extends(Entity, Node);
+
+ function EntityReference() {}
+ EntityReference.prototype.nodeType = ENTITY_REFERENCE_NODE;
+
+ _extends(EntityReference, Node);
+
+ function DocumentFragment() {}
+ DocumentFragment.prototype.nodeName = "#document-fragment";
+ DocumentFragment.prototype.nodeType = DOCUMENT_FRAGMENT_NODE;
+
+ _extends(DocumentFragment, Node);
+
+ function ProcessingInstruction() {}
+
+ ProcessingInstruction.prototype.nodeType = PROCESSING_INSTRUCTION_NODE;
+
+ _extends(ProcessingInstruction, Node);
+
+ function XMLSerializer$1() {}
+
+ XMLSerializer$1.prototype.serializeToString = function (node, isHtml, nodeFilter) {
+ return nodeSerializeToString.call(node, isHtml, nodeFilter);
+ };
+
+ Node.prototype.toString = nodeSerializeToString;
+
+ function nodeSerializeToString(isHtml, nodeFilter) {
+ var buf = [];
+ var refNode = this.nodeType == 9 && this.documentElement || this;
+ var prefix = refNode.prefix;
+ var uri = refNode.namespaceURI;
+
+ if (uri && prefix == null) {
+ //console.log(prefix)
+ var prefix = refNode.lookupPrefix(uri);
+
+ if (prefix == null) {
+ //isHTML = true;
+ var visibleNamespaces = [{
+ namespace: uri,
+ prefix: null
+ } //{namespace:uri,prefix:''}
+ ];
+ }
+ }
+
+ serializeToString(this, buf, isHtml, nodeFilter, visibleNamespaces); //console.log('###',this.nodeType,uri,prefix,buf.join(''))
+
+ return buf.join('');
+ }
+
+ function needNamespaceDefine(node, isHTML, visibleNamespaces) {
+ var prefix = node.prefix || '';
+ var uri = node.namespaceURI; // According to [Namespaces in XML 1.0](https://www.w3.org/TR/REC-xml-names/#ns-using) ,
+ // and more specifically https://www.w3.org/TR/REC-xml-names/#nsc-NoPrefixUndecl :
+ // > In a namespace declaration for a prefix [...], the attribute value MUST NOT be empty.
+ // in a similar manner [Namespaces in XML 1.1](https://www.w3.org/TR/xml-names11/#ns-using)
+ // and more specifically https://www.w3.org/TR/xml-names11/#nsc-NSDeclared :
+ // > [...] Furthermore, the attribute value [...] must not be an empty string.
+ // so serializing empty namespace value like xmlns:ds="" would produce an invalid XML document.
+
+ if (!uri) {
+ return false;
+ }
+
+ if (prefix === "xml" && uri === NAMESPACE$2.XML || uri === NAMESPACE$2.XMLNS) {
+ return false;
+ }
+
+ var i = visibleNamespaces.length;
+
+ while (i--) {
+ var ns = visibleNamespaces[i]; // get namespace prefix
+
+ if (ns.prefix === prefix) {
+ return ns.namespace !== uri;
+ }
+ }
+
+ return true;
+ }
+ /**
+ * Well-formed constraint: No < in Attribute Values
+ * The replacement text of any entity referred to directly or indirectly in an attribute value must not contain a <.
+ * @see https://www.w3.org/TR/xml/#CleanAttrVals
+ * @see https://www.w3.org/TR/xml/#NT-AttValue
+ */
+
+
+ function addSerializedAttribute(buf, qualifiedName, value) {
+ buf.push(' ', qualifiedName, '="', value.replace(/[<&"]/g, _xmlEncoder), '"');
+ }
+
+ function serializeToString(node, buf, isHTML, nodeFilter, visibleNamespaces) {
+ if (!visibleNamespaces) {
+ visibleNamespaces = [];
+ }
+
+ if (nodeFilter) {
+ node = nodeFilter(node);
+
+ if (node) {
+ if (typeof node == 'string') {
+ buf.push(node);
+ return;
+ }
+ } else {
+ return;
+ } //buf.sort.apply(attrs, attributeSorter);
+
+ }
+
+ switch (node.nodeType) {
+ case ELEMENT_NODE:
+ var attrs = node.attributes;
+ var len = attrs.length;
+ var child = node.firstChild;
+ var nodeName = node.tagName;
+ isHTML = NAMESPACE$2.isHTML(node.namespaceURI) || isHTML;
+ var prefixedNodeName = nodeName;
+
+ if (!isHTML && !node.prefix && node.namespaceURI) {
+ var defaultNS; // lookup current default ns from `xmlns` attribute
+
+ for (var ai = 0; ai < attrs.length; ai++) {
+ if (attrs.item(ai).name === 'xmlns') {
+ defaultNS = attrs.item(ai).value;
+ break;
+ }
+ }
+
+ if (!defaultNS) {
+ // lookup current default ns in visibleNamespaces
+ for (var nsi = visibleNamespaces.length - 1; nsi >= 0; nsi--) {
+ var namespace = visibleNamespaces[nsi];
+
+ if (namespace.prefix === '' && namespace.namespace === node.namespaceURI) {
+ defaultNS = namespace.namespace;
+ break;
+ }
+ }
+ }
+
+ if (defaultNS !== node.namespaceURI) {
+ for (var nsi = visibleNamespaces.length - 1; nsi >= 0; nsi--) {
+ var namespace = visibleNamespaces[nsi];
+
+ if (namespace.namespace === node.namespaceURI) {
+ if (namespace.prefix) {
+ prefixedNodeName = namespace.prefix + ':' + nodeName;
+ }
+
+ break;
+ }
+ }
+ }
+ }
+
+ buf.push('<', prefixedNodeName);
+
+ for (var i = 0; i < len; i++) {
+ // add namespaces for attributes
+ var attr = attrs.item(i);
+
+ if (attr.prefix == 'xmlns') {
+ visibleNamespaces.push({
+ prefix: attr.localName,
+ namespace: attr.value
+ });
+ } else if (attr.nodeName == 'xmlns') {
+ visibleNamespaces.push({
+ prefix: '',
+ namespace: attr.value
+ });
+ }
+ }
+
+ for (var i = 0; i < len; i++) {
+ var attr = attrs.item(i);
+
+ if (needNamespaceDefine(attr, isHTML, visibleNamespaces)) {
+ var prefix = attr.prefix || '';
+ var uri = attr.namespaceURI;
+ addSerializedAttribute(buf, prefix ? 'xmlns:' + prefix : "xmlns", uri);
+ visibleNamespaces.push({
+ prefix: prefix,
+ namespace: uri
+ });
+ }
+
+ serializeToString(attr, buf, isHTML, nodeFilter, visibleNamespaces);
+ } // add namespace for current node
+
+
+ if (nodeName === prefixedNodeName && needNamespaceDefine(node, isHTML, visibleNamespaces)) {
+ var prefix = node.prefix || '';
+ var uri = node.namespaceURI;
+ addSerializedAttribute(buf, prefix ? 'xmlns:' + prefix : "xmlns", uri);
+ visibleNamespaces.push({
+ prefix: prefix,
+ namespace: uri
+ });
+ }
+
+ if (child || isHTML && !/^(?:meta|link|img|br|hr|input)$/i.test(nodeName)) {
+ buf.push('>'); //if is cdata child node
+
+ if (isHTML && /^script$/i.test(nodeName)) {
+ while (child) {
+ if (child.data) {
+ buf.push(child.data);
+ } else {
+ serializeToString(child, buf, isHTML, nodeFilter, visibleNamespaces.slice());
+ }
+
+ child = child.nextSibling;
+ }
+ } else {
+ while (child) {
+ serializeToString(child, buf, isHTML, nodeFilter, visibleNamespaces.slice());
+ child = child.nextSibling;
+ }
+ }
+
+ buf.push('', prefixedNodeName, '>');
+ } else {
+ buf.push('/>');
+ } // remove added visible namespaces
+ //visibleNamespaces.length = startVisibleNamespaces;
+
+
+ return;
+
+ case DOCUMENT_NODE:
+ case DOCUMENT_FRAGMENT_NODE:
+ var child = node.firstChild;
+
+ while (child) {
+ serializeToString(child, buf, isHTML, nodeFilter, visibleNamespaces.slice());
+ child = child.nextSibling;
+ }
+
+ return;
+
+ case ATTRIBUTE_NODE:
+ return addSerializedAttribute(buf, node.name, node.value);
+
+ case TEXT_NODE:
+ /**
+ * The ampersand character (&) and the left angle bracket (<) must not appear in their literal form,
+ * except when used as markup delimiters, or within a comment, a processing instruction, or a CDATA section.
+ * If they are needed elsewhere, they must be escaped using either numeric character references or the strings
+ * `&` and `<` respectively.
+ * The right angle bracket (>) may be represented using the string " > ", and must, for compatibility,
+ * be escaped using either `>` or a character reference when it appears in the string `]]>` in content,
+ * when that string is not marking the end of a CDATA section.
+ *
+ * In the content of elements, character data is any string of characters
+ * which does not contain the start-delimiter of any markup
+ * and does not include the CDATA-section-close delimiter, `]]>`.
+ *
+ * @see https://www.w3.org/TR/xml/#NT-CharData
+ */
+ return buf.push(node.data.replace(/[<&]/g, _xmlEncoder).replace(/]]>/g, ']]>'));
+
+ case CDATA_SECTION_NODE:
+ return buf.push('');
+
+ case COMMENT_NODE:
+ return buf.push("");
+
+ case DOCUMENT_TYPE_NODE:
+ var pubid = node.publicId;
+ var sysid = node.systemId;
+ buf.push('');
+ } else if (sysid && sysid != '.') {
+ buf.push(' SYSTEM ', sysid, '>');
+ } else {
+ var sub = node.internalSubset;
+
+ if (sub) {
+ buf.push(" [", sub, "]");
+ }
+
+ buf.push(">");
+ }
+
+ return;
+
+ case PROCESSING_INSTRUCTION_NODE:
+ return buf.push("", node.target, " ", node.data, "?>");
+
+ case ENTITY_REFERENCE_NODE:
+ return buf.push('&', node.nodeName, ';');
+ //case ENTITY_NODE:
+ //case NOTATION_NODE:
+
+ default:
+ buf.push('??', node.nodeName);
+ }
+ }
+
+ function _importNode(doc, node, deep) {
+ var node2;
+
+ switch (node.nodeType) {
+ case ELEMENT_NODE:
+ node2 = node.cloneNode(false);
+ node2.ownerDocument = doc;
+ //var attrs = node2.attributes;
+ //var len = attrs.length;
+ //for(var i=0;i',
+ lt: '<',
+ quot: '"'
+ });
+ /**
+ * A map of currently 241 entities that are detected in an HTML document.
+ * They contain all entries from `XML_ENTITIES`.
+ *
+ * @see XML_ENTITIES
+ * @see DOMParser.parseFromString
+ * @see DOMImplementation.prototype.createHTMLDocument
+ * @see https://html.spec.whatwg.org/#named-character-references WHATWG HTML(5) Spec
+ * @see https://www.w3.org/TR/xml-entity-names/ W3C XML Entity Names
+ * @see https://www.w3.org/TR/html4/sgml/entities.html W3C HTML4/SGML
+ * @see https://en.wikipedia.org/wiki/List_of_XML_and_HTML_character_entity_references#Character_entity_references_in_HTML Wikipedia (HTML)
+ * @see https://en.wikipedia.org/wiki/List_of_XML_and_HTML_character_entity_references#Entities_representing_special_characters_in_XHTML Wikpedia (XHTML)
+ */
+
+ exports.HTML_ENTITIES = freeze({
+ lt: '<',
+ gt: '>',
+ amp: '&',
+ quot: '"',
+ apos: "'",
+ Agrave: "À",
+ Aacute: "Á",
+ Acirc: "Â",
+ Atilde: "Ã",
+ Auml: "Ä",
+ Aring: "Å",
+ AElig: "Æ",
+ Ccedil: "Ç",
+ Egrave: "È",
+ Eacute: "É",
+ Ecirc: "Ê",
+ Euml: "Ë",
+ Igrave: "Ì",
+ Iacute: "Í",
+ Icirc: "Î",
+ Iuml: "Ï",
+ ETH: "Ð",
+ Ntilde: "Ñ",
+ Ograve: "Ò",
+ Oacute: "Ó",
+ Ocirc: "Ô",
+ Otilde: "Õ",
+ Ouml: "Ö",
+ Oslash: "Ø",
+ Ugrave: "Ù",
+ Uacute: "Ú",
+ Ucirc: "Û",
+ Uuml: "Ü",
+ Yacute: "Ý",
+ THORN: "Þ",
+ szlig: "ß",
+ agrave: "à",
+ aacute: "á",
+ acirc: "â",
+ atilde: "ã",
+ auml: "ä",
+ aring: "å",
+ aelig: "æ",
+ ccedil: "ç",
+ egrave: "è",
+ eacute: "é",
+ ecirc: "ê",
+ euml: "ë",
+ igrave: "ì",
+ iacute: "í",
+ icirc: "î",
+ iuml: "ï",
+ eth: "ð",
+ ntilde: "ñ",
+ ograve: "ò",
+ oacute: "ó",
+ ocirc: "ô",
+ otilde: "õ",
+ ouml: "ö",
+ oslash: "ø",
+ ugrave: "ù",
+ uacute: "ú",
+ ucirc: "û",
+ uuml: "ü",
+ yacute: "ý",
+ thorn: "þ",
+ yuml: "ÿ",
+ nbsp: "\xA0",
+ iexcl: "¡",
+ cent: "¢",
+ pound: "£",
+ curren: "¤",
+ yen: "¥",
+ brvbar: "¦",
+ sect: "§",
+ uml: "¨",
+ copy: "©",
+ ordf: "ª",
+ laquo: "«",
+ not: "¬",
+ shy: "",
+ reg: "®",
+ macr: "¯",
+ deg: "°",
+ plusmn: "±",
+ sup2: "²",
+ sup3: "³",
+ acute: "´",
+ micro: "µ",
+ para: "¶",
+ middot: "·",
+ cedil: "¸",
+ sup1: "¹",
+ ordm: "º",
+ raquo: "»",
+ frac14: "¼",
+ frac12: "½",
+ frac34: "¾",
+ iquest: "¿",
+ times: "×",
+ divide: "÷",
+ forall: "∀",
+ part: "∂",
+ exist: "∃",
+ empty: "∅",
+ nabla: "∇",
+ isin: "∈",
+ notin: "∉",
+ ni: "∋",
+ prod: "∏",
+ sum: "∑",
+ minus: "−",
+ lowast: "∗",
+ radic: "√",
+ prop: "∝",
+ infin: "∞",
+ ang: "∠",
+ and: "∧",
+ or: "∨",
+ cap: "∩",
+ cup: "∪",
+ 'int': "∫",
+ there4: "∴",
+ sim: "∼",
+ cong: "≅",
+ asymp: "≈",
+ ne: "≠",
+ equiv: "≡",
+ le: "≤",
+ ge: "≥",
+ sub: "⊂",
+ sup: "⊃",
+ nsub: "⊄",
+ sube: "⊆",
+ supe: "⊇",
+ oplus: "⊕",
+ otimes: "⊗",
+ perp: "⊥",
+ sdot: "⋅",
+ Alpha: "Α",
+ Beta: "Β",
+ Gamma: "Γ",
+ Delta: "Δ",
+ Epsilon: "Ε",
+ Zeta: "Ζ",
+ Eta: "Η",
+ Theta: "Θ",
+ Iota: "Ι",
+ Kappa: "Κ",
+ Lambda: "Λ",
+ Mu: "Μ",
+ Nu: "Ν",
+ Xi: "Ξ",
+ Omicron: "Ο",
+ Pi: "Π",
+ Rho: "Ρ",
+ Sigma: "Σ",
+ Tau: "Τ",
+ Upsilon: "Υ",
+ Phi: "Φ",
+ Chi: "Χ",
+ Psi: "Ψ",
+ Omega: "Ω",
+ alpha: "α",
+ beta: "β",
+ gamma: "γ",
+ delta: "δ",
+ epsilon: "ε",
+ zeta: "ζ",
+ eta: "η",
+ theta: "θ",
+ iota: "ι",
+ kappa: "κ",
+ lambda: "λ",
+ mu: "μ",
+ nu: "ν",
+ xi: "ξ",
+ omicron: "ο",
+ pi: "π",
+ rho: "ρ",
+ sigmaf: "ς",
+ sigma: "σ",
+ tau: "τ",
+ upsilon: "υ",
+ phi: "φ",
+ chi: "χ",
+ psi: "ψ",
+ omega: "ω",
+ thetasym: "ϑ",
+ upsih: "ϒ",
+ piv: "ϖ",
+ OElig: "Œ",
+ oelig: "œ",
+ Scaron: "Š",
+ scaron: "š",
+ Yuml: "Ÿ",
+ fnof: "ƒ",
+ circ: "ˆ",
+ tilde: "˜",
+ ensp: " ",
+ emsp: " ",
+ thinsp: " ",
+ zwnj: "",
+ zwj: "",
+ lrm: "",
+ rlm: "",
+ ndash: "–",
+ mdash: "—",
+ lsquo: "‘",
+ rsquo: "’",
+ sbquo: "‚",
+ ldquo: "“",
+ rdquo: "”",
+ bdquo: "„",
+ dagger: "†",
+ Dagger: "‡",
+ bull: "•",
+ hellip: "…",
+ permil: "‰",
+ prime: "′",
+ Prime: "″",
+ lsaquo: "‹",
+ rsaquo: "›",
+ oline: "‾",
+ euro: "€",
+ trade: "™",
+ larr: "←",
+ uarr: "↑",
+ rarr: "→",
+ darr: "↓",
+ harr: "↔",
+ crarr: "↵",
+ lceil: "⌈",
+ rceil: "⌉",
+ lfloor: "⌊",
+ rfloor: "⌋",
+ loz: "◊",
+ spades: "♠",
+ clubs: "♣",
+ hearts: "♥",
+ diams: "♦"
+ });
+ /**
+ * @deprecated use `HTML_ENTITIES` instead
+ * @see HTML_ENTITIES
+ */
+
+ exports.entityMap = exports.HTML_ENTITIES;
+ });
+ entities.XML_ENTITIES;
+ entities.HTML_ENTITIES;
+ entities.entityMap;
+
+ var NAMESPACE$1 = conventions.NAMESPACE; //[4] NameStartChar ::= ":" | [A-Z] | "_" | [a-z] | [#xC0-#xD6] | [#xD8-#xF6] | [#xF8-#x2FF] | [#x370-#x37D] | [#x37F-#x1FFF] | [#x200C-#x200D] | [#x2070-#x218F] | [#x2C00-#x2FEF] | [#x3001-#xD7FF] | [#xF900-#xFDCF] | [#xFDF0-#xFFFD] | [#x10000-#xEFFFF]
+ //[4a] NameChar ::= NameStartChar | "-" | "." | [0-9] | #xB7 | [#x0300-#x036F] | [#x203F-#x2040]
+ //[5] Name ::= NameStartChar (NameChar)*
+
+ var nameStartChar = /[A-Z_a-z\xC0-\xD6\xD8-\xF6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/; //\u10000-\uEFFFF
+
+ var nameChar = new RegExp("[\\-\\.0-9" + nameStartChar.source.slice(1, -1) + "\\u00B7\\u0300-\\u036F\\u203F-\\u2040]");
+ var tagNamePattern = new RegExp('^' + nameStartChar.source + nameChar.source + '*(?:\:' + nameStartChar.source + nameChar.source + '*)?$'); //var tagNamePattern = /^[a-zA-Z_][\w\-\.]*(?:\:[a-zA-Z_][\w\-\.]*)?$/
+ //var handlers = 'resolveEntity,getExternalSubset,characters,endDocument,endElement,endPrefixMapping,ignorableWhitespace,processingInstruction,setDocumentLocator,skippedEntity,startDocument,startElement,startPrefixMapping,notationDecl,unparsedEntityDecl,error,fatalError,warning,attributeDecl,elementDecl,externalEntityDecl,internalEntityDecl,comment,endCDATA,endDTD,endEntity,startCDATA,startDTD,startEntity'.split(',')
+ //S_TAG, S_ATTR, S_EQ, S_ATTR_NOQUOT_VALUE
+ //S_ATTR_SPACE, S_ATTR_END, S_TAG_SPACE, S_TAG_CLOSE
+
+ var S_TAG = 0; //tag name offerring
+
+ var S_ATTR = 1; //attr name offerring
+
+ var S_ATTR_SPACE = 2; //attr name end and space offer
+
+ var S_EQ = 3; //=space?
+
+ var S_ATTR_NOQUOT_VALUE = 4; //attr value(no quot value only)
+
+ var S_ATTR_END = 5; //attr value end and no space(quot end)
+
+ var S_TAG_SPACE = 6; //(attr value end || tag end ) && (space offer)
+
+ var S_TAG_CLOSE = 7; //closed el
+
+ /**
+ * Creates an error that will not be caught by XMLReader aka the SAX parser.
+ *
+ * @param {string} message
+ * @param {any?} locator Optional, can provide details about the location in the source
+ * @constructor
+ */
+
+ function ParseError$1(message, locator) {
+ this.message = message;
+ this.locator = locator;
+ if (Error.captureStackTrace) Error.captureStackTrace(this, ParseError$1);
+ }
+
+ ParseError$1.prototype = new Error();
+ ParseError$1.prototype.name = ParseError$1.name;
+
+ function XMLReader$1() {}
+
+ XMLReader$1.prototype = {
+ parse: function parse(source, defaultNSMap, entityMap) {
+ var domBuilder = this.domBuilder;
+ domBuilder.startDocument();
+
+ _copy(defaultNSMap, defaultNSMap = {});
+
+ _parse(source, defaultNSMap, entityMap, domBuilder, this.errorHandler);
+
+ domBuilder.endDocument();
+ }
+ };
+
+ function _parse(source, defaultNSMapCopy, entityMap, domBuilder, errorHandler) {
+ function fixedFromCharCode(code) {
+ // String.prototype.fromCharCode does not supports
+ // > 2 bytes unicode chars directly
+ if (code > 0xffff) {
+ code -= 0x10000;
+ var surrogate1 = 0xd800 + (code >> 10),
+ surrogate2 = 0xdc00 + (code & 0x3ff);
+ return String.fromCharCode(surrogate1, surrogate2);
+ } else {
+ return String.fromCharCode(code);
+ }
+ }
+
+ function entityReplacer(a) {
+ var k = a.slice(1, -1);
+
+ if (k in entityMap) {
+ return entityMap[k];
+ } else if (k.charAt(0) === '#') {
+ return fixedFromCharCode(parseInt(k.substr(1).replace('x', '0x')));
+ } else {
+ errorHandler.error('entity not found:' + a);
+ return a;
+ }
+ }
+
+ function appendText(end) {
+ //has some bugs
+ if (end > start) {
+ var xt = source.substring(start, end).replace(/?\w+;/g, entityReplacer);
+ locator && position(start);
+ domBuilder.characters(xt, 0, end - start);
+ start = end;
+ }
+ }
+
+ function position(p, m) {
+ while (p >= lineEnd && (m = linePattern.exec(source))) {
+ lineStart = m.index;
+ lineEnd = lineStart + m[0].length;
+ locator.lineNumber++; //console.log('line++:',locator,startPos,endPos)
+ }
+
+ locator.columnNumber = p - lineStart + 1;
+ }
+
+ var lineStart = 0;
+ var lineEnd = 0;
+ var linePattern = /.*(?:\r\n?|\n)|.*$/g;
+ var locator = domBuilder.locator;
+ var parseStack = [{
+ currentNSMap: defaultNSMapCopy
+ }];
+ var closeMap = {};
+ var start = 0;
+
+ while (true) {
+ try {
+ var tagStart = source.indexOf('<', start);
+
+ if (tagStart < 0) {
+ if (!source.substr(start).match(/^\s*$/)) {
+ var doc = domBuilder.doc;
+ var text = doc.createTextNode(source.substr(start));
+ doc.appendChild(text);
+ domBuilder.currentElement = text;
+ }
+
+ return;
+ }
+
+ if (tagStart > start) {
+ appendText(tagStart);
+ }
+
+ switch (source.charAt(tagStart + 1)) {
+ case '/':
+ var end = source.indexOf('>', tagStart + 3);
+ var tagName = source.substring(tagStart + 2, end).replace(/[ \t\n\r]+$/g, '');
+ var config = parseStack.pop();
+
+ if (end < 0) {
+ tagName = source.substring(tagStart + 2).replace(/[\s<].*/, '');
+ errorHandler.error("end tag name: " + tagName + ' is not complete:' + config.tagName);
+ end = tagStart + 1 + tagName.length;
+ } else if (tagName.match(/\s)) {
+ tagName = tagName.replace(/[\s<].*/, '');
+ errorHandler.error("end tag name: " + tagName + ' maybe not complete');
+ end = tagStart + 1 + tagName.length;
+ }
+
+ var localNSMap = config.localNSMap;
+ var endMatch = config.tagName == tagName;
+ var endIgnoreCaseMach = endMatch || config.tagName && config.tagName.toLowerCase() == tagName.toLowerCase();
+
+ if (endIgnoreCaseMach) {
+ domBuilder.endElement(config.uri, config.localName, tagName);
+
+ if (localNSMap) {
+ for (var prefix in localNSMap) {
+ domBuilder.endPrefixMapping(prefix);
+ }
+ }
+
+ if (!endMatch) {
+ errorHandler.fatalError("end tag name: " + tagName + ' is not match the current start tagName:' + config.tagName); // No known test case
+ }
+ } else {
+ parseStack.push(config);
+ }
+
+ end++;
+ break;
+ // end elment
+
+ case '?':
+ // ...?>
+ locator && position(tagStart);
+ end = parseInstruction(source, tagStart, domBuilder);
+ break;
+
+ case '!':
+ // start) {
+ start = end;
+ } else {
+ //TODO: 这里有可能sax回退,有位置错误风险
+ appendText(Math.max(tagStart, start) + 1);
+ }
+ }
+ }
+
+ function copyLocator(f, t) {
+ t.lineNumber = f.lineNumber;
+ t.columnNumber = f.columnNumber;
+ return t;
+ }
+ /**
+ * @see #appendElement(source,elStartEnd,el,selfClosed,entityReplacer,domBuilder,parseStack);
+ * @return end of the elementStartPart(end of elementEndPart for selfClosed el)
+ */
+
+
+ function parseElementStartPart(source, start, el, currentNSMap, entityReplacer, errorHandler) {
+ /**
+ * @param {string} qname
+ * @param {string} value
+ * @param {number} startIndex
+ */
+ function addAttribute(qname, value, startIndex) {
+ if (el.attributeNames.hasOwnProperty(qname)) {
+ errorHandler.fatalError('Attribute ' + qname + ' redefined');
+ }
+
+ el.addValue(qname, value, startIndex);
+ }
+
+ var attrName;
+ var value;
+ var p = ++start;
+ var s = S_TAG; //status
+
+ while (true) {
+ var c = source.charAt(p);
+
+ switch (c) {
+ case '=':
+ if (s === S_ATTR) {
+ //attrName
+ attrName = source.slice(start, p);
+ s = S_EQ;
+ } else if (s === S_ATTR_SPACE) {
+ s = S_EQ;
+ } else {
+ //fatalError: equal must after attrName or space after attrName
+ throw new Error('attribute equal must after attrName'); // No known test case
+ }
+
+ break;
+
+ case '\'':
+ case '"':
+ if (s === S_EQ || s === S_ATTR //|| s == S_ATTR_SPACE
+ ) {
+ //equal
+ if (s === S_ATTR) {
+ errorHandler.warning('attribute value must after "="');
+ attrName = source.slice(start, p);
+ }
+
+ start = p + 1;
+ p = source.indexOf(c, start);
+
+ if (p > 0) {
+ value = source.slice(start, p).replace(/?\w+;/g, entityReplacer);
+ addAttribute(attrName, value, start - 1);
+ s = S_ATTR_END;
+ } else {
+ //fatalError: no end quot match
+ throw new Error('attribute value no end \'' + c + '\' match');
+ }
+ } else if (s == S_ATTR_NOQUOT_VALUE) {
+ value = source.slice(start, p).replace(/?\w+;/g, entityReplacer); //console.log(attrName,value,start,p)
+
+ addAttribute(attrName, value, start); //console.dir(el)
+
+ errorHandler.warning('attribute "' + attrName + '" missed start quot(' + c + ')!!');
+ start = p + 1;
+ s = S_ATTR_END;
+ } else {
+ //fatalError: no equal before
+ throw new Error('attribute value must after "="'); // No known test case
+ }
+
+ break;
+
+ case '/':
+ switch (s) {
+ case S_TAG:
+ el.setTagName(source.slice(start, p));
+
+ case S_ATTR_END:
+ case S_TAG_SPACE:
+ case S_TAG_CLOSE:
+ s = S_TAG_CLOSE;
+ el.closed = true;
+
+ case S_ATTR_NOQUOT_VALUE:
+ case S_ATTR:
+ case S_ATTR_SPACE:
+ break;
+ //case S_EQ:
+
+ default:
+ throw new Error("attribute invalid close char('/')");
+ // No known test case
+ }
+
+ break;
+
+ case '':
+ //end document
+ errorHandler.error('unexpected end of input');
+
+ if (s == S_TAG) {
+ el.setTagName(source.slice(start, p));
+ }
+
+ return p;
+
+ case '>':
+ switch (s) {
+ case S_TAG:
+ el.setTagName(source.slice(start, p));
+
+ case S_ATTR_END:
+ case S_TAG_SPACE:
+ case S_TAG_CLOSE:
+ break;
+ //normal
+
+ case S_ATTR_NOQUOT_VALUE: //Compatible state
+
+ case S_ATTR:
+ value = source.slice(start, p);
+
+ if (value.slice(-1) === '/') {
+ el.closed = true;
+ value = value.slice(0, -1);
+ }
+
+ case S_ATTR_SPACE:
+ if (s === S_ATTR_SPACE) {
+ value = attrName;
+ }
+
+ if (s == S_ATTR_NOQUOT_VALUE) {
+ errorHandler.warning('attribute "' + value + '" missed quot(")!');
+ addAttribute(attrName, value.replace(/?\w+;/g, entityReplacer), start);
+ } else {
+ if (!NAMESPACE$1.isHTML(currentNSMap['']) || !value.match(/^(?:disabled|checked|selected)$/i)) {
+ errorHandler.warning('attribute "' + value + '" missed value!! "' + value + '" instead!!');
+ }
+
+ addAttribute(value, value, start);
+ }
+
+ break;
+
+ case S_EQ:
+ throw new Error('attribute value missed!!');
+ } // console.log(tagName,tagNamePattern,tagNamePattern.test(tagName))
+
+
+ return p;
+
+ /*xml space '\x20' | #x9 | #xD | #xA; */
+
+ case "\x80":
+ c = ' ';
+
+ default:
+ if (c <= ' ') {
+ //space
+ switch (s) {
+ case S_TAG:
+ el.setTagName(source.slice(start, p)); //tagName
+
+ s = S_TAG_SPACE;
+ break;
+
+ case S_ATTR:
+ attrName = source.slice(start, p);
+ s = S_ATTR_SPACE;
+ break;
+
+ case S_ATTR_NOQUOT_VALUE:
+ var value = source.slice(start, p).replace(/?\w+;/g, entityReplacer);
+ errorHandler.warning('attribute "' + value + '" missed quot(")!!');
+ addAttribute(attrName, value, start);
+
+ case S_ATTR_END:
+ s = S_TAG_SPACE;
+ break;
+ //case S_TAG_SPACE:
+ //case S_EQ:
+ //case S_ATTR_SPACE:
+ // void();break;
+ //case S_TAG_CLOSE:
+ //ignore warning
+ }
+ } else {
+ //not space
+ //S_TAG, S_ATTR, S_EQ, S_ATTR_NOQUOT_VALUE
+ //S_ATTR_SPACE, S_ATTR_END, S_TAG_SPACE, S_TAG_CLOSE
+ switch (s) {
+ //case S_TAG:void();break;
+ //case S_ATTR:void();break;
+ //case S_ATTR_NOQUOT_VALUE:void();break;
+ case S_ATTR_SPACE:
+ el.tagName;
+
+ if (!NAMESPACE$1.isHTML(currentNSMap['']) || !attrName.match(/^(?:disabled|checked|selected)$/i)) {
+ errorHandler.warning('attribute "' + attrName + '" missed value!! "' + attrName + '" instead2!!');
+ }
+
+ addAttribute(attrName, attrName, start);
+ start = p;
+ s = S_ATTR;
+ break;
+
+ case S_ATTR_END:
+ errorHandler.warning('attribute space is required"' + attrName + '"!!');
+
+ case S_TAG_SPACE:
+ s = S_ATTR;
+ start = p;
+ break;
+
+ case S_EQ:
+ s = S_ATTR_NOQUOT_VALUE;
+ start = p;
+ break;
+
+ case S_TAG_CLOSE:
+ throw new Error("elements closed character '/' and '>' must be connected to");
+ }
+ }
+
+ } //end outer switch
+ //console.log('p++',p)
+
+
+ p++;
+ }
+ }
+ /**
+ * @return true if has new namespace define
+ */
+
+
+ function appendElement$1(el, domBuilder, currentNSMap) {
+ var tagName = el.tagName;
+ var localNSMap = null; //var currentNSMap = parseStack[parseStack.length-1].currentNSMap;
+
+ var i = el.length;
+
+ while (i--) {
+ var a = el[i];
+ var qName = a.qName;
+ var value = a.value;
+ var nsp = qName.indexOf(':');
+
+ if (nsp > 0) {
+ var prefix = a.prefix = qName.slice(0, nsp);
+ var localName = qName.slice(nsp + 1);
+ var nsPrefix = prefix === 'xmlns' && localName;
+ } else {
+ localName = qName;
+ prefix = null;
+ nsPrefix = qName === 'xmlns' && '';
+ } //can not set prefix,because prefix !== ''
+
+
+ a.localName = localName; //prefix == null for no ns prefix attribute
+
+ if (nsPrefix !== false) {
+ //hack!!
+ if (localNSMap == null) {
+ localNSMap = {}; //console.log(currentNSMap,0)
+
+ _copy(currentNSMap, currentNSMap = {}); //console.log(currentNSMap,1)
+
+ }
+
+ currentNSMap[nsPrefix] = localNSMap[nsPrefix] = value;
+ a.uri = NAMESPACE$1.XMLNS;
+ domBuilder.startPrefixMapping(nsPrefix, value);
+ }
+ }
+
+ var i = el.length;
+
+ while (i--) {
+ a = el[i];
+ var prefix = a.prefix;
+
+ if (prefix) {
+ //no prefix attribute has no namespace
+ if (prefix === 'xml') {
+ a.uri = NAMESPACE$1.XML;
+ }
+
+ if (prefix !== 'xmlns') {
+ a.uri = currentNSMap[prefix || '']; //{console.log('###'+a.qName,domBuilder.locator.systemId+'',currentNSMap,a.uri)}
+ }
+ }
+ }
+
+ var nsp = tagName.indexOf(':');
+
+ if (nsp > 0) {
+ prefix = el.prefix = tagName.slice(0, nsp);
+ localName = el.localName = tagName.slice(nsp + 1);
+ } else {
+ prefix = null; //important!!
+
+ localName = el.localName = tagName;
+ } //no prefix element has default namespace
+
+
+ var ns = el.uri = currentNSMap[prefix || ''];
+ domBuilder.startElement(ns, localName, tagName, el); //endPrefixMapping and startPrefixMapping have not any help for dom builder
+ //localNSMap = null
+
+ if (el.closed) {
+ domBuilder.endElement(ns, localName, tagName);
+
+ if (localNSMap) {
+ for (prefix in localNSMap) {
+ domBuilder.endPrefixMapping(prefix);
+ }
+ }
+ } else {
+ el.currentNSMap = currentNSMap;
+ el.localNSMap = localNSMap; //parseStack.push(el);
+
+ return true;
+ }
+ }
+
+ function parseHtmlSpecialContent(source, elStartEnd, tagName, entityReplacer, domBuilder) {
+ if (/^(?:script|textarea)$/i.test(tagName)) {
+ var elEndStart = source.indexOf('' + tagName + '>', elStartEnd);
+ var text = source.substring(elStartEnd + 1, elEndStart);
+
+ if (/[&<]/.test(text)) {
+ if (/^script$/i.test(tagName)) {
+ //if(!/\]\]>/.test(text)){
+ //lexHandler.startCDATA();
+ domBuilder.characters(text, 0, text.length); //lexHandler.endCDATA();
+
+ return elEndStart; //}
+ } //}else{//text area
+
+
+ text = text.replace(/?\w+;/g, entityReplacer);
+ domBuilder.characters(text, 0, text.length);
+ return elEndStart; //}
+ }
+ }
+
+ return elStartEnd + 1;
+ }
+
+ function fixSelfClosed(source, elStartEnd, tagName, closeMap) {
+ //if(tagName in closeMap){
+ var pos = closeMap[tagName];
+
+ if (pos == null) {
+ //console.log(tagName)
+ pos = source.lastIndexOf('' + tagName + '>');
+
+ if (pos < elStartEnd) {
+ //忘记闭合
+ pos = source.lastIndexOf('' + tagName);
+ }
+
+ closeMap[tagName] = pos;
+ }
+
+ return pos < elStartEnd; //}
+ }
+
+ function _copy(source, target) {
+ for (var n in source) {
+ target[n] = source[n];
+ }
+ }
+
+ function parseDCC(source, start, domBuilder, errorHandler) {
+ //sure start with '', start + 4); //append comment source.substring(4,end)//
- // 'video/mp4; codecs="avc1"' and 'audio/mp4; codecs="mp4"')
- } else {
- var parsedMimeType = parseContentType(mainSegmentLoader.mimeType_);
- var codecs = parsedMimeType.parameters.codecs.split(',');
- var audioCodec = void 0;
- var videoCodec = void 0;
- codecs.forEach(function (codec) {
- codec = codec.trim();
+ if (mainPlaylist && mainPlaylist.attributes && mainPlaylist.attributes.CODECS) {
+ codecs = unwrapCodecList(parseCodecs(mainPlaylist.attributes.CODECS));
+ }
- if (isAudioCodec(codec)) {
- audioCodec = codec;
- } else if (isVideoCodec(codec)) {
- videoCodec = codec;
- }
- });
- videoMimeType = parsedMimeType.type + '; codecs="' + videoCodec + '"';
- audioMimeType = parsedMimeType.type.replace('video', 'audio') + '; codecs="' + audioCodec + '"';
- } // upsert the content types based on the selected playlist
+ if (audioPlaylist && audioPlaylist.attributes && audioPlaylist.attributes.CODECS) {
+ codecs.audio = audioPlaylist.attributes.CODECS;
+ }
+ var videoContentType = getMimeForCodec(codecs.video);
+ var audioContentType = getMimeForCodec(codecs.audio); // upsert the content types based on the selected playlist
var keySystemContentTypes = {};
- var videoPlaylist = mainSegmentLoader.playlist_;
for (var keySystem in keySystemOptions) {
- keySystemContentTypes[keySystem] = {
- audioContentType: audioMimeType,
- videoContentType: videoMimeType
- };
+ keySystemContentTypes[keySystem] = {};
- if (videoPlaylist.contentProtection && videoPlaylist.contentProtection[keySystem] && videoPlaylist.contentProtection[keySystem].pssh) {
- keySystemContentTypes[keySystem].pssh = videoPlaylist.contentProtection[keySystem].pssh;
+ if (audioContentType) {
+ keySystemContentTypes[keySystem].audioContentType = audioContentType;
+ }
+
+ if (videoContentType) {
+ keySystemContentTypes[keySystem].videoContentType = videoContentType;
+ } // Default to using the video playlist's PSSH even though they may be different, as
+ // videojs-contrib-eme will only accept one in the options.
+ //
+ // This shouldn't be an issue for most cases as early intialization will handle all
+ // unique PSSH values, and if they aren't, then encrypted events should have the
+ // specific information needed for the unique license.
+
+
+ if (mainPlaylist.contentProtection && mainPlaylist.contentProtection[keySystem] && mainPlaylist.contentProtection[keySystem].pssh) {
+ keySystemContentTypes[keySystem].pssh = mainPlaylist.contentProtection[keySystem].pssh;
} // videojs-contrib-eme accepts the option of specifying: 'com.some.cdm': 'url'
// so we need to prevent overwriting the URL entirely
@@ -54972,26 +65556,179 @@
}
}
- return videojs$1.mergeOptions(keySystemOptions, keySystemContentTypes);
+ return videojs.mergeOptions(keySystemOptions, keySystemContentTypes);
};
+ /**
+ * @typedef {Object} KeySystems
+ *
+ * keySystems configuration for https://github.com/videojs/videojs-contrib-eme
+ * Note: not all options are listed here.
+ *
+ * @property {Uint8Array} [pssh]
+ * Protection System Specific Header
+ */
- var setupEmeOptions = function setupEmeOptions(hlsHandler) {
- var mainSegmentLoader = hlsHandler.masterPlaylistController_.mainSegmentLoader_;
- var audioSegmentLoader = hlsHandler.masterPlaylistController_.audioSegmentLoader_;
- var player = videojs$1.players[hlsHandler.tech_.options_.playerId];
+ /**
+ * Goes through all the playlists and collects an array of KeySystems options objects
+ * containing each playlist's keySystems and their pssh values, if available.
+ *
+ * @param {Object[]} playlists
+ * The playlists to look through
+ * @param {string[]} keySystems
+ * The keySystems to collect pssh values for
+ *
+ * @return {KeySystems[]}
+ * An array of KeySystems objects containing available key systems and their
+ * pssh values
+ */
- if (player.eme) {
- var sourceOptions = emeKeySystems(hlsHandler.source_.keySystems, mainSegmentLoader, audioSegmentLoader);
- if (sourceOptions) {
- player.currentSource().keySystems = sourceOptions; // Works around https://bugs.chromium.org/p/chromium/issues/detail?id=895449
- // in non-IE11 browsers. In IE11 this is too early to initialize media keys
-
- if (!(videojs$1.browser.IE_VERSION === 11) && player.eme.initializeMediaKeys) {
- player.eme.initializeMediaKeys();
- }
+ var getAllPsshKeySystemsOptions = function getAllPsshKeySystemsOptions(playlists, keySystems) {
+ return playlists.reduce(function (keySystemsArr, playlist) {
+ if (!playlist.contentProtection) {
+ return keySystemsArr;
}
+
+ var keySystemsOptions = keySystems.reduce(function (keySystemsObj, keySystem) {
+ var keySystemOptions = playlist.contentProtection[keySystem];
+
+ if (keySystemOptions && keySystemOptions.pssh) {
+ keySystemsObj[keySystem] = {
+ pssh: keySystemOptions.pssh
+ };
+ }
+
+ return keySystemsObj;
+ }, {});
+
+ if (Object.keys(keySystemsOptions).length) {
+ keySystemsArr.push(keySystemsOptions);
+ }
+
+ return keySystemsArr;
+ }, []);
+ };
+ /**
+ * Returns a promise that waits for the
+ * [eme plugin](https://github.com/videojs/videojs-contrib-eme) to create a key session.
+ *
+ * Works around https://bugs.chromium.org/p/chromium/issues/detail?id=895449 in non-IE11
+ * browsers.
+ *
+ * As per the above ticket, this is particularly important for Chrome, where, if
+ * unencrypted content is appended before encrypted content and the key session has not
+ * been created, a MEDIA_ERR_DECODE will be thrown once the encrypted content is reached
+ * during playback.
+ *
+ * @param {Object} player
+ * The player instance
+ * @param {Object[]} sourceKeySystems
+ * The key systems options from the player source
+ * @param {Object} [audioMedia]
+ * The active audio media playlist (optional)
+ * @param {Object[]} mainPlaylists
+ * The playlists found on the master playlist object
+ *
+ * @return {Object}
+ * Promise that resolves when the key session has been created
+ */
+
+
+ var waitForKeySessionCreation = function waitForKeySessionCreation(_ref) {
+ var player = _ref.player,
+ sourceKeySystems = _ref.sourceKeySystems,
+ audioMedia = _ref.audioMedia,
+ mainPlaylists = _ref.mainPlaylists;
+
+ if (!player.eme.initializeMediaKeys) {
+ return Promise.resolve();
+ } // TODO should all audio PSSH values be initialized for DRM?
+ //
+ // All unique video rendition pssh values are initialized for DRM, but here only
+ // the initial audio playlist license is initialized. In theory, an encrypted
+ // event should be fired if the user switches to an alternative audio playlist
+ // where a license is required, but this case hasn't yet been tested. In addition, there
+ // may be many alternate audio playlists unlikely to be used (e.g., multiple different
+ // languages).
+
+
+ var playlists = audioMedia ? mainPlaylists.concat([audioMedia]) : mainPlaylists;
+ var keySystemsOptionsArr = getAllPsshKeySystemsOptions(playlists, Object.keys(sourceKeySystems));
+ var initializationFinishedPromises = [];
+ var keySessionCreatedPromises = []; // Since PSSH values are interpreted as initData, EME will dedupe any duplicates. The
+ // only place where it should not be deduped is for ms-prefixed APIs, but the early
+ // return for IE11 above, and the existence of modern EME APIs in addition to
+ // ms-prefixed APIs on Edge should prevent this from being a concern.
+ // initializeMediaKeys also won't use the webkit-prefixed APIs.
+
+ keySystemsOptionsArr.forEach(function (keySystemsOptions) {
+ keySessionCreatedPromises.push(new Promise(function (resolve, reject) {
+ player.tech_.one('keysessioncreated', resolve);
+ }));
+ initializationFinishedPromises.push(new Promise(function (resolve, reject) {
+ player.eme.initializeMediaKeys({
+ keySystems: keySystemsOptions
+ }, function (err) {
+ if (err) {
+ reject(err);
+ return;
+ }
+
+ resolve();
+ });
+ }));
+ }); // The reasons Promise.race is chosen over Promise.any:
+ //
+ // * Promise.any is only available in Safari 14+.
+ // * None of these promises are expected to reject. If they do reject, it might be
+ // better here for the race to surface the rejection, rather than mask it by using
+ // Promise.any.
+
+ return Promise.race([// If a session was previously created, these will all finish resolving without
+ // creating a new session, otherwise it will take until the end of all license
+ // requests, which is why the key session check is used (to make setup much faster).
+ Promise.all(initializationFinishedPromises), // Once a single session is created, the browser knows DRM will be used.
+ Promise.race(keySessionCreatedPromises)]);
+ };
+ /**
+ * If the [eme](https://github.com/videojs/videojs-contrib-eme) plugin is available, and
+ * there are keySystems on the source, sets up source options to prepare the source for
+ * eme.
+ *
+ * @param {Object} player
+ * The player instance
+ * @param {Object[]} sourceKeySystems
+ * The key systems options from the player source
+ * @param {Object} media
+ * The active media playlist
+ * @param {Object} [audioMedia]
+ * The active audio media playlist (optional)
+ *
+ * @return {boolean}
+ * Whether or not options were configured and EME is available
+ */
+
+
+ var setupEmeOptions = function setupEmeOptions(_ref2) {
+ var player = _ref2.player,
+ sourceKeySystems = _ref2.sourceKeySystems,
+ media = _ref2.media,
+ audioMedia = _ref2.audioMedia;
+ var sourceOptions = emeKeySystems(sourceKeySystems, media, audioMedia);
+
+ if (!sourceOptions) {
+ return false;
}
+
+ player.currentSource().keySystems = sourceOptions; // eme handles the rest of the setup, so if it is missing
+ // do nothing.
+
+ if (sourceOptions && !player.eme) {
+ videojs.log.warn('DRM encrypted source cannot be decrypted without a DRM plugin');
+ return false;
+ }
+
+ return true;
};
var getVhsLocalStorage = function getVhsLocalStorage() {
@@ -54999,7 +65736,7 @@
return null;
}
- var storedObject = window.localStorage.getItem(LOCAL_STORAGE_KEY$1);
+ var storedObject = window.localStorage.getItem(LOCAL_STORAGE_KEY);
if (!storedObject) {
return null;
@@ -55019,10 +65756,10 @@
}
var objectToStore = getVhsLocalStorage();
- objectToStore = objectToStore ? videojs$1.mergeOptions(objectToStore, options) : options;
+ objectToStore = objectToStore ? videojs.mergeOptions(objectToStore, options) : options;
try {
- window.localStorage.setItem(LOCAL_STORAGE_KEY$1, JSON.stringify(objectToStore));
+ window.localStorage.setItem(LOCAL_STORAGE_KEY, JSON.stringify(objectToStore));
} catch (e) {
// Throws if storage is full (e.g., always on iOS 5+ Safari private mode, where
// storage is set to 0).
@@ -55033,15 +65770,41 @@
return objectToStore;
};
+ /**
+ * Parses VHS-supported media types from data URIs. See
+ * https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs
+ * for information on data URIs.
+ *
+ * @param {string} dataUri
+ * The data URI
+ *
+ * @return {string|Object}
+ * The parsed object/string, or the original string if no supported media type
+ * was found
+ */
+
+
+ var expandDataUri = function expandDataUri(dataUri) {
+ if (dataUri.toLowerCase().indexOf('data:application/vnd.videojs.vhs+json,') === 0) {
+ return JSON.parse(dataUri.substring(dataUri.indexOf(',') + 1));
+ } // no known case for this data URI, return the string as-is
+
+
+ return dataUri;
+ };
/**
* Whether the browser has built-in HLS support.
*/
- Hls$1.supportsNativeHls = function () {
+ Vhs.supportsNativeHls = function () {
+ if (!document || !document.createElement) {
+ return false;
+ }
+
var video = document.createElement('video'); // native HLS is definitely not supported if HTML5 video isn't
- if (!videojs$1.getTech('Html5').isSupported()) {
+ if (!videojs.getTech('Html5').isSupported()) {
return false;
} // HLS manifests can go by many mime-types
@@ -55057,21 +65820,21 @@
});
}();
- Hls$1.supportsNativeDash = function () {
- if (!videojs$1.getTech('Html5').isSupported()) {
+ Vhs.supportsNativeDash = function () {
+ if (!document || !document.createElement || !videojs.getTech('Html5').isSupported()) {
return false;
}
return /maybe|probably/i.test(document.createElement('video').canPlayType('application/dash+xml'));
}();
- Hls$1.supportsTypeNatively = function (type) {
+ Vhs.supportsTypeNatively = function (type) {
if (type === 'hls') {
- return Hls$1.supportsNativeHls;
+ return Vhs.supportsNativeHls;
}
if (type === 'dash') {
- return Hls$1.supportsNativeDash;
+ return Vhs.supportsNativeDash;
}
return false;
@@ -55082,62 +65845,91 @@
*/
- Hls$1.isSupported = function () {
- return videojs$1.log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
+ Vhs.isSupported = function () {
+ return videojs.log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
};
- var Component$1 = videojs$1.getComponent('Component');
+ var Component = videojs.getComponent('Component');
/**
- * The Hls Handler object, where we orchestrate all of the parts
+ * The Vhs Handler object, where we orchestrate all of the parts
* of HLS to interact with video.js
*
- * @class HlsHandler
+ * @class VhsHandler
* @extends videojs.Component
* @param {Object} source the soruce object
* @param {Tech} tech the parent tech object
* @param {Object} options optional and required options
*/
- var HlsHandler = function (_Component) {
- inherits$2(HlsHandler, _Component);
+ var VhsHandler = /*#__PURE__*/function (_Component) {
+ inheritsLoose(VhsHandler, _Component);
- function HlsHandler(source, tech, options) {
- classCallCheck$1(this, HlsHandler); // tech.player() is deprecated but setup a reference to HLS for
+ function VhsHandler(source, tech, options) {
+ var _this;
+
+ _this = _Component.call(this, tech, videojs.mergeOptions(options.hls, options.vhs)) || this;
+
+ if (options.hls && Object.keys(options.hls).length) {
+ videojs.log.warn('Using hls options is deprecated. Please rename `hls` to `vhs` in your options object.');
+ } // if a tech level `initialBandwidth` option was passed
+ // use that over the VHS level `bandwidth` option
+
+
+ if (typeof options.initialBandwidth === 'number') {
+ _this.options_.bandwidth = options.initialBandwidth;
+ }
+
+ _this.logger_ = logger('VhsHandler'); // tech.player() is deprecated but setup a reference to HLS for
// backwards-compatibility
- var _this = possibleConstructorReturn$1(this, (HlsHandler.__proto__ || Object.getPrototypeOf(HlsHandler)).call(this, tech, options.hls));
-
if (tech.options_ && tech.options_.playerId) {
- var _player = videojs$1(tech.options_.playerId);
+ var _player = videojs(tech.options_.playerId);
if (!_player.hasOwnProperty('hls')) {
Object.defineProperty(_player, 'hls', {
- get: function get$$1() {
- videojs$1.log.warn('player.hls is deprecated. Use player.tech().hls instead.');
+ get: function get() {
+ videojs.log.warn('player.hls is deprecated. Use player.tech().vhs instead.');
tech.trigger({
type: 'usage',
name: 'hls-player-access'
});
- return _this;
+ return assertThisInitialized(_this);
},
configurable: true
});
- } // Set up a reference to the HlsHandler from player.vhs. This allows users to start
- // migrating from player.tech_.hls... to player.vhs... for API access. Although this
- // isn't the most appropriate form of reference for video.js (since all APIs should
- // be provided through core video.js), it is a common pattern for plugins, and vhs
- // will act accordingly.
+ }
+ if (!_player.hasOwnProperty('vhs')) {
+ Object.defineProperty(_player, 'vhs', {
+ get: function get() {
+ videojs.log.warn('player.vhs is deprecated. Use player.tech().vhs instead.');
+ tech.trigger({
+ type: 'usage',
+ name: 'vhs-player-access'
+ });
+ return assertThisInitialized(_this);
+ },
+ configurable: true
+ });
+ }
- _player.vhs = _this; // deprecated, for backwards compatibility
+ if (!_player.hasOwnProperty('dash')) {
+ Object.defineProperty(_player, 'dash', {
+ get: function get() {
+ videojs.log.warn('player.dash is deprecated. Use player.tech().vhs instead.');
+ return assertThisInitialized(_this);
+ },
+ configurable: true
+ });
+ }
- _player.dash = _this;
_this.player_ = _player;
}
_this.tech_ = tech;
_this.source_ = source;
_this.stats = {};
+ _this.ignoreNextSeekingEvent_ = false;
_this.setOptions_();
@@ -55156,19 +65948,28 @@
var fullscreenElement = document.fullscreenElement || document.webkitFullscreenElement || document.mozFullScreenElement || document.msFullscreenElement;
if (fullscreenElement && fullscreenElement.contains(_this.tech_.el())) {
- _this.masterPlaylistController_.smoothQualityChange_();
- }
- }); // Handle seeking when looping - middleware doesn't handle this seek event from the tech
-
-
- _this.on(_this.tech_, 'seeking', function () {
- if (this.tech_.currentTime() === 0 && this.tech_.player_.loop()) {
- this.setCurrentTime(0);
+ _this.masterPlaylistController_.fastQualityChange_();
+ } else {
+ // When leaving fullscreen, since the in page pixel dimensions should be smaller
+ // than full screen, see if there should be a rendition switch down to preserve
+ // bandwidth.
+ _this.masterPlaylistController_.checkABR_();
}
});
+ _this.on(_this.tech_, 'seeking', function () {
+ if (this.ignoreNextSeekingEvent_) {
+ this.ignoreNextSeekingEvent_ = false;
+ return;
+ }
+
+ this.setCurrentTime(this.tech_.currentTime());
+ });
+
_this.on(_this.tech_, 'error', function () {
- if (this.masterPlaylistController_) {
+ // verify that the error was real and we are loaded
+ // enough to have mpc loaded.
+ if (this.tech_.error() && this.masterPlaylistController_) {
this.masterPlaylistController_.pauseLoading();
}
});
@@ -55178,450 +65979,650 @@
return _this;
}
- createClass$1(HlsHandler, [{
- key: 'setOptions_',
- value: function setOptions_() {
- var _this2 = this; // defaults
+ var _proto = VhsHandler.prototype;
+
+ _proto.setOptions_ = function setOptions_() {
+ var _this2 = this; // defaults
- this.options_.withCredentials = this.options_.withCredentials || false;
- this.options_.handleManifestRedirects = this.options_.handleManifestRedirects || false;
- this.options_.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions === false ? false : true;
- this.options_.smoothQualityChange = this.options_.smoothQualityChange || false;
- this.options_.useBandwidthFromLocalStorage = typeof this.source_.useBandwidthFromLocalStorage !== 'undefined' ? this.source_.useBandwidthFromLocalStorage : this.options_.useBandwidthFromLocalStorage || false;
- this.options_.customTagParsers = this.options_.customTagParsers || [];
- this.options_.customTagMappers = this.options_.customTagMappers || [];
- this.options_.cacheEncryptionKeys = this.options_.cacheEncryptionKeys || false;
+ this.options_.withCredentials = this.options_.withCredentials || false;
+ this.options_.handleManifestRedirects = this.options_.handleManifestRedirects === false ? false : true;
+ this.options_.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions === false ? false : true;
+ this.options_.useDevicePixelRatio = this.options_.useDevicePixelRatio || false;
+ this.options_.smoothQualityChange = this.options_.smoothQualityChange || false;
+ this.options_.useBandwidthFromLocalStorage = typeof this.source_.useBandwidthFromLocalStorage !== 'undefined' ? this.source_.useBandwidthFromLocalStorage : this.options_.useBandwidthFromLocalStorage || false;
+ this.options_.useNetworkInformationApi = this.options_.useNetworkInformationApi || false;
+ this.options_.useDtsForTimestampOffset = this.options_.useDtsForTimestampOffset || false;
+ this.options_.customTagParsers = this.options_.customTagParsers || [];
+ this.options_.customTagMappers = this.options_.customTagMappers || [];
+ this.options_.cacheEncryptionKeys = this.options_.cacheEncryptionKeys || false;
- if (typeof this.options_.blacklistDuration !== 'number') {
- this.options_.blacklistDuration = 5 * 60;
- }
-
- if (typeof this.options_.bandwidth !== 'number') {
- if (this.options_.useBandwidthFromLocalStorage) {
- var storedObject = getVhsLocalStorage();
-
- if (storedObject && storedObject.bandwidth) {
- this.options_.bandwidth = storedObject.bandwidth;
- this.tech_.trigger({
- type: 'usage',
- name: 'hls-bandwidth-from-local-storage'
- });
- }
-
- if (storedObject && storedObject.throughput) {
- this.options_.throughput = storedObject.throughput;
- this.tech_.trigger({
- type: 'usage',
- name: 'hls-throughput-from-local-storage'
- });
- }
- }
- } // if bandwidth was not set by options or pulled from local storage, start playlist
- // selection at a reasonable bandwidth
-
-
- if (typeof this.options_.bandwidth !== 'number') {
- this.options_.bandwidth = Config.INITIAL_BANDWIDTH;
- } // If the bandwidth number is unchanged from the initial setting
- // then this takes precedence over the enableLowInitialPlaylist option
-
-
- this.options_.enableLowInitialPlaylist = this.options_.enableLowInitialPlaylist && this.options_.bandwidth === Config.INITIAL_BANDWIDTH; // grab options passed to player.src
-
- ['withCredentials', 'limitRenditionByPlayerDimensions', 'bandwidth', 'smoothQualityChange', 'customTagParsers', 'customTagMappers', 'handleManifestRedirects', 'cacheEncryptionKeys'].forEach(function (option) {
- if (typeof _this2.source_[option] !== 'undefined') {
- _this2.options_[option] = _this2.source_[option];
- }
- });
- this.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions;
+ if (typeof this.options_.blacklistDuration !== 'number') {
+ this.options_.blacklistDuration = 5 * 60;
}
- /**
- * called when player.src gets called, handle a new source
- *
- * @param {Object} src the source object to handle
- */
- }, {
- key: 'src',
- value: function src(_src, type) {
- var _this3 = this; // do nothing if the src is falsey
+ if (typeof this.options_.bandwidth !== 'number') {
+ if (this.options_.useBandwidthFromLocalStorage) {
+ var storedObject = getVhsLocalStorage();
-
- if (!_src) {
- return;
- }
-
- this.setOptions_(); // add master playlist controller options
-
- this.options_.url = this.source_.src;
- this.options_.tech = this.tech_;
- this.options_.externHls = Hls$1;
- this.options_.sourceType = simpleTypeFromSourceType(type); // Whenever we seek internally, we should update both the tech and call our own
- // setCurrentTime function. This is needed because "seeking" events aren't always
- // reliable. External seeks (via the player object) are handled via middleware.
-
- this.options_.seekTo = function (time) {
- _this3.tech_.setCurrentTime(time);
-
- _this3.setCurrentTime(time);
- };
-
- this.masterPlaylistController_ = new MasterPlaylistController(this.options_);
- this.playbackWatcher_ = new PlaybackWatcher(videojs$1.mergeOptions(this.options_, {
- seekable: function seekable$$1() {
- return _this3.seekable();
- },
- media: function media() {
- return _this3.masterPlaylistController_.media();
- }
- }));
- this.masterPlaylistController_.on('error', function () {
- var player = videojs$1.players[_this3.tech_.options_.playerId];
- player.error(_this3.masterPlaylistController_.error);
- }); // `this` in selectPlaylist should be the HlsHandler for backwards
- // compatibility with < v2
-
- this.masterPlaylistController_.selectPlaylist = this.selectPlaylist ? this.selectPlaylist.bind(this) : Hls$1.STANDARD_PLAYLIST_SELECTOR.bind(this);
- this.masterPlaylistController_.selectInitialPlaylist = Hls$1.INITIAL_PLAYLIST_SELECTOR.bind(this); // re-expose some internal objects for backwards compatibility with < v2
-
- this.playlists = this.masterPlaylistController_.masterPlaylistLoader_;
- this.mediaSource = this.masterPlaylistController_.mediaSource; // Proxy assignment of some properties to the master playlist
- // controller. Using a custom property for backwards compatibility
- // with < v2
-
- Object.defineProperties(this, {
- selectPlaylist: {
- get: function get$$1() {
- return this.masterPlaylistController_.selectPlaylist;
- },
- set: function set$$1(selectPlaylist) {
- this.masterPlaylistController_.selectPlaylist = selectPlaylist.bind(this);
- }
- },
- throughput: {
- get: function get$$1() {
- return this.masterPlaylistController_.mainSegmentLoader_.throughput.rate;
- },
- set: function set$$1(throughput) {
- this.masterPlaylistController_.mainSegmentLoader_.throughput.rate = throughput; // By setting `count` to 1 the throughput value becomes the starting value
- // for the cumulative average
-
- this.masterPlaylistController_.mainSegmentLoader_.throughput.count = 1;
- }
- },
- bandwidth: {
- get: function get$$1() {
- return this.masterPlaylistController_.mainSegmentLoader_.bandwidth;
- },
- set: function set$$1(bandwidth) {
- this.masterPlaylistController_.mainSegmentLoader_.bandwidth = bandwidth; // setting the bandwidth manually resets the throughput counter
- // `count` is set to zero that current value of `rate` isn't included
- // in the cumulative average
-
- this.masterPlaylistController_.mainSegmentLoader_.throughput = {
- rate: 0,
- count: 0
- };
- }
- },
-
- /**
- * `systemBandwidth` is a combination of two serial processes bit-rates. The first
- * is the network bitrate provided by `bandwidth` and the second is the bitrate of
- * the entire process after that - decryption, transmuxing, and appending - provided
- * by `throughput`.
- *
- * Since the two process are serial, the overall system bandwidth is given by:
- * sysBandwidth = 1 / (1 / bandwidth + 1 / throughput)
- */
- systemBandwidth: {
- get: function get$$1() {
- var invBandwidth = 1 / (this.bandwidth || 1);
- var invThroughput = void 0;
-
- if (this.throughput > 0) {
- invThroughput = 1 / this.throughput;
- } else {
- invThroughput = 0;
- }
-
- var systemBitrate = Math.floor(1 / (invBandwidth + invThroughput));
- return systemBitrate;
- },
- set: function set$$1() {
- videojs$1.log.error('The "systemBandwidth" property is read-only');
- }
- }
- });
-
- if (this.options_.bandwidth) {
- this.bandwidth = this.options_.bandwidth;
- }
-
- if (this.options_.throughput) {
- this.throughput = this.options_.throughput;
- }
-
- Object.defineProperties(this.stats, {
- bandwidth: {
- get: function get$$1() {
- return _this3.bandwidth || 0;
- },
- enumerable: true
- },
- mediaRequests: {
- get: function get$$1() {
- return _this3.masterPlaylistController_.mediaRequests_() || 0;
- },
- enumerable: true
- },
- mediaRequestsAborted: {
- get: function get$$1() {
- return _this3.masterPlaylistController_.mediaRequestsAborted_() || 0;
- },
- enumerable: true
- },
- mediaRequestsTimedout: {
- get: function get$$1() {
- return _this3.masterPlaylistController_.mediaRequestsTimedout_() || 0;
- },
- enumerable: true
- },
- mediaRequestsErrored: {
- get: function get$$1() {
- return _this3.masterPlaylistController_.mediaRequestsErrored_() || 0;
- },
- enumerable: true
- },
- mediaTransferDuration: {
- get: function get$$1() {
- return _this3.masterPlaylistController_.mediaTransferDuration_() || 0;
- },
- enumerable: true
- },
- mediaBytesTransferred: {
- get: function get$$1() {
- return _this3.masterPlaylistController_.mediaBytesTransferred_() || 0;
- },
- enumerable: true
- },
- mediaSecondsLoaded: {
- get: function get$$1() {
- return _this3.masterPlaylistController_.mediaSecondsLoaded_() || 0;
- },
- enumerable: true
- },
- buffered: {
- get: function get$$1() {
- return timeRangesToArray(_this3.tech_.buffered());
- },
- enumerable: true
- },
- currentTime: {
- get: function get$$1() {
- return _this3.tech_.currentTime();
- },
- enumerable: true
- },
- currentSource: {
- get: function get$$1() {
- return _this3.tech_.currentSource_;
- },
- enumerable: true
- },
- currentTech: {
- get: function get$$1() {
- return _this3.tech_.name_;
- },
- enumerable: true
- },
- duration: {
- get: function get$$1() {
- return _this3.tech_.duration();
- },
- enumerable: true
- },
- master: {
- get: function get$$1() {
- return _this3.playlists.master;
- },
- enumerable: true
- },
- playerDimensions: {
- get: function get$$1() {
- return _this3.tech_.currentDimensions();
- },
- enumerable: true
- },
- seekable: {
- get: function get$$1() {
- return timeRangesToArray(_this3.tech_.seekable());
- },
- enumerable: true
- },
- timestamp: {
- get: function get$$1() {
- return Date.now();
- },
- enumerable: true
- },
- videoPlaybackQuality: {
- get: function get$$1() {
- return _this3.tech_.getVideoPlaybackQuality();
- },
- enumerable: true
- }
- });
- this.tech_.one('canplay', this.masterPlaylistController_.setupFirstPlay.bind(this.masterPlaylistController_));
- this.tech_.on('bandwidthupdate', function () {
- if (_this3.options_.useBandwidthFromLocalStorage) {
- updateVhsLocalStorage({
- bandwidth: _this3.bandwidth,
- throughput: Math.round(_this3.throughput)
+ if (storedObject && storedObject.bandwidth) {
+ this.options_.bandwidth = storedObject.bandwidth;
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-bandwidth-from-local-storage'
+ });
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'hls-bandwidth-from-local-storage'
});
}
- });
- this.masterPlaylistController_.on('selectedinitialmedia', function () {
- // Add the manual rendition mix-in to HlsHandler
- renditionSelectionMixin(_this3);
- setupEmeOptions(_this3);
- }); // the bandwidth of the primary segment loader is our best
- // estimate of overall bandwidth
- this.on(this.masterPlaylistController_, 'progress', function () {
- this.tech_.trigger('progress');
- });
- this.tech_.ready(function () {
- return _this3.setupQualityLevels_();
- }); // do nothing if the tech has been disposed already
- // this can occur if someone sets the src in player.ready(), for instance
+ if (storedObject && storedObject.throughput) {
+ this.options_.throughput = storedObject.throughput;
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'vhs-throughput-from-local-storage'
+ });
+ this.tech_.trigger({
+ type: 'usage',
+ name: 'hls-throughput-from-local-storage'
+ });
+ }
+ }
+ } // if bandwidth was not set by options or pulled from local storage, start playlist
+ // selection at a reasonable bandwidth
- if (!this.tech_.el()) {
- return;
+
+ if (typeof this.options_.bandwidth !== 'number') {
+ this.options_.bandwidth = Config.INITIAL_BANDWIDTH;
+ } // If the bandwidth number is unchanged from the initial setting
+ // then this takes precedence over the enableLowInitialPlaylist option
+
+
+ this.options_.enableLowInitialPlaylist = this.options_.enableLowInitialPlaylist && this.options_.bandwidth === Config.INITIAL_BANDWIDTH; // grab options passed to player.src
+
+ ['withCredentials', 'useDevicePixelRatio', 'limitRenditionByPlayerDimensions', 'bandwidth', 'smoothQualityChange', 'customTagParsers', 'customTagMappers', 'handleManifestRedirects', 'cacheEncryptionKeys', 'playlistSelector', 'initialPlaylistSelector', 'experimentalBufferBasedABR', 'liveRangeSafeTimeDelta', 'experimentalLLHLS', 'useNetworkInformationApi', 'useDtsForTimestampOffset', 'experimentalExactManifestTimings', 'experimentalLeastPixelDiffSelector'].forEach(function (option) {
+ if (typeof _this2.source_[option] !== 'undefined') {
+ _this2.options_[option] = _this2.source_[option];
+ }
+ });
+ this.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions;
+ this.useDevicePixelRatio = this.options_.useDevicePixelRatio;
+ }
+ /**
+ * called when player.src gets called, handle a new source
+ *
+ * @param {Object} src the source object to handle
+ */
+ ;
+
+ _proto.src = function src(_src, type) {
+ var _this3 = this; // do nothing if the src is falsey
+
+
+ if (!_src) {
+ return;
+ }
+
+ this.setOptions_(); // add master playlist controller options
+
+ this.options_.src = expandDataUri(this.source_.src);
+ this.options_.tech = this.tech_;
+ this.options_.externVhs = Vhs;
+ this.options_.sourceType = simpleTypeFromSourceType(type); // Whenever we seek internally, we should update the tech
+
+ this.options_.seekTo = function (time) {
+ _this3.tech_.setCurrentTime(time);
+ };
+
+ if (this.options_.smoothQualityChange) {
+ videojs.log.warn('smoothQualityChange is deprecated and will be removed in the next major version');
+ }
+
+ this.masterPlaylistController_ = new MasterPlaylistController(this.options_);
+ var playbackWatcherOptions = videojs.mergeOptions({
+ liveRangeSafeTimeDelta: SAFE_TIME_DELTA
+ }, this.options_, {
+ seekable: function seekable() {
+ return _this3.seekable();
+ },
+ media: function media() {
+ return _this3.masterPlaylistController_.media();
+ },
+ masterPlaylistController: this.masterPlaylistController_
+ });
+ this.playbackWatcher_ = new PlaybackWatcher(playbackWatcherOptions);
+ this.masterPlaylistController_.on('error', function () {
+ var player = videojs.players[_this3.tech_.options_.playerId];
+ var error = _this3.masterPlaylistController_.error;
+
+ if (typeof error === 'object' && !error.code) {
+ error.code = 3;
+ } else if (typeof error === 'string') {
+ error = {
+ message: error,
+ code: 3
+ };
}
- this.tech_.src(videojs$1.URL.createObjectURL(this.masterPlaylistController_.mediaSource));
+ player.error(error);
+ });
+ var defaultSelector = this.options_.experimentalBufferBasedABR ? Vhs.movingAverageBandwidthSelector(0.55) : Vhs.STANDARD_PLAYLIST_SELECTOR; // `this` in selectPlaylist should be the VhsHandler for backwards
+ // compatibility with < v2
+
+ this.masterPlaylistController_.selectPlaylist = this.selectPlaylist ? this.selectPlaylist.bind(this) : defaultSelector.bind(this);
+ this.masterPlaylistController_.selectInitialPlaylist = Vhs.INITIAL_PLAYLIST_SELECTOR.bind(this); // re-expose some internal objects for backwards compatibility with < v2
+
+ this.playlists = this.masterPlaylistController_.masterPlaylistLoader_;
+ this.mediaSource = this.masterPlaylistController_.mediaSource; // Proxy assignment of some properties to the master playlist
+ // controller. Using a custom property for backwards compatibility
+ // with < v2
+
+ Object.defineProperties(this, {
+ selectPlaylist: {
+ get: function get() {
+ return this.masterPlaylistController_.selectPlaylist;
+ },
+ set: function set(selectPlaylist) {
+ this.masterPlaylistController_.selectPlaylist = selectPlaylist.bind(this);
+ }
+ },
+ throughput: {
+ get: function get() {
+ return this.masterPlaylistController_.mainSegmentLoader_.throughput.rate;
+ },
+ set: function set(throughput) {
+ this.masterPlaylistController_.mainSegmentLoader_.throughput.rate = throughput; // By setting `count` to 1 the throughput value becomes the starting value
+ // for the cumulative average
+
+ this.masterPlaylistController_.mainSegmentLoader_.throughput.count = 1;
+ }
+ },
+ bandwidth: {
+ get: function get() {
+ var playerBandwidthEst = this.masterPlaylistController_.mainSegmentLoader_.bandwidth;
+ var networkInformation = window.navigator.connection || window.navigator.mozConnection || window.navigator.webkitConnection;
+ var tenMbpsAsBitsPerSecond = 10e6;
+
+ if (this.options_.useNetworkInformationApi && networkInformation) {
+ // downlink returns Mbps
+ // https://developer.mozilla.org/en-US/docs/Web/API/NetworkInformation/downlink
+ var networkInfoBandwidthEstBitsPerSec = networkInformation.downlink * 1000 * 1000; // downlink maxes out at 10 Mbps. In the event that both networkInformationApi and the player
+ // estimate a bandwidth greater than 10 Mbps, use the larger of the two estimates to ensure that
+ // high quality streams are not filtered out.
+
+ if (networkInfoBandwidthEstBitsPerSec >= tenMbpsAsBitsPerSecond && playerBandwidthEst >= tenMbpsAsBitsPerSecond) {
+ playerBandwidthEst = Math.max(playerBandwidthEst, networkInfoBandwidthEstBitsPerSec);
+ } else {
+ playerBandwidthEst = networkInfoBandwidthEstBitsPerSec;
+ }
+ }
+
+ return playerBandwidthEst;
+ },
+ set: function set(bandwidth) {
+ this.masterPlaylistController_.mainSegmentLoader_.bandwidth = bandwidth; // setting the bandwidth manually resets the throughput counter
+ // `count` is set to zero that current value of `rate` isn't included
+ // in the cumulative average
+
+ this.masterPlaylistController_.mainSegmentLoader_.throughput = {
+ rate: 0,
+ count: 0
+ };
+ }
+ },
+
+ /**
+ * `systemBandwidth` is a combination of two serial processes bit-rates. The first
+ * is the network bitrate provided by `bandwidth` and the second is the bitrate of
+ * the entire process after that - decryption, transmuxing, and appending - provided
+ * by `throughput`.
+ *
+ * Since the two process are serial, the overall system bandwidth is given by:
+ * sysBandwidth = 1 / (1 / bandwidth + 1 / throughput)
+ */
+ systemBandwidth: {
+ get: function get() {
+ var invBandwidth = 1 / (this.bandwidth || 1);
+ var invThroughput;
+
+ if (this.throughput > 0) {
+ invThroughput = 1 / this.throughput;
+ } else {
+ invThroughput = 0;
+ }
+
+ var systemBitrate = Math.floor(1 / (invBandwidth + invThroughput));
+ return systemBitrate;
+ },
+ set: function set() {
+ videojs.log.error('The "systemBandwidth" property is read-only');
+ }
+ }
+ });
+
+ if (this.options_.bandwidth) {
+ this.bandwidth = this.options_.bandwidth;
}
- /**
- * Initializes the quality levels and sets listeners to update them.
- *
- * @method setupQualityLevels_
- * @private
- */
- }, {
- key: 'setupQualityLevels_',
- value: function setupQualityLevels_() {
- var _this4 = this;
+ if (this.options_.throughput) {
+ this.throughput = this.options_.throughput;
+ }
- var player = videojs$1.players[this.tech_.options_.playerId];
-
- if (player && player.qualityLevels) {
- this.qualityLevels_ = player.qualityLevels();
- this.masterPlaylistController_.on('selectedinitialmedia', function () {
- handleHlsLoadedMetadata(_this4.qualityLevels_, _this4);
- });
- this.playlists.on('mediachange', function () {
- handleHlsMediaChange(_this4.qualityLevels_, _this4.playlists);
+ Object.defineProperties(this.stats, {
+ bandwidth: {
+ get: function get() {
+ return _this3.bandwidth || 0;
+ },
+ enumerable: true
+ },
+ mediaRequests: {
+ get: function get() {
+ return _this3.masterPlaylistController_.mediaRequests_() || 0;
+ },
+ enumerable: true
+ },
+ mediaRequestsAborted: {
+ get: function get() {
+ return _this3.masterPlaylistController_.mediaRequestsAborted_() || 0;
+ },
+ enumerable: true
+ },
+ mediaRequestsTimedout: {
+ get: function get() {
+ return _this3.masterPlaylistController_.mediaRequestsTimedout_() || 0;
+ },
+ enumerable: true
+ },
+ mediaRequestsErrored: {
+ get: function get() {
+ return _this3.masterPlaylistController_.mediaRequestsErrored_() || 0;
+ },
+ enumerable: true
+ },
+ mediaTransferDuration: {
+ get: function get() {
+ return _this3.masterPlaylistController_.mediaTransferDuration_() || 0;
+ },
+ enumerable: true
+ },
+ mediaBytesTransferred: {
+ get: function get() {
+ return _this3.masterPlaylistController_.mediaBytesTransferred_() || 0;
+ },
+ enumerable: true
+ },
+ mediaSecondsLoaded: {
+ get: function get() {
+ return _this3.masterPlaylistController_.mediaSecondsLoaded_() || 0;
+ },
+ enumerable: true
+ },
+ mediaAppends: {
+ get: function get() {
+ return _this3.masterPlaylistController_.mediaAppends_() || 0;
+ },
+ enumerable: true
+ },
+ mainAppendsToLoadedData: {
+ get: function get() {
+ return _this3.masterPlaylistController_.mainAppendsToLoadedData_() || 0;
+ },
+ enumerable: true
+ },
+ audioAppendsToLoadedData: {
+ get: function get() {
+ return _this3.masterPlaylistController_.audioAppendsToLoadedData_() || 0;
+ },
+ enumerable: true
+ },
+ appendsToLoadedData: {
+ get: function get() {
+ return _this3.masterPlaylistController_.appendsToLoadedData_() || 0;
+ },
+ enumerable: true
+ },
+ timeToLoadedData: {
+ get: function get() {
+ return _this3.masterPlaylistController_.timeToLoadedData_() || 0;
+ },
+ enumerable: true
+ },
+ buffered: {
+ get: function get() {
+ return timeRangesToArray(_this3.tech_.buffered());
+ },
+ enumerable: true
+ },
+ currentTime: {
+ get: function get() {
+ return _this3.tech_.currentTime();
+ },
+ enumerable: true
+ },
+ currentSource: {
+ get: function get() {
+ return _this3.tech_.currentSource_;
+ },
+ enumerable: true
+ },
+ currentTech: {
+ get: function get() {
+ return _this3.tech_.name_;
+ },
+ enumerable: true
+ },
+ duration: {
+ get: function get() {
+ return _this3.tech_.duration();
+ },
+ enumerable: true
+ },
+ master: {
+ get: function get() {
+ return _this3.playlists.master;
+ },
+ enumerable: true
+ },
+ playerDimensions: {
+ get: function get() {
+ return _this3.tech_.currentDimensions();
+ },
+ enumerable: true
+ },
+ seekable: {
+ get: function get() {
+ return timeRangesToArray(_this3.tech_.seekable());
+ },
+ enumerable: true
+ },
+ timestamp: {
+ get: function get() {
+ return Date.now();
+ },
+ enumerable: true
+ },
+ videoPlaybackQuality: {
+ get: function get() {
+ return _this3.tech_.getVideoPlaybackQuality();
+ },
+ enumerable: true
+ }
+ });
+ this.tech_.one('canplay', this.masterPlaylistController_.setupFirstPlay.bind(this.masterPlaylistController_));
+ this.tech_.on('bandwidthupdate', function () {
+ if (_this3.options_.useBandwidthFromLocalStorage) {
+ updateVhsLocalStorage({
+ bandwidth: _this3.bandwidth,
+ throughput: Math.round(_this3.throughput)
});
}
+ });
+ this.masterPlaylistController_.on('selectedinitialmedia', function () {
+ // Add the manual rendition mix-in to VhsHandler
+ renditionSelectionMixin(_this3);
+ });
+ this.masterPlaylistController_.sourceUpdater_.on('createdsourcebuffers', function () {
+ _this3.setupEme_();
+ }); // the bandwidth of the primary segment loader is our best
+ // estimate of overall bandwidth
+
+ this.on(this.masterPlaylistController_, 'progress', function () {
+ this.tech_.trigger('progress');
+ }); // In the live case, we need to ignore the very first `seeking` event since
+ // that will be the result of the seek-to-live behavior
+
+ this.on(this.masterPlaylistController_, 'firstplay', function () {
+ this.ignoreNextSeekingEvent_ = true;
+ });
+ this.setupQualityLevels_(); // do nothing if the tech has been disposed already
+ // this can occur if someone sets the src in player.ready(), for instance
+
+ if (!this.tech_.el()) {
+ return;
}
- /**
- * Begin playing the video.
- */
- }, {
- key: 'play',
- value: function play() {
- this.masterPlaylistController_.play();
- }
- /**
- * a wrapper around the function in MasterPlaylistController
- */
+ this.mediaSourceUrl_ = window.URL.createObjectURL(this.masterPlaylistController_.mediaSource);
+ this.tech_.src(this.mediaSourceUrl_);
+ };
- }, {
- key: 'setCurrentTime',
- value: function setCurrentTime(currentTime) {
- this.masterPlaylistController_.setCurrentTime(currentTime);
- }
- /**
- * a wrapper around the function in MasterPlaylistController
- */
+ _proto.createKeySessions_ = function createKeySessions_() {
+ var _this4 = this;
- }, {
- key: 'duration',
- value: function duration$$1() {
- return this.masterPlaylistController_.duration();
- }
- /**
- * a wrapper around the function in MasterPlaylistController
- */
+ var audioPlaylistLoader = this.masterPlaylistController_.mediaTypes_.AUDIO.activePlaylistLoader;
+ this.logger_('waiting for EME key session creation');
+ waitForKeySessionCreation({
+ player: this.player_,
+ sourceKeySystems: this.source_.keySystems,
+ audioMedia: audioPlaylistLoader && audioPlaylistLoader.media(),
+ mainPlaylists: this.playlists.master.playlists
+ }).then(function () {
+ _this4.logger_('created EME key session');
- }, {
- key: 'seekable',
- value: function seekable$$1() {
- return this.masterPlaylistController_.seekable();
- }
- /**
- * Abort all outstanding work and cleanup.
- */
+ _this4.masterPlaylistController_.sourceUpdater_.initializedEme();
+ })["catch"](function (err) {
+ _this4.logger_('error while creating EME key session', err);
- }, {
- key: 'dispose',
- value: function dispose() {
- if (this.playbackWatcher_) {
- this.playbackWatcher_.dispose();
- }
-
- if (this.masterPlaylistController_) {
- this.masterPlaylistController_.dispose();
- }
-
- if (this.qualityLevels_) {
- this.qualityLevels_.dispose();
- }
-
- if (this.player_) {
- delete this.player_.vhs;
- delete this.player_.dash;
- delete this.player_.hls;
- }
-
- if (this.tech_ && this.tech_.hls) {
- delete this.tech_.hls;
- }
-
- get$1(HlsHandler.prototype.__proto__ || Object.getPrototypeOf(HlsHandler.prototype), 'dispose', this).call(this);
- }
- }, {
- key: 'convertToProgramTime',
- value: function convertToProgramTime(time, callback) {
- return getProgramTime({
- playlist: this.masterPlaylistController_.media(),
- time: time,
- callback: callback
+ _this4.player_.error({
+ message: 'Failed to initialize media keys for EME',
+ code: 3
});
- } // the player must be playing before calling this
+ });
+ };
- }, {
- key: 'seekToProgramTime',
- value: function seekToProgramTime$$1(programTime, callback) {
- var pauseAfterSeek = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : true;
- var retryCount = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : 2;
- return seekToProgramTime({
- programTime: programTime,
- playlist: this.masterPlaylistController_.media(),
- retryCount: retryCount,
- pauseAfterSeek: pauseAfterSeek,
- seekTo: this.options_.seekTo,
- tech: this.options_.tech,
- callback: callback
- });
+ _proto.handleWaitingForKey_ = function handleWaitingForKey_() {
+ // If waitingforkey is fired, it's possible that the data that's necessary to retrieve
+ // the key is in the manifest. While this should've happened on initial source load, it
+ // may happen again in live streams where the keys change, and the manifest info
+ // reflects the update.
+ //
+ // Because videojs-contrib-eme compares the PSSH data we send to that of PSSH data it's
+ // already requested keys for, we don't have to worry about this generating extraneous
+ // requests.
+ this.logger_('waitingforkey fired, attempting to create any new key sessions');
+ this.createKeySessions_();
+ }
+ /**
+ * If necessary and EME is available, sets up EME options and waits for key session
+ * creation.
+ *
+ * This function also updates the source updater so taht it can be used, as for some
+ * browsers, EME must be configured before content is appended (if appending unencrypted
+ * content before encrypted content).
+ */
+ ;
+
+ _proto.setupEme_ = function setupEme_() {
+ var _this5 = this;
+
+ var audioPlaylistLoader = this.masterPlaylistController_.mediaTypes_.AUDIO.activePlaylistLoader;
+ var didSetupEmeOptions = setupEmeOptions({
+ player: this.player_,
+ sourceKeySystems: this.source_.keySystems,
+ media: this.playlists.media(),
+ audioMedia: audioPlaylistLoader && audioPlaylistLoader.media()
+ });
+ this.player_.tech_.on('keystatuschange', function (e) {
+ if (e.status === 'output-restricted') {
+ _this5.masterPlaylistController_.blacklistCurrentPlaylist({
+ playlist: _this5.masterPlaylistController_.media(),
+ message: "DRM keystatus changed to " + e.status + ". Playlist will fail to play. Check for HDCP content.",
+ blacklistDuration: Infinity
+ });
+ }
+ });
+ this.handleWaitingForKey_ = this.handleWaitingForKey_.bind(this);
+ this.player_.tech_.on('waitingforkey', this.handleWaitingForKey_); // In IE11 this is too early to initialize media keys, and IE11 does not support
+ // promises.
+
+ if (videojs.browser.IE_VERSION === 11 || !didSetupEmeOptions) {
+ // If EME options were not set up, we've done all we could to initialize EME.
+ this.masterPlaylistController_.sourceUpdater_.initializedEme();
+ return;
}
- }]);
- return HlsHandler;
- }(Component$1);
+
+ this.createKeySessions_();
+ }
+ /**
+ * Initializes the quality levels and sets listeners to update them.
+ *
+ * @method setupQualityLevels_
+ * @private
+ */
+ ;
+
+ _proto.setupQualityLevels_ = function setupQualityLevels_() {
+ var _this6 = this;
+
+ var player = videojs.players[this.tech_.options_.playerId]; // if there isn't a player or there isn't a qualityLevels plugin
+ // or qualityLevels_ listeners have already been setup, do nothing.
+
+ if (!player || !player.qualityLevels || this.qualityLevels_) {
+ return;
+ }
+
+ this.qualityLevels_ = player.qualityLevels();
+ this.masterPlaylistController_.on('selectedinitialmedia', function () {
+ handleVhsLoadedMetadata(_this6.qualityLevels_, _this6);
+ });
+ this.playlists.on('mediachange', function () {
+ handleVhsMediaChange(_this6.qualityLevels_, _this6.playlists);
+ });
+ }
+ /**
+ * return the version
+ */
+ ;
+
+ VhsHandler.version = function version$5() {
+ return {
+ '@videojs/http-streaming': version$4,
+ 'mux.js': version$3,
+ 'mpd-parser': version$2,
+ 'm3u8-parser': version$1,
+ 'aes-decrypter': version
+ };
+ }
+ /**
+ * return the version
+ */
+ ;
+
+ _proto.version = function version() {
+ return this.constructor.version();
+ };
+
+ _proto.canChangeType = function canChangeType() {
+ return SourceUpdater.canChangeType();
+ }
+ /**
+ * Begin playing the video.
+ */
+ ;
+
+ _proto.play = function play() {
+ this.masterPlaylistController_.play();
+ }
+ /**
+ * a wrapper around the function in MasterPlaylistController
+ */
+ ;
+
+ _proto.setCurrentTime = function setCurrentTime(currentTime) {
+ this.masterPlaylistController_.setCurrentTime(currentTime);
+ }
+ /**
+ * a wrapper around the function in MasterPlaylistController
+ */
+ ;
+
+ _proto.duration = function duration() {
+ return this.masterPlaylistController_.duration();
+ }
+ /**
+ * a wrapper around the function in MasterPlaylistController
+ */
+ ;
+
+ _proto.seekable = function seekable() {
+ return this.masterPlaylistController_.seekable();
+ }
+ /**
+ * Abort all outstanding work and cleanup.
+ */
+ ;
+
+ _proto.dispose = function dispose() {
+ if (this.playbackWatcher_) {
+ this.playbackWatcher_.dispose();
+ }
+
+ if (this.masterPlaylistController_) {
+ this.masterPlaylistController_.dispose();
+ }
+
+ if (this.qualityLevels_) {
+ this.qualityLevels_.dispose();
+ }
+
+ if (this.player_) {
+ delete this.player_.vhs;
+ delete this.player_.dash;
+ delete this.player_.hls;
+ }
+
+ if (this.tech_ && this.tech_.vhs) {
+ delete this.tech_.vhs;
+ } // don't check this.tech_.hls as it will log a deprecated warning
+
+
+ if (this.tech_) {
+ delete this.tech_.hls;
+ }
+
+ if (this.mediaSourceUrl_ && window.URL.revokeObjectURL) {
+ window.URL.revokeObjectURL(this.mediaSourceUrl_);
+ this.mediaSourceUrl_ = null;
+ }
+
+ if (this.tech_) {
+ this.tech_.off('waitingforkey', this.handleWaitingForKey_);
+ }
+
+ _Component.prototype.dispose.call(this);
+ };
+
+ _proto.convertToProgramTime = function convertToProgramTime(time, callback) {
+ return getProgramTime({
+ playlist: this.masterPlaylistController_.media(),
+ time: time,
+ callback: callback
+ });
+ } // the player must be playing before calling this
+ ;
+
+ _proto.seekToProgramTime = function seekToProgramTime$1(programTime, callback, pauseAfterSeek, retryCount) {
+ if (pauseAfterSeek === void 0) {
+ pauseAfterSeek = true;
+ }
+
+ if (retryCount === void 0) {
+ retryCount = 2;
+ }
+
+ return seekToProgramTime({
+ programTime: programTime,
+ playlist: this.masterPlaylistController_.media(),
+ retryCount: retryCount,
+ pauseAfterSeek: pauseAfterSeek,
+ seekTo: this.options_.seekTo,
+ tech: this.options_.tech,
+ callback: callback
+ });
+ };
+
+ return VhsHandler;
+ }(Component);
/**
* The Source Handler object, which informs video.js what additional
* MIME types are supported and sets up playback. It is registered
@@ -55631,62 +66632,113 @@
*/
- var HlsSourceHandler = {
+ var VhsSourceHandler = {
name: 'videojs-http-streaming',
- VERSION: version$1,
- canHandleSource: function canHandleSource(srcObj) {
- var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
- var localOptions = videojs$1.mergeOptions(videojs$1.options, options);
- return HlsSourceHandler.canPlayType(srcObj.type, localOptions);
- },
- handleSource: function handleSource(source, tech) {
- var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
- var localOptions = videojs$1.mergeOptions(videojs$1.options, options);
- tech.hls = new HlsHandler(source, tech, localOptions);
- tech.hls.xhr = xhrFactory();
- tech.hls.src(source.src, source.type);
- return tech.hls;
- },
- canPlayType: function canPlayType(type) {
- var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+ VERSION: version$4,
+ canHandleSource: function canHandleSource(srcObj, options) {
+ if (options === void 0) {
+ options = {};
+ }
- var _videojs$mergeOptions = videojs$1.mergeOptions(videojs$1.options, options),
- overrideNative = _videojs$mergeOptions.hls.overrideNative;
+ var localOptions = videojs.mergeOptions(videojs.options, options);
+ return VhsSourceHandler.canPlayType(srcObj.type, localOptions);
+ },
+ handleSource: function handleSource(source, tech, options) {
+ if (options === void 0) {
+ options = {};
+ }
+ var localOptions = videojs.mergeOptions(videojs.options, options);
+ tech.vhs = new VhsHandler(source, tech, localOptions);
+
+ if (!videojs.hasOwnProperty('hls')) {
+ Object.defineProperty(tech, 'hls', {
+ get: function get() {
+ videojs.log.warn('player.tech().hls is deprecated. Use player.tech().vhs instead.');
+ return tech.vhs;
+ },
+ configurable: true
+ });
+ }
+
+ tech.vhs.xhr = xhrFactory();
+ tech.vhs.src(source.src, source.type);
+ return tech.vhs;
+ },
+ canPlayType: function canPlayType(type, options) {
+ if (options === void 0) {
+ options = {};
+ }
+
+ var _videojs$mergeOptions = videojs.mergeOptions(videojs.options, options),
+ _videojs$mergeOptions2 = _videojs$mergeOptions.vhs;
+
+ _videojs$mergeOptions2 = _videojs$mergeOptions2 === void 0 ? {} : _videojs$mergeOptions2;
+ var _videojs$mergeOptions3 = _videojs$mergeOptions2.overrideNative,
+ overrideNative = _videojs$mergeOptions3 === void 0 ? !videojs.browser.IS_ANY_SAFARI : _videojs$mergeOptions3,
+ _videojs$mergeOptions4 = _videojs$mergeOptions.hls;
+ _videojs$mergeOptions4 = _videojs$mergeOptions4 === void 0 ? {} : _videojs$mergeOptions4;
+ var _videojs$mergeOptions5 = _videojs$mergeOptions4.overrideNative,
+ legacyOverrideNative = _videojs$mergeOptions5 === void 0 ? false : _videojs$mergeOptions5;
var supportedType = simpleTypeFromSourceType(type);
- var canUseMsePlayback = supportedType && (!Hls$1.supportsTypeNatively(supportedType) || overrideNative);
+ var canUseMsePlayback = supportedType && (!Vhs.supportsTypeNatively(supportedType) || legacyOverrideNative || overrideNative);
return canUseMsePlayback ? 'maybe' : '';
}
};
+ /**
+ * Check to see if the native MediaSource object exists and supports
+ * an MP4 container with both H.264 video and AAC-LC audio.
+ *
+ * @return {boolean} if native media sources are supported
+ */
- if (typeof videojs$1.MediaSource === 'undefined' || typeof videojs$1.URL === 'undefined') {
- videojs$1.MediaSource = MediaSource;
- videojs$1.URL = URL$1;
- } // register source handlers with the appropriate techs
+ var supportsNativeMediaSources = function supportsNativeMediaSources() {
+ return browserSupportsCodec('avc1.4d400d,mp4a.40.2');
+ }; // register source handlers with the appropriate techs
- if (MediaSource.supportsNativeMediaSources()) {
- videojs$1.getTech('Html5').registerSourceHandler(HlsSourceHandler, 0);
+ if (supportsNativeMediaSources()) {
+ videojs.getTech('Html5').registerSourceHandler(VhsSourceHandler, 0);
}
- videojs$1.HlsHandler = HlsHandler;
- videojs$1.HlsSourceHandler = HlsSourceHandler;
- videojs$1.Hls = Hls$1;
+ videojs.VhsHandler = VhsHandler;
+ Object.defineProperty(videojs, 'HlsHandler', {
+ get: function get() {
+ videojs.log.warn('videojs.HlsHandler is deprecated. Use videojs.VhsHandler instead.');
+ return VhsHandler;
+ },
+ configurable: true
+ });
+ videojs.VhsSourceHandler = VhsSourceHandler;
+ Object.defineProperty(videojs, 'HlsSourceHandler', {
+ get: function get() {
+ videojs.log.warn('videojs.HlsSourceHandler is deprecated. ' + 'Use videojs.VhsSourceHandler instead.');
+ return VhsSourceHandler;
+ },
+ configurable: true
+ });
+ videojs.Vhs = Vhs;
+ Object.defineProperty(videojs, 'Hls', {
+ get: function get() {
+ videojs.log.warn('videojs.Hls is deprecated. Use videojs.Vhs instead.');
+ return Vhs;
+ },
+ configurable: true
+ });
- if (!videojs$1.use) {
- videojs$1.registerComponent('Hls', Hls$1);
+ if (!videojs.use) {
+ videojs.registerComponent('Hls', Vhs);
+ videojs.registerComponent('Vhs', Vhs);
}
- videojs$1.options.hls = videojs$1.options.hls || {};
+ videojs.options.vhs = videojs.options.vhs || {};
+ videojs.options.hls = videojs.options.hls || {};
- if (videojs$1.registerPlugin) {
- videojs$1.registerPlugin('reloadSourceOnError', reloadSourceOnError);
- } else {
- videojs$1.plugin('reloadSourceOnError', reloadSourceOnError);
+ if (!videojs.getPlugin || !videojs.getPlugin('reloadSourceOnError')) {
+ var registerPlugin = videojs.registerPlugin || videojs.plugin;
+ registerPlugin('reloadSourceOnError', reloadSourceOnError);
}
- return videojs$1;
+ return videojs;
-}));
-
-!function(){!function(a){var b=a&&a.videojs;b&&(b.CDN_VERSION="7.7.5")}(window)}();
\ No newline at end of file
+})));
diff --git a/frontend/src/static/lib/video-js/7.20.2/video.min.js b/frontend/src/static/lib/video-js/7.20.2/video.min.js
new file mode 100644
index 0000000..3bc604f
--- /dev/null
+++ b/frontend/src/static/lib/video-js/7.20.2/video.min.js
@@ -0,0 +1,25 @@
+/**
+ * @license
+ * Video.js 7.20.2
+ * Copyright Brightcove, Inc.
+ * Available under Apache License Version 2.0
+ *
+ *
+ * Includes vtt.js
+ * Available under Apache License Version 2.0
+ *
+ */
+!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t():"function"==typeof define&&define.amd?define(t):(e="undefined"!=typeof globalThis?globalThis:e||self).videojs=t()}(this,function(){"use strict";for(var e,u="7.20.2",i={},a=function(e,t){return i[e]=i[e]||[],t&&(i[e]=i[e].concat(t)),i[e]},n=function(e,t){t=a(e).indexOf(t);return!(t<=-1)&&(i[e]=i[e].slice(),i[e].splice(t,1),!0)},l={prefixed:!0},t=[["requestFullscreen","exitFullscreen","fullscreenElement","fullscreenEnabled","fullscreenchange","fullscreenerror","fullscreen"],["webkitRequestFullscreen","webkitExitFullscreen","webkitFullscreenElement","webkitFullscreenEnabled","webkitfullscreenchange","webkitfullscreenerror","-webkit-full-screen"],["mozRequestFullScreen","mozCancelFullScreen","mozFullScreenElement","mozFullScreenEnabled","mozfullscreenchange","mozfullscreenerror","-moz-full-screen"],["msRequestFullscreen","msExitFullscreen","msFullscreenElement","msFullscreenEnabled","MSFullscreenChange","MSFullscreenError","-ms-fullscreen"]],r=t[0],s=0;s
+ * Copyright (c) 2014 David Björklund
+ * Available under the MIT license
+ *
+ */
+var $t=function(e){var n={};return e&&e.trim().split("\n").forEach(function(e){var t=e.indexOf(":"),i=e.slice(0,t).trim().toLowerCase(),t=e.slice(t+1).trim();"undefined"==typeof n[i]?n[i]=t:Array.isArray(n[i])?n[i].push(t):n[i]=[n[i],t]}),n},Jt=ei,I=ei;function Zt(e,t,i){var n=e;return Yt(t)?(i=t,"string"==typeof e&&(n={uri:e})):n=g({},t,{uri:e}),n.callback=i,n}function ei(e,t,i){return ti(t=Zt(e,t,i))}function ti(n){if("undefined"==typeof n.callback)throw new Error("callback argument missing");var r=!1,a=function(e,t,i){r||(r=!0,n.callback(e,t,i))};function s(){var e=void 0,e=l.response||l.responseText||function(e){try{if("document"===e.responseType)return e.responseXML;var t=e.responseXML&&"parsererror"===e.responseXML.documentElement.nodeName;if(""===e.responseType&&!t)return e.responseXML}catch(e){}return null}(l);if(m)try{e=JSON.parse(e)}catch(e){}return e}function t(e){return clearTimeout(u),(e=!(e instanceof Error)?new Error(""+(e||"Unknown XMLHttpRequest Error")):e).statusCode=0,a(e,g)}function e(){if(!o){clearTimeout(u);var e=n.useXDR&&void 0===l.status?200:1223===l.status?204:l.status,t=g,i=null;return 0!==e?(t={body:s(),statusCode:e,method:d,headers:{},url:c,rawRequest:l},l.getAllResponseHeaders&&(t.headers=$t(l.getAllResponseHeaders()))):i=new Error("Internal XMLHttpRequest Error"),a(i,t,t.body)}}var i,o,u,l=n.xhr||null,c=(l=l||new(n.cors||n.useXDR?ei.XDomainRequest:ei.XMLHttpRequest)).url=n.uri||n.url,d=l.method=n.method||"GET",h=n.body||n.data,p=l.headers=n.headers||{},f=!!n.sync,m=!1,g={body:void 0,headers:{},statusCode:0,method:d,url:c,rawRequest:l};if("json"in n&&!1!==n.json&&(m=!0,p.accept||p.Accept||(p.Accept="application/json"),"GET"!==d&&"HEAD"!==d&&(p["content-type"]||p["Content-Type"]||(p["Content-Type"]="application/json"),h=JSON.stringify(!0===n.json?h:n.json))),l.onreadystatechange=function(){4===l.readyState&&setTimeout(e,0)},l.onload=e,l.onerror=t,l.onprogress=function(){},l.onabort=function(){o=!0},l.ontimeout=t,l.open(d,c,!f,n.username,n.password),f||(l.withCredentials=!!n.withCredentials),!f&&0=e||r.startTime===r.endTime&&r.startTime<=e&&r.startTime+.5>=e)&&t.push(r)}if(o=!1,t.length!==this.activeCues_.length)o=!0;else for(var a=0;a]*>?)?/);return e=e[1]||e[2],t=t.substr(e.length),e}());)"<"!==i[0]?p.appendChild(e.document.createTextNode((s=i,yi.innerHTML=s,s=yi.textContent,yi.textContent="",s))):"/"!==i[1]?(a=pi(i.substr(1,i.length-2)))?(n=e.document.createProcessingInstruction("timestamp",a),p.appendChild(n)):(r=i.match(/^<([^.\s/0-9>]+)(\.[^\s\\>]+)?([^>\\]+)?(\\?)>?$/))&&(l=r[1],c=r[3],d=void 0,d=vi[l],(n=d?(d=e.document.createElement(d),(l=bi[l])&&c&&(d[l]=c.trim()),d):null)&&(o=p,Ti[(u=n).localName]&&Ti[u.localName]!==o.localName||(r[2]&&((a=r[2].split(".")).forEach(function(e){var t=/^bg_/.test(e),e=t?e.slice(3):e;_i.hasOwnProperty(e)&&(e=_i[e],n.style[t?"background-color":"color"]=e)}),n.className=a.join(" ")),f.push(r[1]),p.appendChild(n),p=n))):f.length&&f[f.length-1]===i.substr(2).replace(">","")&&(f.pop(),p=p.parentNode);return h}var wi=[[1470,1470],[1472,1472],[1475,1475],[1478,1478],[1488,1514],[1520,1524],[1544,1544],[1547,1547],[1549,1549],[1563,1563],[1566,1610],[1645,1647],[1649,1749],[1765,1766],[1774,1775],[1786,1805],[1807,1808],[1810,1839],[1869,1957],[1969,1969],[1984,2026],[2036,2037],[2042,2042],[2048,2069],[2074,2074],[2084,2084],[2088,2088],[2096,2110],[2112,2136],[2142,2142],[2208,2208],[2210,2220],[8207,8207],[64285,64285],[64287,64296],[64298,64310],[64312,64316],[64318,64318],[64320,64321],[64323,64324],[64326,64449],[64467,64829],[64848,64911],[64914,64967],[65008,65020],[65136,65140],[65142,65276],[67584,67589],[67592,67592],[67594,67637],[67639,67640],[67644,67644],[67647,67669],[67671,67679],[67840,67867],[67872,67897],[67903,67903],[67968,68023],[68030,68031],[68096,68096],[68112,68115],[68117,68119],[68121,68147],[68160,68167],[68176,68184],[68192,68223],[68352,68405],[68416,68437],[68440,68466],[68472,68479],[68608,68680],[126464,126467],[126469,126495],[126497,126498],[126500,126500],[126503,126503],[126505,126514],[126516,126519],[126521,126521],[126523,126523],[126530,126530],[126535,126535],[126537,126537],[126539,126539],[126541,126543],[126545,126546],[126548,126548],[126551,126551],[126553,126553],[126555,126555],[126557,126557],[126559,126559],[126561,126562],[126564,126564],[126567,126570],[126572,126578],[126580,126583],[126585,126588],[126590,126590],[126592,126601],[126603,126619],[126625,126627],[126629,126633],[126635,126651],[1114109,1114109]];function Ei(e){var t=[],i="";if(!e||!e.childNodes)return"ltr";function a(e,t){for(var i=t.childNodes.length-1;0<=i;i--)e.push(t.childNodes[i])}for(a(t,e);i=function e(t){if(!t||!t.length)return null;var i=t.pop(),n=i.textContent||i.innerText;if(n){var r=n.match(/^.*(\n|\r)/);return r?r[t.length=0]:n}return"ruby"===i.tagName?e(t):i.childNodes?(a(t,i),e(t)):void 0}(t);)for(var n=0;n=i[0]&&e<=i[1])return 1}}(i.charCodeAt(n)))return"rtl";return"ltr"}function ki(){}function Ci(e,t,i){ki.call(this),this.cue=t,this.cueDiv=Si(e,t.text);var n={color:"rgba(255, 255, 255, 1)",backgroundColor:"rgba(0, 0, 0, 0.8)",position:"relative",left:0,right:0,top:0,bottom:0,display:"inline",writingMode:""===t.vertical?"horizontal-tb":"lr"===t.vertical?"vertical-lr":"vertical-rl",unicodeBidi:"plaintext"};this.applyStyles(n,this.cueDiv),this.div=e.document.createElement("div"),n={direction:Ei(this.cueDiv),writingMode:""===t.vertical?"horizontal-tb":"lr"===t.vertical?"vertical-lr":"vertical-rl",unicodeBidi:"plaintext",textAlign:"middle"===t.align?"center":t.align,font:i.font,whiteSpace:"pre-line",position:"absolute"},this.applyStyles(n),this.div.appendChild(this.cueDiv);var r=0;switch(t.positionAlign){case"start":r=t.position;break;case"center":r=t.position-t.size/2;break;case"end":r=t.position-t.size}""===t.vertical?this.applyStyles({left:this.formatStyle(r,"%"),width:this.formatStyle(t.size,"%")}):this.applyStyles({top:this.formatStyle(r,"%"),height:this.formatStyle(t.size,"%")}),this.move=function(e){this.applyStyles({top:this.formatStyle(e.top,"px"),bottom:this.formatStyle(e.bottom,"px"),left:this.formatStyle(e.left,"px"),right:this.formatStyle(e.right,"px"),height:this.formatStyle(e.height,"px"),width:this.formatStyle(e.width,"px")})}}function Ii(e){var t,i,n,r;e.div&&(t=e.div.offsetHeight,i=e.div.offsetWidth,n=e.div.offsetTop,r=(r=e.div.childNodes)&&(r=r[0])&&r.getClientRects&&r.getClientRects(),e=e.div.getBoundingClientRect(),r=r?Math.max(r[0]&&r[0].height||0,e.height/r.length):0),this.left=e.left,this.right=e.right,this.top=e.top||n,this.height=e.height||t,this.bottom=e.bottom||n+(e.height||t),this.width=e.width||i,this.lineHeight=void 0!==r?r:e.lineHeight}function xi(e,t,o,u){var i,n=new Ii(t),r=t.cue,a=function(e){if("number"==typeof e.line&&(e.snapToLines||0<=e.line&&e.line<=100))return e.line;if(!e.track||!e.track.textTrackList||!e.track.textTrackList.mediaElement)return-1;for(var t=e.track,i=t.textTrackList,n=0,r=0;rd&&(c=c<0?-1:1,c*=Math.ceil(d/l)*l),a<0&&(c+=""===r.vertical?o.height:o.width,s=s.reverse()),n.move(h,c)}else{var p=n.lineHeight/o.height*100;switch(r.lineAlign){case"center":a-=p/2;break;case"end":a-=p}switch(r.vertical){case"":t.applyStyles({top:t.formatStyle(a,"%")});break;case"rl":t.applyStyles({left:t.formatStyle(a,"%")});break;case"lr":t.applyStyles({right:t.formatStyle(a,"%")})}s=["+y","-x","+x","-y"],n=new Ii(t)}n=function(e,t){for(var i,n=new Ii(e),r=1,a=0;ae.left&&this.tope.top},Ii.prototype.overlapsAny=function(e){for(var t=0;t=e.top&&this.bottom<=e.bottom&&this.left>=e.left&&this.right<=e.right},Ii.prototype.overlapsOppositeAxis=function(e,t){switch(t){case"+x":return this.lefte.right;case"+y":return this.tope.bottom}},Ii.prototype.intersectPercentage=function(e){return Math.max(0,Math.min(this.right,e.right)-Math.max(this.left,e.left))*Math.max(0,Math.min(this.bottom,e.bottom)-Math.max(this.top,e.top))/(this.height*this.width)},Ii.prototype.toCSSCompatValues=function(e){return{top:this.top-e.top,bottom:e.bottom-this.bottom,left:this.left-e.left,right:e.right-this.right,height:this.height,width:this.width}},Ii.getSimpleBoxPosition=function(e){var t=e.div?e.div.offsetHeight:e.tagName?e.offsetHeight:0,i=e.div?e.div.offsetWidth:e.tagName?e.offsetWidth:0,n=e.div?e.div.offsetTop:e.tagName?e.offsetTop:0;return{left:(e=e.div?e.div.getBoundingClientRect():e.tagName?e.getBoundingClientRect():e).left,right:e.right,top:e.top||n,height:e.height||t,bottom:e.bottom||n+(e.height||t),width:e.width||i}},Ai.StringDecoder=function(){return{decode:function(e){if(!e)return"";if("string"!=typeof e)throw new Error("Error - expected string data.");return decodeURIComponent(encodeURIComponent(e))}}},Ai.convertCueToDOMTree=function(e,t){return e&&t?Si(e,t):null};Ai.processCues=function(n,r,e){if(!n||!r||!e)return null;for(;e.firstChild;)e.removeChild(e.firstChild);var a=n.document.createElement("div");if(a.style.position="absolute",a.style.left="0",a.style.right="0",a.style.top="0",a.style.bottom="0",a.style.margin="1.5%",e.appendChild(a),function(e){for(var t=0;tt.length;u--)l.el_.removeChild(n[u-1]);n.length=t.length})},e}(pt)),pt.registerComponent("TimeTooltip",function(i){function e(e,t){t=i.call(this,e,t)||this;return t.update=We(Ve(ft(t),t.update),30),t}mt(e,i);var t=e.prototype;return t.createEl=function(){return i.prototype.createEl.call(this,"div",{className:"vjs-time-tooltip"},{"aria-hidden":"true"})},t.update=function(e,t,i){var n=he(this.el_),r=de(this.player_.el()),a=e.width*t;r&&n&&(t=e.left-r.left+a,r=e.width-a+(r.right-e.right),t<(e=n.width/2)?e+=e-t:rn.width&&(e=n.width),e=Math.round(e),this.el_.style.right="-"+e+"px",this.write(i))},t.write=function(e){J(this.el_,e)},t.updateTime=function(n,r,a,s){var o=this;this.requestNamedAnimationFrame("TimeTooltip#updateTime",function(){var e,t,i=o.player_.duration();i=o.player_.liveTracker&&o.player_.liveTracker.isLive()?((t=(e=o.player_.liveTracker.liveWindow())-r*e)<1?"":"-")+ln(t,e):ln(a,i),o.update(n,r,i),s&&s()})},e}(pt));Xt=function(i){function e(e,t){t=i.call(this,e,t)||this;return t.update=We(Ve(ft(t),t.update),30),t}mt(e,i);var t=e.prototype;return t.createEl=function(){return i.prototype.createEl.call(this,"div",{className:"vjs-play-progress vjs-slider-bar"},{"aria-hidden":"true"})},t.update=function(e,t){var i,n=this.getChild("timeTooltip");n&&(i=this.player_.scrubbing()?this.player_.getCache().currentTime:this.player_.currentTime(),n.updateTime(e,t,i))},e}(pt);Xt.prototype.options_={children:[]},q||A||Xt.prototype.options_.children.push("timeTooltip"),pt.registerComponent("PlayProgressBar",Xt);I=function(i){function e(e,t){t=i.call(this,e,t)||this;return t.update=We(Ve(ft(t),t.update),30),t}mt(e,i);var t=e.prototype;return t.createEl=function(){return i.prototype.createEl.call(this,"div",{className:"vjs-mouse-display"})},t.update=function(e,t){var i=this,n=t*this.player_.duration();this.getChild("timeTooltip").updateTime(e,t,n,function(){i.el_.style.left=e.width*t+"px"})},e}(pt);I.prototype.options_={children:["timeTooltip"]},pt.registerComponent("MouseTimeDisplay",I);Bt=function(a){function e(e,t){t=a.call(this,e,t)||this;return t.setEventHandlers_(),t}mt(e,a);var t=e.prototype;return t.setEventHandlers_=function(){var t=this;this.update_=Ve(this,this.update),this.update=We(this.update_,30),this.on(this.player_,["ended","durationchange","timeupdate"],this.update),this.player_.liveTracker&&this.on(this.player_.liveTracker,"liveedgechange",this.update),this.updateInterval=null,this.enableIntervalHandler_=function(e){return t.enableInterval_(e)},this.disableIntervalHandler_=function(e){return t.disableInterval_(e)},this.on(this.player_,["playing"],this.enableIntervalHandler_),this.on(this.player_,["ended","pause","waiting"],this.disableIntervalHandler_),"hidden"in document&&"visibilityState"in document&&this.on(document,"visibilitychange",this.toggleVisibility_)},t.toggleVisibility_=function(e){"hidden"===document.visibilityState?(this.cancelNamedAnimationFrame("SeekBar#update"),this.cancelNamedAnimationFrame("Slider#update"),this.disableInterval_(e)):(this.player_.ended()||this.player_.paused()||this.enableInterval_(),this.update())},t.enableInterval_=function(){this.updateInterval||(this.updateInterval=this.setInterval(this.update,30))},t.disableInterval_=function(e){this.player_.liveTracker&&this.player_.liveTracker.isLive()&&e&&"ended"!==e.type||this.updateInterval&&(this.clearInterval(this.updateInterval),this.updateInterval=null)},t.createEl=function(){return a.prototype.createEl.call(this,"div",{className:"vjs-progress-holder"},{"aria-label":this.localize("Progress Bar")})},t.update=function(e){var n=this;if("hidden"!==document.visibilityState){var r=a.prototype.update.call(this);return this.requestNamedAnimationFrame("SeekBar#update",function(){var e=n.player_.ended()?n.player_.duration():n.getCurrentTime_(),t=n.player_.liveTracker,i=n.player_.duration();t&&t.isLive()&&(i=n.player_.liveTracker.liveCurrentTime()),n.percent_!==r&&(n.el_.setAttribute("aria-valuenow",(100*r).toFixed(2)),n.percent_=r),n.currentTime_===e&&n.duration_===i||(n.el_.setAttribute("aria-valuetext",n.localize("progress bar timing: currentTime={1} duration={2}",[ln(e,i),ln(i,i)],"{1} of {2}")),n.currentTime_=e,n.duration_=i),n.bar&&n.bar.update(de(n.el()),n.getProgress())}),r}},t.userSeek_=function(e){this.player_.liveTracker&&this.player_.liveTracker.isLive()&&this.player_.liveTracker.nextSeekedFromUser(),this.player_.currentTime(e)},t.getCurrentTime_=function(){return this.player_.scrubbing()?this.player_.getCache().currentTime:this.player_.currentTime()},t.getPercent=function(){var e,t=this.getCurrentTime_(),i=this.player_.liveTracker;return i&&i.isLive()?(e=(t-i.seekableStart())/i.liveWindow(),i.atLiveEdge()&&(e=1)):e=t/this.player_.duration(),e},t.handleMouseDown=function(e){_e(e)&&(e.stopPropagation(),this.videoWasPlaying=!this.player_.paused(),this.player_.pause(),a.prototype.handleMouseDown.call(this,e))},t.handleMouseMove=function(e,t){if(void 0===t&&(t=!1),_e(e)){t||this.player_.scrubbing()||this.player_.scrubbing(!0);var i=this.calculateDistance(e),n=this.player_.liveTracker;if(n&&n.isLive()){if(.99<=i)return void n.seekToLiveEdge();var r,t=n.seekableStart(),e=n.liveCurrentTime();if((r=(r=e<=(r=t+i*n.liveWindow())?e:r)<=t?t+.1:r)===1/0)return}else(r=i*this.player_.duration())===this.player_.duration()&&(r-=.1);this.userSeek_(r)}},t.enable=function(){a.prototype.enable.call(this);var e=this.getChild("mouseTimeDisplay");e&&e.show()},t.disable=function(){a.prototype.disable.call(this);var e=this.getChild("mouseTimeDisplay");e&&e.hide()},t.handleMouseUp=function(e){a.prototype.handleMouseUp.call(this,e),e&&e.stopPropagation(),this.player_.scrubbing(!1),this.player_.trigger({type:"timeupdate",target:this,manuallyTriggered:!0}),this.videoWasPlaying?Et(this.player_.play()):this.update_()},t.stepForward=function(){this.userSeek_(this.player_.currentTime()+5)},t.stepBack=function(){this.userSeek_(this.player_.currentTime()-5)},t.handleAction=function(e){this.player_.paused()?this.player_.play():this.player_.pause()},t.handleKeyDown=function(e){var t,i=this.player_.liveTracker;ht.isEventKey(e,"Space")||ht.isEventKey(e,"Enter")?(e.preventDefault(),e.stopPropagation(),this.handleAction(e)):ht.isEventKey(e,"Home")?(e.preventDefault(),e.stopPropagation(),this.userSeek_(0)):ht.isEventKey(e,"End")?(e.preventDefault(),e.stopPropagation(),i&&i.isLive()?this.userSeek_(i.liveCurrentTime()):this.userSeek_(this.player_.duration())):/^[0-9]$/.test(ht(e))?(e.preventDefault(),e.stopPropagation(),t=10*(ht.codes[ht(e)]-ht.codes[0])/100,i&&i.isLive()?this.userSeek_(i.seekableStart()+i.liveWindow()*t):this.userSeek_(this.player_.duration()*t)):ht.isEventKey(e,"PgDn")?(e.preventDefault(),e.stopPropagation(),this.userSeek_(this.player_.currentTime()-60)):ht.isEventKey(e,"PgUp")?(e.preventDefault(),e.stopPropagation(),this.userSeek_(this.player_.currentTime()+60)):a.prototype.handleKeyDown.call(this,e)},t.dispose=function(){this.disableInterval_(),this.off(this.player_,["ended","durationchange","timeupdate"],this.update),this.player_.liveTracker&&this.off(this.player_.liveTracker,"liveedgechange",this.update),this.off(this.player_,["playing"],this.enableIntervalHandler_),this.off(this.player_,["ended","pause","waiting"],this.disableIntervalHandler_),"hidden"in document&&"visibilityState"in document&&this.off(document,"visibilitychange",this.toggleVisibility_),a.prototype.dispose.call(this)},e}(li);Bt.prototype.options_={children:["loadProgressBar","playProgressBar"],barName:"playProgressBar"},q||A||Bt.prototype.options_.children.splice(1,0,"mouseTimeDisplay"),pt.registerComponent("SeekBar",Bt);Ft=function(n){function e(e,t){var i=n.call(this,e,t)||this;return i.handleMouseMove=We(Ve(ft(i),i.handleMouseMove),30),i.throttledHandleMouseSeek=We(Ve(ft(i),i.handleMouseSeek),30),i.handleMouseUpHandler_=function(e){return i.handleMouseUp(e)},i.handleMouseDownHandler_=function(e){return i.handleMouseDown(e)},i.enable(),i}mt(e,n);var t=e.prototype;return t.createEl=function(){return n.prototype.createEl.call(this,"div",{className:"vjs-progress-control vjs-control"})},t.handleMouseMove=function(e){var t,i,n,r,a=this.getChild("seekBar");a&&(t=a.getChild("playProgressBar"),i=a.getChild("mouseTimeDisplay"),(t||i)&&(r=he(n=a.el()),e=pe(n,e).x,e=cn(e,0,1),i&&i.update(r,e),t&&t.update(r,a.getProgress())))},t.handleMouseSeek=function(e){var t=this.getChild("seekBar");t&&t.handleMouseMove(e)},t.enabled=function(){return this.enabled_},t.disable=function(){var e;this.children().forEach(function(e){return e.disable&&e.disable()}),this.enabled()&&(this.off(["mousedown","touchstart"],this.handleMouseDownHandler_),this.off(this.el_,"mousemove",this.handleMouseMove),this.removeListenersAddedOnMousedownAndTouchstart(),this.addClass("disabled"),this.enabled_=!1,this.player_.scrubbing()&&(e=this.getChild("seekBar"),this.player_.scrubbing(!1),e.videoWasPlaying&&Et(this.player_.play())))},t.enable=function(){this.children().forEach(function(e){return e.enable&&e.enable()}),this.enabled()||(this.on(["mousedown","touchstart"],this.handleMouseDownHandler_),this.on(this.el_,"mousemove",this.handleMouseMove),this.removeClass("disabled"),this.enabled_=!0)},t.removeListenersAddedOnMousedownAndTouchstart=function(){var e=this.el_.ownerDocument;this.off(e,"mousemove",this.throttledHandleMouseSeek),this.off(e,"touchmove",this.throttledHandleMouseSeek),this.off(e,"mouseup",this.handleMouseUpHandler_),this.off(e,"touchend",this.handleMouseUpHandler_)},t.handleMouseDown=function(e){var t=this.el_.ownerDocument,i=this.getChild("seekBar");i&&i.handleMouseDown(e),this.on(t,"mousemove",this.throttledHandleMouseSeek),this.on(t,"touchmove",this.throttledHandleMouseSeek),this.on(t,"mouseup",this.handleMouseUpHandler_),this.on(t,"touchend",this.handleMouseUpHandler_)},t.handleMouseUp=function(e){var t=this.getChild("seekBar");t&&t.handleMouseUp(e),this.removeListenersAddedOnMousedownAndTouchstart()},e}(pt);Ft.prototype.options_={children:["seekBar"]},pt.registerComponent("ProgressControl",Ft);jt=function(n){function e(e,t){var i=n.call(this,e,t)||this;return i.on(e,["enterpictureinpicture","leavepictureinpicture"],function(e){return i.handlePictureInPictureChange(e)}),i.on(e,["disablepictureinpicturechanged","loadedmetadata"],function(e){return i.handlePictureInPictureEnabledChange(e)}),i.on(e,["loadedmetadata","audioonlymodechange","audiopostermodechange"],function(){"audio"===e.currentType().substring(0,5)||e.audioPosterMode()||e.audioOnlyMode()?(e.isInPictureInPicture()&&e.exitPictureInPicture(),i.hide()):i.show()}),i.disable(),i}mt(e,n);var t=e.prototype;return t.buildCSSClass=function(){return"vjs-picture-in-picture-control "+n.prototype.buildCSSClass.call(this)},t.handlePictureInPictureEnabledChange=function(){document.pictureInPictureEnabled&&!1===this.player_.disablePictureInPicture()?this.enable():this.disable()},t.handlePictureInPictureChange=function(e){this.player_.isInPictureInPicture()?this.controlText("Exit Picture-in-Picture"):this.controlText("Picture-in-Picture"),this.handlePictureInPictureEnabledChange()},t.handleClick=function(e){this.player_.isInPictureInPicture()?this.player_.exitPictureInPicture():this.player_.requestPictureInPicture()},e}(sn);jt.prototype.controlText_="Picture-in-Picture",pt.registerComponent("PictureInPictureToggle",jt);j=function(n){function e(e,t){var i=n.call(this,e,t)||this;return i.on(e,"fullscreenchange",function(e){return i.handleFullscreenChange(e)}),!1===document[e.fsApi_.fullscreenEnabled]&&i.disable(),i}mt(e,n);var t=e.prototype;return t.buildCSSClass=function(){return"vjs-fullscreen-control "+n.prototype.buildCSSClass.call(this)},t.handleFullscreenChange=function(e){this.player_.isFullscreen()?this.controlText("Non-Fullscreen"):this.controlText("Fullscreen")},t.handleClick=function(e){this.player_.isFullscreen()?this.player_.exitFullscreen():this.player_.requestFullscreen()},e}(sn);j.prototype.controlText_="Fullscreen",pt.registerComponent("FullscreenToggle",j);pt.registerComponent("VolumeLevel",function(t){function e(){return t.apply(this,arguments)||this}return mt(e,t),e.prototype.createEl=function(){var e=t.prototype.createEl.call(this,"div",{className:"vjs-volume-level"});return e.appendChild(t.prototype.createEl.call(this,"span",{className:"vjs-control-text"})),e},e}(pt)),pt.registerComponent("VolumeLevelTooltip",function(i){function e(e,t){t=i.call(this,e,t)||this;return t.update=We(Ve(ft(t),t.update),30),t}mt(e,i);var t=e.prototype;return t.createEl=function(){return i.prototype.createEl.call(this,"div",{className:"vjs-volume-tooltip"},{"aria-hidden":"true"})},t.update=function(e,t,i,n){if(!i){var r=de(this.el_),a=de(this.player_.el()),i=e.width*t;if(!a||!r)return;t=e.left-a.left+i,a=e.width-i+(a.right-e.right),e=r.width/2;tr.width&&(e=r.width),this.el_.style.right="-"+e+"px"}this.write(n+"%")},t.write=function(e){J(this.el_,e)},t.updateVolume=function(e,t,i,n,r){var a=this;this.requestNamedAnimationFrame("VolumeLevelTooltip#updateVolume",function(){a.update(e,t,i,n.toFixed(0)),r&&r()})},e}(pt));k=function(i){function e(e,t){t=i.call(this,e,t)||this;return t.update=We(Ve(ft(t),t.update),30),t}mt(e,i);var t=e.prototype;return t.createEl=function(){return i.prototype.createEl.call(this,"div",{className:"vjs-mouse-display"})},t.update=function(e,t,i){var n=this,r=100*t;this.getChild("volumeLevelTooltip").updateVolume(e,t,i,r,function(){i?n.el_.style.bottom=e.height*t+"px":n.el_.style.left=e.width*t+"px"})},e}(pt);k.prototype.options_={children:["volumeLevelTooltip"]},pt.registerComponent("MouseVolumeLevelDisplay",k);f=function(n){function e(e,t){var i=n.call(this,e,t)||this;return i.on("slideractive",function(e){return i.updateLastVolume_(e)}),i.on(e,"volumechange",function(e){return i.updateARIAAttributes(e)}),e.ready(function(){return i.updateARIAAttributes()}),i}mt(e,n);var t=e.prototype;return t.createEl=function(){return n.prototype.createEl.call(this,"div",{className:"vjs-volume-bar vjs-slider-bar"},{"aria-label":this.localize("Volume Level"),"aria-live":"polite"})},t.handleMouseDown=function(e){_e(e)&&n.prototype.handleMouseDown.call(this,e)},t.handleMouseMove=function(e){var t,i,n,r=this.getChild("mouseVolumeLevelDisplay");r&&(t=de(n=this.el()),i=this.vertical(),n=pe(n,e),n=i?n.y:n.x,n=cn(n,0,1),r.update(t,n,i)),_e(e)&&(this.checkMuted(),this.player_.volume(this.calculateDistance(e)))},t.checkMuted=function(){this.player_.muted()&&this.player_.muted(!1)},t.getPercent=function(){return this.player_.muted()?0:this.player_.volume()},t.stepForward=function(){this.checkMuted(),this.player_.volume(this.player_.volume()+.1)},t.stepBack=function(){this.checkMuted(),this.player_.volume(this.player_.volume()-.1)},t.updateARIAAttributes=function(e){var t=this.player_.muted()?0:this.volumeAsPercentage_();this.el_.setAttribute("aria-valuenow",t),this.el_.setAttribute("aria-valuetext",t+"%")},t.volumeAsPercentage_=function(){return Math.round(100*this.player_.volume())},t.updateLastVolume_=function(){var e=this,t=this.player_.volume();this.one("sliderinactive",function(){0===e.player_.volume()&&e.player_.lastVolume_(t)})},e}(li);f.prototype.options_={children:["volumeLevel"],barName:"volumeLevel"},q||A||f.prototype.options_.children.splice(0,0,"mouseVolumeLevelDisplay"),f.prototype.playerEvent="volumechange",pt.registerComponent("VolumeBar",f);ui=function(a){function e(e,t){var i,n,r;return(t=void 0===t?{}:t).vertical=t.vertical||!1,"undefined"!=typeof t.volumeBar&&!S(t.volumeBar)||(t.volumeBar=t.volumeBar||{},t.volumeBar.vertical=t.vertical),i=a.call(this,e,t)||this,n=ft(i),(r=e).tech_&&!r.tech_.featuresVolumeControl&&n.addClass("vjs-hidden"),n.on(r,"loadstart",function(){r.tech_.featuresVolumeControl?n.removeClass("vjs-hidden"):n.addClass("vjs-hidden")}),i.throttledHandleMouseMove=We(Ve(ft(i),i.handleMouseMove),30),i.handleMouseUpHandler_=function(e){return i.handleMouseUp(e)},i.on("mousedown",function(e){return i.handleMouseDown(e)}),i.on("touchstart",function(e){return i.handleMouseDown(e)}),i.on("mousemove",function(e){return i.handleMouseMove(e)}),i.on(i.volumeBar,["focus","slideractive"],function(){i.volumeBar.addClass("vjs-slider-active"),i.addClass("vjs-slider-active"),i.trigger("slideractive")}),i.on(i.volumeBar,["blur","sliderinactive"],function(){i.volumeBar.removeClass("vjs-slider-active"),i.removeClass("vjs-slider-active"),i.trigger("sliderinactive")}),i}mt(e,a);var t=e.prototype;return t.createEl=function(){var e="vjs-volume-horizontal";return this.options_.vertical&&(e="vjs-volume-vertical"),a.prototype.createEl.call(this,"div",{className:"vjs-volume-control vjs-control "+e})},t.handleMouseDown=function(e){var t=this.el_.ownerDocument;this.on(t,"mousemove",this.throttledHandleMouseMove),this.on(t,"touchmove",this.throttledHandleMouseMove),this.on(t,"mouseup",this.handleMouseUpHandler_),this.on(t,"touchend",this.handleMouseUpHandler_)},t.handleMouseUp=function(e){var t=this.el_.ownerDocument;this.off(t,"mousemove",this.throttledHandleMouseMove),this.off(t,"touchmove",this.throttledHandleMouseMove),this.off(t,"mouseup",this.handleMouseUpHandler_),this.off(t,"touchend",this.handleMouseUpHandler_)},t.handleMouseMove=function(e){this.volumeBar.handleMouseMove(e)},e}(pt);ui.prototype.options_={children:["volumeBar"]},pt.registerComponent("VolumeControl",ui);Xt=function(a){function e(e,t){var i,n,r=a.call(this,e,t)||this;return i=ft(r),(n=e).tech_&&!n.tech_.featuresMuteControl&&i.addClass("vjs-hidden"),i.on(n,"loadstart",function(){n.tech_.featuresMuteControl?i.removeClass("vjs-hidden"):i.addClass("vjs-hidden")}),r.on(e,["loadstart","volumechange"],function(e){return r.update(e)}),r}mt(e,a);var t=e.prototype;return t.buildCSSClass=function(){return"vjs-mute-control "+a.prototype.buildCSSClass.call(this)},t.handleClick=function(e){var t=this.player_.volume(),i=this.player_.lastVolume_();0===t?(this.player_.volume(i<.1?.1:i),this.player_.muted(!1)):this.player_.muted(!this.player_.muted())},t.update=function(e){this.updateIcon_(),this.updateControlText_()},t.updateIcon_=function(){var e=this.player_.volume(),t=3;q&&this.player_.tech_&&this.player_.tech_.el_&&this.player_.muted(this.player_.tech_.el_.muted),0===e||this.player_.muted()?t=0:e<.33?t=1:e<.67&&(t=2);for(var i=0;i<4;i++)ie(this.el_,"vjs-vol-"+i);te(this.el_,"vjs-vol-"+t)},t.updateControlText_=function(){var e=this.player_.muted()||0===this.player_.volume()?"Unmute":"Mute";this.controlText()!==e&&this.controlText(e)},e}(sn);Xt.prototype.controlText_="Mute",pt.registerComponent("MuteToggle",Xt);I=function(n){function e(e,t){var i;return"undefined"!=typeof(t=void 0===t?{}:t).inline?t.inline=t.inline:t.inline=!0,"undefined"!=typeof t.volumeControl&&!S(t.volumeControl)||(t.volumeControl=t.volumeControl||{},t.volumeControl.vertical=!t.inline),(i=n.call(this,e,t)||this).handleKeyPressHandler_=function(e){return i.handleKeyPress(e)},i.on(e,["loadstart"],function(e){return i.volumePanelState_(e)}),i.on(i.muteToggle,"keyup",function(e){return i.handleKeyPress(e)}),i.on(i.volumeControl,"keyup",function(e){return i.handleVolumeControlKeyUp(e)}),i.on("keydown",function(e){return i.handleKeyPress(e)}),i.on("mouseover",function(e){return i.handleMouseOver(e)}),i.on("mouseout",function(e){return i.handleMouseOut(e)}),i.on(i.volumeControl,["slideractive"],i.sliderActive_),i.on(i.volumeControl,["sliderinactive"],i.sliderInactive_),i}mt(e,n);var t=e.prototype;return t.sliderActive_=function(){this.addClass("vjs-slider-active")},t.sliderInactive_=function(){this.removeClass("vjs-slider-active")},t.volumePanelState_=function(){this.volumeControl.hasClass("vjs-hidden")&&this.muteToggle.hasClass("vjs-hidden")&&this.addClass("vjs-hidden"),this.volumeControl.hasClass("vjs-hidden")&&!this.muteToggle.hasClass("vjs-hidden")&&this.addClass("vjs-mute-toggle-only")},t.createEl=function(){var e="vjs-volume-panel-horizontal";return this.options_.inline||(e="vjs-volume-panel-vertical"),n.prototype.createEl.call(this,"div",{className:"vjs-volume-panel vjs-control "+e})},t.dispose=function(){this.handleMouseOut(),n.prototype.dispose.call(this)},t.handleVolumeControlKeyUp=function(e){ht.isEventKey(e,"Esc")&&this.muteToggle.focus()},t.handleMouseOver=function(e){this.addClass("vjs-hover"),Be(document,"keyup",this.handleKeyPressHandler_)},t.handleMouseOut=function(e){this.removeClass("vjs-hover"),Fe(document,"keyup",this.handleKeyPressHandler_)},t.handleKeyPress=function(e){ht.isEventKey(e,"Esc")&&this.handleMouseOut()},e}(pt);I.prototype.options_={children:["muteToggle","volumeControl"]},pt.registerComponent("VolumePanel",I);var hn=function(n){function e(e,t){var i=n.call(this,e,t)||this;return t&&(i.menuButton_=t.menuButton),i.focusedChild_=-1,i.on("keydown",function(e){return i.handleKeyDown(e)}),i.boundHandleBlur_=function(e){return i.handleBlur(e)},i.boundHandleTapClick_=function(e){return i.handleTapClick(e)},i}mt(e,n);var t=e.prototype;return t.addEventListenerForItem=function(e){e instanceof pt&&(this.on(e,"blur",this.boundHandleBlur_),this.on(e,["tap","click"],this.boundHandleTapClick_))},t.removeEventListenerForItem=function(e){e instanceof pt&&(this.off(e,"blur",this.boundHandleBlur_),this.off(e,["tap","click"],this.boundHandleTapClick_))},t.removeChild=function(e){"string"==typeof e&&(e=this.getChild(e)),this.removeEventListenerForItem(e),n.prototype.removeChild.call(this,e)},t.addItem=function(e){e=this.addChild(e);e&&this.addEventListenerForItem(e)},t.createEl=function(){var e=this.options_.contentElType||"ul";this.contentEl_=$(e,{className:"vjs-menu-content"}),this.contentEl_.setAttribute("role","menu");e=n.prototype.createEl.call(this,"div",{append:this.contentEl_,className:"vjs-menu"});return e.appendChild(this.contentEl_),Be(e,"click",function(e){e.preventDefault(),e.stopImmediatePropagation()}),e},t.dispose=function(){this.contentEl_=null,this.boundHandleBlur_=null,this.boundHandleTapClick_=null,n.prototype.dispose.call(this)},t.handleBlur=function(e){var t=e.relatedTarget||document.activeElement;this.children().some(function(e){return e.el()===t})||(e=this.menuButton_)&&e.buttonPressed_&&t!==e.el().firstChild&&e.unpressButton()},t.handleTapClick=function(t){var e;this.menuButton_&&(this.menuButton_.unpressButton(),e=this.children(),!Array.isArray(e)||(e=e.filter(function(e){return e.el()===t.target})[0])&&"CaptionSettingsMenuItem"!==e.name()&&this.menuButton_.focus())},t.handleKeyDown=function(e){ht.isEventKey(e,"Left")||ht.isEventKey(e,"Down")?(e.preventDefault(),e.stopPropagation(),this.stepForward()):(ht.isEventKey(e,"Right")||ht.isEventKey(e,"Up"))&&(e.preventDefault(),e.stopPropagation(),this.stepBack())},t.stepForward=function(){var e=0;void 0!==this.focusedChild_&&(e=this.focusedChild_+1),this.focus(e)},t.stepBack=function(){var e=0;void 0!==this.focusedChild_&&(e=this.focusedChild_-1),this.focus(e)},t.focus=function(e){void 0===e&&(e=0);var t=this.children().slice();t.length&&t[0].hasClass("vjs-menu-title")&&t.shift(),0=t.length&&(e=t.length-1),t[this.focusedChild_=e].el_.focus())},e}(pt);pt.registerComponent("Menu",hn);Bt=function(n){function e(e,t){var i;(i=n.call(this,e,t=void 0===t?{}:t)||this).menuButton_=new sn(e,t),i.menuButton_.controlText(i.controlText_),i.menuButton_.el_.setAttribute("aria-haspopup","true");t=sn.prototype.buildCSSClass();i.menuButton_.el_.className=i.buildCSSClass()+" "+t,i.menuButton_.removeClass("vjs-control"),i.addChild(i.menuButton_),i.update(),i.enabled_=!0;t=function(e){return i.handleClick(e)};return i.handleMenuKeyUp_=function(e){return i.handleMenuKeyUp(e)},i.on(i.menuButton_,"tap",t),i.on(i.menuButton_,"click",t),i.on(i.menuButton_,"keydown",function(e){return i.handleKeyDown(e)}),i.on(i.menuButton_,"mouseenter",function(){i.addClass("vjs-hover"),i.menu.show(),Be(document,"keyup",i.handleMenuKeyUp_)}),i.on("mouseleave",function(e){return i.handleMouseLeave(e)}),i.on("keydown",function(e){return i.handleSubmenuKeyDown(e)}),i}mt(e,n);var t=e.prototype;return t.update=function(){var e=this.createMenu();this.menu&&(this.menu.dispose(),this.removeChild(this.menu)),this.menu=e,this.addChild(e),this.buttonPressed_=!1,this.menuButton_.el_.setAttribute("aria-expanded","false"),this.items&&this.items.length<=this.hideThreshold_?(this.hide(),this.menu.contentEl_.removeAttribute("role")):(this.show(),this.menu.contentEl_.setAttribute("role","menu"))},t.createMenu=function(){var e,t=new hn(this.player_,{menuButton:this});if(this.hideThreshold_=0,this.options_.title&&(e=$("li",{className:"vjs-menu-title",textContent:ut(this.options_.title),tabIndex:-1}),e=new pt(this.player_,{el:e}),t.addItem(e)),this.items=this.createItems(),this.items)for(var i=0;i select",id:"captions-background-color-%s",label:"Color",options:[ui,Bt,jt,Ft,j,C,I,Xt]},backgroundOpacity:{selector:".vjs-bg-opacity > select",id:"captions-background-opacity-%s",label:"Transparency",options:[k,li,f]},color:{selector:".vjs-fg-color > select",id:"captions-foreground-color-%s",label:"Color",options:[Bt,ui,jt,Ft,j,C,I,Xt]},edgeStyle:{selector:".vjs-edge-style > select",id:"%s",label:"Text Edge Style",options:[["none","None"],["raised","Raised"],["depressed","Depressed"],["uniform","Uniform"],["dropshadow","Dropshadow"]]},fontFamily:{selector:".vjs-font-family > select",id:"captions-font-family-%s",label:"Font Family",options:[["proportionalSansSerif","Proportional Sans-Serif"],["monospaceSansSerif","Monospace Sans-Serif"],["proportionalSerif","Proportional Serif"],["monospaceSerif","Monospace Serif"],["casual","Casual"],["script","Script"],["small-caps","Small Caps"]]},fontPercent:{selector:".vjs-font-percent > select",id:"captions-font-size-%s",label:"Font Size",options:[["0.50","50%"],["0.75","75%"],["1.00","100%"],["1.25","125%"],["1.50","150%"],["1.75","175%"],["2.00","200%"],["3.00","300%"],["4.00","400%"]],default:2,parser:function(e){return"1.00"===e?null:Number(e)}},textOpacity:{selector:".vjs-text-opacity > select",id:"captions-foreground-opacity-%s",label:"Transparency",options:[k,li]},windowColor:{selector:".vjs-window-color > select",id:"captions-window-color-%s",label:"Color"},windowOpacity:{selector:".vjs-window-opacity > select",id:"captions-window-opacity-%s",label:"Transparency",options:[f,li,k]}};function wn(e,t){if((e=t?t(e):e)&&"none"!==e)return e}Sn.windowColor.options=Sn.backgroundColor.options,pt.registerComponent("TextTrackSettings",function(n){function e(e,t){var i;return t.temporary=!1,(i=n.call(this,e,t)||this).updateDisplay=i.updateDisplay.bind(ft(i)),i.fill(),i.hasBeenOpened_=i.hasBeenFilled_=!0,i.endDialog=$("p",{className:"vjs-control-text",textContent:i.localize("End of dialog window.")}),i.el().appendChild(i.endDialog),i.setDefaults(),void 0===t.persistTextTrackSettings&&(i.options_.persistTextTrackSettings=i.options_.playerOptions.persistTextTrackSettings),i.on(i.$(".vjs-done-button"),"click",function(){i.saveSettings(),i.close()}),i.on(i.$(".vjs-default-button"),"click",function(){i.setDefaults(),i.updateDisplay()}),_(Sn,function(e){i.on(i.$(e.selector),"change",i.updateDisplay)}),i.options_.persistTextTrackSettings&&i.restoreSettings(),i}mt(e,n);var t=e.prototype;return t.dispose=function(){this.endDialog=null,n.prototype.dispose.call(this)},t.createElSelect_=function(e,t,i){var n=this;void 0===t&&(t=""),void 0===i&&(i="label");var e=Sn[e],r=e.id.replace("%s",this.id_),a=[t,r].join(" ").trim();return["<"+i+' id="'+r+'" class="'+("label"===i?"vjs-label":"")+'">',this.localize(e.label),""+i+">",''].concat(e.options.map(function(e){var t=r+"-"+e[1].replace(/\W+/g,"");return['',n.localize(e[1])," "].join("")})).concat(" ").join("")},t.createElFgColor_=function(){var e="captions-text-legend-"+this.id_;return['','',this.localize("Text")," ",this.createElSelect_("color",e),'',this.createElSelect_("textOpacity",e)," "," "].join("")},t.createElBgColor_=function(){var e="captions-background-"+this.id_;return['','',this.localize("Background")," ",this.createElSelect_("backgroundColor",e),'',this.createElSelect_("backgroundOpacity",e)," "," "].join("")},t.createElWinColor_=function(){var e="captions-window-"+this.id_;return['','',this.localize("Window")," ",this.createElSelect_("windowColor",e),'',this.createElSelect_("windowOpacity",e)," "," "].join("")},t.createElColors_=function(){return $("div",{className:"vjs-track-settings-colors",innerHTML:[this.createElFgColor_(),this.createElBgColor_(),this.createElWinColor_()].join("")})},t.createElFont_=function(){return $("div",{className:"vjs-track-settings-font",innerHTML:['',this.createElSelect_("fontPercent","","legend")," ",'',this.createElSelect_("edgeStyle","","legend")," ",'',this.createElSelect_("fontFamily","","legend")," "].join("")})},t.createElControls_=function(){var e=this.localize("restore all settings to the default values");return $("div",{className:"vjs-track-settings-controls",innerHTML:['',this.localize("Reset"),' '+e+" "," ",''+this.localize("Done")+" "].join("")})},t.content=function(){return[this.createElColors_(),this.createElFont_(),this.createElControls_()]},t.label=function(){return this.localize("Caption Settings Dialog")},t.description=function(){return this.localize("Beginning of dialog window. Escape will cancel and close the window.")},t.buildCSSClass=function(){return n.prototype.buildCSSClass.call(this)+" vjs-text-track-settings"},t.getValues=function(){var i,n,e,r=this;return n=function(e,t,i){var n,t=(n=r.$(t.selector),t=t.parser,wn(n.options[n.options.selectedIndex].value,t));return void 0!==t&&(e[i]=t),e},void 0===(e={})&&(e=0),v(i=Sn).reduce(function(e,t){return n(e,i[t],t)},e)},t.setValues=function(i){var n=this;_(Sn,function(e,t){!function(e,t,i){if(t)for(var n=0;nthis.options_.liveTolerance,(t=!this.timeupdateSeen_||e===1/0?!1:t)!==this.behindLiveEdge_&&(this.behindLiveEdge_=t,this.trigger("liveedgechange")))},t.handleDurationchange=function(){this.toggleTracking()},t.toggleTracking=function(){this.player_.duration()===1/0&&this.liveWindow()>=this.options_.trackingThreshold?(this.player_.options_.liveui&&this.player_.addClass("vjs-liveui"),this.startTracking()):(this.player_.removeClass("vjs-liveui"),this.stopTracking())},t.startTracking=function(){this.isTracking()||(this.timeupdateSeen_||(this.timeupdateSeen_=this.player_.hasStarted()),this.trackingInterval_=this.setInterval(this.trackLiveHandler_,30),this.trackLive_(),this.on(this.player_,["play","pause"],this.trackLiveHandler_),this.timeupdateSeen_?this.on(this.player_,"seeked",this.handleSeeked_):(this.one(this.player_,"play",this.handlePlay_),this.one(this.player_,"timeupdate",this.handleFirstTimeupdate_)))},t.handleFirstTimeupdate=function(){this.timeupdateSeen_=!0,this.on(this.player_,"seeked",this.handleSeeked_)},t.handleSeeked=function(){var e=Math.abs(this.liveCurrentTime()-this.player_.currentTime());this.seekedBehindLive_=this.nextSeekedFromUser_&&2=e.length&&n.call(e,function(e,t){return e===(a[t]?a[t]&i[r+t]:i[r+t])})}var Er=function(t){function e(){var e=t.call(this)||this;return e.buffer="",e}return mt(e,t),e.prototype.push=function(e){var t;for(this.buffer+=e,t=this.buffer.indexOf("\n");-1"==e&&">")||"&"==e&&"&"||'"'==e&&"""||""+e.charCodeAt()+";"}function ga(e,t){if(t(e))return 1;if(e=e.firstChild)do{if(ga(e,t))return 1}while(e=e.nextSibling)}function ya(){}function va(e,t,i){e&&e._inc++,i.namespaceURI===Hr.XMLNS&&delete t._nsMap[i.prefix?i.localName:""]}function _a(e,t,i){if(e&&e._inc){e._inc++;var n=t.childNodes;if(i)n[n.length++]=i;else{for(var r=t.firstChild,a=0;r;)r=(n[a++]=r).nextSibling;n.length=a}}}function ba(e,t){var i=t.previousSibling,n=t.nextSibling;return i?i.nextSibling=n:e.firstChild=n,n?n.previousSibling=i:e.lastChild=i,_a(e.ownerDocument,e),t}function Ta(e,t,i){var n=t.parentNode;if(n&&n.removeChild(t),t.nodeType===ia){var r=t.firstChild;if(null==r)return t;var a=t.lastChild}else r=a=t;n=i?i.previousSibling:e.lastChild;for(r.previousSibling=n,a.nextSibling=i,n?n.nextSibling=r:e.firstChild=r,null==i?e.lastChild=a:i.previousSibling=a;r.parentNode=e,r!==a&&(r=r.nextSibling););return _a(e.ownerDocument||e,e),t.nodeType==ia&&(t.firstChild=t.lastChild=null),t}function Sa(){this._nsMap={}}function wa(){}function Ea(){}function ka(){}function Ca(){}function Ia(){}function xa(){}function Aa(){}function Pa(){}function La(){}function Da(){}function Oa(){}function Ma(){}function Ra(e,t){var i,n=[],r=9==this.nodeType&&this.documentElement||this,a=r.prefix,s=r.namespaceURI;return Ba(this,n,e,t,i=s&&null==a&&null==(a=r.lookupPrefix(s))?[{namespace:s,prefix:null}]:i),n.join("")}function Na(e,t,i){var n=e.prefix||"",r=e.namespaceURI;if(r&&("xml"!==n||r!==Hr.XML)&&r!==Hr.XMLNS){for(var a=i.length;a--;){var s=i[a];if(s.prefix===n)return s.namespace!==r}return 1}}function Ua(e,t,i){e.push(" ",t,'="',i.replace(/[<&"]/g,ma),'"')}function Ba(e,t,i,n,r){if(r=r||[],n){if(!(e=n(e)))return;if("string"==typeof e)return void t.push(e)}switch(e.nodeType){case Xr:var a=e.attributes,s=a.length,o=e.firstChild,u=e.tagName,l=u;if(!(i=Hr.isHTML(e.namespaceURI)||i)&&!e.prefix&&e.namespaceURI){for(var c,d=0;d"),i&&/^script$/i.test(u))for(;o;)o.data?t.push(o.data):Ba(o,t,i,n,r.slice()),o=o.nextSibling;else for(;o;)Ba(o,t,i,n,r.slice()),o=o.nextSibling;t.push("",l,">")}else t.push("/>");return;case ea:case ia:for(o=e.firstChild;o;)Ba(o,t,i,n,r.slice()),o=o.nextSibling;return;case Kr:return Ua(t,e.name,e.value),0;case Yr:return t.push(e.data.replace(/[<&]/g,ma).replace(/]]>/g,"]]>"));case Qr:return t.push("");case Zr:return t.push("\x3c!--",e.data,"--\x3e");case ta:var v=e.publicId,_=e.systemId;return t.push("")):_&&"."!=_?t.push(" SYSTEM ",_,">"):((_=e.internalSubset)&&t.push(" [",_,"]"),t.push(">")));case Jr:return t.push("",e.target," ",e.data,"?>");case $r:return t.push("&",e.nodeName,";");default:t.push("??",e.nodeName)}}function Fa(e,t,i){e[t]=i}x.INVALID_STATE_ERR=(na[11]="Invalid state",11),x.SYNTAX_ERR=(na[12]="Syntax error",12),x.INVALID_MODIFICATION_ERR=(na[13]="Invalid modification",13),x.NAMESPACE_ERR=(na[14]="Invalid namespace",14),x.INVALID_ACCESS_ERR=(na[15]="Invalid access",15),aa.prototype=Error.prototype,Gr(x,aa),sa.prototype={length:0,item:function(e){return this[e]||null},toString:function(e,t){for(var i=[],n=0;n",lt:"<",quot:'"'}),t.HTML_ENTITIES=i({lt:"<",gt:">",amp:"&",quot:'"',apos:"'",Agrave:"À",Aacute:"Á",Acirc:"Â",Atilde:"Ã",Auml:"Ä",Aring:"Å",AElig:"Æ",Ccedil:"Ç",Egrave:"È",Eacute:"É",Ecirc:"Ê",Euml:"Ë",Igrave:"Ì",Iacute:"Í",Icirc:"Î",Iuml:"Ï",ETH:"Ð",Ntilde:"Ñ",Ograve:"Ò",Oacute:"Ó",Ocirc:"Ô",Otilde:"Õ",Ouml:"Ö",Oslash:"Ø",Ugrave:"Ù",Uacute:"Ú",Ucirc:"Û",Uuml:"Ü",Yacute:"Ý",THORN:"Þ",szlig:"ß",agrave:"à",aacute:"á",acirc:"â",atilde:"ã",auml:"ä",aring:"å",aelig:"æ",ccedil:"ç",egrave:"è",eacute:"é",ecirc:"ê",euml:"ë",igrave:"ì",iacute:"í",icirc:"î",iuml:"ï",eth:"ð",ntilde:"ñ",ograve:"ò",oacute:"ó",ocirc:"ô",otilde:"õ",ouml:"ö",oslash:"ø",ugrave:"ù",uacute:"ú",ucirc:"û",uuml:"ü",yacute:"ý",thorn:"þ",yuml:"ÿ",nbsp:" ",iexcl:"¡",cent:"¢",pound:"£",curren:"¤",yen:"¥",brvbar:"¦",sect:"§",uml:"¨",copy:"©",ordf:"ª",laquo:"«",not:"¬",shy:"",reg:"®",macr:"¯",deg:"°",plusmn:"±",sup2:"²",sup3:"³",acute:"´",micro:"µ",para:"¶",middot:"·",cedil:"¸",sup1:"¹",ordm:"º",raquo:"»",frac14:"¼",frac12:"½",frac34:"¾",iquest:"¿",times:"×",divide:"÷",forall:"∀",part:"∂",exist:"∃",empty:"∅",nabla:"∇",isin:"∈",notin:"∉",ni:"∋",prod:"∏",sum:"∑",minus:"−",lowast:"∗",radic:"√",prop:"∝",infin:"∞",ang:"∠",and:"∧",or:"∨",cap:"∩",cup:"∪",int:"∫",there4:"∴",sim:"∼",cong:"≅",asymp:"≈",ne:"≠",equiv:"≡",le:"≤",ge:"≥",sub:"⊂",sup:"⊃",nsub:"⊄",sube:"⊆",supe:"⊇",oplus:"⊕",otimes:"⊗",perp:"⊥",sdot:"⋅",Alpha:"Α",Beta:"Β",Gamma:"Γ",Delta:"Δ",Epsilon:"Ε",Zeta:"Ζ",Eta:"Η",Theta:"Θ",Iota:"Ι",Kappa:"Κ",Lambda:"Λ",Mu:"Μ",Nu:"Ν",Xi:"Ξ",Omicron:"Ο",Pi:"Π",Rho:"Ρ",Sigma:"Σ",Tau:"Τ",Upsilon:"Υ",Phi:"Φ",Chi:"Χ",Psi:"Ψ",Omega:"Ω",alpha:"α",beta:"β",gamma:"γ",delta:"δ",epsilon:"ε",zeta:"ζ",eta:"η",theta:"θ",iota:"ι",kappa:"κ",lambda:"λ",mu:"μ",nu:"ν",xi:"ξ",omicron:"ο",pi:"π",rho:"ρ",sigmaf:"ς",sigma:"σ",tau:"τ",upsilon:"υ",phi:"φ",chi:"χ",psi:"ψ",omega:"ω",thetasym:"ϑ",upsih:"ϒ",piv:"ϖ",OElig:"Œ",oelig:"œ",Scaron:"Š",scaron:"š",Yuml:"Ÿ",fnof:"ƒ",circ:"ˆ",tilde:"˜",ensp:" ",emsp:" ",thinsp:" ",zwnj:"",zwj:"",lrm:"",rlm:"",ndash:"–",mdash:"—",lsquo:"‘",rsquo:"’",sbquo:"‚",ldquo:"“",rdquo:"”",bdquo:"„",dagger:"†",Dagger:"‡",bull:"•",hellip:"…",permil:"‰",prime:"′",Prime:"″",lsaquo:"‹",rsaquo:"›",oline:"‾",euro:"€",trade:"™",larr:"←",uarr:"↑",rarr:"→",darr:"↓",harr:"↔",crarr:"↵",lceil:"⌈",rceil:"⌉",lfloor:"⌊",rfloor:"⌋",loz:"◊",spades:"♠",clubs:"♣",hearts:"♥",diams:"♦"}),t.entityMap=t.HTML_ENTITIES});ja.XML_ENTITIES,ja.HTML_ENTITIES,ja.entityMap;var Ha=jr.NAMESPACE,zt=/[A-Z_a-z\xC0-\xD6\xD8-\xF6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/,ar=new RegExp("[\\-\\.0-9"+zt.source.slice(1,-1)+"\\u00B7\\u0300-\\u036F\\u203F-\\u2040]"),qa=new RegExp("^"+zt.source+ar.source+"*(?::"+zt.source+ar.source+"*)?$"),Va=0,Wa=1,Ga=2,za=3,Xa=4,Ka=5,Ya=6,Qa=7;function $a(e,t){this.message=e,this.locator=t,Error.captureStackTrace&&Error.captureStackTrace(this,$a)}function Ja(){}function Za(e,t){return t.lineNumber=e.lineNumber,t.columnNumber=e.columnNumber,t}function es(e,t,i){for(var n=e.tagName,r=null,a=e.length;a--;){var s=e[a],o=s.qName,u=s.value,o=0<(c=o.indexOf(":"))?(l=s.prefix=o.slice(0,c),d=o.slice(c+1),"xmlns"===l&&d):(l=null,"xmlns"===(d=o)&&"");s.localName=d,!1!==o&&(null==r&&(r={},ts(i,i={})),i[o]=r[o]=u,s.uri=Ha.XMLNS,t.startPrefixMapping(o,u))}for(var l,a=e.length;a--;)(l=(s=e[a]).prefix)&&("xml"===l&&(s.uri=Ha.XML),"xmlns"!==l&&(s.uri=i[l||""]));var c,d=0<(c=n.indexOf(":"))?(l=e.prefix=n.slice(0,c),e.localName=n.slice(c+1)):(l=null,e.localName=n),h=e.uri=i[l||""];if(t.startElement(h,d,n,e),!e.closed)return e.currentNSMap=i,e.localNSMap=r,1;if(t.endElement(h,d,n),r)for(l in r)t.endPrefixMapping(l)}function ts(e,t){for(var i in e)t[i]=e[i]}function is(){this.attributeNames={}}($a.prototype=new Error).name=$a.name,Ja.prototype={parse:function(e,t,i){var n=this.domBuilder;n.startDocument(),ts(t,t={}),function(i,e,n,r,a){function s(e){var t=e.slice(1,-1);return t in n?n[t]:"#"===t.charAt(0)?65535<(t=parseInt(t.substr(1).replace("x","0x")))?(t-=65536,String.fromCharCode(55296+(t>>10),56320+(1023&t))):String.fromCharCode(t):(a.error("entity not found:"+e),e)}function t(e){var t;f",y+3),_=i.substring(y+2,v).replace(/[ \t\n\r]+$/g,""),b=h.pop();v<0?(_=i.substring(y+2).replace(/[\s<].*/,""),a.error("end tag name: "+_+" is not complete:"+b.tagName),v=y+1+_.length):_.match(/\s)&&(_=_.replace(/[\s<].*/,""),a.error("end tag name: "+_+" maybe not complete"),v=y+1+_.length);var T=b.localNSMap,S=b.tagName==_;if(S||b.tagName&&b.tagName.toLowerCase()==_.toLowerCase()){if(r.endElement(b.uri,b.localName,_),T)for(var w in T)r.endPrefixMapping(w);S||a.fatalError("end tag name: "+_+" is not match the current start tagName:"+b.tagName)}else h.push(b);v++;break;case"?":d&&o(y),v=function(e,t,i){var n=e.indexOf("?>",t);if(n){t=e.substring(t,n).match(/^<\?(\S*)\s*([\s\S]*?)\s*$/);return t?(t[0].length,i.processingInstruction(t[1],t[2]),n+2):-1}return-1}(i,y,r);break;case"!":d&&o(y),v=function(e,t,i,n){{if("-"===e.charAt(t+2)){if("-"!==e.charAt(t+3))return-1;var r=e.indexOf("--\x3e",t+4);return t",t+9);return i.startCDATA(),i.characters(e,t+9,r-t-9),i.endCDATA(),r+3}var a=function(e,t){var i,n=[],r=/'[^']+'|"[^"]+"|[^\s<>\/=]+=?|(\/?\s*>|<)/g;r.lastIndex=t,r.exec(e);for(;i=r.exec(e);)if(n.push(i),i[1])return n}(e,t),n=a.length;if(1":switch(l){case Va:n.setTagName(e.slice(t,u));case Ka:case Ya:case Qa:break;case Xa:case Wa:"/"===(d=e.slice(t,u)).slice(-1)&&(n.closed=!0,d=d.slice(0,-1));case Ga:l===Ga&&(d=o),l==Xa?(a.warning('attribute "'+d+'" missed quot(")!'),s(o,d.replace(/?\w+;/g,r),t)):(Ha.isHTML(i[""])&&d.match(/^(?:disabled|checked|selected)$/i)||a.warning('attribute "'+d+'" missed value!! "'+d+'" instead!!'),s(d,d,t));break;case za:throw new Error("attribute value missed!!")}return u;case"":c=" ";default:if(c<=" ")switch(l){case Va:n.setTagName(e.slice(t,u)),l=Ya;break;case Wa:o=e.slice(t,u),l=Ga;break;case Xa:var d=e.slice(t,u).replace(/?\w+;/g,r);a.warning('attribute "'+d+'" missed quot(")!!'),s(o,d,t);case Ka:l=Ya}else switch(l){case Ga:n.tagName,Ha.isHTML(i[""])&&o.match(/^(?:disabled|checked|selected)$/i)||a.warning('attribute "'+o+'" missed value!! "'+o+'" instead2!!'),s(o,o,t),t=u,l=Wa;break;case Ka:a.warning('attribute space is required"'+o+'"!!');case Ya:l=Wa,t=u;break;case za:l=Xa,t=u;break;case Qa:throw new Error("elements closed character '/' and '>' must be connected to")}}u++}}(i,y,E,k,s,a),C=E.length;if(!E.closed&&function(e,t,i,n){var r=n[i];null==r&&((r=e.lastIndexOf(""+i+">"))",t),e=e.substring(t+1,a);if(/[&<]/.test(e))return/^script$/i.test(i)||(e=e.replace(/?\w+;/g,n)),r.characters(e,0,e.length),a}return t+1}(i,v,E.tagName,s,r):v++}}catch(e){if(e instanceof $a)throw e;a.error("element parse error: "+e),v=-1}f=t+i||t?new java.lang.String(e,t,i)+"":e}function hs(e,t){(e.currentElement||e.doc).appendChild(t)}os.prototype.parseFromString=function(e,t){var i=this.options,n=new ss,r=i.domBuilder||new us,a=i.errorHandler,s=i.locator,o=i.xmlns||{},u=/\/x?html?$/.test(t),t=u?ja.HTML_ENTITIES:ja.XML_ENTITIES;return s&&r.setDocumentLocator(s),n.errorHandler=function(n,e,r){if(!n){if(e instanceof us)return e;n=e}var a={},s=n instanceof Function;function t(t){var i=n[t];!i&&s&&(i=2==n.length?function(e){n(t,e)}:n),a[t]=i?function(e){i("[xmldom "+t+"]\t"+e+cs(r))}:function(){}}return r=r||{},t("warning"),t("error"),t("fatalError"),a}(a,r,s),n.domBuilder=i.domBuilder||r,u&&(o[""]=rs.HTML),o.xml=o.xml||rs.XML,e&&"string"==typeof e?n.parse(e,o,t):n.errorHandler.error("invalid doc source"),r.doc},us.prototype={startDocument:function(){this.doc=(new ns).createDocument(null,null,null),this.locator&&(this.doc.documentURI=this.locator.systemId)},startElement:function(e,t,i,n){var r=this.doc,a=r.createElementNS(e,i||t),s=n.length;hs(this,a),this.currentElement=a,this.locator&&ls(this.locator,a);for(var o=0;ot.timeline?1:-1});var i}function ks(e){var r,a,s=[];return r=e,a=function(e,t,i,n){s=s.concat(e.playlists||[])},yo.forEach(function(e){for(var t in r.mediaGroups[e])for(var i in r.mediaGroups[e][t]){var n=r.mediaGroups[e][t][i];a(n,e,t,i)}}),s}function Cs(e){var i=e.playlist,e=e.mediaSequence;i.mediaSequence=e,i.segments.forEach(function(e,t){e.number=i.mediaSequence+t})}function Is(e){var r,a,t=e.oldManifest,i=e.newManifest,n=t.playlists.concat(ks(t)),e=i.playlists.concat(ks(i));return i.timelineStarts=Es([t.timelineStarts,i.timelineStarts]),n={oldPlaylists:n,newPlaylists:e,timelineStarts:i.timelineStarts},r=n.oldPlaylists,e=n.newPlaylists,a=n.timelineStarts,e.forEach(function(t){t.discontinuitySequence=vs(a,function(e){return e.timeline===t.timeline});var e=function(e,t){for(var i=0;ie.timeline||e.segments.length&&t.timeline>e.segments[e.segments.length-1].timeline)&&t.discontinuitySequence--);e.segments[n].discontinuity&&!i.discontinuity&&(i.discontinuity=!0,t.discontinuityStarts.unshift(0),t.discontinuitySequence--),Cs({playlist:t,mediaSequence:e.segments[n].number})}}),i}function xs(e){return e&&e.uri+"-"+(t=e.byterange,e="bigint"==typeof t.offset||"bigint"==typeof t.length?window.BigInt(t.offset)+window.BigInt(t.length)-window.BigInt(1):t.offset+t.length-1,t.offset+"-"+e);var t}function As(e){return ms(e.reduce(function(e,t){var i,n=t.attributes.id+(t.attributes.lang||"");return e[n]?(t.segments&&(t.segments[0]&&(t.segments[0].discontinuity=!0),(i=e[n].segments).push.apply(i,t.segments)),t.attributes.contentProtection&&(e[n].attributes.contentProtection=t.attributes.contentProtection)):(e[n]=t,e[n].attributes.timelineStarts=[]),e[n].attributes.timelineStarts.push({start:t.attributes.periodStart,timeline:t.attributes.periodStart}),e},{})).map(function(e){var t,n;return e.discontinuityStarts=(t=e.segments||[],n="discontinuity",t.reduce(function(e,t,i){return t[n]&&e.push(i),e},[])),e})}function Ps(e,t){var i=xs(e.sidx);return(i=i&&t[i]&&t[i].sidx)&&ws(e,i,e.sidx.resolvedUri),e}function Ls(e,h,p){var f;return void 0===h&&(h={}),void 0===p&&(p=!1),e=e.reduce(function(e,t){var i=t.attributes.role&&t.attributes.role.value||"",n=t.attributes.lang||"",r=t.attributes.label||"main";e[r=n&&!t.attributes.label?t.attributes.lang+(i?" ("+i+")":""):r]||(e[r]={language:n,autoselect:!0,default:"main"===i,playlists:[],uri:""});var a,s,o,u,l,c,d,u=Ps((s=p,o=(a=t).attributes,u=a.segments,l=a.sidx,c=a.mediaSequence,d=a.discontinuitySequence,n=a.discontinuityStarts,u={attributes:((a={NAME:o.id,BANDWIDTH:o.bandwidth,CODECS:o.codecs})["PROGRAM-ID"]=1,a),uri:"",endList:"static"===o.type,timeline:o.periodStart,resolvedUri:"",targetDuration:o.duration,discontinuitySequence:d,discontinuityStarts:n,timelineStarts:o.timelineStarts,mediaSequence:c,segments:u},o.contentProtection&&(u.contentProtection=o.contentProtection),l&&(u.sidx=l),s&&(u.attributes.AUDIO="audio",u.attributes.SUBTITLES="subs"),u),h);return e[r].playlists.push(u),"undefined"==typeof f&&"main"===i&&((f=t).default=!0),e},{}),f||(e[Object.keys(e)[0]].default=!0),e}function Ds(e){var t=e.attributes,i=e.segments,n=e.sidx,r=e.discontinuityStarts,i={attributes:((e={NAME:t.id,AUDIO:"audio",SUBTITLES:"subs",RESOLUTION:{width:t.width,height:t.height},CODECS:t.codecs,BANDWIDTH:t.bandwidth})["PROGRAM-ID"]=1,e),uri:"",endList:"static"===t.type,timeline:t.periodStart,resolvedUri:"",targetDuration:t.duration,discontinuityStarts:r,timelineStarts:t.timelineStarts,segments:i};return t.contentProtection&&(i.contentProtection=t.contentProtection),n&&(i.sidx=n),i}function Os(e){return"video/mp4"===(e=e.attributes).mimeType||"video/webm"===e.mimeType||"video"===e.contentType}function Ms(e){return"audio/mp4"===(e=e.attributes).mimeType||"audio/webm"===e.mimeType||"audio"===e.contentType}function Rs(e){return"text/vtt"===(e=e.attributes).mimeType||"text"===e.contentType}function Ns(i){return i?Object.keys(i).reduce(function(e,t){t=i[t];return e.concat(t.playlists)},[]):[]}function Us(e){var t=e.dashPlaylists,i=e.locations,n=void 0===(c=e.sidxMapping)?{}:c,r=e.previousManifest;if(!t.length)return{};var a=(d=t[0].attributes).sourceDuration,s=d.type,o=d.suggestedPresentationDelay,u=d.minimumUpdatePeriod,l=As(t.filter(Os)).map(Ds),c=As(t.filter(Ms)),e=As(t.filter(Rs)),d=t.map(function(e){return e.attributes.captionServices}).filter(Boolean),a={allowCache:!0,discontinuityStarts:[],segments:[],endList:!0,mediaGroups:((t={AUDIO:{},VIDEO:{}})["CLOSED-CAPTIONS"]={},t.SUBTITLES={},t),uri:"",duration:a,playlists:function(e,t){if(void 0===t&&(t={}),!Object.keys(t).length)return e;for(var i in e)e[i]=Ps(e[i],t);return e}(l,n)};0<=u&&(a.minimumUpdatePeriod=1e3*u),i&&(a.locations=i),"dynamic"===s&&(a.suggestedPresentationDelay=o);var h,p,o=0===a.playlists.length,o=c.length?Ls(c,n,o):null,n=e.length?(void 0===(h=n)&&(h={}),e.reduce(function(e,t){var i=t.attributes.lang||"text";return e[i]||(e[i]={language:i,default:!1,autoselect:!1,playlists:[],uri:""}),e[i].playlists.push(Ps(function(e){var t=e.attributes,i=e.segments,n=e.mediaSequence,r=e.discontinuityStarts,a=e.discontinuitySequence;"undefined"==typeof i&&(i=[{uri:t.baseUrl,timeline:t.periodStart,resolvedUri:t.baseUrl||"",duration:t.sourceDuration,number:0}],t.duration=t.sourceDuration);(e={NAME:t.id,BANDWIDTH:t.bandwidth})["PROGRAM-ID"]=1;return t.codecs&&(e.CODECS=t.codecs),{attributes:e,uri:"",endList:"static"===t.type,timeline:t.periodStart,resolvedUri:t.baseUrl||"",targetDuration:t.duration,timelineStarts:t.timelineStarts,discontinuityStarts:r,discontinuitySequence:a,mediaSequence:n,segments:i}}(t),h)),e},{})):null,l=(e=l.concat(Ns(o),Ns(n))).map(function(e){return e.timelineStarts});return a.timelineStarts=Es(l),e=e,p=a.timelineStarts,e.forEach(function(t){t.mediaSequence=0,t.discontinuitySequence=vs(p,function(e){return e.timeline===t.timeline}),t.segments&&t.segments.forEach(function(e,t){e.number=t})}),o&&(a.mediaGroups.AUDIO.audio=o),n&&(a.mediaGroups.SUBTITLES.subs=n),d.length&&(a.mediaGroups["CLOSED-CAPTIONS"].cc=d.reduce(function(n,e){return e&&e.forEach(function(e){var t=e.channel,i=e.language;n[i]={autoselect:!1,default:!1,instreamId:t,language:i},e.hasOwnProperty("aspectRatio")&&(n[i].aspectRatio=e.aspectRatio),e.hasOwnProperty("easyReader")&&(n[i].easyReader=e.easyReader),e.hasOwnProperty("3D")&&(n[i]["3D"]=e["3D"])}),n},{})),r?Is({oldManifest:r,newManifest:a}):a}function Bs(e,t){for(var i,n,r,a,s,o,u=e.type,l=e.minimumUpdatePeriod,c=void 0===l?0:l,d=void 0===(l=e.media)?"":l,h=e.sourceDuration,p=void 0===(l=e.timescale)?1:l,f=void 0===(l=e.startNumber)?1:l,m=e.periodStart,g=[],y=-1,v=0;v>4?20+t:10+t}(e,t))}function to(e){return"string"==typeof e?Sr(e):e}function io(e,t,i){var n;void 0===i&&(i=!1),n=t,t=Array.isArray(n)?n.map(to):[to(n)],e=br(e);var r=[];if(!t.length)return r;for(var a=0;a>>0,o=e.subarray(a+4,a+8);if(0==s)break;var u=a+s;if(u>e.length){if(i)break;u=e.length}s=e.subarray(a+8,u);wr(o,t[0])&&(1===t.length?r.push(s):r.push.apply(r,io(s,t.slice(1),i))),a=u}return r}function no(e,t,i,n){void 0===i&&(i=!0),void 0===n&&(n=!1);var r=function(e){for(var t=1,i=0;i=t.length)return t.length;var n=no(t,i,!1);if(wr(e.bytes,n.bytes))return i;var r=no(t,i+n.length);return ao(e,t,i+r.length+r.value+n.length)}function so(e,t){var i;i=t,t=Array.isArray(i)?i.map(ro):[ro(i)],e=br(e);var n=[];if(!t.length)return n;for(var r=0;re.length?e.length:o+s.value,u=e.subarray(o,u);wr(t[0],a.bytes)&&(1===t.length?n.push(u):n=n.concat(so(u,t.slice(1)))),r+=a.length+s.length+u.length}return n}function oo(e,t,i,n){void 0===n&&(n=1/0),e=br(e),i=[].concat(i);for(var r,a=0,s=0;a>1&63),-1!==i.indexOf(u)&&(r=a+o),a+=o+("h264"===t?1:2)}else a++}return e.subarray(0,0)}var uo={__DOMHandler:us,DOMParser:os,DOMImplementation:U.DOMImplementation,XMLSerializer:U.XMLSerializer}.DOMParser,lo="INVALID_NUMBER_OF_PERIOD",co="DASH_EMPTY_MANIFEST",ho="DASH_INVALID_XML",po="NO_BASE_URL",fo="SEGMENT_TIME_UNSPECIFIED",mo="UNSUPPORTED_UTC_TIMING_SCHEME",go={static:function(e){var t=e.duration,i=e.timescale,n=void 0===i?1:i,r=e.sourceDuration,i=e.periodDuration,e=bs(e.endNumber),n=t/n;return"number"==typeof e?{start:0,end:e}:"number"==typeof i?{start:0,end:i/n}:{start:0,end:r/n}},dynamic:function(e){var t=e.NOW,i=e.clientOffset,n=e.availabilityStartTime,r=e.timescale,a=void 0===r?1:r,s=e.duration,o=e.periodStart,u=void 0===o?0:o,r=e.minimumUpdatePeriod,o=void 0===r?0:r,r=e.timeShiftBufferDepth,r=void 0===r?1/0:r,e=bs(e.endNumber),i=(t+i)/1e3,u=n+u,o=Math.ceil((i+o-u)*a/s),r=Math.floor((i-u-r)*a/s),s=Math.floor((i-u)*a/s);return{start:Math.max(0,r),end:"number"==typeof e?e:Math.min(o,s)}}},yo=["AUDIO","SUBTITLES"],vo=/\$([A-z]*)(?:(%0)([0-9]+)d)?\$/g,_o={mediaPresentationDuration:Gs,availabilityStartTime:function(e){return/^\d+-\d+-\d+T\d+:\d+:\d+(\.\d+)?$/.test(e=e)&&(e+="Z"),Date.parse(e)/1e3},minimumUpdatePeriod:Gs,suggestedPresentationDelay:Gs,type:function(e){return e},timeShiftBufferDepth:Gs,start:Gs,width:function(e){return parseInt(e,10)},height:function(e){return parseInt(e,10)},bandwidth:function(e){return parseInt(e,10)},startNumber:function(e){return parseInt(e,10)},timescale:function(e){return parseInt(e,10)},presentationTimeOffset:function(e){return parseInt(e,10)},duration:function(e){var t=parseInt(e,10);return isNaN(t)?Gs(e):t},d:function(e){return parseInt(e,10)},t:function(e){return parseInt(e,10)},r:function(e){return parseInt(e,10)},DEFAULT:function(e){return e}},bo={"urn:uuid:1077efec-c0b2-4d02-ace3-3c1e52e2fb4b":"org.w3.clearkey","urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed":"com.widevine.alpha","urn:uuid:9a04f079-9840-4286-ab92-e65be0885f95":"com.microsoft.playready","urn:uuid:f239e769-efa3-4850-9c16-a903c6932efb":"com.adobe.primetime"},To=Math.pow(2,32),So=function(e){var t=new DataView(e.buffer,e.byteOffset,e.byteLength);return t.getBigUint64?(e=t.getBigUint64(0))>>7,referencedSize:2147483647&t.getUint32(n),subsegmentDuration:t.getUint32(n+4),startsWithSap:!!(128&e[n+8]),sapType:(112&e[n+8])>>>4,sapDeltaTime:268435455&t.getUint32(n+8)});return i},Eo=br([73,68,51]),ko={EBML:br([26,69,223,163]),DocType:br([66,130]),Segment:br([24,83,128,103]),SegmentInfo:br([21,73,169,102]),Tracks:br([22,84,174,107]),Track:br([174]),TrackNumber:br([215]),DefaultDuration:br([35,227,131]),TrackEntry:br([174]),TrackType:br([131]),FlagDefault:br([136]),CodecID:br([134]),CodecPrivate:br([99,162]),VideoTrack:br([224]),AudioTrack:br([225]),Cluster:br([31,67,182,117]),Timestamp:br([231]),TimestampScale:br([42,215,177]),BlockGroup:br([160]),BlockDuration:br([155]),Block:br([161]),SimpleBlock:br([163])},Co=[128,64,32,16,8,4,2,1],Io=br([0,0,0,1]),xo=br([0,0,1]),Ao=br([0,0,3]),Po={webm:br([119,101,98,109]),matroska:br([109,97,116,114,111,115,107,97]),flac:br([102,76,97,67]),ogg:br([79,103,103,83]),ac3:br([11,119]),riff:br([82,73,70,70]),avi:br([65,86,73]),wav:br([87,65,86,69]),"3gp":br([102,116,121,112,51,103]),mp4:br([102,116,121,112]),fmp4:br([115,116,121,112]),mov:br([102,116,121,112,113,116]),moov:br([109,111,111,118]),moof:br([109,111,111,102])},Lo={aac:function(e){var t=eo(e);return wr(e,[255,16],{offset:t,mask:[255,22]})},mp3:function(e){var t=eo(e);return wr(e,[255,2],{offset:t,mask:[255,6]})},webm:function(e){e=so(e,[ko.EBML,ko.DocType])[0];return wr(e,Po.webm)},mkv:function(e){e=so(e,[ko.EBML,ko.DocType])[0];return wr(e,Po.matroska)},mp4:function(e){return!Lo["3gp"](e)&&!Lo.mov(e)&&(!(!wr(e,Po.mp4,{offset:4})&&!wr(e,Po.fmp4,{offset:4}))||(!(!wr(e,Po.moof,{offset:4})&&!wr(e,Po.moov,{offset:4}))||void 0))},mov:function(e){return wr(e,Po.mov,{offset:4})},"3gp":function(e){return wr(e,Po["3gp"],{offset:4})},ac3:function(e){var t=eo(e);return wr(e,Po.ac3,{offset:t})},ts:function(e){if(e.length<189&&1<=e.length)return 71===e[0];for(var t=0;t+188"):function(){}}function No(e,t){var i,n=[];if(e&&e.length)for(i=0;i "+e.end(i));return t.join(", ")}function jo(e){for(var t=[],i=0;iDate.now()}function Zo(e){return e.excludeUntil&&e.excludeUntil===1/0}function eu(e){var t=Jo(e);return!e.disabled&&!t}function tu(e,t){return t.attributes&&t.attributes[e]}function iu(e,t){if(1===e.playlists.length)return!0;var i=t.attributes.BANDWIDTH||Number.MAX_VALUE;return 0===e.playlists.filter(function(e){return!!eu(e)&&(e.attributes.BANDWIDTH||0)n+.25*a.duration)return null;i=a}return{segment:i,estimatedStart:i.videoTimingInfo?i.videoTimingInfo.transmuxedPresentationStart:n-i.duration,type:i.videoTimingInfo?"accurate":"estimate"}}(n,t))?"estimate"===e.type?i({message:"Accurate programTime could not be determined. Please seek to e.seekTime and try again",seekTime:e.estimatedStart}):(t={mediaSeconds:n},(e=function(e,t){if(!t.dateTimeObject)return null;var i=t.videoTimingInfo.transmuxerPrependedSeconds,i=e-(t.videoTimingInfo.transmuxedPresentationStart+i);return new Date(t.dateTimeObject.getTime()+1e3*i)}(n,e.segment))&&(t.programDateTime=e.toISOString()),i(null,t)):i({message:"valid programTime was not found"}):i({message:"getProgramTime: playlist and time must be provided"})}function Cu(e){var t=e.programTime,i=e.playlist,n=e.retryCount,r=void 0===n?2:n,a=e.seekTo,s=e.pauseAfterSeek,o=void 0===s||s,u=e.tech,l=e.callback;if(!l)throw new Error("seekToProgramTime: callback must be provided");return"undefined"!=typeof t&&i&&a?i.endList||u.hasStarted_?function(e){if(!e.segments||0===e.segments.length)return!1;for(var t=0;ti||e.height>n})).filter(function(e){return e.width===h[0].width&&e.height===h[0].height}),c=p[p.length-1],p=p.filter(function(e){return e.bandwidth===c.bandwidth})[0]),a.experimentalLeastPixelDiffSelector&&(m=d.map(function(e){return e.pixelDiff=Math.abs(e.width-i)+Math.abs(e.height-n),e}),el(m,function(e,t){return e.pixelDiff===t.pixelDiff?t.bandwidth-e.bandwidth:e.pixelDiff-t.pixelDiff}),f=m[0]);var m=f||p||e||o||l[0]||u[0];if(m&&m.playlist){u="sortedPlaylistReps";return f?u="leastPixelDiffRep":p?u="resolutionPlusOneRep":e?u="resolutionBestRep":o?u="bandwidthBestRep":l[0]&&(u="enabledPlaylistReps"),Ol("choosing "+Ju(m)+" using "+u+" with options",s),m.playlist}return Ol("could not choose a playlist with options",s),null}}function nl(e){var t=e.inbandTextTracks,i=e.metadataArray,r=e.timestampOffset,n=e.videoDuration;if(i){var a=window.WebKitDataCue||window.VTTCue,s=t.metadataTrack_;if(s&&(i.forEach(function(e){var n=e.cueTime+r;!("number"!=typeof n||window.isNaN(n)||n<0)&&n<1/0&&e.frames.forEach(function(e){var t,i=new a(n,n,e.value||e.url||e.data||"");i.frame=e,i.value=e,t=i,Object.defineProperties(t.frame,{id:{get:function(){return tr.log.warn("cue.frame.id is deprecated. Use cue.value.key instead."),t.value.key}},value:{get:function(){return tr.log.warn("cue.frame.value is deprecated. Use cue.value.data instead."),t.value.data}},privateData:{get:function(){return tr.log.warn("cue.frame.privateData is deprecated. Use cue.value.data instead."),t.value.data}}}),s.addCue(i)})}),s.cues&&s.cues.length)){for(var o=s.cues,u=[],l=0;l=e&&r.endTime<=t&&i.removeCue(r)}function al(e){return"number"==typeof e&&isFinite(e)}function sl(e){var t=e.startOfSegment,i=e.duration,n=e.segment,r=e.part,a=e.playlist,s=a.mediaSequence,o=a.id,u=a.segments,l=e.mediaIndex,c=e.partIndex,d=e.timeline,h=(void 0===u?[]:u).length-1,p="mediaIndex/partIndex increment";return e.getMediaInfoForTime?p="getMediaInfoForTime ("+e.getMediaInfoForTime+")":e.isSyncRequest&&(p="getSyncSegmentCandidate (isSyncRequest)"),e.independent&&(p+=" with independent "+e.independent),a="number"==typeof c,u=e.segment.uri?"segment":"pre-segment",e=a?zo({preloadSegment:n})-1:0,u+" ["+(s+l)+"/"+(s+h)+"]"+(a?" part ["+c+"/"+e+"]":"")+" segment start/end ["+n.start+" => "+n.end+"]"+(a?" part start/end ["+r.start+" => "+r.end+"]":"")+" startOfSegment ["+t+"] duration ["+i+"] timeline ["+d+"] selected by ["+p+"] playlist ["+o+"]"}function ol(e){return e+"TimingInfo"}function ul(e){var t=e.timelineChangeController,i=e.currentTimeline,n=e.segmentTimeline,r=e.loaderType,e=e.audioDisabled;if(i!==n){if("audio"===r){i=t.lastTimelineChange({type:"main"});return!i||i.to!==n}if("main"===r&&e){t=t.pendingTimelineChange({type:"audio"});return t&&t.to===n?!1:!0}}}function ll(e){var t=e.segmentDuration,e=e.maxDuration;return!!t&&Math.round(t)>e+fl}function cl(e,t){if("hls"!==t)return null;var n,r,i=(n={audioTimingInfo:e.audioTimingInfo,videoTimingInfo:e.videoTimingInfo},r=0,["video","audio"].forEach(function(e){var t,i=n[e+"TimingInfo"];i&&(e=i.start,i=i.end,"bigint"==typeof e||"bigint"==typeof i?t=window.BigInt(i)-window.BigInt(e):"number"==typeof e&&"number"==typeof i&&(t=i-e),"undefined"!=typeof t&&r=r+i)return o(e,{response:n.subarray(i,i+r),status:t.status,uri:t.uri});u.request=u.vhs_.xhr({uri:s,responseType:"arraybuffer",headers:vu({byterange:a.sidx.byterange})},o)})):this.mediaRequest_=window.setTimeout(function(){return r(!1)},0)},t.dispose=function(){this.trigger("dispose"),this.stopRequest(),this.loadedPlaylists_={},window.clearTimeout(this.minimumUpdatePeriodTimeout_),window.clearTimeout(this.mediaRequest_),window.clearTimeout(this.mediaUpdateTimeout),this.mediaUpdateTimeout=null,this.mediaRequest_=null,this.minimumUpdatePeriodTimeout_=null,this.masterPlaylistLoader_.createMupOnMedia_&&(this.off("loadedmetadata",this.masterPlaylistLoader_.createMupOnMedia_),this.masterPlaylistLoader_.createMupOnMedia_=null),this.off()},t.hasPendingRequest=function(){return this.request||this.mediaRequest_},t.stopRequest=function(){var e;this.request&&(e=this.request,this.request=null,e.onreadystatechange=null,e.abort())},t.media=function(t){var i=this;if(!t)return this.media_;if("HAVE_NOTHING"===this.state)throw new Error("Cannot switch media playlist from "+this.state);var n=this.state;if("string"==typeof t){if(!this.masterPlaylistLoader_.master.playlists[t])throw new Error("Unknown playlist URI: "+t);t=this.masterPlaylistLoader_.master.playlists[t]}var e=!this.media_||t.id!==this.media_.id;if(e&&this.loadedPlaylists_[t.id]&&this.loadedPlaylists_[t.id].endList)return this.state="HAVE_METADATA",this.media_=t,void(e&&(this.trigger("mediachanging"),this.trigger("mediachange")));e&&(this.media_&&this.trigger("mediachanging"),this.addSidxSegments_(t,n,function(e){i.haveMetadata({startingState:n,playlist:t})}))},t.haveMetadata=function(e){var t=e.startingState,e=e.playlist;this.state="HAVE_METADATA",this.loadedPlaylists_[e.id]=e,this.mediaRequest_=null,this.refreshMedia_(e.id),"HAVE_MASTER"===t?this.trigger("loadedmetadata"):this.trigger("mediachange")},t.pause=function(){this.masterPlaylistLoader_.createMupOnMedia_&&(this.off("loadedmetadata",this.masterPlaylistLoader_.createMupOnMedia_),this.masterPlaylistLoader_.createMupOnMedia_=null),this.stopRequest(),window.clearTimeout(this.mediaUpdateTimeout),this.mediaUpdateTimeout=null,this.isMaster_&&(window.clearTimeout(this.masterPlaylistLoader_.minimumUpdatePeriodTimeout_),this.masterPlaylistLoader_.minimumUpdatePeriodTimeout_=null),"HAVE_NOTHING"===this.state&&(this.started=!1)},t.load=function(e){var t=this;window.clearTimeout(this.mediaUpdateTimeout),this.mediaUpdateTimeout=null;var i=this.media();e?(e=i?i.targetDuration/2*1e3:5e3,this.mediaUpdateTimeout=window.setTimeout(function(){return t.load()},e)):this.started?i&&!i.endList?(this.isMaster_&&!this.minimumUpdatePeriodTimeout_&&(this.trigger("minimumUpdatePeriod"),this.updateMinimumUpdatePeriodTimeout_()),this.trigger("mediaupdatetimeout")):this.trigger("loadedplaylist"):this.start()},t.start=function(){var i=this;this.started=!0,this.isMaster_?this.requestMaster_(function(e,t){i.haveMaster_(),i.hasPendingRequest()||i.media_||i.media(i.masterPlaylistLoader_.master.playlists[0])}):this.mediaRequest_=window.setTimeout(function(){return i.haveMaster_()},0)},t.requestMaster_=function(n){var r=this;this.request=this.vhs_.xhr({uri:this.masterPlaylistLoader_.srcUrl,withCredentials:this.withCredentials},function(e,t){if(!r.requestErrored_(e,t)){var i=t.responseText!==r.masterPlaylistLoader_.masterXml_;return r.masterPlaylistLoader_.masterXml_=t.responseText,t.responseHeaders&&t.responseHeaders.date?r.masterLoaded_=Date.parse(t.responseHeaders.date):r.masterLoaded_=Date.now(),r.masterPlaylistLoader_.srcUrl=Mo(r.handleManifestRedirects,r.masterPlaylistLoader_.srcUrl,t),i?(r.handleMaster_(),void r.syncClientServerClock_(function(){return n(t,i)})):n(t,i)}"HAVE_NOTHING"===r.state&&(r.started=!1)})},t.syncClientServerClock_=function(i){var n=this,r=Zs(this.masterPlaylistLoader_.masterXml_);return null===r?(this.masterPlaylistLoader_.clientOffset_=this.masterLoaded_-Date.now(),i()):"DIRECT"===r.method?(this.masterPlaylistLoader_.clientOffset_=r.value-Date.now(),i()):void(this.request=this.vhs_.xhr({uri:pl(this.masterPlaylistLoader_.srcUrl,r.value),method:r.method,withCredentials:this.withCredentials},function(e,t){if(n.request){if(e)return n.masterPlaylistLoader_.clientOffset_=n.masterLoaded_-Date.now(),i();t="HEAD"===r.method?t.responseHeaders&&t.responseHeaders.date?Date.parse(t.responseHeaders.date):n.masterLoaded_:Date.parse(t.responseText);n.masterPlaylistLoader_.clientOffset_=t-Date.now(),i()}}))},t.haveMaster_=function(){this.state="HAVE_MASTER",this.isMaster_?this.trigger("loadedplaylist"):this.media_||this.media(this.childPlaylist_)},t.handleMaster_=function(){this.mediaRequest_=null;var e,t,i,n,r=this.masterPlaylistLoader_.master,t=(a={masterXml:this.masterPlaylistLoader_.masterXml_,srcUrl:this.masterPlaylistLoader_.srcUrl,clientOffset:this.masterPlaylistLoader_.clientOffset_,sidxMapping:this.masterPlaylistLoader_.sidxMapping_,previousManifest:r},e=a.masterXml,t=a.srcUrl,i=a.clientOffset,n=a.sidxMapping,a=a.previousManifest,a=Js(e,{manifestUri:t,clientOffset:i,sidxMapping:n,previousManifest:a}),lu(a,t),a);r&&(t=function(e,t,i){for(var a=!0,s=Sl(e,{duration:t.duration,minimumUpdatePeriod:t.minimumUpdatePeriod,timelineStarts:t.timelineStarts}),n=0;n>>1,e.samplingfrequencyindex<<7|e.channelcount<<3,6,1,2]))},f=function(e){return u(T.hdlr,I[e])},p=function(e){var t=new Uint8Array([0,0,0,0,0,0,0,2,0,0,0,3,0,1,95,144,e.duration>>>24&255,e.duration>>>16&255,e.duration>>>8&255,255&e.duration,85,196,0,0]);return e.samplerate&&(t[12]=e.samplerate>>>24&255,t[13]=e.samplerate>>>16&255,t[14]=e.samplerate>>>8&255,t[15]=255&e.samplerate),u(T.mdhd,t)},h=function(e){return u(T.mdia,p(e),f(e.type),a(e))},r=function(e){return u(T.mfhd,new Uint8Array([0,0,0,0,(4278190080&e)>>24,(16711680&e)>>16,(65280&e)>>8,255&e]))},a=function(e){return u(T.minf,"video"===e.type?u(T.vmhd,x):u(T.smhd,A),t(),g(e))},We=function(e,t){for(var i=[],n=t.length;n--;)i[n]=v(t[n]);return u.apply(null,[T.moof,r(e)].concat(i))},s=function(e){for(var t=e.length,i=[];t--;)i[t]=c(e[t]);return u.apply(null,[T.moov,l(4294967295)].concat(i).concat(o(e)))},o=function(e){for(var t=e.length,i=[];t--;)i[t]=_(e[t]);return u.apply(null,[T.mvex].concat(i))},l=function(e){e=new Uint8Array([0,0,0,0,0,0,0,1,0,0,0,2,0,1,95,144,(4278190080&e)>>24,(16711680&e)>>16,(65280&e)>>8,255&e,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,255,255,255,255]);return u(T.mvhd,e)},m=function(e){for(var t,i=e.samples||[],n=new Uint8Array(4+i.length),r=0;r>>8),a.push(255&n[o].byteLength),a=a.concat(Array.prototype.slice.call(n[o]));for(o=0;o>>8),s.push(255&r[o].byteLength),s=s.concat(Array.prototype.slice.call(r[o]));return t=[T.avc1,new Uint8Array([0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,(65280&e.width)>>8,255&e.width,(65280&e.height)>>8,255&e.height,0,72,0,0,0,72,0,0,0,0,0,0,0,1,19,118,105,100,101,111,106,115,45,99,111,110,116,114,105,98,45,104,108,115,0,0,0,0,0,0,0,0,0,0,0,0,0,24,17,17]),u(T.avcC,new Uint8Array([1,e.profileIdc,e.profileCompatibility,e.levelIdc,255].concat([n.length],a,[r.length],s))),u(T.btrt,new Uint8Array([0,28,156,128,0,45,198,192,0,45,198,192]))],e.sarRatio&&(i=e.sarRatio[0],e=e.sarRatio[1],t.push(u(T.pasp,new Uint8Array([(4278190080&i)>>24,(16711680&i)>>16,(65280&i)>>8,255&i,(4278190080&e)>>24,(16711680&e)>>16,(65280&e)>>8,255&e])))),u.apply(null,t)},N=function(e){return u(T.mp4a,new Uint8Array([0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,(65280&e.channelcount)>>8,255&e.channelcount,(65280&e.samplesize)>>8,255&e.samplesize,0,0,0,0,(65280&e.samplerate)>>8,255&e.samplerate,0,0]),i(e))},d=function(e){e=new Uint8Array([0,0,0,7,0,0,0,0,0,0,0,0,(4278190080&e.id)>>24,(16711680&e.id)>>16,(65280&e.id)>>8,255&e.id,0,0,0,0,(4278190080&e.duration)>>24,(16711680&e.duration)>>16,(65280&e.duration)>>8,255&e.duration,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64,0,0,0,(65280&e.width)>>8,255&e.width,0,0,(65280&e.height)>>8,255&e.height,0,0]);return u(T.tkhd,e)},v=function(e){var t,i=u(T.tfhd,new Uint8Array([0,0,0,58,(4278190080&e.id)>>24,(16711680&e.id)>>16,(65280&e.id)>>8,255&e.id,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0])),n=Math.floor(e.baseMediaDecodeTime/V),r=Math.floor(e.baseMediaDecodeTime%V),n=u(T.tfdt,new Uint8Array([1,0,0,0,n>>>24&255,n>>>16&255,n>>>8&255,255&n,r>>>24&255,r>>>16&255,r>>>8&255,255&r]));return"audio"===e.type?(t=b(e,92),u(T.traf,i,n,t)):(r=m(e),t=b(e,r.length+92),u(T.traf,i,n,t,r))},c=function(e){return e.duration=e.duration||4294967295,u(T.trak,d(e),h(e))},_=function(e){var t=new Uint8Array([0,0,0,0,(4278190080&e.id)>>24,(16711680&e.id)>>16,(65280&e.id)>>8,255&e.id,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1]);return"video"!==e.type&&(t[t.length-1]=0),u(T.trex,t)},U=function(e,t){var i=0,n=0,r=0,a=0;return e.length&&(void 0!==e[0].duration&&(i=1),void 0!==e[0].size&&(n=2),void 0!==e[0].flags&&(r=4),void 0!==e[0].compositionTimeOffset&&(a=8)),[0,0,i|n|r|a,1,(4278190080&e.length)>>>24,(16711680&e.length)>>>16,(65280&e.length)>>>8,255&e.length,(4278190080&t)>>>24,(16711680&t)>>>16,(65280&t)>>>8,255&t]},B=function(e,t){var i,n,r,a,s=e.samples||[];for(t+=20+16*s.length,t=U(s,t),(n=new Uint8Array(t.length+16*s.length)).set(t),i=t.length,a=0;a>>24,n[i++]=(16711680&r.duration)>>>16,n[i++]=(65280&r.duration)>>>8,n[i++]=255&r.duration,n[i++]=(4278190080&r.size)>>>24,n[i++]=(16711680&r.size)>>>16,n[i++]=(65280&r.size)>>>8,n[i++]=255&r.size,n[i++]=r.flags.isLeading<<2|r.flags.dependsOn,n[i++]=r.flags.isDependedOn<<6|r.flags.hasRedundancy<<4|r.flags.paddingValue<<1|r.flags.isNonSyncSample,n[i++]=61440&r.flags.degradationPriority,n[i++]=15&r.flags.degradationPriority,n[i++]=(4278190080&r.compositionTimeOffset)>>>24,n[i++]=(16711680&r.compositionTimeOffset)>>>16,n[i++]=(65280&r.compositionTimeOffset)>>>8,n[i++]=255&r.compositionTimeOffset;return u(T.trun,n)},F=function(e,t){var i,n,r,a,s=e.samples||[];for(t+=20+8*s.length,t=U(s,t),(i=new Uint8Array(t.length+8*s.length)).set(t),n=t.length,a=0;a>>24,i[n++]=(16711680&r.duration)>>>16,i[n++]=(65280&r.duration)>>>8,i[n++]=255&r.duration,i[n++]=(4278190080&r.size)>>>24,i[n++]=(16711680&r.size)>>>16,i[n++]=(65280&r.size)>>>8,i[n++]=255&r.size;return u(T.trun,i)},b=function(e,t){return("audio"===e.type?F:B)(e,t)};n=function(){return u(T.ftyp,S,w,S,E)};function W(e,t){var i={size:0,flags:{isLeading:0,dependsOn:1,isDependedOn:0,hasRedundancy:0,degradationPriority:0,isNonSyncSample:1}};return i.dataOffset=t,i.compositionTimeOffset=e.pts-e.dts,i.duration=e.duration,i.size=4*e.length,i.size+=e.byteLength,e.keyFrame&&(i.flags.dependsOn=2,i.flags.isNonSyncSample=0),i}function G(e){for(var t=[];e--;)t.push(0);return t}function z(){var e,i;return X||(e={96e3:[ie,[227,64],G(154),[56]],88200:[ie,[231],G(170),[56]],64e3:[ie,[248,192],G(240),[56]],48e3:[ie,[255,192],G(268),[55,148,128],G(54),[112]],44100:[ie,[255,192],G(268),[55,163,128],G(84),[112]],32e3:[ie,[255,192],G(268),[55,234],G(226),[112]],24e3:[ie,[255,192],G(268),[55,255,128],G(268),[111,112],G(126),[224]],16e3:[ie,[255,192],G(268),[55,255,128],G(268),[111,255],G(269),[223,108],G(195),[1,192]],12e3:[ne,G(268),[3,127,248],G(268),[6,255,240],G(268),[13,255,224],G(268),[27,253,128],G(259),[56]],11025:[ne,G(268),[3,127,248],G(268),[6,255,240],G(268),[13,255,224],G(268),[27,255,192],G(268),[55,175,128],G(108),[112]],8e3:[ne,G(268),[3,121,16],G(47),[7]]},i=e,X=Object.keys(i).reduce(function(e,t){return e[t]=new Uint8Array(i[t].reduce(function(e,t){return e.concat(t)},[])),e},{})),X}var X,K=function(e){return u(T.mdat,e)},Y=We,Q=function(e){var t=n(),i=s(e),e=new Uint8Array(t.byteLength+i.byteLength);return e.set(t),e.set(i,t.byteLength),e},$=function(e){var t,i,n=[],r=[];for(r.byteLength=0,r.nalCount=0,r.duration=0,t=n.byteLength=0;t=i?e:(t.minSegmentDts=1/0,e.filter(function(e){return e.dts>=i&&(t.minSegmentDts=Math.min(t.minSegmentDts,e.dts),t.minSegmentPts=t.minSegmentDts,!0)}))},ge=function(e){for(var t,i=[],n=0;n=this.virtualRowCount&&"function"==typeof this.beforeRowOverflow&&this.beforeRowOverflow(e),0this.virtualRowCount;)this.rows.shift(),this.rowIdx--},Ae.prototype.isEmpty=function(){return 0===this.rows.length||1===this.rows.length&&""===this.rows[0]},Ae.prototype.addText=function(e){this.rows[this.rowIdx]+=e},Ae.prototype.backspace=function(){var e;this.isEmpty()||(e=this.rows[this.rowIdx],this.rows[this.rowIdx]=e.substr(0,e.length-1))};function Le(e,t,i){this.serviceNum=e,this.text="",this.currentWindow=new Ae(-1),this.windows=[],this.stream=i,"string"==typeof t&&this.createTextDecoder(t)}Le.prototype.init=function(e,t){this.startPts=e;for(var i=0;i<8;i++)this.windows[i]=new Ae(i),"function"==typeof t&&(this.windows[i].beforeRowOverflow=t)},Le.prototype.setCurrentWindow=function(e){this.currentWindow=this.windows[e]},Le.prototype.createTextDecoder=function(t){if("undefined"==typeof TextDecoder)this.stream.trigger("log",{level:"warn",message:"The `encoding` option is unsupported without TextDecoder support"});else try{this.textDecoder_=new TextDecoder(t)}catch(e){this.stream.trigger("log",{level:"warn",message:"TextDecoder could not be created with "+t+" encoding. "+e})}};var De=function e(t){t=t||{},e.prototype.init.call(this);var i,n=this,r=t.captionServices||{},a={};Object.keys(r).forEach(function(e){i=r[e],/^SERVICE/.test(e)&&(a[e]=i.encoding)}),this.serviceEncodings=a,this.current708Packet=null,this.services={},this.push=function(e){(3===e.type||null===n.current708Packet)&&n.new708Packet(),n.add708Bytes(e)}};De.prototype=new j,De.prototype.new708Packet=function(){null!==this.current708Packet&&this.push708Packet(),this.current708Packet={data:[],ptsVals:[]}},De.prototype.add708Bytes=function(e){var t=e.ccData,i=t>>>8,t=255&t;this.current708Packet.ptsVals.push(e.pts),this.current708Packet.data.push(i),this.current708Packet.data.push(t)},De.prototype.push708Packet=function(){var e,t=this.current708Packet,i=t.data,n=null,r=0,a=i[r++];for(t.seq=a>>6,t.sizeCode=63&a;r>5)&&0>5,t.rowLock=(16&n)>>4,t.columnLock=(8&n)>>3,t.priority=7&n,n=i[++e],t.relativePositioning=(128&n)>>7,t.anchorVertical=127&n,n=i[++e],t.anchorHorizontal=n,n=i[++e],t.anchorPoint=(240&n)>>4,t.rowCount=15&n,n=i[++e],t.columnCount=63&n,n=i[++e],t.windowStyle=(56&n)>>3,t.penStyle=7&n,t.virtualRowCount=t.rowCount+1,e},De.prototype.setWindowAttributes=function(e,t){var i=this.current708Packet.data,n=i[e],t=t.currentWindow.winAttr,n=i[++e];return t.fillOpacity=(192&n)>>6,t.fillRed=(48&n)>>4,t.fillGreen=(12&n)>>2,t.fillBlue=3&n,n=i[++e],t.borderType=(192&n)>>6,t.borderRed=(48&n)>>4,t.borderGreen=(12&n)>>2,t.borderBlue=3&n,n=i[++e],t.borderType+=(128&n)>>5,t.wordWrap=(64&n)>>6,t.printDirection=(48&n)>>4,t.scrollDirection=(12&n)>>2,t.justify=3&n,n=i[++e],t.effectSpeed=(240&n)>>4,t.effectDirection=(12&n)>>2,t.displayEffect=3&n,e},De.prototype.flushDisplayed=function(e,t){for(var i=[],n=0;n<8;n++)t.windows[n].visible&&!t.windows[n].isEmpty()&&i.push(t.windows[n].getText());t.endPts=e,t.text=i.join("\n\n"),this.pushCaption(t),t.startPts=e},De.prototype.pushCaption=function(e){""!==e.text&&(this.trigger("data",{startPts:e.startPts,endPts:e.endPts,text:e.text,stream:"cc708_"+e.serviceNum}),e.text="",e.startPts=e.endPts)},De.prototype.displayWindows=function(e,t){var i=this.current708Packet.data[++e],n=this.getPts(e);this.flushDisplayed(n,t);for(var r=0;r<8;r++)i&1<>4,t.offset=(12&n)>>2,t.penSize=3&n,n=i[++e],t.italics=(128&n)>>7,t.underline=(64&n)>>6,t.edgeType=(56&n)>>3,t.fontStyle=7&n,e},De.prototype.setPenColor=function(e,t){var i=this.current708Packet.data,n=i[e],t=t.currentWindow.penColor,n=i[++e];return t.fgOpacity=(192&n)>>6,t.fgRed=(48&n)>>4,t.fgGreen=(12&n)>>2,t.fgBlue=3&n,n=i[++e],t.bgOpacity=(192&n)>>6,t.bgRed=(48&n)>>4,t.bgGreen=(12&n)>>2,t.bgBlue=3&n,n=i[++e],t.edgeRed=(48&n)>>4,t.edgeGreen=(12&n)>>2,t.edgeBlue=3&n,e},De.prototype.setPenLocation=function(e,t){var i=this.current708Packet.data,n=i[e],r=t.currentWindow.penLoc;return t.currentWindow.pendingNewLine=!0,n=i[++e],r.row=15&n,n=i[++e],r.column=63&n,e},De.prototype.reset=function(e,t){var i=this.getPts(e);return this.flushDisplayed(i,t),this.initService(t.serviceNum,e)};function Oe(e){return null===e?"":(e=Re[e]||e,String.fromCharCode(e))}function Me(){for(var e=[],t=15;t--;)e.push("");return e}var Re={42:225,92:233,94:237,95:243,96:250,123:231,124:247,125:209,126:241,127:9608,304:174,305:176,306:189,307:191,308:8482,309:162,310:163,311:9834,312:224,313:160,314:232,315:226,316:234,317:238,318:244,319:251,544:193,545:201,546:211,547:218,548:220,549:252,550:8216,551:161,552:42,553:39,554:8212,555:169,556:8480,557:8226,558:8220,559:8221,560:192,561:194,562:199,563:200,564:202,565:203,566:235,567:206,568:207,569:239,570:212,571:217,572:249,573:219,574:171,575:187,800:195,801:227,802:205,803:204,804:236,805:210,806:242,807:213,808:245,809:123,810:125,811:92,812:94,813:95,814:124,815:126,816:196,817:228,818:214,819:246,820:223,821:165,822:164,823:9474,824:197,825:229,826:216,827:248,828:9484,829:9488,830:9492,831:9496},Ne=[4352,4384,4608,4640,5376,5408,5632,5664,5888,5920,4096,4864,4896,5120,5152],Ue=function e(t,i){e.prototype.init.call(this),this.field_=t||0,this.dataChannel_=i||0,this.name_="CC"+(1+(this.field_<<1|this.dataChannel_)),this.setConstants(),this.reset(),this.push=function(e){var t,i,n,r,a=32639&e.ccData;a!==this.lastControlCode_?(4096==(61440&a)?this.lastControlCode_=a:a!==this.PADDING_&&(this.lastControlCode_=null),t=a>>>8,i=255&a,a===this.PADDING_||(a===this.RESUME_CAPTION_LOADING_?this.mode_="popOn":a===this.END_OF_CAPTION_?(this.mode_="popOn",this.clearFormatting(e.pts),this.flushDisplayed(e.pts),r=this.displayed_,this.displayed_=this.nonDisplayed_,this.nonDisplayed_=r,this.startPts_=e.pts):a===this.ROLL_UP_2_ROWS_?(this.rollUpRows_=2,this.setRollUp(e.pts)):a===this.ROLL_UP_3_ROWS_?(this.rollUpRows_=3,this.setRollUp(e.pts)):a===this.ROLL_UP_4_ROWS_?(this.rollUpRows_=4,this.setRollUp(e.pts)):a===this.CARRIAGE_RETURN_?(this.clearFormatting(e.pts),this.flushDisplayed(e.pts),this.shiftRowsUp_(),this.startPts_=e.pts):a===this.BACKSPACE_?"popOn"===this.mode_?this.nonDisplayed_[this.row_]=this.nonDisplayed_[this.row_].slice(0,-1):this.displayed_[this.row_]=this.displayed_[this.row_].slice(0,-1):a===this.ERASE_DISPLAYED_MEMORY_?(this.flushDisplayed(e.pts),this.displayed_=Me()):a===this.ERASE_NON_DISPLAYED_MEMORY_?this.nonDisplayed_=Me():a===this.RESUME_DIRECT_CAPTIONING_?("paintOn"!==this.mode_&&(this.flushDisplayed(e.pts),this.displayed_=Me()),this.mode_="paintOn",this.startPts_=e.pts):this.isSpecialCharacter(t,i)?(n=Oe((t=(3&t)<<8)|i),this[this.mode_](e.pts,n),this.column_++):this.isExtCharacter(t,i)?("popOn"===this.mode_?this.nonDisplayed_[this.row_]=this.nonDisplayed_[this.row_].slice(0,-1):this.displayed_[this.row_]=this.displayed_[this.row_].slice(0,-1),n=Oe((t=(3&t)<<8)|i),this[this.mode_](e.pts,n),this.column_++):this.isMidRowCode(t,i)?(this.clearFormatting(e.pts),this[this.mode_](e.pts," "),this.column_++,14==(14&i)&&this.addFormatting(e.pts,["i"]),1==(1&i)&&this.addFormatting(e.pts,["u"])):this.isOffsetControlCode(t,i)?this.column_+=3&i:this.isPAC(t,i)?(r=Ne.indexOf(7968&a),"rollUp"===this.mode_&&(r-this.rollUpRows_+1<0&&(r=this.rollUpRows_-1),this.setRollUp(e.pts,r)),r!==this.row_&&(this.clearFormatting(e.pts),this.row_=r),1&i&&-1===this.formatting_.indexOf("u")&&this.addFormatting(e.pts,["u"]),16==(16&a)&&(this.column_=4*((14&a)>>1)),this.isColorPAC(i)&&14==(14&i)&&this.addFormatting(e.pts,["i"])):this.isNormalChar(t)&&(0===i&&(i=null),n=Oe(t),n+=Oe(i),this[this.mode_](e.pts,n),this.column_+=n.length))):this.lastControlCode_=null}};Ue.prototype=new j,Ue.prototype.flushDisplayed=function(e){var t=this.displayed_.map(function(e,t){try{return e.trim()}catch(e){return this.trigger("log",{level:"warn",message:"Skipping a malformed 608 caption at index "+t+"."}),""}},this).join("\n").replace(/^\n+|\n+$/g,"");t.length&&this.trigger("data",{startPts:this.startPts_,endPts:e,text:t,stream:this.name_})},Ue.prototype.reset=function(){this.mode_="popOn",this.topRow_=0,this.startPts_=0,this.displayed_=Me(),this.nonDisplayed_=Me(),this.lastControlCode_=null,this.column_=0,this.row_=14,this.rollUpRows_=2,this.formatting_=[]},Ue.prototype.setConstants=function(){0===this.dataChannel_?(this.BASE_=16,this.EXT_=17,this.CONTROL_=(20|this.field_)<<8,this.OFFSET_=23):1===this.dataChannel_&&(this.BASE_=24,this.EXT_=25,this.CONTROL_=(28|this.field_)<<8,this.OFFSET_=31),this.PADDING_=0,this.RESUME_CAPTION_LOADING_=32|this.CONTROL_,this.END_OF_CAPTION_=47|this.CONTROL_,this.ROLL_UP_2_ROWS_=37|this.CONTROL_,this.ROLL_UP_3_ROWS_=38|this.CONTROL_,this.ROLL_UP_4_ROWS_=39|this.CONTROL_,this.CARRIAGE_RETURN_=45|this.CONTROL_,this.RESUME_DIRECT_CAPTIONING_=41|this.CONTROL_,this.BACKSPACE_=33|this.CONTROL_,this.ERASE_DISPLAYED_MEMORY_=44|this.CONTROL_,this.ERASE_NON_DISPLAYED_MEMORY_=46|this.CONTROL_},Ue.prototype.isSpecialCharacter=function(e,t){return e===this.EXT_&&48<=t&&t<=63},Ue.prototype.isExtCharacter=function(e,t){return(e===this.EXT_+1||e===this.EXT_+2)&&32<=t&&t<=63},Ue.prototype.isMidRowCode=function(e,t){return e===this.EXT_&&32<=t&&t<=47},Ue.prototype.isOffsetControlCode=function(e,t){return e===this.OFFSET_&&33<=t&&t<=35},Ue.prototype.isPAC=function(e,t){return e>=this.BASE_&&e"},"");this[this.mode_](e,t)},Ue.prototype.clearFormatting=function(e){var t;this.formatting_.length&&(t=this.formatting_.reverse().reduce(function(e,t){return e+""+t+">"},""),this.formatting_=[],this[this.mode_](e,t))},Ue.prototype.popOn=function(e,t){var i=this.nonDisplayed_[this.row_];this.nonDisplayed_[this.row_]=i+=t},Ue.prototype.rollUp=function(e,t){var i=this.displayed_[this.row_];this.displayed_[this.row_]=i+=t},Ue.prototype.shiftRowsUp_=function(){for(var e=0;e>>2,s*=4,s+=3&a[7],o.timeStamp=s,void 0===t.pts&&void 0===t.dts&&(t.pts=o.timeStamp,t.dts=o.timeStamp),this.trigger("timestamp",o))),t.frames.push(o),i+=10,(i+=n)>>4&&(i+=e[i]+1),0===t.pid)t.type="pat",n(e.subarray(i),t),this.trigger("data",t);else if(t.pid===this.pmtPid)for(t.type="pmt",n(e.subarray(i),t),this.trigger("data",t);this.packetsWaitingForPmt.length;)this.processPes_.apply(this,this.packetsWaitingForPmt.shift());else void 0===this.programMapTable?this.packetsWaitingForPmt.push([e,i,t]):this.processPes_(e,i,t)},this.processPes_=function(e,t,i){i.pid===this.programMapTable.video?i.streamType=je.H264_STREAM_TYPE:i.pid===this.programMapTable.audio?i.streamType=je.ADTS_STREAM_TYPE:i.streamType=this.programMapTable["timed-metadata"][i.pid],i.type="pes",i.data=e.subarray(t),this.trigger("data",i)}}).prototype=new j,Xe.STREAM_TYPES={h264:27,adts:15},(Ke=function(){function n(e,t,i){var n,r,a,s,o=new Uint8Array(e.size),u={type:t},l=0,c=0;if(e.data.length&&!(e.size<9)){for(u.trackId=e.data[0].pid,l=0;l>>3,a.pts*=4,a.pts+=(6&r[13])>>>1,a.dts=a.pts,64&s&&(a.dts=(14&r[14])<<27|(255&r[15])<<20|(254&r[16])<<12|(255&r[17])<<5|(254&r[18])>>>3,a.dts*=4,a.dts+=(6&r[18])>>>1)),a.data=r.subarray(9+r[8])),t="video"===t||u.packetLength<=e.size,(i||t)&&(e.size=0,e.data.length=0),t&&d.trigger("data",u)}}var t,d=this,r=!1,a={data:[],size:0},s={data:[],size:0},o={data:[],size:0};Ke.prototype.init.call(this),this.push=function(i){({pat:function(){},pes:function(){var e,t;switch(i.streamType){case je.H264_STREAM_TYPE:e=a,t="video";break;case je.ADTS_STREAM_TYPE:e=s,t="audio";break;case je.METADATA_STREAM_TYPE:e=o,t="timed-metadata";break;default:return}i.payloadUnitStartIndicator&&n(e,t,!0),e.data.push(i),e.size+=i.data.byteLength},pmt:function(){var e={type:"metadata",tracks:[]};null!==(t=i.programMapTable).video&&e.tracks.push({timelineStartInfo:{baseMediaDecodeTime:0},id:+t.video,codec:"avc",type:"video"}),null!==t.audio&&e.tracks.push({timelineStartInfo:{baseMediaDecodeTime:0},id:+t.audio,codec:"adts",type:"audio"}),r=!0,d.trigger("data",e)}})[i.type]()},this.reset=function(){a.size=0,a.data.length=0,s.size=0,s.data.length=0,this.trigger("reset")},this.flushStreams_=function(){n(a,"video"),n(s,"audio"),n(o,"timed-metadata")},this.flush=function(){var e;!r&&t&&(e={type:"metadata",tracks:[]},null!==t.video&&e.tracks.push({timelineStartInfo:{baseMediaDecodeTime:0},id:+t.video,codec:"avc",type:"video"}),null!==t.audio&&e.tracks.push({timelineStartInfo:{baseMediaDecodeTime:0},id:+t.audio,codec:"adts",type:"audio"}),d.trigger("data",e)),r=!1,this.flushStreams_(),this.trigger("done")}}).prototype=new j;var Qe,$e={PAT_PID:0,MP2T_PACKET_LENGTH:188,TransportPacketStream:Ye,TransportParseStream:Xe,ElementaryStream:Ke,TimestampRolloverStream:We,CaptionStream:Fe.CaptionStream,Cea608Stream:Fe.Cea608Stream,Cea708Stream:Fe.Cea708Stream,MetadataStream:e};for(Qe in je)je.hasOwnProperty(Qe)&&($e[Qe]=je[Qe]);var Je=$e,Ze=ue,et=[96e3,88200,64e3,48e3,44100,32e3,24e3,22050,16e3,12e3,11025,8e3,7350],tt=function(u){var l,c=0;tt.prototype.init.call(this),this.skipWarn_=function(e,t){this.trigger("log",{level:"warn",message:"adts skiping bytes "+e+" to "+t+" in frame "+c+" outside syncword"})},this.push=function(e){var t,i,n,r,a,s,o=0;if(u||(c=0),"audio"===e.type){for(l&&l.length?(n=l,(l=new Uint8Array(n.byteLength+e.data.byteLength)).set(n),l.set(e.data,n.byteLength)):l=e.data;o+7>5,a=(r=1024*(1+(3&l[o+6])))*Ze/et[(60&l[o+2])>>>2],l.byteLength-o>>6&3),channelcount:(1&l[o+2])<<2|(192&l[o+3])>>>6,samplerate:et[(60&l[o+2])>>>2],samplingfrequencyindex:(60&l[o+2])>>>2,samplesize:16,data:l.subarray(o+7+i,o+t)}),c++,o+=t}else"number"!=typeof s&&(s=o),o++;"number"==typeof s&&(this.skipWarn_(s,o),s=null),l=l.subarray(o)}},this.flush=function(){c=0,this.trigger("done")},this.reset=function(){l=void 0,this.trigger("reset")},this.endTimeline=function(){l=void 0,this.trigger("endedtimeline")}};tt.prototype=new j;var it,nt,rt=tt,at=function(n){var r=n.byteLength,a=0,s=0;this.length=function(){return 8*r},this.bitsAvailable=function(){return 8*r+s},this.loadWord=function(){var e=n.byteLength-r,t=new Uint8Array(4),i=Math.min(4,r);if(0===i)throw new Error("no bytes available");t.set(n.subarray(e,e+i)),a=new DataView(t.buffer).getUint32(0),s=8*i,r-=i},this.skipBits=function(e){var t;e>>32-t;return 0<(s-=t)?a<<=t:0>>e))return a<<=e,s-=e,e;return this.loadWord(),e+this.skipLeadingZeros()},this.skipUnsignedExpGolomb=function(){this.skipBits(1+this.skipLeadingZeros())},this.skipExpGolomb=function(){this.skipBits(1+this.skipLeadingZeros())},this.readUnsignedExpGolomb=function(){var e=this.skipLeadingZeros();return this.readBits(e+1)-1},this.readExpGolomb=function(){var e=this.readUnsignedExpGolomb();return 1&e?1+e>>>1:-1*(e>>>1)},this.readBoolean=function(){return 1===this.readBits(1)},this.readUnsignedByte=function(){return this.readBits(8)},this.loadWord()},st=function(){var n,r,a=0;st.prototype.init.call(this),this.push=function(e){for(var t,i=(r=r?((t=new Uint8Array(r.byteLength+e.data.byteLength)).set(r),t.set(e.data,r.byteLength),t):e.data).byteLength;a>4?i+20:i+10}function ut(e,t){return e.length-t<10||e[t]!=="I".charCodeAt(0)||e[t+1]!=="D".charCodeAt(0)||e[t+2]!=="3".charCodeAt(0)?t:ut(e,t+=ot(e,t))}function lt(e){return e[0]<<21|e[1]<<14|e[2]<<7|e[3]}var e={H264Stream:it,NalByteStream:st},ct=[96e3,88200,64e3,48e3,44100,32e3,24e3,22050,16e3,12e3,11025,8e3,7350],dt={isLikelyAacData:function(e){var t=ut(e,0);return e.length>=t+2&&255==(255&e[t])&&240==(240&e[t+1])&&16==(22&e[t+1])},parseId3TagSize:ot,parseAdtsSize:function(e,t){var i=(224&e[t+5])>>5,n=e[t+4]<<3;return 6144&e[t+3]|n|i},parseType:function(e,t){return e[t]==="I".charCodeAt(0)&&e[t+1]==="D".charCodeAt(0)&&e[t+2]==="3".charCodeAt(0)?"timed-metadata":!0&e[t]&&240==(240&e[t+1])?"audio":null},parseSampleRate:function(e){for(var t=0;t+5>>2];t++}return null},parseAacTimestamp:function(e){var t,i=10;64&e[5]&&(i+=4,i+=lt(e.subarray(10,14)));do{if((t=lt(e.subarray(i+4,i+8)))<1)return null;if("PRIV"===String.fromCharCode(e[i],e[i+1],e[i+2],e[i+3]))for(var n=e.subarray(i+10,i+t+10),r=0;r>>2;return s*=4,s+=3&a[7]}}while(i+=10,(i+=t)a.length)break;t={type:"audio",data:a.subarray(r,r+n),pts:s,dts:s},this.trigger("data",t),r+=n}else{if(a.length-r<10)break;if(r+(n=dt.parseId3TagSize(a,r))>a.length)break;t={type:"timed-metadata",data:a.subarray(r,r+n)},this.trigger("data",t),r+=n}e=a.length-r,a=0i.pts?u++:(t++,a-=n.byteLength,s-=n.nalCount,o-=n.duration);return 0===t?e:t===e.length?null:((r=e.slice(t)).byteLength=a,r.duration=o,r.nalCount=s,r.pts=r[0].pts,r.dts=r[0].dts,r)},this.alignGopsAtEnd_=function(e){for(var t,i,n=l.length-1,r=e.length-1,a=null,s=!1;0<=n&&0<=r;){if(t=l[n],i=e[r],t.pts===i.pts){s=!0;break}t.pts>i.pts?n--:(n===l.length-1&&(a=r),r--)}if(!s&&null===a)return null;if(0===(u=s?r:a))return e;var o=e.slice(u),u=o.reduce(function(e,t){return e.byteLength+=t.byteLength,e.duration+=t.duration,e.nalCount+=t.nalCount,e},{byteLength:0,duration:0,nalCount:0});return o.byteLength=u.byteLength,o.duration=u.duration,o.nalCount=u.nalCount,o.pts=o[0].pts,o.dts=o[0].dts,o},this.alignGopsWith=function(e){l=e}}).prototype=new j,(_t=function(e,t){this.numberOfTracks=0,this.metadataStream=t,"undefined"!=typeof(e=e||{}).remux?this.remuxTracks=!!e.remux:this.remuxTracks=!0,"boolean"==typeof e.keepOriginalTimestamps?this.keepOriginalTimestamps=e.keepOriginalTimestamps:this.keepOriginalTimestamps=!1,this.pendingTracks=[],this.videoTrack=null,this.pendingBoxes=[],this.pendingCaptions=[],this.pendingMetadata=[],this.pendingBytes=0,this.emittedTracks=0,_t.prototype.init.call(this),this.push=function(e){return e.text?this.pendingCaptions.push(e):e.frames?this.pendingMetadata.push(e):(this.pendingTracks.push(e.track),this.pendingBytes+=e.boxes.byteLength,"video"===e.track.type&&(this.videoTrack=e.track,this.pendingBoxes.push(e.boxes)),void("audio"===e.track.type&&(this.audioTrack=e.track,this.pendingBoxes.unshift(e.boxes))))}}).prototype=new j,_t.prototype.flush=function(e){var t,i,n,r=0,a={captions:[],captionStreams:{},metadata:[],info:{}},s=0;if(this.pendingTracks.length=this.numberOfTracks&&(this.trigger("done"),this.emittedTracks=0))}if(this.videoTrack?(s=this.videoTrack.timelineStartInfo.pts,St.forEach(function(e){a.info[e]=this.videoTrack[e]},this)):this.audioTrack&&(s=this.audioTrack.timelineStartInfo.pts,Tt.forEach(function(e){a.info[e]=this.audioTrack[e]},this)),this.videoTrack||this.audioTrack){for(1===this.pendingTracks.length?a.type=this.pendingTracks[0].type:a.type="combined",this.emittedTracks+=this.pendingTracks.length,e=Q(this.pendingTracks),a.initSegment=new Uint8Array(e.byteLength),a.initSegment.set(e),a.data=new Uint8Array(this.pendingBytes),n=0;n=this.numberOfTracks&&(this.trigger("done"),this.emittedTracks=0)},_t.prototype.setRemux=function(e){this.remuxTracks=e},(vt=function(n){var r,a,s=this,i=!0;vt.prototype.init.call(this),this.baseMediaDecodeTime=(n=n||{}).baseMediaDecodeTime||0,this.transmuxPipeline_={},this.setupAacPipeline=function(){var t={};(this.transmuxPipeline_=t).type="aac",t.metadataStream=new Je.MetadataStream,t.aacStream=new bt,t.audioTimestampRolloverStream=new Je.TimestampRolloverStream("audio"),t.timedMetadataTimestampRolloverStream=new Je.TimestampRolloverStream("timed-metadata"),t.adtsStream=new rt,t.coalesceStream=new _t(n,t.metadataStream),t.headOfPipeline=t.aacStream,t.aacStream.pipe(t.audioTimestampRolloverStream).pipe(t.adtsStream),t.aacStream.pipe(t.timedMetadataTimestampRolloverStream).pipe(t.metadataStream).pipe(t.coalesceStream),t.metadataStream.on("timestamp",function(e){t.aacStream.setTimestamp(e.timeStamp)}),t.aacStream.on("data",function(e){"timed-metadata"!==e.type&&"audio"!==e.type||t.audioSegmentStream||(a=a||{timelineStartInfo:{baseMediaDecodeTime:s.baseMediaDecodeTime},codec:"adts",type:"audio"},t.coalesceStream.numberOfTracks++,t.audioSegmentStream=new Ct(a,n),t.audioSegmentStream.on("log",s.getLogTrigger_("audioSegmentStream")),t.audioSegmentStream.on("timingInfo",s.trigger.bind(s,"audioTimingInfo")),t.adtsStream.pipe(t.audioSegmentStream).pipe(t.coalesceStream),s.trigger("trackinfo",{hasAudio:!!a,hasVideo:!!r}))}),t.coalesceStream.on("data",this.trigger.bind(this,"data")),t.coalesceStream.on("done",this.trigger.bind(this,"done")),ft(this,t)},this.setupTsPipeline=function(){var i={};(this.transmuxPipeline_=i).type="ts",i.metadataStream=new Je.MetadataStream,i.packetStream=new Je.TransportPacketStream,i.parseStream=new Je.TransportParseStream,i.elementaryStream=new Je.ElementaryStream,i.timestampRolloverStream=new Je.TimestampRolloverStream,i.adtsStream=new rt,i.h264Stream=new wt,i.captionStream=new Je.CaptionStream(n),i.coalesceStream=new _t(n,i.metadataStream),i.headOfPipeline=i.packetStream,i.packetStream.pipe(i.parseStream).pipe(i.elementaryStream).pipe(i.timestampRolloverStream),i.timestampRolloverStream.pipe(i.h264Stream),i.timestampRolloverStream.pipe(i.adtsStream),i.timestampRolloverStream.pipe(i.metadataStream).pipe(i.coalesceStream),i.h264Stream.pipe(i.captionStream).pipe(i.coalesceStream),i.elementaryStream.on("data",function(e){var t;if("metadata"===e.type){for(t=e.tracks.length;t--;)r||"video"!==e.tracks[t].type?a||"audio"!==e.tracks[t].type||((a=e.tracks[t]).timelineStartInfo.baseMediaDecodeTime=s.baseMediaDecodeTime):(r=e.tracks[t]).timelineStartInfo.baseMediaDecodeTime=s.baseMediaDecodeTime;r&&!i.videoSegmentStream&&(i.coalesceStream.numberOfTracks++,i.videoSegmentStream=new yt(r,n),i.videoSegmentStream.on("log",s.getLogTrigger_("videoSegmentStream")),i.videoSegmentStream.on("timelineStartInfo",function(e){a&&!n.keepOriginalTimestamps&&(a.timelineStartInfo=e,i.audioSegmentStream.setEarliestDts(e.dts-s.baseMediaDecodeTime))}),i.videoSegmentStream.on("processedGopsInfo",s.trigger.bind(s,"gopInfo")),i.videoSegmentStream.on("segmentTimingInfo",s.trigger.bind(s,"videoSegmentTimingInfo")),i.videoSegmentStream.on("baseMediaDecodeTime",function(e){a&&i.audioSegmentStream.setVideoBaseMediaDecodeTime(e)}),i.videoSegmentStream.on("timingInfo",s.trigger.bind(s,"videoTimingInfo")),i.h264Stream.pipe(i.videoSegmentStream).pipe(i.coalesceStream)),a&&!i.audioSegmentStream&&(i.coalesceStream.numberOfTracks++,i.audioSegmentStream=new Ct(a,n),i.audioSegmentStream.on("log",s.getLogTrigger_("audioSegmentStream")),i.audioSegmentStream.on("timingInfo",s.trigger.bind(s,"audioTimingInfo")),i.audioSegmentStream.on("segmentTimingInfo",s.trigger.bind(s,"audioSegmentTimingInfo")),i.adtsStream.pipe(i.audioSegmentStream).pipe(i.coalesceStream)),s.trigger("trackinfo",{hasAudio:!!a,hasVideo:!!r})}}),i.coalesceStream.on("data",this.trigger.bind(this,"data")),i.coalesceStream.on("id3Frame",function(e){e.dispatchType=i.metadataStream.dispatchType,s.trigger("id3Frame",e)}),i.coalesceStream.on("caption",this.trigger.bind(this,"caption")),i.coalesceStream.on("done",this.trigger.bind(this,"done")),ft(this,i)},this.setBaseMediaDecodeTime=function(e){var t=this.transmuxPipeline_;n.keepOriginalTimestamps||(this.baseMediaDecodeTime=e),a&&(a.timelineStartInfo.dts=void 0,a.timelineStartInfo.pts=void 0,_e(a),t.audioTimestampRolloverStream&&t.audioTimestampRolloverStream.discontinuity()),r&&(t.videoSegmentStream&&(t.videoSegmentStream.gopCache_=[]),r.timelineStartInfo.dts=void 0,r.timelineStartInfo.pts=void 0,_e(r),t.captionStream.reset()),t.timestampRolloverStream&&t.timestampRolloverStream.discontinuity()},this.setAudioAppendStart=function(e){a&&this.transmuxPipeline_.audioSegmentStream.setAudioAppendStart(e)},this.setRemux=function(e){var t=this.transmuxPipeline_;n.remux=e,t&&t.coalesceStream&&t.coalesceStream.setRemux(e)},this.alignGopsWith=function(e){r&&this.transmuxPipeline_.videoSegmentStream&&this.transmuxPipeline_.videoSegmentStream.alignGopsWith(e)},this.getLogTrigger_=function(t){var i=this;return function(e){e.stream=t,i.trigger("log",e)}},this.push=function(e){var t;i&&((t=Et(e))&&"aac"!==this.transmuxPipeline_.type?this.setupAacPipeline():t||"ts"===this.transmuxPipeline_.type||this.setupTsPipeline(),i=!1),this.transmuxPipeline_.headOfPipeline.push(e)},this.flush=function(){i=!0,this.transmuxPipeline_.headOfPipeline.flush()},this.endTimeline=function(){this.transmuxPipeline_.headOfPipeline.endTimeline()},this.reset=function(){this.transmuxPipeline_.headOfPipeline&&this.transmuxPipeline_.headOfPipeline.reset()},this.resetCaptions=function(){this.transmuxPipeline_.captionStream&&this.transmuxPipeline_.captionStream.reset()}}).prototype=new j;function It(e,c){var i=Rt(e,["moof","traf"]),e=Rt(e,["mdat"]),d={},n=[];return e.forEach(function(e,t){t=i[t];n.push({mdat:e,traf:t})}),n.forEach(function(e){var t,i,n,r,a,s=e.mdat,o=e.traf,u=Rt(o,["tfhd"]),l=Ht(u[0]),e=l.trackId,u=Rt(o,["tfdt"]),u=0>>4&&(t+=e[4]+1),t}function Lt(e){switch(e){case 5:return"slice_layer_without_partitioning_rbsp_idr";case 6:return"sei_rbsp";case 7:return"seq_parameter_set_rbsp";case 8:return"pic_parameter_set_rbsp";case 9:return"access_unit_delimiter_rbsp";default:return null}}var Dt={Transmuxer:vt,VideoSegmentStream:yt,AudioSegmentStream:Ct,AUDIO_PROPERTIES:Tt,VIDEO_PROPERTIES:St,generateSegmentTimingInfo:gt},e=function(e){return e>>>0},Ot=function(e){var t="";return t+=String.fromCharCode(e[0]),t+=String.fromCharCode(e[1]),t+=String.fromCharCode(e[2]),t+=String.fromCharCode(e[3])},Mt=e,Rt=function e(t,i){var n,r,a,s=[];if(!i.length)return null;for(n=0;n>>2,dependsOn:3&e[0],isDependedOn:(192&e[1])>>>6,hasRedundancy:(48&e[1])>>>4,paddingValue:(14&e[1])>>>1,isNonSyncSample:1&e[1],degradationPriority:e[2]<<8|e[3]}},jt=function(e){var t,i={version:e[0],flags:new Uint8Array(e.subarray(1,4)),samples:[]},n=new DataView(e.buffer,e.byteOffset,e.byteLength),r=1&i.flags[2],a=4&i.flags[2],s=1&i.flags[1],o=2&i.flags[1],u=4&i.flags[1],l=8&i.flags[1],c=n.getUint32(4),d=8;for(r&&(i.dataOffset=n.getInt32(d),d+=4),a&&c&&(t={flags:Ft(e.subarray(d,d+4))},d+=4,s&&(t.duration=n.getUint32(d),d+=4),o&&(t.size=n.getUint32(d),d+=4),l&&(1===i.version?t.compositionTimeOffset=n.getInt32(d):t.compositionTimeOffset=n.getUint32(d),d+=4),i.samples.push(t),c--);c--;)t={},s&&(t.duration=n.getUint32(d),d+=4),o&&(t.size=n.getUint32(d),d+=4),u&&(t.flags=Ft(e.subarray(d,d+4)),d+=4),l&&(1===i.version?t.compositionTimeOffset=n.getInt32(d):t.compositionTimeOffset=n.getUint32(d),d+=4),i.samples.push(t);return i},Ht=function(e){var t=new DataView(e.buffer,e.byteOffset,e.byteLength),i={version:e[0],flags:new Uint8Array(e.subarray(1,4)),trackId:t.getUint32(4)},n=1&i.flags[2],r=2&i.flags[2],a=8&i.flags[2],s=16&i.flags[2],o=32&i.flags[2],u=65536&i.flags[0],l=131072&i.flags[0],e=8;return n&&(e+=4,i.baseDataOffset=t.getUint32(12),e+=4),r&&(i.sampleDescriptionIndex=t.getUint32(e),e+=4),a&&(i.defaultSampleDuration=t.getUint32(e),e+=4),s&&(i.defaultSampleSize=t.getUint32(e),e+=4),o&&(i.defaultSampleFlags=t.getUint32(e)),u&&(i.durationIsEmpty=!0),!n&&l&&(i.baseDataOffsetIsMoof=!0),i},j="undefined"!=typeof globalThis?globalThis:"undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:{},j="undefined"!=typeof window?window:"undefined"!=typeof j?j:"undefined"!=typeof self?self:{},qt=j,Vt=ke,Wt=Fe.CaptionStream,Gt=function(){var t,r,a,s,o,i,n=!1;this.isInitialized=function(){return n},this.init=function(e){t=new Wt,n=!0,i=!!e&&e.isPartial,t.on("data",function(e){e.startTime=e.startPts/s,e.endTime=e.endPts/s,o.captions.push(e),o.captionStreams[e.stream]=!0}),t.on("log",function(e){o.logs.push(e)})},this.isNewInit=function(e,t){return!(e&&0===e.length||t&&"object"==typeof t&&0===Object.keys(t).length)&&(a!==e[0]||s!==t[a])},this.parse=function(e,t,i){if(!this.isInitialized())return null;if(!t||!i)return null;if(this.isNewInit(t,i))a=t[0],s=i[a];else if(null===a||!s)return r.push(e),null;for(;0>>2&63).replace(/^0/,"")):t.codec="mp4a.40.2"):t.codec=t.codec.toLowerCase()));e=Rt(e,["mdia","mdhd"])[0];e&&(t.timescale=Yt(e)),s.push(t)}),s},Qt=ke,$t=q,Jt=Ie,Zt={};Zt.ts={parseType:function(e,t){e=xt(e);return 0===e?"pat":e===t?"pmt":t?"pes":null},parsePat:function(e){var t=At(e),i=4+Pt(e);return t&&(i+=e[i]+1),(31&e[i+10])<<8|e[i+11]},parsePmt:function(e){var t={},i=At(e),n=4+Pt(e);if(i&&(n+=e[n]+1),1&e[n+5]){for(var r=3+((15&e[n+1])<<8|e[n+2])-4,a=12+((15&e[n+10])<<8|e[n+11]);a=e.byteLength)return null;var i=null,n=e[t+7];return 192&n&&((i={}).pts=(14&e[t+9])<<27|(255&e[t+10])<<20|(254&e[t+11])<<12|(255&e[t+12])<<5|(254&e[t+13])>>>3,i.pts*=4,i.pts+=(6&e[t+13])>>>1,i.dts=i.pts,64&n&&(i.dts=(14&e[t+14])<<27|(255&e[t+15])<<20|(254&e[t+16])<<12|(255&e[t+17])<<5|(254&e[t+18])>>>3,i.dts*=4,i.dts+=(6&e[t+18])>>>1)),i},videoPacketContainsKeyFrame:function(e){for(var t=4+Pt(e),i=e.subarray(t),n=0,r=0,a=!1;re.length){i=!0;break}null===a&&(t=e.subarray(o,o+s),a=Zt.aac.parseAacTimestamp(t)),o+=s;break;case"audio":if(e.length-o<7){i=!0;break}if((s=Zt.aac.parseAdtsSize(e,o))>e.length){i=!0;break}null===r&&(t=e.subarray(o,o+s),r=Zt.aac.parseSampleRate(t)),n++,o+=s;break;default:o++}if(i)return null}if(null===r||null===a)return null;var u=ii/r;return{audio:[{type:"audio",dts:a,pts:a},{type:"audio",dts:a+1024*n*u,pts:a+1024*n*u}]}}:ti)(e);return r&&(r.audio||r.video)?(e=t,(t=r).audio&&t.audio.length&&("undefined"!=typeof(i=e)&&!isNaN(i)||(i=t.audio[0].dts),t.audio.forEach(function(e){e.dts=Jt(e.dts,i),e.pts=Jt(e.pts,i),e.dtsTime=e.dts/ii,e.ptsTime=e.pts/ii})),t.video&&t.video.length&&("undefined"!=typeof(n=e)&&!isNaN(n)||(n=t.video[0].dts),t.video.forEach(function(e){e.dts=Jt(e.dts,n),e.pts=Jt(e.pts,n),e.dtsTime=e.dts/ii,e.ptsTime=e.pts/ii}),t.firstKeyFrame&&((t=t.firstKeyFrame).dts=Jt(t.dts,n),t.pts=Jt(t.pts,n),t.dtsTime=t.dts/ii,t.ptsTime=t.pts/ii)),r):null},ri=function(){function e(e,t){this.options=t||{},this.self=e,this.init()}var t=e.prototype;return t.init=function(){var i,e;this.transmuxer&&this.transmuxer.dispose(),this.transmuxer=new Dt.Transmuxer(this.options),i=this.self,(e=this.transmuxer).on("data",function(e){var t=e.initSegment;e.initSegment={data:t.buffer,byteOffset:t.byteOffset,byteLength:t.byteLength};t=e.data;e.data=t.buffer,i.postMessage({action:"data",segment:e,byteOffset:t.byteOffset,byteLength:t.byteLength},[e.data])}),e.on("done",function(e){i.postMessage({action:"done"})}),e.on("gopInfo",function(e){i.postMessage({action:"gopInfo",gopInfo:e})}),e.on("videoSegmentTimingInfo",function(e){var t={start:{decode:ce(e.start.dts),presentation:ce(e.start.pts)},end:{decode:ce(e.end.dts),presentation:ce(e.end.pts)},baseMediaDecodeTime:ce(e.baseMediaDecodeTime)};e.prependedContentDuration&&(t.prependedContentDuration=ce(e.prependedContentDuration)),i.postMessage({action:"videoSegmentTimingInfo",videoSegmentTimingInfo:t})}),e.on("audioSegmentTimingInfo",function(e){var t={start:{decode:ce(e.start.dts),presentation:ce(e.start.pts)},end:{decode:ce(e.end.dts),presentation:ce(e.end.pts)},baseMediaDecodeTime:ce(e.baseMediaDecodeTime)};e.prependedContentDuration&&(t.prependedContentDuration=ce(e.prependedContentDuration)),i.postMessage({action:"audioSegmentTimingInfo",audioSegmentTimingInfo:t})}),e.on("id3Frame",function(e){i.postMessage({action:"id3Frame",id3Frame:e})}),e.on("caption",function(e){i.postMessage({action:"caption",caption:e})}),e.on("trackinfo",function(e){i.postMessage({action:"trackinfo",trackInfo:e})}),e.on("audioTimingInfo",function(e){i.postMessage({action:"audioTimingInfo",audioTimingInfo:{start:ce(e.start),end:ce(e.end)}})}),e.on("videoTimingInfo",function(e){i.postMessage({action:"videoTimingInfo",videoTimingInfo:{start:ce(e.start),end:ce(e.end)}})}),e.on("log",function(e){i.postMessage({action:"log",log:e})})},t.pushMp4Captions=function(e){this.captionParser||(this.captionParser=new Gt,this.captionParser.init());var t=new Uint8Array(e.data,e.byteOffset,e.byteLength),e=this.captionParser.parse(t,e.trackIds,e.timescales);this.self.postMessage({action:"mp4Captions",captions:e&&e.captions||[],logs:e&&e.logs||[],data:t.buffer},[t.buffer])},t.probeMp4StartTime=function(e){var t=e.timescales,e=e.data,t=Qt(t,e);this.self.postMessage({action:"probeMp4StartTime",startTime:t,data:e},[e.buffer])},t.probeMp4Tracks=function(e){var t=e.data,e=$t(t);this.self.postMessage({action:"probeMp4Tracks",tracks:e,data:t},[t.buffer])},t.probeTs=function(e){var t=e.data,i=e.baseStartTime,e="number"!=typeof i||isNaN(i)?void 0:i*ue,i=ni(t,e),e=null;i&&((e={hasVideo:i.video&&2===i.video.length||!1,hasAudio:i.audio&&2===i.audio.length||!1}).hasVideo&&(e.videoStart=i.video[0].ptsTime),e.hasAudio&&(e.audioStart=i.audio[0].ptsTime)),this.self.postMessage({action:"probeTs",result:e,data:t},[t.buffer])},t.clearAllMp4Captions=function(){this.captionParser&&this.captionParser.clearAllCaptions()},t.clearParsedMp4Captions=function(){this.captionParser&&this.captionParser.clearParsedCaptions()},t.push=function(e){e=new Uint8Array(e.data,e.byteOffset,e.byteLength);this.transmuxer.push(e)},t.reset=function(){this.transmuxer.reset()},t.setTimestampOffset=function(e){e=e.timestampOffset||0;this.transmuxer.setBaseMediaDecodeTime(Math.round(le(e)))},t.setAudioAppendStart=function(e){this.transmuxer.setAudioAppendStart(Math.ceil(le(e.appendStart)))},t.setRemux=function(e){this.transmuxer.setRemux(e.remux)},t.flush=function(e){this.transmuxer.flush(),self.postMessage({action:"done",type:"transmuxed"})},t.endTimeline=function(){this.transmuxer.endTimeline(),self.postMessage({action:"endedtimeline",type:"transmuxed"})},t.alignGopsWith=function(e){this.transmuxer.alignGopsWith(e.gopsToAlignWith.slice())},e}();self.onmessage=function(e){"init"===e.data.action&&e.data.options?this.messageHandlers=new ri(self,e.data.options):(this.messageHandlers||(this.messageHandlers=new ri(self)),e.data&&e.data.action&&"init"!==e.data.action&&this.messageHandlers[e.data.action]&&this.messageHandlers[e.data.action](e.data))}}))),Cl=function(e){e.currentTransmux=null,e.transmuxQueue.length&&(e.currentTransmux=e.transmuxQueue.shift(),"function"==typeof e.currentTransmux?e.currentTransmux():Du(e.currentTransmux))},Il=function(e){Mu("reset",e)},xl=function(e){var t=new kl;t.currentTransmux=null,t.transmuxQueue=[];var i=t.terminate;return t.terminate=function(){return t.currentTransmux=null,t.transmuxQueue.length=0,i.call(t)},t.postMessage({action:"init",options:e}),t},Al=2,Pl=-101,Ll=-102,Dl=Ro("CodecUtils"),Ol=Ro("PlaylistSelector"),ar=function(){var e=this.useDevicePixelRatio&&window.devicePixelRatio||1;return il(this.playlists.master,this.systemBandwidth,parseInt(Zu(this.tech_.el(),"width"),10)*e,parseInt(Zu(this.tech_.el(),"height"),10)*e,this.limitRenditionByPlayerDimensions,this.masterPlaylistController_)},Ml=function(n){function e(e,t){var i=n.call(this)||this;if(!e)throw new TypeError("Initialization settings are required");if("function"!=typeof e.currentTime)throw new TypeError("No currentTime getter specified");if(!e.mediaSource)throw new TypeError("No MediaSource specified");return i.bandwidth=e.bandwidth,i.throughput={rate:0,count:0},i.roundTrip=NaN,i.resetStats_(),i.mediaIndex=null,i.partIndex=null,i.hasPlayed_=e.hasPlayed,i.currentTime_=e.currentTime,i.seekable_=e.seekable,i.seeking_=e.seeking,i.duration_=e.duration,i.mediaSource_=e.mediaSource,i.vhs_=e.vhs,i.loaderType_=e.loaderType,i.currentMediaInfo_=void 0,i.startingMediaInfo_=void 0,i.segmentMetadataTrack_=e.segmentMetadataTrack,i.goalBufferLength_=e.goalBufferLength,i.sourceType_=e.sourceType,i.sourceUpdater_=e.sourceUpdater,i.inbandTextTracks_=e.inbandTextTracks,i.state_="INIT",i.timelineChangeController_=e.timelineChangeController,i.shouldSaveSegmentTimingInfo_=!0,i.parse708captions_=e.parse708captions,i.useDtsForTimestampOffset_=e.useDtsForTimestampOffset,i.captionServices_=e.captionServices,i.experimentalExactManifestTimings=e.experimentalExactManifestTimings,i.checkBufferTimeout_=null,i.error_=void 0,i.currentTimeline_=-1,i.pendingSegment_=null,i.xhrOptions_=null,i.pendingSegments_=[],i.audioDisabled_=!1,i.isPendingTimestampOffset_=!1,i.gopBuffer_=[],i.timeMapping_=0,i.safeAppend_=11<=tr.browser.IE_VERSION,i.appendInitSegment_={audio:!0,video:!0},i.playlistOfLastInitSegment_={audio:null,video:null},i.callQueue_=[],i.loadQueue_=[],i.metadataQueue_={id3:[],caption:[]},i.waitingOnRemove_=!1,i.quotaExceededErrorRetryTimeout_=null,i.activeInitSegmentId_=null,i.initSegments_={},i.cacheEncryptionKeys_=e.cacheEncryptionKeys,i.keyCache_={},i.decrypter_=e.decrypter,i.syncController_=e.syncController,i.syncPoint_={segmentIndex:0,time:0},i.transmuxer_=i.createTransmuxer_(),i.triggerSyncInfoUpdate_=function(){return i.trigger("syncinfoupdate")},i.syncController_.on("syncinfoupdate",i.triggerSyncInfoUpdate_),i.mediaSource_.addEventListener("sourceopen",function(){i.isEndOfStream_()||(i.ended_=!1)}),i.fetchAtBuffer_=!1,i.logger_=Ro("SegmentLoader["+i.loaderType_+"]"),Object.defineProperty(ft(i),"state",{get:function(){return this.state_},set:function(e){e!==this.state_&&(this.logger_(this.state_+" -> "+e),this.state_=e,this.trigger("statechange"))}}),i.sourceUpdater_.on("ready",function(){i.hasEnoughInfoToAppend_()&&i.processCallQueue_()}),"main"===i.loaderType_&&i.timelineChangeController_.on("pendingtimelinechange",function(){i.hasEnoughInfoToAppend_()&&i.processCallQueue_()}),"audio"===i.loaderType_&&i.timelineChangeController_.on("timelinechange",function(){i.hasEnoughInfoToLoad_()&&i.processLoadQueue_(),i.hasEnoughInfoToAppend_()&&i.processCallQueue_()}),i}mt(e,n);var t=e.prototype;return t.createTransmuxer_=function(){return xl({remux:!1,alignGopsAtEnd:this.safeAppend_,keepOriginalTimestamps:!0,parse708captions:this.parse708captions_,captionServices:this.captionServices_})},t.resetStats_=function(){this.mediaBytesTransferred=0,this.mediaRequests=0,this.mediaRequestsAborted=0,this.mediaRequestsTimedout=0,this.mediaRequestsErrored=0,this.mediaTransferDuration=0,this.mediaSecondsLoaded=0,this.mediaAppends=0},t.dispose=function(){this.trigger("dispose"),this.state="DISPOSED",this.pause(),this.abort_(),this.transmuxer_&&this.transmuxer_.terminate(),this.resetStats_(),this.checkBufferTimeout_&&window.clearTimeout(this.checkBufferTimeout_),this.syncController_&&this.triggerSyncInfoUpdate_&&this.syncController_.off("syncinfoupdate",this.triggerSyncInfoUpdate_),this.off()},t.setAudio=function(e){this.audioDisabled_=!e,e?this.appendInitSegment_.audio=!0:this.sourceUpdater_.removeAudio(0,this.duration_())},t.abort=function(){"WAITING"===this.state?(this.abort_(),this.state="READY",this.paused()||this.monitorBuffer_()):this.pendingSegment_&&(this.pendingSegment_=null)},t.abort_=function(){this.pendingSegment_&&this.pendingSegment_.abortRequests&&this.pendingSegment_.abortRequests(),this.pendingSegment_=null,this.callQueue_=[],this.loadQueue_=[],this.metadataQueue_.id3=[],this.metadataQueue_.caption=[],this.timelineChangeController_.clearPendingTimelineChange(this.loaderType_),this.waitingOnRemove_=!1,window.clearTimeout(this.quotaExceededErrorRetryTimeout_),this.quotaExceededErrorRetryTimeout_=null},t.checkForAbort_=function(e){return"APPENDING"!==this.state||this.pendingSegment_?!this.pendingSegment_||this.pendingSegment_.requestId!==e:(this.state="READY",!0)},t.error=function(e){return"undefined"!=typeof e&&(this.logger_("error occurred:",e),this.error_=e),this.pendingSegment_=null,this.error_},t.endOfStream=function(){this.ended_=!0,this.transmuxer_&&Il(this.transmuxer_),this.gopBuffer_.length=0,this.pause(),this.trigger("ended")},t.buffered_=function(){var e=this.getMediaInfo_();if(!this.sourceUpdater_||!e)return tr.createTimeRanges();if("main"===this.loaderType_){var t=e.hasAudio,i=e.hasVideo,e=e.isMuxed;if(i&&t&&!this.audioDisabled_&&!e)return this.sourceUpdater_.buffered();if(i)return this.sourceUpdater_.videoBuffered()}return this.sourceUpdater_.audioBuffered()},t.initSegmentForMap=function(e,t){if(void 0===t&&(t=!1),!e)return null;var i=Su(e),n=this.initSegments_[i];return t&&!n&&e.bytes&&(this.initSegments_[i]=n={resolvedUri:e.resolvedUri,byterange:e.byterange,bytes:e.bytes,tracks:e.tracks,timescales:e.timescales}),n||e},t.segmentKey=function(e,t){if(void 0===t&&(t=!1),!e)return null;var i=wu(e),n=this.keyCache_[i];this.cacheEncryptionKeys_&&t&&!n&&e.bytes&&(this.keyCache_[i]=n={resolvedUri:e.resolvedUri,bytes:e.bytes});e={resolvedUri:(n||e).resolvedUri};return n&&(e.bytes=n.bytes),e},t.couldBeginLoading_=function(){return this.playlist_&&!this.paused()},t.load=function(){if(this.monitorBuffer_(),this.playlist_)return"INIT"===this.state&&this.couldBeginLoading_()?this.init_():void(!this.couldBeginLoading_()||"READY"!==this.state&&"INIT"!==this.state||(this.state="READY"))},t.init_=function(){return this.state="READY",this.resetEverything(),this.monitorBuffer_()},t.playlist=function(e,t){if(void 0===t&&(t={}),e){var i=this.playlist_,n=this.pendingSegment_;this.playlist_=e,this.xhrOptions_=t,"INIT"===this.state&&(e.syncInfo={mediaSequence:e.mediaSequence,time:0},"main"===this.loaderType_&&this.syncController_.setDateTimeMappingForStart(e));var r=null;if(i&&(i.id?r=i.id:i.uri&&(r=i.uri)),this.logger_("playlist update ["+r+" => "+(e.id||e.uri)+"]"),this.trigger("syncinfoupdate"),"INIT"===this.state&&this.couldBeginLoading_())return this.init_();if(!i||i.uri!==e.uri)return null!==this.mediaIndex&&(e.endList?this.resyncLoader():this.resetLoader()),this.currentMediaInfo_=void 0,void this.trigger("playlistupdate");t=e.mediaSequence-i.mediaSequence;this.logger_("live window shift ["+t+"]"),null!==this.mediaIndex&&(this.mediaIndex-=t,this.mediaIndex<0?(this.mediaIndex=null,this.partIndex=null):(r=this.playlist_.segments[this.mediaIndex],!this.partIndex||r.parts&&r.parts.length&&r.parts[this.partIndex]||(r=this.mediaIndex,this.logger_("currently processing part (index "+this.partIndex+") no longer exists."),this.resetLoader(),this.mediaIndex=r))),n&&(n.mediaIndex-=t,n.mediaIndex<0?(n.mediaIndex=null,n.partIndex=null):(0<=n.mediaIndex&&(n.segment=e.segments[n.mediaIndex]),0<=n.partIndex&&n.segment.parts&&(n.part=n.segment.parts[n.partIndex]))),this.syncController_.saveExpiredSegmentInfo(i,e)}},t.pause=function(){this.checkBufferTimeout_&&(window.clearTimeout(this.checkBufferTimeout_),this.checkBufferTimeout_=null)},t.paused=function(){return null===this.checkBufferTimeout_},t.resetEverything=function(e){this.ended_=!1,this.appendInitSegment_={audio:!0,video:!0},this.resetLoader(),this.remove(0,1/0,e),this.transmuxer_&&(this.transmuxer_.postMessage({action:"clearAllMp4Captions"}),this.transmuxer_.postMessage({action:"reset"}))},t.resetLoader=function(){this.fetchAtBuffer_=!1,this.resyncLoader()},t.resyncLoader=function(){this.transmuxer_&&Il(this.transmuxer_),this.mediaIndex=null,this.partIndex=null,this.syncPoint_=null,this.isPendingTimestampOffset_=!1,this.callQueue_=[],this.loadQueue_=[],this.metadataQueue_.id3=[],this.metadataQueue_.caption=[],this.abort(),this.transmuxer_&&this.transmuxer_.postMessage({action:"clearParsedMp4Captions"})},t.remove=function(e,t,i,n){if(void 0===i&&(i=function(){}),void 0===n&&(n=!1),(t=t===1/0?this.duration_():t)<=e)this.logger_("skipping remove because end ${end} is <= start ${start}");else if(this.sourceUpdater_&&this.getMediaInfo_()){var r,a=1,s=function(){0===--a&&i()};for(r in!n&&this.audioDisabled_||(a++,this.sourceUpdater_.removeAudio(e,t,s)),!n&&"main"!==this.loaderType_||(this.gopBuffer_=function(e,t,i,n){for(var r=Math.ceil((t-n)*hl),a=Math.ceil((i-n)*hl),n=e.slice(),s=e.length;s--&&!(e[s].pts<=a););if(-1===s)return n;for(var o=s+1;o--&&!(e[o].pts<=r););return o=Math.max(o,0),n.splice(o,s-o+1),n}(this.gopBuffer_,e,t,this.timeMapping_),a++,this.sourceUpdater_.removeVideo(e,t,s)),this.inbandTextTracks_)rl(e,t,this.inbandTextTracks_[r]);rl(e,t,this.segmentMetadataTrack_),s()}else this.logger_("skipping remove because no source updater or starting media info")},t.monitorBuffer_=function(){this.checkBufferTimeout_&&window.clearTimeout(this.checkBufferTimeout_),this.checkBufferTimeout_=window.setTimeout(this.monitorBufferTick_.bind(this),1)},t.monitorBufferTick_=function(){"READY"===this.state&&this.fillBuffer_(),this.checkBufferTimeout_&&window.clearTimeout(this.checkBufferTimeout_),this.checkBufferTimeout_=window.setTimeout(this.monitorBufferTick_.bind(this),500)},t.fillBuffer_=function(){var e;this.sourceUpdater_.updating()||(e=this.chooseNextRequest_())&&("number"==typeof e.timestampOffset&&(this.isPendingTimestampOffset_=!1,this.timelineChangeController_.pendingTimelineChange({type:this.loaderType_,from:this.currentTimeline_,to:e.timeline})),this.loadSegment_(e))},t.isEndOfStream_=function(e,t,i){if(void 0===e&&(e=this.mediaIndex),void 0===t&&(t=this.playlist_),void 0===i&&(i=this.partIndex),!t||!this.mediaSource_)return!1;var n="number"==typeof e&&t.segments[e],e=e+1===t.segments.length,n=!n||!n.parts||i+1===n.parts.length;return t.endList&&"open"===this.mediaSource_.readyState&&e&&n},t.chooseNextRequest_=function(){var e=this.buffered_(),t=Ho(e)||0,i=qo(e,this.currentTime_()),n=!this.hasPlayed_()&&1<=i,r=i>=this.goalBufferLength_(),e=this.playlist_.segments;if(!e.length||n||r)return null;this.syncPoint_=this.syncPoint_||this.syncController_.getSyncPoint(this.playlist_,this.duration_(),this.currentTimeline_,this.currentTime_());var a,n={partIndex:null,mediaIndex:null,startOfSegment:null,playlist:this.playlist_,isSyncRequest:Boolean(!this.syncPoint_)};n.isSyncRequest?n.mediaIndex=function(e,t,i){t=t||[];for(var n=[],r=0,a=0;a=e.length-1&&s&&!this.seeking_()?null:this.generateSegmentInfo_(n)},t.generateSegmentInfo_=function(e){var t=e.independent,i=e.playlist,n=e.mediaIndex,r=e.startOfSegment,a=e.isSyncRequest,s=e.partIndex,o=e.forceTimestampOffset,u=e.getMediaInfoForTime,l=i.segments[n],e="number"==typeof s&&l.parts[s],t={requestId:"segment-loader-"+Math.random(),uri:e&&e.resolvedUri||l.resolvedUri,mediaIndex:n,partIndex:e?s:null,isSyncRequest:a,startOfSegment:r,playlist:i,bytes:null,encryptedBytes:null,timestampOffset:null,timeline:l.timeline,duration:e&&e.duration||l.duration,segment:l,part:e,byteLength:0,transmuxer:this.transmuxer_,getMediaInfoForTime:u,independent:t},o="undefined"!=typeof o?o:this.isPendingTimestampOffset_;t.timestampOffset=this.timestampOffsetForSegment_({segmentTimeline:l.timeline,currentTimeline:this.currentTimeline_,startOfSegment:r,buffered:this.buffered_(),overrideCheck:o});o=Ho(this.sourceUpdater_.audioBuffered());return"number"==typeof o&&(t.audioAppendStart=o-this.sourceUpdater_.audioTimestampOffset()),this.sourceUpdater_.videoBuffered().length&&(t.gopsToAlignWith=function(e,t,i){if("undefined"==typeof t||null===t||!e.length)return[];for(var n=Math.ceil((t-i+3)*hl),r=0;rn);r++);return e.slice(r)}(this.gopBuffer_,this.currentTime_()-this.sourceUpdater_.videoTimestampOffset(),this.timeMapping_)),t},t.timestampOffsetForSegment_=function(e){return i=(t=e).segmentTimeline,n=t.currentTimeline,r=t.startOfSegment,e=t.buffered,t.overrideCheck||i!==n?!(i "+p+" for "+e),t=m,i=v.vhs_.tech_,t[n=e]||(i.trigger({type:"usage",name:"vhs-608"}),i.trigger({type:"usage",name:"hls-608"}),/^cc708_/.test(r=n)&&(r="SERVICE"+n.split("_")[1]),(o=i.textTracks().getTrackById(r))?t[n]=o:(s=a=n,d=!1,(o=(i.options_.vhs&&i.options_.vhs.captionServices||{})[r])&&(a=o.label,s=o.language,d=o.default),t[n]=i.addRemoteTextTrack({kind:"captions",id:r,default:d,label:a,language:s},!1).track)),rl(h,p,m[e]),l=(f={captionArray:f,inbandTextTracks:m,timestampOffset:g}).inbandTextTracks,m=f.captionArray,c=f.timestampOffset,m&&(u=window.WebKitDataCue||window.VTTCue,m.forEach(function(e){var t=e.stream;l[t].addCue(new u(e.startTime+c,e.endTime+c,e.text))}))}),this.transmuxer_&&this.transmuxer_.postMessage({action:"clearParsedMp4Captions"})):this.metadataQueue_.caption.push(this.handleCaptions_.bind(this,e,t)):this.logger_("SegmentLoader received no captions from a caption event"))},t.handleId3_=function(e,t,i){var n,r,a,s;this.earlyAbortWhenNeeded_(e.stats),this.checkForAbort_(e.requestId)||(this.pendingSegment_.hasAppendedData_?(n=null===this.sourceUpdater_.videoTimestampOffset()?this.sourceUpdater_.audioTimestampOffset():this.sourceUpdater_.videoTimestampOffset(),r=this.inbandTextTracks_,a=i,s=this.vhs_.tech_,r.metadataTrack_||(r.metadataTrack_=s.addRemoteTextTrack({kind:"metadata",label:"Timed Metadata"},!1).track,r.metadataTrack_.inBandMetadataTrackDispatchType=a),nl({inbandTextTracks:this.inbandTextTracks_,metadataArray:t,timestampOffset:n,videoDuration:this.duration_()})):this.metadataQueue_.id3.push(this.handleId3_.bind(this,e,t,i)))},t.processMetadataQueue_=function(){this.metadataQueue_.id3.forEach(function(e){return e()}),this.metadataQueue_.caption.forEach(function(e){return e()}),this.metadataQueue_.id3=[],this.metadataQueue_.caption=[]},t.processCallQueue_=function(){var e=this.callQueue_;this.callQueue_=[],e.forEach(function(e){return e()})},t.processLoadQueue_=function(){var e=this.loadQueue_;this.loadQueue_=[],e.forEach(function(e){return e()})},t.hasEnoughInfoToLoad_=function(){if("audio"!==this.loaderType_)return!0;var e=this.pendingSegment_;return!!e&&(!this.getCurrentMediaInfo_()||!ul({timelineChangeController:this.timelineChangeController_,currentTimeline:this.currentTimeline_,segmentTimeline:e.timeline,loaderType:this.loaderType_,audioDisabled:this.audioDisabled_}))},t.getCurrentMediaInfo_=function(e){return(e=void 0===e?this.pendingSegment_:e)&&e.trackInfo||this.currentMediaInfo_},t.getMediaInfo_=function(e){return void 0===e&&(e=this.pendingSegment_),this.getCurrentMediaInfo_(e)||this.startingMediaInfo_},t.hasEnoughInfoToAppend_=function(){if(!this.sourceUpdater_.ready())return!1;if(this.waitingOnRemove_||this.quotaExceededErrorRetryTimeout_)return!1;var e=this.pendingSegment_,t=this.getCurrentMediaInfo_();if(!e||!t)return!1;var i=t.hasAudio,n=t.hasVideo,t=t.isMuxed;return!(n&&!e.videoTimingInfo)&&(!(i&&!this.audioDisabled_&&!t&&!e.audioTimingInfo)&&!ul({timelineChangeController:this.timelineChangeController_,currentTimeline:this.currentTimeline_,segmentTimeline:e.timeline,loaderType:this.loaderType_,audioDisabled:this.audioDisabled_}))},t.handleData_=function(e,t){if(this.earlyAbortWhenNeeded_(e.stats),!this.checkForAbort_(e.requestId))if(!this.callQueue_.length&&this.hasEnoughInfoToAppend_()){var i,n=this.pendingSegment_;if(this.setTimeMapping_(n.timeline),this.updateMediaSecondsLoaded_(n.part||n.segment),"closed"!==this.mediaSource_.readyState){if(e.map&&(e.map=this.initSegmentForMap(e.map,!0),n.segment.map=e.map),e.key&&this.segmentKey(e.key,!0),n.isFmp4=e.isFmp4,n.timingInfo=n.timingInfo||{},n.isFmp4?(this.trigger("fmp4"),n.timingInfo.start=n[ol(t.type)].start):(i=this.getCurrentMediaInfo_(),(i="main"===this.loaderType_&&i&&i.hasVideo)&&(r=n.videoTimingInfo.start),n.timingInfo.start=this.trueSegmentStart_({currentStart:n.timingInfo.start,playlist:n.playlist,mediaIndex:n.mediaIndex,currentVideoTimestampOffset:this.sourceUpdater_.videoTimestampOffset(),useVideoTimingInfo:i,firstVideoFrameTimeForData:r,videoTimingInfo:n.videoTimingInfo,audioTimingInfo:n.audioTimingInfo})),this.updateAppendInitSegmentStatus(n,t.type),this.updateSourceBufferTimestampOffset_(n),n.isSyncRequest){this.updateTimingInfoEnd_(n),this.syncController_.saveSegmentTimingInfo({segmentInfo:n,shouldSaveTimelineMapping:"main"===this.loaderType_});var r=this.chooseNextRequest_();if(r.mediaIndex!==n.mediaIndex||r.partIndex!==n.partIndex)return void this.logger_("sync segment was incorrect, not appending");this.logger_("sync segment was correct, appending")}n.hasAppendedData_=!0,this.processMetadataQueue_(),this.appendData_(n,t)}}else this.callQueue_.push(this.handleData_.bind(this,e,t))},t.updateAppendInitSegmentStatus=function(e,t){"main"!==this.loaderType_||"number"!=typeof e.timestampOffset||e.changedTimestampOffset||(this.appendInitSegment_={audio:!0,video:!0}),this.playlistOfLastInitSegment_[t]!==e.playlist&&(this.appendInitSegment_[t]=!0)},t.getInitSegmentAndUpdateState_=function(e){var t=e.type,i=e.initSegment,n=e.map,r=e.playlist;if(n){e=Su(n);if(this.activeInitSegmentId_===e)return null;i=this.initSegmentForMap(n,!0).bytes,this.activeInitSegmentId_=e}return i&&this.appendInitSegment_[t]?(this.playlistOfLastInitSegment_[t]=r,this.appendInitSegment_[t]=!1,this.activeInitSegmentId_=null,i):null},t.handleQuotaExceededError_=function(e,t){var i=this,n=e.segmentInfo,r=e.type,a=e.bytes,s=this.sourceUpdater_.audioBuffered(),o=this.sourceUpdater_.videoBuffered();1=n);r++);return e.slice(0,r).concat(t)}(this.gopBuffer_,i.gopInfo,this.safeAppend_)),this.state="APPENDING",this.trigger("appending"),this.waitForAppendsToComplete_(e)}},t.setTimeMapping_=function(e){e=this.syncController_.mappingForTimeline(e);null!==e&&(this.timeMapping_=e)},t.updateMediaSecondsLoaded_=function(e){"number"==typeof e.start&&"number"==typeof e.end?this.mediaSecondsLoaded+=e.end-e.start:this.mediaSecondsLoaded+=e.duration},t.shouldUpdateTransmuxerTimestampOffset_=function(e){return null!==e&&("main"===this.loaderType_&&e!==this.sourceUpdater_.videoTimestampOffset()||!this.audioDisabled_&&e!==this.sourceUpdater_.audioTimestampOffset())},t.trueSegmentStart_=function(e){var t=e.currentStart,i=e.playlist,n=e.mediaIndex,r=e.firstVideoFrameTimeForData,a=e.currentVideoTimestampOffset,s=e.useVideoTimingInfo,o=e.videoTimingInfo,e=e.audioTimingInfo;if("undefined"!=typeof t)return t;if(!s)return e.start;i=i.segments[n-1];return 0!==n&&i&&"undefined"!=typeof i.start&&i.end===r+a?o.start:r},t.waitForAppendsToComplete_=function(e){var t=this.getCurrentMediaInfo_(e);if(!t)return this.error({message:"No starting media returned, likely due to an unsupported media format.",blacklistDuration:1/0}),void this.trigger("error");var i=t.hasAudio,n=t.hasVideo,t=t.isMuxed,n="main"===this.loaderType_&&n,t=!this.audioDisabled_&&i&&!t;if(e.waitingOnAppends=0,!e.hasAppendedData_)return e.timingInfo||"number"!=typeof e.timestampOffset||(this.isPendingTimestampOffset_=!0),e.timingInfo={start:0},e.waitingOnAppends++,this.isPendingTimestampOffset_||(this.updateSourceBufferTimestampOffset_(e),this.processMetadataQueue_()),void this.checkAppendsDone_(e);n&&e.waitingOnAppends++,t&&e.waitingOnAppends++,n&&this.sourceUpdater_.videoQueueCallback(this.checkAppendsDone_.bind(this,e)),t&&this.sourceUpdater_.audioQueueCallback(this.checkAppendsDone_.bind(this,e))},t.checkAppendsDone_=function(e){this.checkForAbort_(e.requestId)||(e.waitingOnAppends--,0===e.waitingOnAppends&&this.handleAppendsDone_())},t.checkForIllegalMediaSwitch=function(e){var t,i,e=(t=this.loaderType_,i=this.getCurrentMediaInfo_(),e=e,"main"===t&&i&&e?e.hasAudio||e.hasVideo?i.hasVideo&&!e.hasVideo?"Only audio found in segment when we expected video. We can't switch to audio only from a stream that had video. To get rid of this message, please add codec information to the manifest.":!i.hasVideo&&e.hasVideo?"Video found in segment when we expected only audio. We can't switch to a stream with video from an audio only stream. To get rid of this message, please add codec information to the manifest.":null:"Neither audio nor video found in segment.":null);return!!e&&(this.error({message:e,blacklistDuration:1/0}),this.trigger("error"),!0)},t.updateSourceBufferTimestampOffset_=function(e){var t;null===e.timestampOffset||"number"!=typeof e.timingInfo.start||e.changedTimestampOffset||"main"!==this.loaderType_||(t=!1,e.timestampOffset-=this.getSegmentStartTimeForTimestampOffsetCalculation_({videoTimingInfo:e.segment.videoTimingInfo,audioTimingInfo:e.segment.audioTimingInfo,timingInfo:e.timingInfo}),e.changedTimestampOffset=!0,e.timestampOffset!==this.sourceUpdater_.videoTimestampOffset()&&(this.sourceUpdater_.videoTimestampOffset(e.timestampOffset),t=!0),e.timestampOffset!==this.sourceUpdater_.audioTimestampOffset()&&(this.sourceUpdater_.audioTimestampOffset(e.timestampOffset),t=!0),t&&this.trigger("timestampoffset"))},t.getSegmentStartTimeForTimestampOffsetCalculation_=function(e){var t=e.videoTimingInfo,i=e.audioTimingInfo,e=e.timingInfo;return this.useDtsForTimestampOffset_?t&&"number"==typeof t.transmuxedDecodeStart?t.transmuxedDecodeStart:i&&"number"==typeof i.transmuxedDecodeStart?i.transmuxedDecodeStart:e.start:e.start},t.updateTimingInfoEnd_=function(e){e.timingInfo=e.timingInfo||{};var t=this.getMediaInfo_(),t="main"===this.loaderType_&&t&&t.hasVideo&&e.videoTimingInfo?e.videoTimingInfo:e.audioTimingInfo;t&&(e.timingInfo.end="number"==typeof t.end?t.end:t.start+e.duration)},t.handleAppendsDone_=function(){if(this.pendingSegment_&&this.trigger("appendsdone"),!this.pendingSegment_)return this.state="READY",void(this.paused()||this.monitorBuffer_());var e=this.pendingSegment_;this.updateTimingInfoEnd_(e),this.shouldSaveSegmentTimingInfo_&&this.syncController_.saveSegmentTimingInfo({segmentInfo:e,shouldSaveTimelineMapping:"main"===this.loaderType_});var t=cl(e,this.sourceType_);if(t&&("warn"===t.severity?tr.log.warn(t.message):this.logger_(t.message)),this.recordThroughput_(e),this.pendingSegment_=null,this.state="READY",!e.isSyncRequest||(this.trigger("syncinfoupdate"),e.hasAppendedData_)){this.logger_("Appended "+sl(e)),this.addSegmentMetadataCue_(e),this.fetchAtBuffer_=!0,this.currentTimeline_!==e.timeline&&(this.timelineChangeController_.lastTimelineChange({type:this.loaderType_,from:this.currentTimeline_,to:e.timeline}),"main"!==this.loaderType_||this.audioDisabled_||this.timelineChangeController_.lastTimelineChange({type:"audio",from:this.currentTimeline_,to:e.timeline})),this.currentTimeline_=e.timeline,this.trigger("syncinfoupdate");var i=e.segment,t=e.part,i=i.end&&this.currentTime_()-i.end>3*e.playlist.targetDuration,t=t&&t.end&&this.currentTime_()-t.end>3*e.playlist.partTargetDuration;if(i||t)return this.logger_("bad "+(i?"segment":"part")+" "+sl(e)),void this.resetEverything();null!==this.mediaIndex&&this.trigger("bandwidthupdate"),this.trigger("progress"),this.mediaIndex=e.mediaIndex,this.partIndex=e.partIndex,this.isEndOfStream_(e.mediaIndex,e.playlist,e.partIndex)&&this.endOfStream(),this.trigger("appended"),e.hasAppendedData_&&this.mediaAppends++,this.paused()||this.monitorBuffer_()}else this.logger_("Throwing away un-appended sync request "+sl(e))},t.recordThroughput_=function(e){var t,i;e.duration<1/60?this.logger_("Ignoring segment's throughput because its duration of "+e.duration+" is less than the min to record "+1/60):(t=this.throughput.rate,i=Date.now()-e.endOfAllRequests+1,i=Math.floor(e.byteLength/i*8*1e3),this.throughput.rate+=(i-t)/++this.throughput.count)},t.addSegmentMetadataCue_=function(e){var t,i,n,r;this.segmentMetadataTrack_&&(i=(t=e.segment).start,r=t.end,al(i)&&al(r)&&(rl(i,r,this.segmentMetadataTrack_),n=window.WebKitDataCue||window.VTTCue,e={custom:t.custom,dateTimeObject:t.dateTimeObject,dateTimeString:t.dateTimeString,bandwidth:e.playlist.attributes.BANDWIDTH,resolution:e.playlist.attributes.RESOLUTION,codecs:e.playlist.attributes.CODECS,byteLength:e.byteLength,uri:e.uri,timeline:e.timeline,playlist:e.playlist.id,start:i,end:r},(r=new n(i,r,JSON.stringify(e))).value=e,this.segmentMetadataTrack_.addCue(r)))},e}(tr.EventTarget);function Rl(){}function Nl(e){return"string"!=typeof e?e:e.replace(/./,function(e){return e.toUpperCase()})}function Ul(e,t){var i=t[e+"Buffer"];return i&&i.updating||t.queuePending[e]}function Bl(e,t){if(0!==t.queue.length){var i=0,n=t.queue[i];if("mediaSource"!==n.type){if("mediaSource"!==e&&t.ready()&&"closed"!==t.mediaSource.readyState&&!Ul(e,t)){if(n.type!==e){if(null===(i=function(e,t){for(var i=0;i=e.playlist.segments.length){e=null;break}e=this.generateSegmentInfo_({playlist:e.playlist,mediaIndex:e.mediaIndex+1,startOfSegment:e.startOfSegment+e.duration,isSyncRequest:e.isSyncRequest})}return e},t.stopForError=function(e){this.error(e),this.state="READY",this.pause(),this.trigger("error")},t.segmentRequestFinished_=function(e,t,i){var n=this;if(this.subtitlesTrack_){if(this.saveTransferStats_(t.stats),!this.pendingSegment_)return this.state="READY",void(this.mediaRequestsAborted+=1);if(e)return e.code===Pl&&this.handleTimeout_(),e.code===Ll?this.mediaRequestsAborted+=1:this.mediaRequestsErrored+=1,void this.stopForError(e);var r=this.pendingSegment_;this.saveBandwidthRelatedStats_(r.duration,t.stats),this.state="APPENDING",this.trigger("appending");var a=r.segment;if(a.map&&(a.map.bytes=t.map.bytes),r.bytes=t.bytes,"function"!=typeof window.WebVTT&&this.subtitlesTrack_&&this.subtitlesTrack_.tech_){var s=function(){n.subtitlesTrack_.tech_.off("vttjsloaded",o),n.stopForError({message:"Error loading vtt.js"})},o=function(){n.subtitlesTrack_.tech_.off("vttjserror",s),n.segmentRequestFinished_(e,t,i)};return this.state="WAITING_ON_VTTJS",this.subtitlesTrack_.tech_.one("vttjsloaded",o),void this.subtitlesTrack_.tech_.one("vttjserror",s)}a.requested=!0;try{this.parseVTTCues_(r)}catch(e){return void this.stopForError({message:e.message})}if(this.updateTimeMapping_(r,this.syncController_.timelines[r.timeline],this.playlist_),r.cues.length?r.timingInfo={start:r.cues[0].startTime,end:r.cues[r.cues.length-1].endTime}:r.timingInfo={start:r.startOfSegment,end:r.startOfSegment+r.duration},r.isSyncRequest)return this.trigger("syncinfoupdate"),this.pendingSegment_=null,void(this.state="READY");r.byteLength=r.bytes.byteLength,this.mediaSecondsLoaded+=a.duration,r.cues.forEach(function(e){n.subtitlesTrack_.addCue(n.featuresNativeTextTracks_?new window.VTTCue(e.startTime,e.endTime,e.text):e)}),function(t){var e=t.cues;if(e)for(var i=0;iu)&&(r=void 0,r=o<0?i.start-Qo({defaultDuration:t.targetDuration,durationList:t.segments,startIndex:e.mediaIndex,endIndex:a}):i.end+Qo({defaultDuration:t.targetDuration,durationList:t.segments,startIndex:e.mediaIndex+1,endIndex:a}),this.discontinuities[s]={time:r,accuracy:u})}},t.dispose=function(){this.trigger("dispose"),this.off()},e}(tr.EventTarget),pc=function(t){function e(){var e=t.call(this)||this;return e.pendingTimelineChanges_={},e.lastTimelineChanges_={},e}mt(e,t);var i=e.prototype;return i.clearPendingTimelineChange=function(e){this.pendingTimelineChanges_[e]=null,this.trigger("pendingtimelinechange")},i.pendingTimelineChange=function(e){var t=e.type,i=e.from,e=e.to;return"number"==typeof i&&"number"==typeof e&&(this.pendingTimelineChanges_[t]={type:t,from:i,to:e},this.trigger("pendingtimelinechange")),this.pendingTimelineChanges_[t]},i.lastTimelineChange=function(e){var t=e.type,i=e.from,e=e.to;return"number"==typeof i&&"number"==typeof e&&(this.lastTimelineChanges_[t]={type:t,from:i,to:e},delete this.pendingTimelineChanges_[t],this.trigger("timelinechange")),this.lastTimelineChanges_[t]},i.dispose=function(){this.trigger("dispose"),this.pendingTimelineChanges_={},this.lastTimelineChanges_={},this.off()},e}(tr.EventTarget),fc=x(U(W(function(){var e="undefined"!=typeof globalThis?globalThis:"undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:{};function t(e,t,i){return e(i={path:t,exports:{},require:function(e,t){return function(){throw new Error("Dynamic requires are not currently supported by @rollup/plugin-commonjs")}(null==t&&i.path)}},i.exports),i.exports}var i=t(function(e){function n(e,t){for(var i=0;i>7))^f]=f;for(e=t=0;!c[e];e^=i||1,t=p[t]||1)for(s=16843009*h[n=h[i=h[d[c[e]=r=(r=t^t<<1^t<<2^t<<3^t<<4)>>8^255&r^99]=e]]]^65537*n^257*i^16843008*e,a=257*h[r]^16843008*r,f=0;f<4;f++)u[f][e]=a=a<<24^a>>>8,l[f][r]=s=s<<24^s>>>8;for(f=0;f<5;f++)u[f]=u[f].slice(0),l[f]=l[f].slice(0);return o}(),this._tables=[[c[0][0].slice(),c[0][1].slice(),c[0][2].slice(),c[0][3].slice(),c[0][4].slice()],[c[1][0].slice(),c[1][1].slice(),c[1][2].slice(),c[1][3].slice(),c[1][4].slice()]];var r=this._tables[0][4],a=this._tables[1],s=e.length,o=1;if(4!==s&&6!==s&&8!==s)throw new Error("Invalid aes key size");var u=e.slice(0),l=[];for(this._key=[u,l],t=s;t<4*s+28;t++)n=u[t-1],(t%s==0||8===s&&t%s==4)&&(n=r[n>>>24]<<24^r[n>>16&255]<<16^r[n>>8&255]<<8^r[255&n],t%s==0&&(n=n<<8^n>>>24^o<<24,o=o<<1^283*(o>>7))),u[t]=u[t-s]^n;for(i=0;t;i++,t--)n=u[3&i?t:t-4],l[i]=t<=4||i<4?n:a[0][r[n>>>24]]^a[1][r[n>>16&255]]^a[2][r[n>>8&255]]^a[3][r[255&n]]}return e.prototype.decrypt=function(e,t,i,n,r,a){for(var s,o,u,l=this._key[1],c=e^l[0],d=n^l[1],h=i^l[2],p=t^l[3],f=l.length/4-2,m=4,t=this._tables[1],g=t[0],y=t[1],v=t[2],_=t[3],b=t[4],T=0;T>>24]^y[d>>16&255]^v[h>>8&255]^_[255&p]^l[m],o=g[d>>>24]^y[h>>16&255]^v[p>>8&255]^_[255&c]^l[m+1],u=g[h>>>24]^y[p>>16&255]^v[c>>8&255]^_[255&d]^l[m+2],p=g[p>>>24]^y[c>>16&255]^v[d>>8&255]^_[255&h]^l[m+3],m+=4,c=s,d=o,h=u;for(T=0;T<4;T++)r[(3&-T)+a]=b[c>>>24]<<24^b[d>>16&255]<<16^b[h>>8&255]<<8^b[255&p]^l[m++],s=c,c=d,d=h,h=p,p=s},e}(),l=function(t){function e(){var e=t.call(this,a)||this;return e.jobs=[],e.delay=1,e.timeout_=null,e}r(e,t);var i=e.prototype;return i.processJob_=function(){this.jobs.shift()(),this.jobs.length?this.timeout_=setTimeout(this.processJob_.bind(this),this.delay):this.timeout_=null},i.push=function(e){this.jobs.push(e),this.timeout_||(this.timeout_=setTimeout(this.processJob_.bind(this),this.delay))},e}(a),g=function(e){return e<<24|(65280&e)<<8|(16711680&e)>>8|e>>>24},s=function(){function u(e,t,i,n){var r=u.STEP,a=new Int32Array(e.buffer),s=new Uint8Array(e.byteLength),o=0;for(this.asyncStream_=new l,this.asyncStream_.push(this.decryptChunk_(a.subarray(o,o+r),t,i,s)),o=r;o>2),u=new m(Array.prototype.slice.call(t)),e=new Uint8Array(e.byteLength),l=new Int32Array(e.buffer),c=i[0],d=i[1],h=i[2],p=i[3],f=0;f "+n+" from "+t),this.tech_.trigger({type:"usage",name:"vhs-rendition-change-"+t})),this.masterPlaylistLoader_.media(e,i)},t.startABRTimer_=function(){var e=this;this.stopABRTimer_(),this.abrTimer_=window.setInterval(function(){return e.checkABR_()},250)},t.stopABRTimer_=function(){this.tech_.scrubbing&&this.tech_.scrubbing()||(window.clearInterval(this.abrTimer_),this.abrTimer_=null)},t.getAudioTrackPlaylists_=function(){var e=this.master(),t=e&&e.playlists||[];if(!e||!e.mediaGroups||!e.mediaGroups.AUDIO)return t;var i,n=e.mediaGroups.AUDIO,r=Object.keys(n);if(Object.keys(this.mediaTypes_.AUDIO.groups).length)i=this.mediaTypes_.AUDIO.activeTrack();else{var a,s=n.main||r.length&&n[r[0]];for(a in s)if(s[a].default){i={label:a};break}}if(!i)return t;var o,u=[];for(o in n)if(n[o][i.label]){var l=n[o][i.label];if(l.playlists&&l.playlists.length)u.push.apply(u,l.playlists);else if(l.uri)u.push(l);else if(e.playlists.length)for(var c=0;c "+r.id;if(!t)return l(c+" as current playlist is not set"),!0;if(r.id===t.id)return!1;e=Boolean(Uo(i,n).length);if(!t.endList)return e||"number"!=typeof t.partTargetDuration?(l(c+" as current playlist is live"),!0):(l("not "+c+" as current playlist is live llhls, but currentTime isn't in buffered."),!1);i=qo(i,n),n=u?El.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE:El.MAX_BUFFER_LOW_WATER_LINE;if(o= bufferLowWaterLine ("+i+" >= "+a+")";return u&&(a+=" and next bandwidth > current bandwidth ("+n+" > "+r+")"),l(a),!0}return l("not "+c+" as no switching criteria met"),!1}({buffered:this.tech_.buffered(),currentTime:i,currentPlaylist:t,nextPlaylist:e,bufferLowWaterLine:n,bufferHighWaterLine:r,duration:this.duration(),experimentalBufferBasedABR:this.experimentalBufferBasedABR,log:this.logger_})},t.setupSegmentLoaderListeners_=function(){var t=this;this.experimentalBufferBasedABR||(this.mainSegmentLoader_.on("bandwidthupdate",function(){var e=t.selectPlaylist();t.shouldSwitchToMedia_(e)&&t.switchMedia_(e,"bandwidthupdate"),t.tech_.trigger("bandwidthupdate")}),this.mainSegmentLoader_.on("progress",function(){t.trigger("progress")})),this.mainSegmentLoader_.on("error",function(){t.blacklistCurrentPlaylist(t.mainSegmentLoader_.error())}),this.mainSegmentLoader_.on("appenderror",function(){t.error=t.mainSegmentLoader_.error_,t.trigger("error")}),this.mainSegmentLoader_.on("syncinfoupdate",function(){t.onSyncInfoUpdate_()}),this.mainSegmentLoader_.on("timestampoffset",function(){t.tech_.trigger({type:"usage",name:"vhs-timestamp-offset"}),t.tech_.trigger({type:"usage",name:"hls-timestamp-offset"})}),this.audioSegmentLoader_.on("syncinfoupdate",function(){t.onSyncInfoUpdate_()}),this.audioSegmentLoader_.on("appenderror",function(){t.error=t.audioSegmentLoader_.error_,t.trigger("error")}),this.mainSegmentLoader_.on("ended",function(){t.logger_("main segment loader ended"),t.onEndOfStream()}),this.mainSegmentLoader_.on("earlyabort",function(e){t.experimentalBufferBasedABR||(t.delegateLoaders_("all",["abort"]),t.blacklistCurrentPlaylist({message:"Aborted early because there isn't enough bandwidth to complete the request without rebuffering."},120))});function e(){if(!t.sourceUpdater_.hasCreatedSourceBuffers())return t.tryToCreateSourceBuffers_();var e=t.getCodecsOrExclude_();e&&t.sourceUpdater_.addOrChangeSourceBuffers(e)}this.mainSegmentLoader_.on("trackinfo",e),this.audioSegmentLoader_.on("trackinfo",e),this.mainSegmentLoader_.on("fmp4",function(){t.triggeredFmp4Usage||(t.tech_.trigger({type:"usage",name:"vhs-fmp4"}),t.tech_.trigger({type:"usage",name:"hls-fmp4"}),t.triggeredFmp4Usage=!0)}),this.audioSegmentLoader_.on("fmp4",function(){t.triggeredFmp4Usage||(t.tech_.trigger({type:"usage",name:"vhs-fmp4"}),t.tech_.trigger({type:"usage",name:"hls-fmp4"}),t.triggeredFmp4Usage=!0)}),this.audioSegmentLoader_.on("ended",function(){t.logger_("audioSegmentLoader ended"),t.onEndOfStream()})},t.mediaSecondsLoaded_=function(){return Math.max(this.audioSegmentLoader_.mediaSecondsLoaded+this.mainSegmentLoader_.mediaSecondsLoaded)},t.load=function(){this.mainSegmentLoader_.load(),this.mediaTypes_.AUDIO.activePlaylistLoader&&this.audioSegmentLoader_.load(),this.mediaTypes_.SUBTITLES.activePlaylistLoader&&this.subtitleSegmentLoader_.load()},t.smoothQualityChange_=function(e){void 0===e&&(e=this.selectPlaylist()),this.fastQualityChange_(e)},t.fastQualityChange_=function(e){var t=this;(e=void 0===e?this.selectPlaylist():e)!==this.masterPlaylistLoader_.media()?(this.switchMedia_(e,"fast-quality"),this.mainSegmentLoader_.resetEverything(function(){tr.browser.IE_VERSION||tr.browser.IS_EDGE?t.tech_.setCurrentTime(t.tech_.currentTime()+.04):t.tech_.setCurrentTime(t.tech_.currentTime())})):this.logger_("skipping fastQualityChange because new media is same as old")},t.play=function(){if(!this.setupFirstPlay()){this.tech_.ended()&&this.tech_.setCurrentTime(0),this.hasPlayed_&&this.load();var e=this.tech_.seekable();return this.tech_.duration()===1/0&&this.tech_.currentTime()this.maxPlaylistRetries?1/0:Date.now()+1e3*t,i.excludeUntil=a,e.reason&&(i.lastExcludeReason_=e.reason),this.tech_.trigger("blacklistplaylist"),this.tech_.trigger({type:"usage",name:"vhs-rendition-blacklisted"}),this.tech_.trigger({type:"usage",name:"hls-rendition-blacklisted"});r=this.selectPlaylist();if(!r)return this.error="Playback cannot continue. No available working or supported playlists.",void this.trigger("error");t=e.internal?this.logger_:tr.log.warn,a=e.message?" "+e.message:"";t((e.internal?"Internal problem":"Problem")+" encountered with playlist "+i.id+"."+a+" Switching to playlist "+r.id+"."),r.attributes.AUDIO!==i.attributes.AUDIO&&this.delegateLoaders_("audio",["abort","pause"]),r.attributes.SUBTITLES!==i.attributes.SUBTITLES&&this.delegateLoaders_("subtitle",["abort","pause"]),this.delegateLoaders_("main",["abort","pause"]);a=r.targetDuration/2*1e3||5e3,a="number"==typeof r.lastRequest&&Date.now()-r.lastRequest<=a;return this.switchMedia_(r,"exclude",s||a)},t.pauseLoading=function(){this.delegateLoaders_("all",["abort","pause"]),this.stopABRTimer_()},t.delegateLoaders_=function(i,e){var n=this,r=[],t="all"===i;!t&&"main"!==i||r.push(this.masterPlaylistLoader_);var a=[];!t&&"audio"!==i||a.push("AUDIO"),!t&&"subtitle"!==i||(a.push("CLOSED-CAPTIONS"),a.push("SUBTITLES")),a.forEach(function(e){e=n.mediaTypes_[e]&&n.mediaTypes_[e].activePlaylistLoader;e&&r.push(e)}),["main","audio","subtitle"].forEach(function(e){var t=n[e+"SegmentLoader_"];!t||i!==e&&"all"!==i||r.push(t)}),r.forEach(function(t){return e.forEach(function(e){"function"==typeof t[e]&&t[e]()})})},t.setCurrentTime=function(e){var t=Uo(this.tech_.buffered(),e);return this.masterPlaylistLoader_&&this.masterPlaylistLoader_.media()&&this.masterPlaylistLoader_.media().segments?t&&t.length?e:(this.mainSegmentLoader_.resetEverything(),this.mainSegmentLoader_.abort(),this.mediaTypes_.AUDIO.activePlaylistLoader&&(this.audioSegmentLoader_.resetEverything(),this.audioSegmentLoader_.abort()),this.mediaTypes_.SUBTITLES.activePlaylistLoader&&(this.subtitleSegmentLoader_.resetEverything(),this.subtitleSegmentLoader_.abort()),void this.load()):0},t.duration=function(){if(!this.masterPlaylistLoader_)return 0;var e=this.masterPlaylistLoader_.media();return e?e.endList?this.mediaSource?this.mediaSource.duration:Ql.Playlist.duration(e):1/0:0},t.seekable=function(){return this.seekable_},t.onSyncInfoUpdate_=function(){var e;if(this.masterPlaylistLoader_){var t=this.masterPlaylistLoader_.media();if(t){var i=this.syncController_.getExpiredTime(t,this.duration());if(null!==i){var n,r,a=this.masterPlaylistLoader_.master,s=Ql.Playlist.seekable(t,i,Ql.Playlist.liveEdgeDelay(a,t));if(0!==s.length){if(this.mediaTypes_.AUDIO.activePlaylistLoader){if(t=this.mediaTypes_.AUDIO.activePlaylistLoader.media(),null===(i=this.syncController_.getExpiredTime(t,this.duration())))return;if(0===(e=Ql.Playlist.seekable(t,i,Ql.Playlist.liveEdgeDelay(a,t))).length)return}this.seekable_&&this.seekable_.length&&(n=this.seekable_.end(0),r=this.seekable_.start(0)),!e||e.start(0)>s.end(0)||s.start(0)>e.end(0)?this.seekable_=s:this.seekable_=tr.createTimeRanges([[(e.start(0)>s.start(0)?e:s).start(0),(e.end(0) "'+a[e]+'"')}),u.length)return void this.blacklistCurrentPlaylist({playlist:this.media(),message:"Codec switching not supported: "+u.join(", ")+".",blacklistDuration:1/0,internal:!0})}return a}t=Object.keys(o).reduce(function(e,t){return e&&(e+=", "),e+=t+' does not support codec(s): "'+o[t].join(",")+'"'},"")+".";this.blacklistCurrentPlaylist({playlist:this.media(),internal:!0,message:t,blacklistDuration:1/0})}else this.blacklistCurrentPlaylist({playlist:this.media(),message:"Could not determine codecs for playlist.",blacklistDuration:1/0})},t.tryToCreateSourceBuffers_=function(){var e;"open"!==this.mediaSource.readyState||this.sourceUpdater_.hasCreatedSourceBuffers()||!this.areMediaTypesKnown_()||(e=this.getCodecsOrExclude_())&&(this.sourceUpdater_.createSourceBuffers(e),e=[e.video,e.audio].filter(Boolean).join(","),this.excludeIncompatibleVariants_(e))},t.excludeUnsupportedVariants_=function(){var n=this,r=this.master().playlists,a=[];Object.keys(r).forEach(function(e){var t,i=r[e];-1===a.indexOf(i.id)&&(a.push(i.id),t=[],!(e=$u(n.master,i)).audio||yr(e.audio)||gr(e.audio)||t.push("audio codec "+e.audio),!e.video||yr(e.video)||gr(e.video)||t.push("video codec "+e.video),e.text&&"stpp.ttml.im1t"===e.text&&t.push("text codec "+e.text),t.length&&(i.excludeUntil=1/0,n.logger_("excluding "+i.id+" for unsupported: "+t.join(", "))))})},t.excludeIncompatibleVariants_=function(e){var r=this,a=[],s=this.master().playlists,e=Yu(pr(e)),o=Qu(e),u=e.video&&pr(e.video)[0]||null,l=e.audio&&pr(e.audio)[0]||null;Object.keys(s).forEach(function(e){var t,i,n=s[e];-1===a.indexOf(n.id)&&n.excludeUntil!==1/0&&(a.push(n.id),t=[],i=$u(r.masterPlaylistLoader_.master,n),e=Qu(i),(i.audio||i.video)&&(e!==o&&t.push('codec count "'+e+'" !== "'+o+'"'),r.sourceUpdater_.canChangeType()||(e=i.video&&pr(i.video)[0]||null,i=i.audio&&pr(i.audio)[0]||null,e&&u&&e.type.toLowerCase()!==u.type.toLowerCase()&&t.push('video codec "'+e.type+'" !== "'+u.type+'"'),i&&l&&i.type.toLowerCase()!==l.type.toLowerCase()&&t.push('audio codec "'+i.type+'" !== "'+l.type+'"')),t.length&&(n.excludeUntil=1/0,r.logger_("blacklisting "+n.id+": "+t.join(" && ")))))})},t.updateAdCues_=function(e){var t=0,i=this.seekable();i.length&&(t=i.start(0)),function(e,t,i){if(void 0===i&&(i=0),e.segments)for(var n=i,r=0;r=r.adStartTime&&t<=r.adEndTime)return r}return null}(t,n+u.duration/2)){if("cueIn"in u){o.endTime=n,o.adEndTime=n,n+=u.duration,o=null;continue}if(n=t.end(t.length-1)))return this.techWaiting_();5<=this.consecutiveUpdates&&e===this.lastRecordedTime?(this.consecutiveUpdates++,this.waiting_()):e===this.lastRecordedTime?this.consecutiveUpdates++:(this.consecutiveUpdates=0,this.lastRecordedTime=e)}},t.cancelTimer_=function(){this.consecutiveUpdates=0,this.timer_&&(this.logger_("cancelTimer_"),clearTimeout(this.timer_)),this.timer_=null},t.fixesBadSeeks_=function(){if(!this.tech_.seeking())return!1;var e,t=this.seekable(),i=this.tech_.currentTime();if(this.afterSeekableWindow_(t,i,this.media(),this.allowSeeksWithinUnsafeLiveWindow)&&(e=t.end(t.length-1)),"undefined"!=typeof(e=this.beforeSeekableWindow_(t,i)?(a=t.start(0))+(a===t.end(0)?0:.1):e))return this.logger_("Trying to seek outside of seekable at time "+i+" with seekable range "+Fo(t)+". Seeking to "+e+"."),this.tech_.setCurrentTime(e),!0;for(var n=this.masterPlaylistController_.sourceUpdater_,r=this.tech_.buffered(),a=n.audioBuffer?n.audioBuffered():null,t=n.videoBuffer?n.videoBuffered():null,n=this.media(),s=n.partTargetDuration||2*(n.targetDuration-fl),o=[a,t],u=0;u "+t.end(0)+"]. Attempting to resume playback by seeking to the current time."),this.tech_.trigger({type:"usage",name:"vhs-unknown-waiting"}),this.tech_.trigger({type:"usage",name:"hls-unknown-waiting"})))},t.techWaiting_=function(){var e=this.seekable(),t=this.tech_.currentTime();if(this.tech_.seeking()||null!==this.timer_)return!0;if(this.beforeSeekableWindow_(e,t)){var i=e.end(e.length-1);return this.logger_("Fell out of live window at time "+t+". Seeking to live point (seekable end) "+i),this.cancelTimer_(),this.tech_.setCurrentTime(i),this.tech_.trigger({type:"usage",name:"vhs-live-resync"}),this.tech_.trigger({type:"usage",name:"hls-live-resync"}),!0}e=this.tech_.vhs.masterPlaylistController_.sourceUpdater_,i=this.tech_.buffered();if(this.videoUnderflow_({audioBuffered:e.audioBuffered(),videoBuffered:e.videoBuffered(),currentTime:t}))return this.cancelTimer_(),this.tech_.setCurrentTime(t),this.tech_.trigger({type:"usage",name:"vhs-video-underflow"}),this.tech_.trigger({type:"usage",name:"hls-video-underflow"}),!0;e=Bo(i,t);if(0.vjs-icon-placeholder:before,.vjs-modal-dialog .vjs-modal-dialog-content{position:absolute;top:0;left:0;width:100%;height:100%}.video-js .vjs-big-play-button .vjs-icon-placeholder:before,.vjs-button>.vjs-icon-placeholder:before{text-align:center}@font-face{font-family:VideoJS;src:url(data:application/font-woff;charset=utf-8;base64,d09GRgABAAAAABDkAAsAAAAAG6gAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAABHU1VCAAABCAAAADsAAABUIIslek9TLzIAAAFEAAAAPgAAAFZRiV3hY21hcAAAAYQAAADaAAADPv749/pnbHlmAAACYAAAC3AAABHQZg6OcWhlYWQAAA3QAAAAKwAAADYZw251aGhlYQAADfwAAAAdAAAAJA+RCLFobXR4AAAOHAAAABMAAACM744AAGxvY2EAAA4wAAAASAAAAEhF6kqubWF4cAAADngAAAAfAAAAIAE0AIFuYW1lAAAOmAAAASUAAAIK1cf1oHBvc3QAAA/AAAABJAAAAdPExYuNeJxjYGRgYOBiMGCwY2BycfMJYeDLSSzJY5BiYGGAAJA8MpsxJzM9kYEDxgPKsYBpDiBmg4gCACY7BUgAeJxjYGS7wTiBgZWBgaWQ5RkDA8MvCM0cwxDOeI6BgYmBlZkBKwhIc01hcPjI+FGJHcRdyA4RZgQRADK3CxEAAHic7dFZbsMgAEXRS0ycyZnnOeG7y+qC8pU1dHusIOXxuoxaOlwZYWQB0Aea4quIEN4E9LzKbKjzDeM6H/mua6Lmc/p8yhg0lvdYx15ZG8uOLQOGjMp3EzqmzJizYMmKNRu27Nhz4MiJMxeu3Ljz4Ekqm7T8P52G8PP3lnTOVk++Z6iN6QZzNN1F7ptuN7eGOjDUoaGODHVsuvU8MdTO9Hd5aqgzQ50b6sJQl4a6MtS1oW4MdWuoO0PdG+rBUI+GejLUs6FeDPVqqDdDvRvqw1CfhpqM9At0iFLaAAB4nJ1YDXBTVRZ+5/22TUlJ8we0pHlJm7RJf5O8F2j6EymlSPkpxaL8U2xpa3DKj0CBhc2IW4eWKSokIoLsuMqssM64f+jA4HSdWXXXscBq67IOs3FXZ1ZYWVyRFdo899yXtIBQZ90k7717zz3v3HPPOfd854YCCj9cL9dL0RQFOqCbGJnrHb5EayiKIWN8iA/hWBblo6hUWm8TtCDwE80WMJus/irwyxOdxeB0MDb14VNJHnXYoLLSl6FfCUYO9nYPTA8Epg9090LprfbBbZ2hY0UlJUXHQp3/vtWkS6EBv8+rPMq5u9692f/dNxJNiqwC1xPE9TCUgCsSdQWgE3XQD25lkG4CN2xmTcOXWBOyser6RN6KnGbKSbmQ3+d0OI1m2W8QzLLkI2sykrWAgJJEtA8vGGW/2Q+CmT3n8zS9wZwu2DCvtuZKZN3xkrLh36yCZuUomQSqGpY8t/25VfHVhw8z4ebGBtfLb0ya9PCaDc+8dGTvk2dsh6z7WzvowlXKUSWo9MJ15a3KrEP2loOr2Ojhw6iW6hf2BDdEccQvZGpaAy7YovSwq8kr7HGllxpd71rkS6G0Sf11sl9OvMK1+jwPPODxjUwkOim9CU3ix1wNjXDfmJSEn618Bs6lpWwUpU+8PCqLMY650zjq8VhCIP17NEKTx3eaLL+s5Pi6yJWaWjTHLR1jYzPSV9VF/6Ojdb/1kO3Mk3uhHC0x6gc1BjlKQ+nQFxTYdaJkZ7ySVxLBbhR1dsboNXp1tCYKW2LRaEzpYcIx2BKNxaL0ZaUnSqfFoiNhHKR/GkX6PWUSAaJelQaqZL1EpoHNsajSEyPSoJ9IjhIxTdjHLmwZvhRDOiFTY/YeQnvrVZmiTQtGncECXtFTBZLOVwwMRgoXHAkXzMzPn1nAJJ8jYSbMDaqN2waGLzNhih/bZynUBMpIWSg7VYi7DRx2m8ALkIdRCJwI6ArJx2EI8kaDWeTQKeAFk9fjl/1AvwktjQ1P7NjyMGQyfd4vjipX6M/i52D7Cq80kqlcxEcGXRr/FEcgs0u5uGgB4VWuMFfpdn2Re6Hi3PqzmxWKsz6+ae2Pn9hXXw/fqM859UiGC0oKYYILJBqJrsn1Z1E5qOs9rQCiUQRREjm8yJcbHF5cUJufX1vAHlefw0XgUoboS3ETfQlTxBC4SOtuE8VPRJTBSCQSjZCpk7Gqzu+masaZ2y7Zjehho4F3g82BNDkAHpORG4+OCS+f6JTPmtRn/PH1kch6d04sp7AQb25aQ/pqUyXeQ8vrebG8OYQdXOQ+585u0sdW9rqalzRURiJ+9F4MweRFrKUjl1GUYhH1A27WOHw5cTFSFPMo9EeUIGnQTZHIaJ7AHLaOKsOODaNF9jkBjYG2QEsQ2xjMUAx2bBEbeTBWMHwskBjngq56S/yfgkBnWBa4K9sqKtq2t1UI8S9He5XuBRbawAdatrQEAi30Aks2+LM8WeCbalVZkWNylvJ+dqJnzVb+OHlSoKW8nPCP7Rd+CcZ2DdWAGqJ2CBFOphgywFFCFBNtfAbGtNPBCwxvygHeYMZMY9ZboBqwq/pVrsbgN5tkv152ODlbMfiqwGMBgxa4Exz3QhovRIUp6acqZmQzRq0ypDXS2TPLT02YIkQETnOE445oOGxOmXAqUJNNG7XgupMjPq2ua9asrj5yY/yuKteO1Kx0YNJTufrirLe1mZnat7OL6rnUdCWenpW6I8mAnbsY8KWs1PuSovCW9A/Z25PQ24a7cNOqgmTkLmBMgh4THgc4b9k2IVv1/g/F5nGljwPLfOgHAzJzh45V/4+WenTzmMtR5Z7us2Tys909UHqrPY7KbckoxRvRHhmVc3cJGE97uml0R1S0jdULVl7EvZtDFVBF35N9cEdjpgmAiOlFZ+Dtoh93+D3zzHr8RRNZQhnCNMNbcegOvpEwZoL+06cJQ07h+th3fZ/7PVbVC6ngTAV/KoLFuO6+2KFcU651gEb5ugPSIb1D+Xp8V4+k3sEIGnw5mYe4If4k1lFYr6SCzmM2EQ8iWtmwjnBI9kTwe1TlfAmXh7H02by9fW2gsjKwtv0aaURKil4OdV7rDL1MXIFNrhdxohcZXYTnq47WisrKitaObbf5+yvkLi5J6lCNZZ+B6GC38VNBZBDidSS/+mSvh6s+srgC8pyKMvDtt+de3c9fU76ZPfuM8ud4Kv0fyP/LqfepMT/3oZxSqpZaTa1DaQYLY8TFsHYbWYsPoRhRWfL5eSSQbhUGgGC3YLbVMk6PitTFNGpAsNrC6D1VNBKgBHMejaiuRWEWGgsSDBTJjqWIl8kJLlsaLJ2tXDr6xGfT85bM2Q06a46x2HTgvdnV8z5YDy/27J4zt6x2VtkzjoYpkq36kaBr4eQSg7tyiVweWubXZugtadl58ydapfbORfKsDTuZ0OBgx4cfdjCf5tbWNITnL120fdOi1RV1C3uKGzNdwYLcMvZ3BxoPyTOCD1XvXTp7U10gWCVmTV9b3r2z0SkGWovb2hp9I89O8a2smlyaO8muMU+dRmtzp60IzAoFpjLr1n388boLyf0dRvxhsHZ0qbWqDkwqvvpkj4l0fY6EIXRi5sQSrAvsVYwXRy4qJ2EVtD1AN7a0HWth9ymvL1xc3WTUKK/TAHA/bXDVtVWfOMfuGxGZv4Ln/jVr9jc3j1yMv0tndmyt9Vq88Y9gH1wtLX3KWjot5++jWHgAoZZkQ14wGQ20Fli71UmKJAy4xKMSTGbVdybW7FDDAut9XpD5AzWrYO7zQ8qffqF8+Ynd/clrHcdyxGy3a/3+mfNnzC/cBsveTjnTvXf1o6vzOlZw7WtqtdmPK/Errz/6NNtD72zmNOZfbmYdTGHfoofqI79Oc+R2n1lrnL6pOm0Up7kwxhTW12Amm7WYkXR2qYrF2AmgmbAsxZjwy1xpg/m1Je2vrp8v/nz2xpmlBg4E9hrMU341wVpTOh/OfmGvAnra8q6uctr60ZQHV3Q+WMQJykMj8ZsWn2QBOmmHMB+m5pDIpTFonYigiaKAhGEiAHF7EliVnQkjoLVIMPtJpBKHYd3A8GYH9jJzrWwmHx5Qjp7vDAX0suGRym1vtm/9W1/HyR8vczfMs6Sk8DSv855/5dlX9oQq52hT8syyp2rx5Id17IAyAM3wIjQPMOHzytEB64q6D5zT91yNbnx3V/nqnd017S9Y0605k3izoXLpsxde2n38yoOV9s1LcjwzNjbdX6asnBVaBj/6/DwKwPkpcqbDG7BnsXoSqWnUAmottYF6jMSdVyYZh3zVXCjwTiwwHH6sGuRiEHQGzuRX6whZkp123oy1BWE2mEfJ/tvIRtM4ZM5bDXiMsPMaAKOTyc5uL57rqyyc5y5JE5pm1i2S2iUX0CcaQ6lC6Zog7JqSqZmYlosl2K6pwNA84zRnQW6SaALYZQGW5lhCtU/W34N6o+bKfZ8cf3/Cl/+iTX3wBzpOY4mRkeNf3rptycGSshQWgGbYt5jFc2e0+DglIrwl6DVWQ7BuwaJ3Xk1J4VL5urnLl/Wf+gHU/hZoZdKNym6lG+I34FaNeZKcSpJIo2IeCVvpdsDGfKvzJnAwmeD37Ow65ZWwSowpgwX5T69s/rB55dP5BcpgDKFV8p7q2sn/1uc93bVzT/w6UrCqDTWvfCq/oCD/qZXNoUj8BL5Kp6GU017frfNXkAtiiyf/SOCEeLqnd8R/Ql9GlCRfctS6k5chvIBuQ1zCCjoCHL2DHNHIXxMJ3kQeO8lbsUXONeSfA5EjcG6/E+KdhN4bP04vBhdi883+BFBzQbxFbvZzQeY9LNBZc0FNfn5NwfDn6rCTnTw6R8o+gfpf5hCom33cRuiTlss3KHmZjD+BPN+5gXuA2ziS/Q73mLxUkpbKN/eqwz5uK0X9F3h2d1V4nGNgZGBgAOJd776+iue3+crAzc4AAje5Bfcg0xz9YHEOBiYQBQA8FQlFAHicY2BkYGBnAAGOPgaG//85+hkYGVCBMgBGGwNYAAAAeJxjYGBgYB8EmKOPgQEAQ04BfgAAAAAAAA4AaAB+AMwA4AECAUIBbAGYAcICGAJYArQC4AMwA7AD3gQwBJYE3AUkBWYFigYgBmYGtAbqB1gIEghYCG4IhAi2COh4nGNgZGBgUGYoZWBnAAEmIOYCQgaG/2A+AwAYCQG2AHicXZBNaoNAGIZfE5PQCKFQ2lUps2oXBfOzzAESyDKBQJdGR2NQR3QSSE/QE/QEPUUPUHqsvsrXjTMw83zPvPMNCuAWP3DQDAejdm1GjzwS7pMmwi75XngAD4/CQ/oX4TFe4Qt7uMMbOzjuDc0EmXCP/C7cJ38Iu+RP4QEe8CU8pP8WHmOPX2EPz87TPo202ey2OjlnQSXV/6arOjWFmvszMWtd6CqwOlKHq6ovycLaWMWVydXKFFZnmVFlZU46tP7R2nI5ncbi/dDkfDtFBA2DDXbYkhKc+V0Bqs5Zt9JM1HQGBRTm/EezTmZNKtpcAMs9Yu6AK9caF76zoLWIWcfMGOSkVduvSWechqZsz040Ib2PY3urxBJTzriT95lipz+TN1fmAAAAeJxtkMl2wjAMRfOAhABlKm2h80C3+ajgCKKDY6cegP59TYBzukAL+z1Zsq8ctaJTTKPrsUQLbXQQI0EXKXroY4AbDDHCGBNMcYsZ7nCPB8yxwCOe8IwXvOIN7/jAJ76wxHfUqWX+OzgumWAjJMV17i0Ndlr6irLKO+qftdT7i6y4uFSUvCknay+lFYZIZaQcmfH/xIFdYn98bqhra1aKTM/6lWMnyaYirx1rFUQZFBkb2zJUtoXeJCeg0WnLtHeSFc3OtrnozNwqi0TkSpBMDB1nSde5oJXW23hTS2/T0LilglXX7dmFVxLnq5U0vYATHFk3zX3BOisoQHNDFDeZnqKDy9hRNawN7Vh727hFzcJ5c8TILrKZfH7tIPxAFP0BpLeJPA==) format("woff");font-weight:400;font-style:normal}.video-js .vjs-big-play-button .vjs-icon-placeholder:before,.video-js .vjs-play-control .vjs-icon-placeholder,.vjs-icon-play{font-family:VideoJS;font-weight:400;font-style:normal}.video-js .vjs-big-play-button .vjs-icon-placeholder:before,.video-js .vjs-play-control .vjs-icon-placeholder:before,.vjs-icon-play:before{content:"\f101"}.vjs-icon-play-circle{font-family:VideoJS;font-weight:400;font-style:normal}.vjs-icon-play-circle:before{content:"\f102"}.video-js .vjs-play-control.vjs-playing .vjs-icon-placeholder,.vjs-icon-pause{font-family:VideoJS;font-weight:400;font-style:normal}.video-js .vjs-play-control.vjs-playing .vjs-icon-placeholder:before,.vjs-icon-pause:before{content:"\f103"}.video-js .vjs-mute-control.vjs-vol-0 .vjs-icon-placeholder,.vjs-icon-volume-mute{font-family:VideoJS;font-weight:400;font-style:normal}.video-js .vjs-mute-control.vjs-vol-0 .vjs-icon-placeholder:before,.vjs-icon-volume-mute:before{content:"\f104"}.video-js .vjs-mute-control.vjs-vol-1 .vjs-icon-placeholder,.vjs-icon-volume-low{font-family:VideoJS;font-weight:400;font-style:normal}.video-js .vjs-mute-control.vjs-vol-1 .vjs-icon-placeholder:before,.vjs-icon-volume-low:before{content:"\f105"}.video-js .vjs-mute-control.vjs-vol-2 .vjs-icon-placeholder,.vjs-icon-volume-mid{font-family:VideoJS;font-weight:400;font-style:normal}.video-js .vjs-mute-control.vjs-vol-2 .vjs-icon-placeholder:before,.vjs-icon-volume-mid:before{content:"\f106"}.video-js .vjs-mute-control .vjs-icon-placeholder,.vjs-icon-volume-high{font-family:VideoJS;font-weight:400;font-style:normal}.video-js .vjs-mute-control .vjs-icon-placeholder:before,.vjs-icon-volume-high:before{content:"\f107"}.video-js .vjs-fullscreen-control .vjs-icon-placeholder,.vjs-icon-fullscreen-enter{font-family:VideoJS;font-weight:400;font-style:normal}.video-js .vjs-fullscreen-control .vjs-icon-placeholder:before,.vjs-icon-fullscreen-enter:before{content:"\f108"}.video-js.vjs-fullscreen .vjs-fullscreen-control .vjs-icon-placeholder,.vjs-icon-fullscreen-exit{font-family:VideoJS;font-weight:400;font-style:normal}.video-js.vjs-fullscreen .vjs-fullscreen-control .vjs-icon-placeholder:before,.vjs-icon-fullscreen-exit:before{content:"\f109"}.vjs-icon-square{font-family:VideoJS;font-weight:400;font-style:normal}.vjs-icon-square:before{content:"\f10a"}.vjs-icon-spinner{font-family:VideoJS;font-weight:400;font-style:normal}.vjs-icon-spinner:before{content:"\f10b"}.video-js .vjs-subs-caps-button .vjs-icon-placeholder,.video-js .vjs-subtitles-button .vjs-icon-placeholder,.video-js.video-js:lang(en-AU) .vjs-subs-caps-button .vjs-icon-placeholder,.video-js.video-js:lang(en-GB) .vjs-subs-caps-button .vjs-icon-placeholder,.video-js.video-js:lang(en-IE) .vjs-subs-caps-button .vjs-icon-placeholder,.video-js.video-js:lang(en-NZ) .vjs-subs-caps-button .vjs-icon-placeholder,.vjs-icon-subtitles{font-family:VideoJS;font-weight:400;font-style:normal}.video-js .vjs-subs-caps-button .vjs-icon-placeholder:before,.video-js .vjs-subtitles-button .vjs-icon-placeholder:before,.video-js.video-js:lang(en-AU) .vjs-subs-caps-button .vjs-icon-placeholder:before,.video-js.video-js:lang(en-GB) .vjs-subs-caps-button .vjs-icon-placeholder:before,.video-js.video-js:lang(en-IE) .vjs-subs-caps-button .vjs-icon-placeholder:before,.video-js.video-js:lang(en-NZ) .vjs-subs-caps-button .vjs-icon-placeholder:before,.vjs-icon-subtitles:before{content:"\f10c"}.video-js .vjs-captions-button .vjs-icon-placeholder,.video-js:lang(en) .vjs-subs-caps-button .vjs-icon-placeholder,.video-js:lang(fr-CA) .vjs-subs-caps-button .vjs-icon-placeholder,.vjs-icon-captions{font-family:VideoJS;font-weight:400;font-style:normal}.video-js .vjs-captions-button .vjs-icon-placeholder:before,.video-js:lang(en) .vjs-subs-caps-button .vjs-icon-placeholder:before,.video-js:lang(fr-CA) .vjs-subs-caps-button .vjs-icon-placeholder:before,.vjs-icon-captions:before{content:"\f10d"}.video-js .vjs-chapters-button .vjs-icon-placeholder,.vjs-icon-chapters{font-family:VideoJS;font-weight:400;font-style:normal}.video-js .vjs-chapters-button .vjs-icon-placeholder:before,.vjs-icon-chapters:before{content:"\f10e"}.vjs-icon-share{font-family:VideoJS;font-weight:400;font-style:normal}.vjs-icon-share:before{content:"\f10f"}.vjs-icon-cog{font-family:VideoJS;font-weight:400;font-style:normal}.vjs-icon-cog:before{content:"\f110"}.video-js .vjs-play-progress,.video-js .vjs-volume-level,.vjs-icon-circle,.vjs-seek-to-live-control .vjs-icon-placeholder{font-family:VideoJS;font-weight:400;font-style:normal}.video-js .vjs-play-progress:before,.video-js .vjs-volume-level:before,.vjs-icon-circle:before,.vjs-seek-to-live-control .vjs-icon-placeholder:before{content:"\f111"}.vjs-icon-circle-outline{font-family:VideoJS;font-weight:400;font-style:normal}.vjs-icon-circle-outline:before{content:"\f112"}.vjs-icon-circle-inner-circle{font-family:VideoJS;font-weight:400;font-style:normal}.vjs-icon-circle-inner-circle:before{content:"\f113"}.vjs-icon-hd{font-family:VideoJS;font-weight:400;font-style:normal}.vjs-icon-hd:before{content:"\f114"}.video-js .vjs-control.vjs-close-button .vjs-icon-placeholder,.vjs-icon-cancel{font-family:VideoJS;font-weight:400;font-style:normal}.video-js .vjs-control.vjs-close-button .vjs-icon-placeholder:before,.vjs-icon-cancel:before{content:"\f115"}.video-js .vjs-play-control.vjs-ended .vjs-icon-placeholder,.vjs-icon-replay{font-family:VideoJS;font-weight:400;font-style:normal}.video-js .vjs-play-control.vjs-ended .vjs-icon-placeholder:before,.vjs-icon-replay:before{content:"\f116"}.vjs-icon-facebook{font-family:VideoJS;font-weight:400;font-style:normal}.vjs-icon-facebook:before{content:"\f117"}.vjs-icon-gplus{font-family:VideoJS;font-weight:400;font-style:normal}.vjs-icon-gplus:before{content:"\f118"}.vjs-icon-linkedin{font-family:VideoJS;font-weight:400;font-style:normal}.vjs-icon-linkedin:before{content:"\f119"}.vjs-icon-twitter{font-family:VideoJS;font-weight:400;font-style:normal}.vjs-icon-twitter:before{content:"\f11a"}.vjs-icon-tumblr{font-family:VideoJS;font-weight:400;font-style:normal}.vjs-icon-tumblr:before{content:"\f11b"}.vjs-icon-pinterest{font-family:VideoJS;font-weight:400;font-style:normal}.vjs-icon-pinterest:before{content:"\f11c"}.video-js .vjs-descriptions-button .vjs-icon-placeholder,.vjs-icon-audio-description{font-family:VideoJS;font-weight:400;font-style:normal}.video-js .vjs-descriptions-button .vjs-icon-placeholder:before,.vjs-icon-audio-description:before{content:"\f11d"}.video-js .vjs-audio-button .vjs-icon-placeholder,.vjs-icon-audio{font-family:VideoJS;font-weight:400;font-style:normal}.video-js .vjs-audio-button .vjs-icon-placeholder:before,.vjs-icon-audio:before{content:"\f11e"}.vjs-icon-next-item{font-family:VideoJS;font-weight:400;font-style:normal}.vjs-icon-next-item:before{content:"\f11f"}.vjs-icon-previous-item{font-family:VideoJS;font-weight:400;font-style:normal}.vjs-icon-previous-item:before{content:"\f120"}.video-js .vjs-picture-in-picture-control .vjs-icon-placeholder,.vjs-icon-picture-in-picture-enter{font-family:VideoJS;font-weight:400;font-style:normal}.video-js .vjs-picture-in-picture-control .vjs-icon-placeholder:before,.vjs-icon-picture-in-picture-enter:before{content:"\f121"}.video-js.vjs-picture-in-picture .vjs-picture-in-picture-control .vjs-icon-placeholder,.vjs-icon-picture-in-picture-exit{font-family:VideoJS;font-weight:400;font-style:normal}.video-js.vjs-picture-in-picture .vjs-picture-in-picture-control .vjs-icon-placeholder:before,.vjs-icon-picture-in-picture-exit:before{content:"\f122"}.video-js{display:block;vertical-align:top;box-sizing:border-box;color:#fff;background-color:#000;position:relative;padding:0;font-size:10px;line-height:1;font-weight:400;font-style:normal;font-family:Arial,Helvetica,sans-serif;word-break:initial}.video-js:-moz-full-screen{position:absolute}.video-js:-webkit-full-screen{width:100%!important;height:100%!important}.video-js[tabindex="-1"]{outline:0}.video-js *,.video-js :after,.video-js :before{box-sizing:inherit}.video-js ul{font-family:inherit;font-size:inherit;line-height:inherit;list-style-position:outside;margin-left:0;margin-right:0;margin-top:0;margin-bottom:0}.video-js.vjs-16-9,.video-js.vjs-4-3,.video-js.vjs-fluid{width:100%;max-width:100%;height:0}.video-js.vjs-16-9{padding-top:56.25%}.video-js.vjs-4-3{padding-top:75%}.video-js.vjs-fill{width:100%;height:100%}.video-js .vjs-tech{position:absolute;top:0;left:0;width:100%;height:100%}body.vjs-full-window{padding:0;margin:0;height:100%}.vjs-full-window .video-js.vjs-fullscreen{position:fixed;overflow:hidden;z-index:1000;left:0;top:0;bottom:0;right:0}.video-js.vjs-fullscreen{width:100%!important;height:100%!important;padding-top:0!important}.video-js.vjs-fullscreen.vjs-user-inactive{cursor:none}.vjs-hidden{display:none!important}.vjs-disabled{opacity:.5;cursor:default}.video-js .vjs-offscreen{height:1px;left:-9999px;position:absolute;top:0;width:1px}.vjs-lock-showing{display:block!important;opacity:1;visibility:visible}.vjs-no-js{padding:20px;color:#fff;background-color:#000;font-size:18px;font-family:Arial,Helvetica,sans-serif;text-align:center;width:300px;height:150px;margin:0 auto}.vjs-no-js a,.vjs-no-js a:visited{color:#66a8cc}.video-js .vjs-big-play-button{font-size:3em;line-height:1.5em;height:1.63332em;width:3em;display:block;position:absolute;top:10px;left:10px;padding:0;cursor:pointer;opacity:1;border:.06666em solid #fff;background-color:#2b333f;background-color:rgba(43,51,63,.7);border-radius:.3em;transition:all .4s}.vjs-big-play-centered .vjs-big-play-button{top:50%;left:50%;margin-top:-.81666em;margin-left:-1.5em}.video-js .vjs-big-play-button:focus,.video-js:hover .vjs-big-play-button{border-color:#fff;background-color:#73859f;background-color:rgba(115,133,159,.5);transition:all 0s}.vjs-controls-disabled .vjs-big-play-button,.vjs-error .vjs-big-play-button,.vjs-has-started .vjs-big-play-button,.vjs-using-native-controls .vjs-big-play-button{display:none}.vjs-has-started.vjs-paused.vjs-show-big-play-button-on-pause .vjs-big-play-button{display:block}.video-js button{background:0 0;border:none;color:inherit;display:inline-block;font-size:inherit;line-height:inherit;text-transform:none;text-decoration:none;transition:none;-webkit-appearance:none;-moz-appearance:none;appearance:none}.vjs-control .vjs-button{width:100%;height:100%}.video-js .vjs-control.vjs-close-button{cursor:pointer;height:3em;position:absolute;right:0;top:.5em;z-index:2}.video-js .vjs-modal-dialog{background:rgba(0,0,0,.8);background:linear-gradient(180deg,rgba(0,0,0,.8),rgba(255,255,255,0));overflow:auto}.video-js .vjs-modal-dialog>*{box-sizing:border-box}.vjs-modal-dialog .vjs-modal-dialog-content{font-size:1.2em;line-height:1.5;padding:20px 24px;z-index:1}.vjs-menu-button{cursor:pointer}.vjs-menu-button.vjs-disabled{cursor:default}.vjs-workinghover .vjs-menu-button.vjs-disabled:hover .vjs-menu{display:none}.vjs-menu .vjs-menu-content{display:block;padding:0;margin:0;font-family:Arial,Helvetica,sans-serif;overflow:auto}.vjs-menu .vjs-menu-content>*{box-sizing:border-box}.vjs-scrubbing .vjs-control.vjs-menu-button:hover .vjs-menu{display:none}.vjs-menu li{list-style:none;margin:0;padding:.2em 0;line-height:1.4em;font-size:1.2em;text-align:center;text-transform:lowercase}.js-focus-visible .vjs-menu li.vjs-menu-item:hover,.vjs-menu li.vjs-menu-item:focus,.vjs-menu li.vjs-menu-item:hover{background-color:#73859f;background-color:rgba(115,133,159,.5)}.js-focus-visible .vjs-menu li.vjs-selected:hover,.vjs-menu li.vjs-selected,.vjs-menu li.vjs-selected:focus,.vjs-menu li.vjs-selected:hover{background-color:#fff;color:#2b333f}.vjs-menu li.vjs-menu-title{text-align:center;text-transform:uppercase;font-size:1em;line-height:2em;padding:0;margin:0 0 .3em 0;font-weight:700;cursor:default}.vjs-menu-button-popup .vjs-menu{display:none;position:absolute;bottom:0;width:10em;left:-3em;height:0;margin-bottom:1.5em;border-top-color:rgba(43,51,63,.7)}.vjs-menu-button-popup .vjs-menu .vjs-menu-content{background-color:#2b333f;background-color:rgba(43,51,63,.7);position:absolute;width:100%;bottom:1.5em;max-height:15em}.vjs-layout-tiny .vjs-menu-button-popup .vjs-menu .vjs-menu-content,.vjs-layout-x-small .vjs-menu-button-popup .vjs-menu .vjs-menu-content{max-height:5em}.vjs-layout-small .vjs-menu-button-popup .vjs-menu .vjs-menu-content{max-height:10em}.vjs-layout-medium .vjs-menu-button-popup .vjs-menu .vjs-menu-content{max-height:14em}.vjs-layout-huge .vjs-menu-button-popup .vjs-menu .vjs-menu-content,.vjs-layout-large .vjs-menu-button-popup .vjs-menu .vjs-menu-content,.vjs-layout-x-large .vjs-menu-button-popup .vjs-menu .vjs-menu-content{max-height:25em}.vjs-menu-button-popup .vjs-menu.vjs-lock-showing,.vjs-workinghover .vjs-menu-button-popup.vjs-hover .vjs-menu{display:block}.video-js .vjs-menu-button-inline{transition:all .4s;overflow:hidden}.video-js .vjs-menu-button-inline:before{width:2.222222222em}.video-js .vjs-menu-button-inline.vjs-slider-active,.video-js .vjs-menu-button-inline:focus,.video-js .vjs-menu-button-inline:hover,.video-js.vjs-no-flex .vjs-menu-button-inline{width:12em}.vjs-menu-button-inline .vjs-menu{opacity:0;height:100%;width:auto;position:absolute;left:4em;top:0;padding:0;margin:0;transition:all .4s}.vjs-menu-button-inline.vjs-slider-active .vjs-menu,.vjs-menu-button-inline:focus .vjs-menu,.vjs-menu-button-inline:hover .vjs-menu{display:block;opacity:1}.vjs-no-flex .vjs-menu-button-inline .vjs-menu{display:block;opacity:1;position:relative;width:auto}.vjs-no-flex .vjs-menu-button-inline.vjs-slider-active .vjs-menu,.vjs-no-flex .vjs-menu-button-inline:focus .vjs-menu,.vjs-no-flex .vjs-menu-button-inline:hover .vjs-menu{width:auto}.vjs-menu-button-inline .vjs-menu-content{width:auto;height:100%;margin:0;overflow:hidden}.video-js .vjs-control-bar{display:none;width:100%;position:absolute;bottom:0;left:0;right:0;height:3em;background-color:#2b333f;background-color:rgba(43,51,63,.7)}.vjs-has-started .vjs-control-bar{display:flex;visibility:visible;opacity:1;transition:visibility .1s,opacity .1s}.vjs-has-started.vjs-user-inactive.vjs-playing .vjs-control-bar{visibility:visible;opacity:0;transition:visibility 1s,opacity 1s}.vjs-controls-disabled .vjs-control-bar,.vjs-error .vjs-control-bar,.vjs-using-native-controls .vjs-control-bar{display:none!important}.vjs-audio.vjs-has-started.vjs-user-inactive.vjs-playing .vjs-control-bar{opacity:1;visibility:visible}.vjs-has-started.vjs-no-flex .vjs-control-bar{display:table}.video-js .vjs-control{position:relative;text-align:center;margin:0;padding:0;height:100%;width:4em;flex:none}.vjs-button>.vjs-icon-placeholder:before{font-size:1.8em;line-height:1.67}.video-js .vjs-control:focus,.video-js .vjs-control:focus:before,.video-js .vjs-control:hover:before{text-shadow:0 0 1em #fff}.video-js .vjs-control-text{border:0;clip:rect(0 0 0 0);height:1px;overflow:hidden;padding:0;position:absolute;width:1px}.vjs-no-flex .vjs-control{display:table-cell;vertical-align:middle}.video-js .vjs-custom-control-spacer{display:none}.video-js .vjs-progress-control{cursor:pointer;flex:auto;display:flex;align-items:center;min-width:4em;touch-action:none}.video-js .vjs-progress-control.disabled{cursor:default}.vjs-live .vjs-progress-control{display:none}.vjs-liveui .vjs-progress-control{display:flex;align-items:center}.vjs-no-flex .vjs-progress-control{width:auto}.video-js .vjs-progress-holder{flex:auto;transition:all .2s;height:.3em}.video-js .vjs-progress-control .vjs-progress-holder{margin:0 10px}.video-js .vjs-progress-control:hover .vjs-progress-holder{font-size:1.6666666667em}.video-js .vjs-progress-control:hover .vjs-progress-holder.disabled{font-size:1em}.video-js .vjs-progress-holder .vjs-load-progress,.video-js .vjs-progress-holder .vjs-load-progress div,.video-js .vjs-progress-holder .vjs-play-progress{position:absolute;display:block;height:100%;margin:0;padding:0;width:0}.video-js .vjs-play-progress{background-color:#fff}.video-js .vjs-play-progress:before{font-size:.9em;position:absolute;right:-.5em;top:-.3333333333em;z-index:1}.video-js .vjs-load-progress{background:rgba(115,133,159,.5)}.video-js .vjs-load-progress div{background:rgba(115,133,159,.75)}.video-js .vjs-time-tooltip{background-color:#fff;background-color:rgba(255,255,255,.8);border-radius:.3em;color:#000;float:right;font-family:Arial,Helvetica,sans-serif;font-size:1em;padding:6px 8px 8px 8px;pointer-events:none;position:absolute;top:-3.4em;visibility:hidden;z-index:1}.video-js .vjs-progress-holder:focus .vjs-time-tooltip{display:none}.video-js .vjs-progress-control:hover .vjs-progress-holder:focus .vjs-time-tooltip,.video-js .vjs-progress-control:hover .vjs-time-tooltip{display:block;font-size:.6em;visibility:visible}.video-js .vjs-progress-control.disabled:hover .vjs-time-tooltip{font-size:1em}.video-js .vjs-progress-control .vjs-mouse-display{display:none;position:absolute;width:1px;height:100%;background-color:#000;z-index:1}.vjs-no-flex .vjs-progress-control .vjs-mouse-display{z-index:0}.video-js .vjs-progress-control:hover .vjs-mouse-display{display:block}.video-js.vjs-user-inactive .vjs-progress-control .vjs-mouse-display{visibility:hidden;opacity:0;transition:visibility 1s,opacity 1s}.video-js.vjs-user-inactive.vjs-no-flex .vjs-progress-control .vjs-mouse-display{display:none}.vjs-mouse-display .vjs-time-tooltip{color:#fff;background-color:#000;background-color:rgba(0,0,0,.8)}.video-js .vjs-slider{position:relative;cursor:pointer;padding:0;margin:0 .45em 0 .45em;-webkit-touch-callout:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;background-color:#73859f;background-color:rgba(115,133,159,.5)}.video-js .vjs-slider.disabled{cursor:default}.video-js .vjs-slider:focus{text-shadow:0 0 1em #fff;box-shadow:0 0 1em #fff}.video-js .vjs-mute-control{cursor:pointer;flex:none}.video-js .vjs-volume-control{cursor:pointer;margin-right:1em;display:flex}.video-js .vjs-volume-control.vjs-volume-horizontal{width:5em}.video-js .vjs-volume-panel .vjs-volume-control{visibility:visible;opacity:0;width:1px;height:1px;margin-left:-1px}.video-js .vjs-volume-panel{transition:width 1s}.video-js .vjs-volume-panel .vjs-volume-control.vjs-slider-active,.video-js .vjs-volume-panel .vjs-volume-control:active,.video-js .vjs-volume-panel.vjs-hover .vjs-mute-control~.vjs-volume-control,.video-js .vjs-volume-panel.vjs-hover .vjs-volume-control,.video-js .vjs-volume-panel:active .vjs-volume-control,.video-js .vjs-volume-panel:focus .vjs-volume-control{visibility:visible;opacity:1;position:relative;transition:visibility .1s,opacity .1s,height .1s,width .1s,left 0s,top 0s}.video-js .vjs-volume-panel .vjs-volume-control.vjs-slider-active.vjs-volume-horizontal,.video-js .vjs-volume-panel .vjs-volume-control:active.vjs-volume-horizontal,.video-js .vjs-volume-panel.vjs-hover .vjs-mute-control~.vjs-volume-control.vjs-volume-horizontal,.video-js .vjs-volume-panel.vjs-hover .vjs-volume-control.vjs-volume-horizontal,.video-js .vjs-volume-panel:active .vjs-volume-control.vjs-volume-horizontal,.video-js .vjs-volume-panel:focus .vjs-volume-control.vjs-volume-horizontal{width:5em;height:3em;margin-right:0}.video-js .vjs-volume-panel .vjs-volume-control.vjs-slider-active.vjs-volume-vertical,.video-js .vjs-volume-panel .vjs-volume-control:active.vjs-volume-vertical,.video-js .vjs-volume-panel.vjs-hover .vjs-mute-control~.vjs-volume-control.vjs-volume-vertical,.video-js .vjs-volume-panel.vjs-hover .vjs-volume-control.vjs-volume-vertical,.video-js .vjs-volume-panel:active .vjs-volume-control.vjs-volume-vertical,.video-js .vjs-volume-panel:focus .vjs-volume-control.vjs-volume-vertical{left:-3.5em;transition:left 0s}.video-js .vjs-volume-panel.vjs-volume-panel-horizontal.vjs-hover,.video-js .vjs-volume-panel.vjs-volume-panel-horizontal.vjs-slider-active,.video-js .vjs-volume-panel.vjs-volume-panel-horizontal:active{width:10em;transition:width .1s}.video-js .vjs-volume-panel.vjs-volume-panel-horizontal.vjs-mute-toggle-only{width:4em}.video-js .vjs-volume-panel .vjs-volume-control.vjs-volume-vertical{height:8em;width:3em;left:-3000em;transition:visibility 1s,opacity 1s,height 1s 1s,width 1s 1s,left 1s 1s,top 1s 1s}.video-js .vjs-volume-panel .vjs-volume-control.vjs-volume-horizontal{transition:visibility 1s,opacity 1s,height 1s 1s,width 1s,left 1s 1s,top 1s 1s}.video-js.vjs-no-flex .vjs-volume-panel .vjs-volume-control.vjs-volume-horizontal{width:5em;height:3em;visibility:visible;opacity:1;position:relative;transition:none}.video-js.vjs-no-flex .vjs-volume-control.vjs-volume-vertical,.video-js.vjs-no-flex .vjs-volume-panel .vjs-volume-control.vjs-volume-vertical{position:absolute;bottom:3em;left:.5em}.video-js .vjs-volume-panel{display:flex}.video-js .vjs-volume-bar{margin:1.35em .45em}.vjs-volume-bar.vjs-slider-horizontal{width:5em;height:.3em}.vjs-volume-bar.vjs-slider-vertical{width:.3em;height:5em;margin:1.35em auto}.video-js .vjs-volume-level{position:absolute;bottom:0;left:0;background-color:#fff}.video-js .vjs-volume-level:before{position:absolute;font-size:.9em}.vjs-slider-vertical .vjs-volume-level{width:.3em}.vjs-slider-vertical .vjs-volume-level:before{top:-.5em;left:-.3em}.vjs-slider-horizontal .vjs-volume-level{height:.3em}.vjs-slider-horizontal .vjs-volume-level:before{top:-.3em;right:-.5em}.video-js .vjs-volume-panel.vjs-volume-panel-vertical{width:4em}.vjs-volume-bar.vjs-slider-vertical .vjs-volume-level{height:100%}.vjs-volume-bar.vjs-slider-horizontal .vjs-volume-level{width:100%}.video-js .vjs-volume-vertical{width:3em;height:8em;bottom:8em;background-color:#2b333f;background-color:rgba(43,51,63,.7)}.video-js .vjs-volume-horizontal .vjs-menu{left:-2em}.vjs-poster{display:inline-block;vertical-align:middle;background-repeat:no-repeat;background-position:50% 50%;background-size:contain;background-color:#000;cursor:pointer;margin:0;padding:0;position:absolute;top:0;right:0;bottom:0;left:0;height:100%}.vjs-has-started .vjs-poster{display:none}.vjs-audio.vjs-has-started .vjs-poster{display:block}.vjs-using-native-controls .vjs-poster{display:none}.video-js .vjs-live-control{display:flex;align-items:flex-start;flex:auto;font-size:1em;line-height:3em}.vjs-no-flex .vjs-live-control{display:table-cell;width:auto;text-align:left}.video-js.vjs-liveui .vjs-live-control,.video-js:not(.vjs-live) .vjs-live-control{display:none}.video-js .vjs-seek-to-live-control{cursor:pointer;flex:none;display:inline-flex;height:100%;padding-left:.5em;padding-right:.5em;font-size:1em;line-height:3em;width:auto;min-width:4em}.vjs-no-flex .vjs-seek-to-live-control{display:table-cell;width:auto;text-align:left}.video-js.vjs-live:not(.vjs-liveui) .vjs-seek-to-live-control,.video-js:not(.vjs-live) .vjs-seek-to-live-control{display:none}.vjs-seek-to-live-control.vjs-control.vjs-at-live-edge{cursor:auto}.vjs-seek-to-live-control .vjs-icon-placeholder{margin-right:.5em;color:#888}.vjs-seek-to-live-control.vjs-control.vjs-at-live-edge .vjs-icon-placeholder{color:red}.video-js .vjs-time-control{flex:none;font-size:1em;line-height:3em;min-width:2em;width:auto;padding-left:1em;padding-right:1em}.vjs-live .vjs-time-control{display:none}.video-js .vjs-current-time,.vjs-no-flex .vjs-current-time{display:none}.video-js .vjs-duration,.vjs-no-flex .vjs-duration{display:none}.vjs-time-divider{display:none;line-height:3em}.vjs-live .vjs-time-divider{display:none}.video-js .vjs-play-control{cursor:pointer}.video-js .vjs-play-control .vjs-icon-placeholder{flex:none}.vjs-text-track-display{position:absolute;bottom:3em;left:0;right:0;top:0;pointer-events:none}.video-js.vjs-user-inactive.vjs-playing .vjs-text-track-display{bottom:1em}.video-js .vjs-text-track{font-size:1.4em;text-align:center;margin-bottom:.1em}.vjs-subtitles{color:#fff}.vjs-captions{color:#fc6}.vjs-tt-cue{display:block}video::-webkit-media-text-track-display{transform:translateY(-3em)}.video-js.vjs-user-inactive.vjs-playing video::-webkit-media-text-track-display{transform:translateY(-1.5em)}.video-js .vjs-picture-in-picture-control{cursor:pointer;flex:none}.video-js .vjs-fullscreen-control{cursor:pointer;flex:none}.vjs-playback-rate .vjs-playback-rate-value,.vjs-playback-rate>.vjs-menu-button{position:absolute;top:0;left:0;width:100%;height:100%}.vjs-playback-rate .vjs-playback-rate-value{pointer-events:none;font-size:1.5em;line-height:2;text-align:center}.vjs-playback-rate .vjs-menu{width:4em;left:0}.vjs-error .vjs-error-display .vjs-modal-dialog-content{font-size:1.4em;text-align:center}.vjs-error .vjs-error-display:before{color:#fff;content:"X";font-family:Arial,Helvetica,sans-serif;font-size:4em;left:0;line-height:1;margin-top:-.5em;position:absolute;text-shadow:.05em .05em .1em #000;text-align:center;top:50%;vertical-align:middle;width:100%}.vjs-loading-spinner{display:none;position:absolute;top:50%;left:50%;margin:-25px 0 0 -25px;opacity:.85;text-align:left;border:6px solid rgba(43,51,63,.7);box-sizing:border-box;background-clip:padding-box;width:50px;height:50px;border-radius:25px;visibility:hidden}.vjs-seeking .vjs-loading-spinner,.vjs-waiting .vjs-loading-spinner{display:block;-webkit-animation:vjs-spinner-show 0s linear .3s forwards;animation:vjs-spinner-show 0s linear .3s forwards}.vjs-loading-spinner:after,.vjs-loading-spinner:before{content:"";position:absolute;margin:-6px;box-sizing:inherit;width:inherit;height:inherit;border-radius:inherit;opacity:1;border:inherit;border-color:transparent;border-top-color:#fff}.vjs-seeking .vjs-loading-spinner:after,.vjs-seeking .vjs-loading-spinner:before,.vjs-waiting .vjs-loading-spinner:after,.vjs-waiting .vjs-loading-spinner:before{-webkit-animation:vjs-spinner-spin 1.1s cubic-bezier(.6,.2,0,.8) infinite,vjs-spinner-fade 1.1s linear infinite;animation:vjs-spinner-spin 1.1s cubic-bezier(.6,.2,0,.8) infinite,vjs-spinner-fade 1.1s linear infinite}.vjs-seeking .vjs-loading-spinner:before,.vjs-waiting .vjs-loading-spinner:before{border-top-color:#fff}.vjs-seeking .vjs-loading-spinner:after,.vjs-waiting .vjs-loading-spinner:after{border-top-color:#fff;-webkit-animation-delay:.44s;animation-delay:.44s}@keyframes vjs-spinner-show{to{visibility:visible}}@-webkit-keyframes vjs-spinner-show{to{visibility:visible}}@keyframes vjs-spinner-spin{100%{transform:rotate(360deg)}}@-webkit-keyframes vjs-spinner-spin{100%{-webkit-transform:rotate(360deg)}}@keyframes vjs-spinner-fade{0%{border-top-color:#73859f}20%{border-top-color:#73859f}35%{border-top-color:#fff}60%{border-top-color:#73859f}100%{border-top-color:#73859f}}@-webkit-keyframes vjs-spinner-fade{0%{border-top-color:#73859f}20%{border-top-color:#73859f}35%{border-top-color:#fff}60%{border-top-color:#73859f}100%{border-top-color:#73859f}}.vjs-chapters-button .vjs-menu ul{width:24em}.video-js .vjs-subs-caps-button+.vjs-menu .vjs-captions-menu-item .vjs-menu-item-text .vjs-icon-placeholder{vertical-align:middle;display:inline-block;margin-bottom:-.1em}.video-js .vjs-subs-caps-button+.vjs-menu .vjs-captions-menu-item .vjs-menu-item-text .vjs-icon-placeholder:before{font-family:VideoJS;content:"";font-size:1.5em;line-height:inherit}.video-js .vjs-audio-button+.vjs-menu .vjs-main-desc-menu-item .vjs-menu-item-text .vjs-icon-placeholder{vertical-align:middle;display:inline-block;margin-bottom:-.1em}.video-js .vjs-audio-button+.vjs-menu .vjs-main-desc-menu-item .vjs-menu-item-text .vjs-icon-placeholder:before{font-family:VideoJS;content:" ";font-size:1.5em;line-height:inherit}.video-js:not(.vjs-fullscreen).vjs-layout-small .vjs-audio-button,.video-js:not(.vjs-fullscreen).vjs-layout-small .vjs-captions-button,.video-js:not(.vjs-fullscreen).vjs-layout-small .vjs-chapters-button,.video-js:not(.vjs-fullscreen).vjs-layout-small .vjs-current-time,.video-js:not(.vjs-fullscreen).vjs-layout-small .vjs-descriptions-button,.video-js:not(.vjs-fullscreen).vjs-layout-small .vjs-duration,.video-js:not(.vjs-fullscreen).vjs-layout-small .vjs-playback-rate,.video-js:not(.vjs-fullscreen).vjs-layout-small .vjs-remaining-time,.video-js:not(.vjs-fullscreen).vjs-layout-small .vjs-subtitles-button,.video-js:not(.vjs-fullscreen).vjs-layout-small .vjs-time-divider,.video-js:not(.vjs-fullscreen).vjs-layout-small .vjs-volume-control,.video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-audio-button,.video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-captions-button,.video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-chapters-button,.video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-current-time,.video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-descriptions-button,.video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-duration,.video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-playback-rate,.video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-remaining-time,.video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-subtitles-button,.video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-time-divider,.video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-volume-control,.video-js:not(.vjs-fullscreen).vjs-layout-x-small .vjs-audio-button,.video-js:not(.vjs-fullscreen).vjs-layout-x-small .vjs-captions-button,.video-js:not(.vjs-fullscreen).vjs-layout-x-small .vjs-chapters-button,.video-js:not(.vjs-fullscreen).vjs-layout-x-small .vjs-current-time,.video-js:not(.vjs-fullscreen).vjs-layout-x-small .vjs-descriptions-button,.video-js:not(.vjs-fullscreen).vjs-layout-x-small .vjs-duration,.video-js:not(.vjs-fullscreen).vjs-layout-x-small .vjs-playback-rate,.video-js:not(.vjs-fullscreen).vjs-layout-x-small .vjs-remaining-time,.video-js:not(.vjs-fullscreen).vjs-layout-x-small .vjs-subtitles-button,.video-js:not(.vjs-fullscreen).vjs-layout-x-small .vjs-time-divider,.video-js:not(.vjs-fullscreen).vjs-layout-x-small .vjs-volume-control{display:none}.video-js:not(.vjs-fullscreen).vjs-layout-small .vjs-volume-panel.vjs-volume-panel-horizontal.vjs-slider-active,.video-js:not(.vjs-fullscreen).vjs-layout-small .vjs-volume-panel.vjs-volume-panel-horizontal:active,.video-js:not(.vjs-fullscreen).vjs-layout-small .vjs-volume-panel.vjs-volume-panel-horizontal:hover,.video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-volume-panel.vjs-volume-panel-horizontal.vjs-slider-active,.video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-volume-panel.vjs-volume-panel-horizontal:active,.video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-volume-panel.vjs-volume-panel-horizontal:hover,.video-js:not(.vjs-fullscreen).vjs-layout-x-small .vjs-volume-panel.vjs-volume-panel-horizontal.vjs-slider-active,.video-js:not(.vjs-fullscreen).vjs-layout-x-small .vjs-volume-panel.vjs-volume-panel-horizontal:active,.video-js:not(.vjs-fullscreen).vjs-layout-x-small .vjs-volume-panel.vjs-volume-panel-horizontal:hover{width:auto;width:initial}.video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-subs-caps-button,.video-js:not(.vjs-fullscreen).vjs-layout-x-small:not(.vjs-live) .vjs-subs-caps-button,.video-js:not(.vjs-fullscreen).vjs-layout-x-small:not(.vjs-liveui) .vjs-subs-caps-button{display:none}.video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-custom-control-spacer,.video-js:not(.vjs-fullscreen).vjs-layout-x-small.vjs-liveui .vjs-custom-control-spacer{flex:auto;display:block}.video-js:not(.vjs-fullscreen).vjs-layout-tiny.vjs-no-flex .vjs-custom-control-spacer,.video-js:not(.vjs-fullscreen).vjs-layout-x-small.vjs-liveui.vjs-no-flex .vjs-custom-control-spacer{width:auto}.video-js:not(.vjs-fullscreen).vjs-layout-tiny .vjs-progress-control,.video-js:not(.vjs-fullscreen).vjs-layout-x-small.vjs-liveui .vjs-progress-control{display:none}.vjs-modal-dialog.vjs-text-track-settings{background-color:#2b333f;background-color:rgba(43,51,63,.75);color:#fff;height:70%}.vjs-text-track-settings .vjs-modal-dialog-content{display:table}.vjs-text-track-settings .vjs-track-settings-colors,.vjs-text-track-settings .vjs-track-settings-controls,.vjs-text-track-settings .vjs-track-settings-font{display:table-cell}.vjs-text-track-settings .vjs-track-settings-controls{text-align:right;vertical-align:bottom}@supports (display:grid){.vjs-text-track-settings .vjs-modal-dialog-content{display:grid;grid-template-columns:1fr 1fr;grid-template-rows:1fr;padding:20px 24px 0 24px}.vjs-track-settings-controls .vjs-default-button{margin-bottom:20px}.vjs-text-track-settings .vjs-track-settings-controls{grid-column:1/-1}.vjs-layout-small .vjs-text-track-settings .vjs-modal-dialog-content,.vjs-layout-tiny .vjs-text-track-settings .vjs-modal-dialog-content,.vjs-layout-x-small .vjs-text-track-settings .vjs-modal-dialog-content{grid-template-columns:1fr}}.vjs-track-setting>select{margin-right:1em;margin-bottom:.5em}.vjs-text-track-settings fieldset{margin:5px;padding:3px;border:none}.vjs-text-track-settings fieldset span{display:inline-block}.vjs-text-track-settings fieldset span>select{max-width:7.3em}.vjs-text-track-settings legend{color:#fff;margin:0 0 5px 0}.vjs-text-track-settings .vjs-label{position:absolute;clip:rect(1px 1px 1px 1px);clip:rect(1px,1px,1px,1px);display:block;margin:0 0 5px 0;padding:0;border:0;height:1px;width:1px;overflow:hidden}.vjs-track-settings-controls button:active,.vjs-track-settings-controls button:focus{outline-style:solid;outline-width:medium;background-image:linear-gradient(0deg,#fff 88%,#73859f 100%)}.vjs-track-settings-controls button:hover{color:rgba(43,51,63,.75)}.vjs-track-settings-controls button{background-color:#fff;background-image:linear-gradient(-180deg,#fff 88%,#73859f 100%);color:#2b333f;cursor:pointer;border-radius:2px}.vjs-track-settings-controls .vjs-default-button{margin-right:1em}@media print{.video-js>:not(.vjs-tech):not(.vjs-poster){visibility:hidden}}.vjs-resize-manager{position:absolute;top:0;left:0;width:100%;height:100%;border:none;z-index:-1000}.js-focus-visible .video-js :focus:not(.focus-visible){outline:0;background:0 0}.video-js .vjs-menu :focus:not(:focus-visible),.video-js :focus:not(:focus-visible){outline:0;background:0 0}
\ No newline at end of file
diff --git a/frontend/src/static/lib/video-js/7.7.5/video.min.js b/frontend/src/static/lib/video-js/7.7.5/video.min.js
deleted file mode 100755
index 4739b98..0000000
--- a/frontend/src/static/lib/video-js/7.7.5/video.min.js
+++ /dev/null
@@ -1,29 +0,0 @@
-/**
- * @license
- * Video.js 7.7.5
- * Copyright Brightcove, Inc.
- * Available under Apache License Version 2.0
- *
- *
- * Includes vtt.js
- * Available under Apache License Version 2.0
- *
- */
-!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t(require("global/window"),require("global/document")):"function"==typeof define&&define.amd?define(["global/window","global/document"],t):(e=e||self).videojs=t(e.window,e.document)}(this,function(y,h){"use strict";y=y&&y.hasOwnProperty("default")?y.default:y,h=h&&h.hasOwnProperty("default")?h.default:h;var d="7.7.5",l=[],e=function(o,u){return function(e,t,i){var n=u.levels[t],r=new RegExp("^("+n+")$");if("log"!==e&&i.unshift(e.toUpperCase()+":"),i.unshift(o+":"),l){l.push([].concat(i));var s=l.length-1e3;l.splice(0,0',i=n.firstChild,n.setAttribute("style","display:none; position:absolute;"),h.body.appendChild(n));for(var s={},a=0;ax',e=t.firstChild.href}return e}function Mt(e){if("string"==typeof e){var t=/^(\/?)([\s\S]*?)((?:\.{1,2}|[^\/]+?)(\.([^\.\/\?]+)))(?:[\/]*|[\?].*)$/.exec(e);if(t)return t.pop().toLowerCase()}return""}function Nt(e,t){void 0===t&&(t=y.location);var i=Rt(e);return(":"===i.protocol?t.protocol:i.protocol)+i.host!==t.protocol+t.host}var Bt=function(n){function e(e){var t;void 0===e&&(e=[]);for(var i=e.length-1;0<=i;i--)if(e[i].enabled){Ot(e,e[i]);break}return(t=n.call(this,e)||this).changing_=!1,t}He(e,n);var t=e.prototype;return t.addTrack=function(e){var t=this;e.enabled&&Ot(this,e),n.prototype.addTrack.call(this,e),e.addEventListener&&(e.enabledChange_=function(){t.changing_||(t.changing_=!0,Ot(t,e),t.changing_=!1,t.trigger("change"))},e.addEventListener("enabledchange",e.enabledChange_))},t.removeTrack=function(e){n.prototype.removeTrack.call(this,e),e.removeEventListener&&e.enabledChange_&&(e.removeEventListener("enabledchange",e.enabledChange_),e.enabledChange_=null)},e}(It),jt=function(n){function e(e){var t;void 0===e&&(e=[]);for(var i=e.length-1;0<=i;i--)if(e[i].selected){Dt(e,e[i]);break}return(t=n.call(this,e)||this).changing_=!1,Object.defineProperty(Me(t),"selectedIndex",{get:function(){for(var e=0;e
- * Copyright (c) 2014 David Björklund
- * Available under the MIT license
- *
- */,$t=Object.prototype.toString;var Qt=function(e){var r={};return e&&e.trim().split("\n").forEach(function(e){var t=e.indexOf(":"),i=e.slice(0,t).trim().toLowerCase(),n=e.slice(t+1).trim();"undefined"==typeof r[i]?r[i]=n:Array.isArray(r[i])?r[i].push(n):r[i]=[r[i],n]}),r},Jt=ti,Zt=ti;function ei(e,t,i){var n=e;return Yt(t)?(i=t,"string"==typeof e&&(n={uri:e})):n=r({},t,{uri:e}),n.callback=i,n}function ti(e,t,i){return ii(t=ei(e,t,i))}function ii(n){if("undefined"==typeof n.callback)throw new Error("callback argument missing");var r=!1,s=function(e,t,i){r||(r=!0,n.callback(e,t,i))};function t(e){return clearTimeout(o),e instanceof Error||(e=new Error(""+(e||"Unknown XMLHttpRequest Error"))),e.statusCode=0,s(e,m)}function e(){if(!a){var e;clearTimeout(o),e=n.useXDR&&void 0===u.status?200:1223===u.status?204:u.status;var t=m,i=null;return 0!==e?(t={body:function(){var e=void 0;if(e=u.response?u.response:u.responseText||function(e){try{if("document"===e.responseType)return e.responseXML;var t=e.responseXML&&"parsererror"===e.responseXML.documentElement.nodeName;if(""===e.responseType&&!t)return e.responseXML}catch(e){}return null}(u),f)try{e=JSON.parse(e)}catch(e){}return e}(),statusCode:e,method:c,headers:{},url:l,rawRequest:u},u.getAllResponseHeaders&&(t.headers=Qt(u.getAllResponseHeaders()))):i=new Error("Internal XMLHttpRequest Error"),s(i,t,t.body)}}var i,a,o,u=n.xhr||null,l=(u=u||(n.cors||n.useXDR?new ti.XDomainRequest:new ti.XMLHttpRequest)).url=n.uri||n.url,c=u.method=n.method||"GET",h=n.body||n.data,d=u.headers=n.headers||{},p=!!n.sync,f=!1,m={body:void 0,headers:{},statusCode:0,method:c,url:l,rawRequest:u};if("json"in n&&!1!==n.json&&(f=!0,d.accept||d.Accept||(d.Accept="application/json"),"GET"!==c&&"HEAD"!==c&&(d["content-type"]||d["Content-Type"]||(d["Content-Type"]="application/json"),h=JSON.stringify(!0===n.json?h:n.json))),u.onreadystatechange=function(){4===u.readyState&&setTimeout(e,0)},u.onload=e,u.onerror=t,u.onprogress=function(){},u.onabort=function(){a=!0},u.ontimeout=t,u.open(c,l,!p,n.username,n.password),p||(u.withCredentials=!!n.withCredentials),!p&&0=e?t.push(r):r.startTime===r.endTime&&r.startTime<=e&&r.startTime+.5>=e&&t.push(r)}if(o=!1,t.length!==this.activeCues_.length)o=!0;else for(var s=0;s]*>?)?/);return e=t[1]?t[1]:t[2],i=i.substr(e.length),e}function t(e,t){var i=bi[e];if(!i)return null;var n=s.document.createElement(i),r=Si[e];return r&&t&&(n[r]=t.trim()),n}for(var n,r,a,o,u=s.document.createElement("div"),l=u,c=[];null!==(n=e());)if("<"!==n[0])l.appendChild(s.document.createTextNode((r=n,_i.innerHTML=r,r=_i.textContent,_i.textContent="",r)));else{if("/"===n[1]){c.length&&c[c.length-1]===n.substr(2).replace(">","")&&(c.pop(),l=l.parentNode);continue}var h,d=mi(n.substr(1,n.length-2));if(d){h=s.document.createProcessingInstruction("timestamp",d),l.appendChild(h);continue}var p=n.match(/^<([^.\s/0-9>]+)(\.[^\s\\>]+)?([^>\\]+)?(\\?)>?$/);if(!p)continue;if(!(h=t(p[1],p[3])))continue;if(a=l,ki[(o=h).localName]&&ki[o.localName]!==a.localName)continue;if(p[2]){var f=p[2].split(".");f.forEach(function(e){var t=/^bg_/.test(e),i=t?e.slice(3):e;if(Ti.hasOwnProperty(i)){var n=t?"background-color":"color",r=Ti[i];h.style[n]=r}}),h.className=f.join(" ")}c.push(p[1]),l.appendChild(h),l=h}return u}var Ei=[[1470,1470],[1472,1472],[1475,1475],[1478,1478],[1488,1514],[1520,1524],[1544,1544],[1547,1547],[1549,1549],[1563,1563],[1566,1610],[1645,1647],[1649,1749],[1765,1766],[1774,1775],[1786,1805],[1807,1808],[1810,1839],[1869,1957],[1969,1969],[1984,2026],[2036,2037],[2042,2042],[2048,2069],[2074,2074],[2084,2084],[2088,2088],[2096,2110],[2112,2136],[2142,2142],[2208,2208],[2210,2220],[8207,8207],[64285,64285],[64287,64296],[64298,64310],[64312,64316],[64318,64318],[64320,64321],[64323,64324],[64326,64449],[64467,64829],[64848,64911],[64914,64967],[65008,65020],[65136,65140],[65142,65276],[67584,67589],[67592,67592],[67594,67637],[67639,67640],[67644,67644],[67647,67669],[67671,67679],[67840,67867],[67872,67897],[67903,67903],[67968,68023],[68030,68031],[68096,68096],[68112,68115],[68117,68119],[68121,68147],[68160,68167],[68176,68184],[68192,68223],[68352,68405],[68416,68437],[68440,68466],[68472,68479],[68608,68680],[126464,126467],[126469,126495],[126497,126498],[126500,126500],[126503,126503],[126505,126514],[126516,126519],[126521,126521],[126523,126523],[126530,126530],[126535,126535],[126537,126537],[126539,126539],[126541,126543],[126545,126546],[126548,126548],[126551,126551],[126553,126553],[126555,126555],[126557,126557],[126559,126559],[126561,126562],[126564,126564],[126567,126570],[126572,126578],[126580,126583],[126585,126588],[126590,126590],[126592,126601],[126603,126619],[126625,126627],[126629,126633],[126635,126651],[1114109,1114109]];function wi(e){for(var t=0;t=i[0]&&e<=i[1])return!0}return!1}function Li(){}function Pi(e,t,i){Li.call(this),this.cue=t,this.cueDiv=Ci(e,t.text);var n={color:"rgba(255, 255, 255, 1)",backgroundColor:"rgba(0, 0, 0, 0.8)",position:"relative",left:0,right:0,top:0,bottom:0,display:"inline",writingMode:""===t.vertical?"horizontal-tb":"lr"===t.vertical?"vertical-lr":"vertical-rl",unicodeBidi:"plaintext"};this.applyStyles(n,this.cueDiv),this.div=e.document.createElement("div"),n={direction:function(e){var t=[],i="";if(!e||!e.childNodes)return"ltr";function r(e,t){for(var i=t.childNodes.length-1;0<=i;i--)e.push(t.childNodes[i])}function s(e){if(!e||!e.length)return null;var t=e.pop(),i=t.textContent||t.innerText;if(i){var n=i.match(/^.*(\n|\r)/);return n?n[e.length=0]:i}return"ruby"===t.tagName?s(e):t.childNodes?(r(e,t),s(e)):void 0}for(r(t,e);i=s(t);)for(var n=0;nh&&(c=c<0?-1:1,c*=Math.ceil(h/l)*l),r<0&&(c+=""===n.vertical?o.height:o.width,s=s.reverse()),i.move(d,c)}else{var p=i.lineHeight/o.height*100;switch(n.lineAlign){case"center":r-=p/2;break;case"end":r-=p}switch(n.vertical){case"":t.applyStyles({top:t.formatStyle(r,"%")});break;case"rl":t.applyStyles({left:t.formatStyle(r,"%")});break;case"lr":t.applyStyles({right:t.formatStyle(r,"%")})}s=["+y","-x","+x","-y"],i=new Ai(t)}var f=function(e,t){for(var i,n=new Ai(e),r=1,s=0;se.left&&this.tope.top},Ai.prototype.overlapsAny=function(e){for(var t=0;t=e.top&&this.bottom<=e.bottom&&this.left>=e.left&&this.right<=e.right},Ai.prototype.overlapsOppositeAxis=function(e,t){switch(t){case"+x":return this.lefte.right;case"+y":return this.tope.bottom}},Ai.prototype.intersectPercentage=function(e){return Math.max(0,Math.min(this.right,e.right)-Math.max(this.left,e.left))*Math.max(0,Math.min(this.bottom,e.bottom)-Math.max(this.top,e.top))/(this.height*this.width)},Ai.prototype.toCSSCompatValues=function(e){return{top:this.top-e.top,bottom:e.bottom-this.bottom,left:this.left-e.left,right:e.right-this.right,height:this.height,width:this.width}},Ai.getSimpleBoxPosition=function(e){var t=e.div?e.div.offsetHeight:e.tagName?e.offsetHeight:0,i=e.div?e.div.offsetWidth:e.tagName?e.offsetWidth:0,n=e.div?e.div.offsetTop:e.tagName?e.offsetTop:0;return{left:(e=e.div?e.div.getBoundingClientRect():e.tagName?e.getBoundingClientRect():e).left,right:e.right,top:e.top||n,height:e.height||t,bottom:e.bottom||n+(e.height||t),width:e.width||i}},xi.StringDecoder=function(){return{decode:function(e){if(!e)return"";if("string"!=typeof e)throw new Error("Error - expected string data.");return decodeURIComponent(encodeURIComponent(e))}}},xi.convertCueToDOMTree=function(e,t){return e&&t?Ci(e,t):null};xi.processCues=function(n,r,e){if(!n||!r||!e)return null;for(;e.firstChild;)e.removeChild(e.firstChild);var s=n.document.createElement("div");if(s.style.position="absolute",s.style.left="0",s.style.right="0",s.style.top="0",s.style.bottom="0",s.style.margin="1.5%",e.appendChild(s),function(e){for(var t=0;t',className:this.buildCSSClass(),tabIndex:0},t),"button"===e&&p.error("Creating a ClickableComponent with an HTML element of "+e+" is not supported; use a Button instead."),i=m({role:"button"},i),this.tabIndex_=t.tabIndex;var n=r.prototype.createEl.call(this,e,t,i);return this.createControlTextEl(n),n},t.dispose=function(){this.controlTextEl_=null,r.prototype.dispose.call(this)},t.createControlTextEl=function(e){return this.controlTextEl_=S("span",{className:"vjs-control-text"},{"aria-live":"polite"}),e&&e.appendChild(this.controlTextEl_),this.controlText(this.controlText_,e),this.controlTextEl_},t.controlText=function(e,t){if(void 0===t&&(t=this.el()),void 0===e)return this.controlText_||"Need Text";var i=this.localize(e);this.controlText_=e,k(this.controlTextEl_,i),this.nonIconControl||t.setAttribute("title",i)},t.buildCSSClass=function(){return"vjs-control vjs-button "+r.prototype.buildCSSClass.call(this)},t.enable=function(){this.enabled_||(this.enabled_=!0,this.removeClass("vjs-disabled"),this.el_.setAttribute("aria-disabled","false"),"undefined"!=typeof this.tabIndex_&&this.el_.setAttribute("tabIndex",this.tabIndex_),this.on(["tap","click"],this.handleClick),this.on("keydown",this.handleKeyDown))},t.disable=function(){this.enabled_=!1,this.addClass("vjs-disabled"),this.el_.setAttribute("aria-disabled","true"),"undefined"!=typeof this.tabIndex_&&this.el_.removeAttribute("tabIndex"),this.off("mouseover",this.handleMouseOver),this.off("mouseout",this.handleMouseOut),this.off(["tap","click"],this.handleClick),this.off("keydown",this.handleKeyDown)},t.handleClick=function(e){this.options_.clickHandler&&this.options_.clickHandler.call(this,arguments)},t.handleKeyDown=function(e){Lt.isEventKey(e,"Space")||Lt.isEventKey(e,"Enter")?(e.preventDefault(),e.stopPropagation(),this.trigger("click")):r.prototype.handleKeyDown.call(this,e)},e}(Ue);Ue.registerComponent("ClickableComponent",rn);var sn=function(n){function e(e,t){var i;return(i=n.call(this,e,t)||this).update(),e.on("posterchange",ge(Me(i),i.update)),i}He(e,n);var t=e.prototype;return t.dispose=function(){this.player().off("posterchange",this.update),n.prototype.dispose.call(this)},t.createEl=function(){return S("div",{className:"vjs-poster",tabIndex:-1})},t.update=function(e){var t=this.player().poster();this.setSrc(t),t?this.show():this.hide()},t.setSrc=function(e){var t="";e&&(t='url("'+e+'")'),this.el_.style.backgroundImage=t},t.handleClick=function(e){this.player_.controls()&&(this.player_.tech(!0)&&this.player_.tech(!0).focus(),this.player_.paused()?kt(this.player_.play()):this.player_.pause())},e}(rn);Ue.registerComponent("PosterImage",sn);var an="#222",on={monospace:"monospace",sansSerif:"sans-serif",serif:"serif",monospaceSansSerif:'"Andale Mono", "Lucida Console", monospace',monospaceSerif:'"Courier New", monospace',proportionalSansSerif:"sans-serif",proportionalSerif:"serif",casual:'"Comic Sans MS", Impact, fantasy',script:'"Monotype Corsiva", cursive',smallcaps:'"Andale Mono", "Lucida Console", monospace, sans-serif'};function un(e,t){var i;if(4===e.length)i=e[1]+e[1]+e[2]+e[2]+e[3]+e[3];else{if(7!==e.length)throw new Error("Invalid color code provided, "+e+"; must be formatted as e.g. #f0e or #f604e2.");i=e.slice(1)}return"rgba("+parseInt(i.slice(0,2),16)+","+parseInt(i.slice(2,4),16)+","+parseInt(i.slice(4,6),16)+","+t+")"}function ln(e,t,i){try{e.style[t]=i}catch(e){return}}var cn=function(s){function e(i,e,t){var n;n=s.call(this,i,e,t)||this;var r=ge(Me(n),n.updateDisplay);return i.on("loadstart",ge(Me(n),n.toggleDisplay)),i.on("texttrackchange",r),i.on("loadedmetadata",ge(Me(n),n.preselectTrack)),i.ready(ge(Me(n),function(){if(i.tech_&&i.tech_.featuresNativeTextTracks)this.hide();else{i.on("fullscreenchange",r),i.on("playerresize",r),y.addEventListener("orientationchange",r),i.on("dispose",function(){return y.removeEventListener("orientationchange",r)});for(var e=this.options_.playerOptions.tracks||[],t=0;t',className:this.buildCSSClass()},t),i=m({type:"button"},i);var n=Ue.prototype.createEl.call(this,"button",t,i);return this.createControlTextEl(n),n},i.addChild=function(e,t){void 0===t&&(t={});var i=this.constructor.name;return p.warn("Adding an actionable (user controllable) child to a Button ("+i+") is not supported; use a ClickableComponent instead."),Ue.prototype.addChild.call(this,e,t)},i.enable=function(){t.prototype.enable.call(this),this.el_.removeAttribute("disabled")},i.disable=function(){t.prototype.disable.call(this),this.el_.setAttribute("disabled","disabled")},i.handleKeyDown=function(e){Lt.isEventKey(e,"Space")||Lt.isEventKey(e,"Enter")?e.stopPropagation():t.prototype.handleKeyDown.call(this,e)},e}(rn);Ue.registerComponent("Button",dn);var pn=function(n){function e(e,t){var i;return(i=n.call(this,e,t)||this).mouseused_=!1,i.on("mousedown",i.handleMouseDown),i}He(e,n);var t=e.prototype;return t.buildCSSClass=function(){return"vjs-big-play-button"},t.handleClick=function(e){var t=this.player_.play();if(this.mouseused_&&e.clientX&&e.clientY){var i=this.player_.usingPlugin("eme")&&this.player_.eme.sessions&&0'+this.localize(this.labelText_)+" "});return this.contentEl_=S("span",{className:e+"-display"},{"aria-live":"off",role:"presentation"}),t.appendChild(this.contentEl_),t},t.dispose=function(){this.contentEl_=null,this.textNode_=null,n.prototype.dispose.call(this)},t.updateTextNode_=function(e){var t=this;void 0===e&&(e=0),e=yn(e),this.formattedTime_!==e&&(this.formattedTime_=e,this.requestAnimationFrame(function(){if(t.contentEl_){var e=t.textNode_;t.textNode_=h.createTextNode(t.formattedTime_),t.textNode_&&(e?t.contentEl_.replaceChild(t.textNode_,e):t.contentEl_.appendChild(t.textNode_))}}))},t.updateContent=function(e){},e}(Ue);_n.prototype.labelText_="Time",_n.prototype.controlText_="Time",Ue.registerComponent("TimeDisplay",_n);var bn=function(e){function t(){return e.apply(this,arguments)||this}He(t,e);var i=t.prototype;return i.buildCSSClass=function(){return"vjs-current-time"},i.updateContent=function(e){var t;t=this.player_.ended()?this.player_.duration():this.player_.scrubbing()?this.player_.getCache().currentTime:this.player_.currentTime(),this.updateTextNode_(t)},t}(_n);bn.prototype.labelText_="Current Time",bn.prototype.controlText_="Current Time",Ue.registerComponent("CurrentTimeDisplay",bn);var Tn=function(n){function e(e,t){var i;return(i=n.call(this,e,t)||this).on(e,"durationchange",i.updateContent),i.on(e,"loadstart",i.updateContent),i.on(e,"loadedmetadata",i.updateContent),i}He(e,n);var t=e.prototype;return t.buildCSSClass=function(){return"vjs-duration"},t.updateContent=function(e){var t=this.player_.duration();this.updateTextNode_(t)},e}(_n);Tn.prototype.labelText_="Duration",Tn.prototype.controlText_="Duration",Ue.registerComponent("DurationDisplay",Tn);var Sn=function(e){function t(){return e.apply(this,arguments)||this}return He(t,e),t.prototype.createEl=function(){return e.prototype.createEl.call(this,"div",{className:"vjs-time-control vjs-time-divider",innerHTML:"/
"},{"aria-hidden":!0})},t}(Ue);Ue.registerComponent("TimeDivider",Sn);var kn=function(n){function e(e,t){var i;return(i=n.call(this,e,t)||this).on(e,"durationchange",i.updateContent),i}He(e,n);var t=e.prototype;return t.buildCSSClass=function(){return"vjs-remaining-time"},t.createEl=function(){var e=n.prototype.createEl.call(this);return e.insertBefore(S("span",{},{"aria-hidden":!0},"-"),this.contentEl_),e},t.updateContent=function(e){var t;"number"==typeof this.player_.duration()&&(t=this.player_.ended()?0:this.player_.remainingTimeDisplay?this.player_.remainingTimeDisplay():this.player_.remainingTime(),this.updateTextNode_(t))},e}(_n);kn.prototype.labelText_="Remaining Time",kn.prototype.controlText_="Remaining Time",Ue.registerComponent("RemainingTimeDisplay",kn);var Cn=function(n){function e(e,t){var i;return(i=n.call(this,e,t)||this).updateShowing(),i.on(i.player(),"durationchange",i.updateShowing),i}He(e,n);var t=e.prototype;return t.createEl=function(){var e=n.prototype.createEl.call(this,"div",{className:"vjs-live-control vjs-control"});return this.contentEl_=S("div",{className:"vjs-live-display",innerHTML:''+this.localize("Stream Type")+" "+this.localize("LIVE")},{"aria-live":"off"}),e.appendChild(this.contentEl_),e},t.dispose=function(){this.contentEl_=null,n.prototype.dispose.call(this)},t.updateShowing=function(e){this.player().duration()===1/0?this.show():this.hide()},e}(Ue);Ue.registerComponent("LiveDisplay",Cn);var En=function(n){function e(e,t){var i;return(i=n.call(this,e,t)||this).updateLiveEdgeStatus(),i.player_.liveTracker&&i.on(i.player_.liveTracker,"liveedgechange",i.updateLiveEdgeStatus),i}He(e,n);var t=e.prototype;return t.createEl=function(){var e=n.prototype.createEl.call(this,"button",{className:"vjs-seek-to-live-control vjs-control"});return this.textEl_=S("span",{className:"vjs-seek-to-live-text",innerHTML:this.localize("LIVE")},{"aria-hidden":"true"}),e.appendChild(this.textEl_),e},t.updateLiveEdgeStatus=function(e){!this.player_.liveTracker||this.player_.liveTracker.atLiveEdge()?(this.setAttribute("aria-disabled",!0),this.addClass("vjs-at-live-edge"),this.controlText("Seek to live, currently playing live")):(this.setAttribute("aria-disabled",!1),this.removeClass("vjs-at-live-edge"),this.controlText("Seek to live, currently behind live"))},t.handleClick=function(){this.player_.liveTracker.seekToLiveEdge()},t.dispose=function(){this.player_.liveTracker&&this.off(this.player_.liveTracker,"liveedgechange",this.updateLiveEdgeStatus),this.textEl_=null,n.prototype.dispose.call(this)},e}(dn);En.prototype.controlText_="Seek to live, currently playing live",Ue.registerComponent("SeekToLive",En);function wn(e,t,i){return e=Number(e),Math.min(i,Math.max(t,isNaN(e)?t:e))}var Ln=function(n){function e(e,t){var i;return(i=n.call(this,e,t)||this).bar=i.getChild(i.options_.barName),i.vertical(!!i.options_.vertical),i.enable(),i}He(e,n);var t=e.prototype;return t.enabled=function(){return this.enabled_},t.enable=function(){this.enabled()||(this.on("mousedown",this.handleMouseDown),this.on("touchstart",this.handleMouseDown),this.on("keydown",this.handleKeyDown),this.on("click",this.handleClick),this.on(this.player_,"controlsvisible",this.update),this.playerEvent&&this.on(this.player_,this.playerEvent,this.update),this.removeClass("disabled"),this.setAttribute("tabindex",0),this.enabled_=!0)},t.disable=function(){if(this.enabled()){var e=this.bar.el_.ownerDocument;this.off("mousedown",this.handleMouseDown),this.off("touchstart",this.handleMouseDown),this.off("keydown",this.handleKeyDown),this.off("click",this.handleClick),this.off(this.player_,"controlsvisible",this.update),this.off(e,"mousemove",this.handleMouseMove),this.off(e,"mouseup",this.handleMouseUp),this.off(e,"touchmove",this.handleMouseMove),this.off(e,"touchend",this.handleMouseUp),this.removeAttribute("tabindex"),this.addClass("disabled"),this.playerEvent&&this.off(this.player_,this.playerEvent,this.update),this.enabled_=!1}},t.createEl=function(e,t,i){return void 0===t&&(t={}),void 0===i&&(i={}),t.className=t.className+" vjs-slider",t=m({tabIndex:0},t),i=m({role:"slider","aria-valuenow":0,"aria-valuemin":0,"aria-valuemax":100,tabIndex:0},i),n.prototype.createEl.call(this,e,t,i)},t.handleMouseDown=function(e){var t=this.bar.el_.ownerDocument;"mousedown"===e.type&&e.preventDefault(),"touchstart"!==e.type||Je||e.preventDefault(),R(),this.addClass("vjs-sliding"),this.trigger("slideractive"),this.on(t,"mousemove",this.handleMouseMove),this.on(t,"mouseup",this.handleMouseUp),this.on(t,"touchmove",this.handleMouseMove),this.on(t,"touchend",this.handleMouseUp),this.handleMouseMove(e)},t.handleMouseMove=function(e){},t.handleMouseUp=function(){var e=this.bar.el_.ownerDocument;U(),this.removeClass("vjs-sliding"),this.trigger("sliderinactive"),this.off(e,"mousemove",this.handleMouseMove),this.off(e,"mouseup",this.handleMouseUp),this.off(e,"touchmove",this.handleMouseMove),this.off(e,"touchend",this.handleMouseUp),this.update()},t.update=function(){var t=this;if(this.el_&&this.bar){var i=this.getProgress();return i===this.progress_||(this.progress_=i,this.requestAnimationFrame(function(){var e=t.vertical()?"height":"width";t.bar.el().style[e]=(100*i).toFixed(2)+"%"})),i}},t.getProgress=function(){return Number(wn(this.getPercent(),0,1).toFixed(4))},t.calculateDistance=function(e){var t=B(this.el_,e);return this.vertical()?t.y:t.x},t.handleKeyDown=function(e){Lt.isEventKey(e,"Left")||Lt.isEventKey(e,"Down")?(e.preventDefault(),e.stopPropagation(),this.stepBack()):Lt.isEventKey(e,"Right")||Lt.isEventKey(e,"Up")?(e.preventDefault(),e.stopPropagation(),this.stepForward()):n.prototype.handleKeyDown.call(this,e)},t.handleClick=function(e){e.stopPropagation(),e.preventDefault()},t.vertical=function(e){if(void 0===e)return this.vertical_||!1;this.vertical_=!!e,this.vertical_?this.addClass("vjs-slider-vertical"):this.addClass("vjs-slider-horizontal")},e}(Ue);Ue.registerComponent("Slider",Ln);function Pn(e,t){return wn(e/t*100,0,100).toFixed(2)+"%"}var An=function(r){function e(e,t){var i;return(i=r.call(this,e,t)||this).partEls_=[],i.on(e,"progress",i.update),i}He(e,r);var t=e.prototype;return t.createEl=function(){var e=r.prototype.createEl.call(this,"div",{className:"vjs-load-progress"}),t=S("span",{className:"vjs-control-text"}),i=S("span",{textContent:this.localize("Loaded")}),n=h.createTextNode(": ");return this.percentageEl_=S("span",{className:"vjs-control-text-loaded-percentage",textContent:"0%"}),e.appendChild(t),t.appendChild(i),t.appendChild(n),t.appendChild(this.percentageEl_),e},t.dispose=function(){this.partEls_=null,this.percentageEl_=null,r.prototype.dispose.call(this)},t.update=function(e){var h=this;this.requestAnimationFrame(function(){var e=h.player_.liveTracker,t=h.player_.buffered(),i=e&&e.isLive()?e.seekableEnd():h.player_.duration(),n=h.player_.bufferedEnd(),r=h.partEls_,s=Pn(n,i);h.percent_!==s&&(h.el_.style.width=s,k(h.percentageEl_,s),h.percent_=s);for(var a=0;at.length;c--)h.el_.removeChild(r[c-1]);r.length=t.length})},e}(Ue);Ue.registerComponent("LoadProgressBar",An);var In=function(n){function e(e,t){var i;return(i=n.call(this,e,t)||this).update=ve(ge(Me(i),i.update),30),i}He(e,n);var t=e.prototype;return t.createEl=function(){return n.prototype.createEl.call(this,"div",{className:"vjs-time-tooltip"},{"aria-hidden":"true"})},t.update=function(e,t,i){var n=M(this.el_),r=M(this.player_.el()),s=e.width*t;if(r&&n){var a=e.left-r.left+s,o=e.width-s+(r.right-e.right),u=n.width/2;an.width&&(u=n.width),this.el_.style.right="-"+u+"px",this.write(i)}},t.write=function(e){k(this.el_,e)},t.updateTime=function(r,s,a,o){var u=this;this.rafId_&&this.cancelAnimationFrame(this.rafId_),this.rafId_=this.requestAnimationFrame(function(){var e,t=u.player_.duration();if(u.player_.liveTracker&&u.player_.liveTracker.isLive()){var i=u.player_.liveTracker.liveWindow(),n=i-s*i;e=(n<1?"":"-")+yn(n,i)}else e=yn(a,t);u.update(r,s,e),o&&o()})},e}(Ue);Ue.registerComponent("TimeTooltip",In);var xn=function(n){function e(e,t){var i;return(i=n.call(this,e,t)||this).update=ve(ge(Me(i),i.update),30),i}He(e,n);var t=e.prototype;return t.createEl=function(){return n.prototype.createEl.call(this,"div",{className:"vjs-play-progress vjs-slider-bar"},{"aria-hidden":"true"})},t.update=function(e,t){var i=this.getChild("timeTooltip");if(i){var n=this.player_.scrubbing()?this.player_.getCache().currentTime:this.player_.currentTime();i.updateTime(e,t,n)}},e}(Ue);xn.prototype.options_={children:[]},at||Ke||xn.prototype.options_.children.push("timeTooltip"),Ue.registerComponent("PlayProgressBar",xn);var On=function(n){function e(e,t){var i;return(i=n.call(this,e,t)||this).update=ve(ge(Me(i),i.update),30),i}He(e,n);var t=e.prototype;return t.createEl=function(){return n.prototype.createEl.call(this,"div",{className:"vjs-mouse-display"})},t.update=function(e,t){var i=this,n=t*this.player_.duration();this.getChild("timeTooltip").updateTime(e,t,n,function(){i.el_.style.left=e.width*t+"px"})},e}(Ue);On.prototype.options_={children:["timeTooltip"]},Ue.registerComponent("MouseTimeDisplay",On);var Dn=function(s){function e(e,t){var i;return(i=s.call(this,e,t)||this).setEventHandlers_(),i}He(e,s);var t=e.prototype;return t.setEventHandlers_=function(){this.update_=ge(this,this.update),this.update=ve(this.update_,30),this.on(this.player_,["ended","durationchange","timeupdate"],this.update),this.player_.liveTracker&&this.on(this.player_.liveTracker,"liveedgechange",this.update),this.updateInterval=null,this.on(this.player_,["playing"],this.enableInterval_),this.on(this.player_,["ended","pause","waiting"],this.disableInterval_),"hidden"in h&&"visibilityState"in h&&this.on(h,"visibilitychange",this.toggleVisibility_)},t.toggleVisibility_=function(e){h.hidden?this.disableInterval_(e):(this.enableInterval_(),this.update())},t.enableInterval_=function(){this.updateInterval||(this.updateInterval=this.setInterval(this.update,30))},t.disableInterval_=function(e){this.player_.liveTracker&&this.player_.liveTracker.isLive()&&"ended"!==e.type||this.updateInterval&&(this.clearInterval(this.updateInterval),this.updateInterval=null)},t.createEl=function(){return s.prototype.createEl.call(this,"div",{className:"vjs-progress-holder"},{"aria-label":this.localize("Progress Bar")})},t.update=function(e){var n=this,r=s.prototype.update.call(this);return this.requestAnimationFrame(function(){var e=n.player_.ended()?n.player_.duration():n.getCurrentTime_(),t=n.player_.liveTracker,i=n.player_.duration();t&&t.isLive()&&(i=n.player_.liveTracker.liveCurrentTime()),n.percent_!==r&&(n.el_.setAttribute("aria-valuenow",(100*r).toFixed(2)),n.percent_=r),n.currentTime_===e&&n.duration_===i||(n.el_.setAttribute("aria-valuetext",n.localize("progress bar timing: currentTime={1} duration={2}",[yn(e,i),yn(i,i)],"{1} of {2}")),n.currentTime_=e,n.duration_=i),n.bar&&n.bar.update(M(n.el()),n.getProgress())}),r},t.getCurrentTime_=function(){return this.player_.scrubbing()?this.player_.getCache().currentTime:this.player_.currentTime()},t.getPercent=function(){var e,t=this.getCurrentTime_(),i=this.player_.liveTracker;return i&&i.isLive()?(e=(t-i.seekableStart())/i.liveWindow(),i.atLiveEdge()&&(e=1)):e=t/this.player_.duration(),e},t.handleMouseDown=function(e){W(e)&&(e.stopPropagation(),this.player_.scrubbing(!0),this.videoWasPlaying=!this.player_.paused(),this.player_.pause(),s.prototype.handleMouseDown.call(this,e))},t.handleMouseMove=function(e){if(W(e)){var t,i=this.calculateDistance(e),n=this.player_.liveTracker;if(n&&n.isLive()){var r=n.seekableStart(),s=n.liveCurrentTime();if(s<=(t=r+i*n.liveWindow())&&(t=s),t<=r&&(t=r+.1),t===1/0)return}else(t=i*this.player_.duration())===this.player_.duration()&&(t-=.1);this.player_.currentTime(t)}},t.enable=function(){s.prototype.enable.call(this);var e=this.getChild("mouseTimeDisplay");e&&e.show()},t.disable=function(){s.prototype.disable.call(this);var e=this.getChild("mouseTimeDisplay");e&&e.hide()},t.handleMouseUp=function(e){s.prototype.handleMouseUp.call(this,e),e&&e.stopPropagation(),this.player_.scrubbing(!1),this.player_.trigger({type:"timeupdate",target:this,manuallyTriggered:!0}),this.videoWasPlaying?kt(this.player_.play()):this.update_()},t.stepForward=function(){this.player_.currentTime(this.player_.currentTime()+5)},t.stepBack=function(){this.player_.currentTime(this.player_.currentTime()-5)},t.handleAction=function(e){this.player_.paused()?this.player_.play():this.player_.pause()},t.handleKeyDown=function(e){if(Lt.isEventKey(e,"Space")||Lt.isEventKey(e,"Enter"))e.preventDefault(),e.stopPropagation(),this.handleAction(e);else if(Lt.isEventKey(e,"Home"))e.preventDefault(),e.stopPropagation(),this.player_.currentTime(0);else if(Lt.isEventKey(e,"End"))e.preventDefault(),e.stopPropagation(),this.player_.currentTime(this.player_.duration());else if(/^[0-9]$/.test(Lt(e))){e.preventDefault(),e.stopPropagation();var t=10*(Lt.codes[Lt(e)]-Lt.codes[0])/100;this.player_.currentTime(this.player_.duration()*t)}else Lt.isEventKey(e,"PgDn")?(e.preventDefault(),e.stopPropagation(),this.player_.currentTime(this.player_.currentTime()-60)):Lt.isEventKey(e,"PgUp")?(e.preventDefault(),e.stopPropagation(),this.player_.currentTime(this.player_.currentTime()+60)):s.prototype.handleKeyDown.call(this,e)},e}(Ln);Dn.prototype.options_={children:["loadProgressBar","playProgressBar"],barName:"playProgressBar"},at||Ke||Dn.prototype.options_.children.splice(1,0,"mouseTimeDisplay"),Ue.registerComponent("SeekBar",Dn);var Rn=function(n){function e(e,t){var i;return(i=n.call(this,e,t)||this).handleMouseMove=ve(ge(Me(i),i.handleMouseMove),30),i.throttledHandleMouseSeek=ve(ge(Me(i),i.handleMouseSeek),30),i.enable(),i}He(e,n);var t=e.prototype;return t.createEl=function(){return n.prototype.createEl.call(this,"div",{className:"vjs-progress-control vjs-control"})},t.handleMouseMove=function(e){var t=this.getChild("seekBar");if(t){var i=t.getChild("playProgressBar"),n=t.getChild("mouseTimeDisplay");if(i||n){var r=t.el(),s=M(r),a=B(r,e).x;a=wn(0,1,a),n&&n.update(s,a),i&&i.update(s,t.getProgress())}}},t.handleMouseSeek=function(e){var t=this.getChild("seekBar");t&&t.handleMouseMove(e)},t.enabled=function(){return this.enabled_},t.disable=function(){this.children().forEach(function(e){return e.disable&&e.disable()}),this.enabled()&&(this.off(["mousedown","touchstart"],this.handleMouseDown),this.off(this.el_,"mousemove",this.handleMouseMove),this.handleMouseUp(),this.addClass("disabled"),this.enabled_=!1)},t.enable=function(){this.children().forEach(function(e){return e.enable&&e.enable()}),this.enabled()||(this.on(["mousedown","touchstart"],this.handleMouseDown),this.on(this.el_,"mousemove",this.handleMouseMove),this.removeClass("disabled"),this.enabled_=!0)},t.handleMouseDown=function(e){var t=this.el_.ownerDocument,i=this.getChild("seekBar");i&&i.handleMouseDown(e),this.on(t,"mousemove",this.throttledHandleMouseSeek),this.on(t,"touchmove",this.throttledHandleMouseSeek),this.on(t,"mouseup",this.handleMouseUp),this.on(t,"touchend",this.handleMouseUp)},t.handleMouseUp=function(e){var t=this.el_.ownerDocument,i=this.getChild("seekBar");i&&i.handleMouseUp(e),this.off(t,"mousemove",this.throttledHandleMouseSeek),this.off(t,"touchmove",this.throttledHandleMouseSeek),this.off(t,"mouseup",this.handleMouseUp),this.off(t,"touchend",this.handleMouseUp)},e}(Ue);Rn.prototype.options_={children:["seekBar"]},Ue.registerComponent("ProgressControl",Rn);var Un=function(n){function e(e,t){var i;return(i=n.call(this,e,t)||this).on(e,["enterpictureinpicture","leavepictureinpicture"],i.handlePictureInPictureChange),h.pictureInPictureEnabled||i.disable(),i}He(e,n);var t=e.prototype;return t.buildCSSClass=function(){return"vjs-picture-in-picture-control "+n.prototype.buildCSSClass.call(this)},t.handlePictureInPictureChange=function(e){this.player_.isInPictureInPicture()?this.controlText("Exit Picture-in-Picture"):this.controlText("Picture-in-Picture")},t.handleClick=function(e){this.player_.isInPictureInPicture()?this.player_.exitPictureInPicture():this.player_.requestPictureInPicture()},e}(dn);Un.prototype.controlText_="Picture-in-Picture",Ue.registerComponent("PictureInPictureToggle",Un);var Mn=function(n){function e(e,t){var i;return(i=n.call(this,e,t)||this).on(e,"fullscreenchange",i.handleFullscreenChange),!1===h[e.fsApi_.fullscreenEnabled]&&i.disable(),i}He(e,n);var t=e.prototype;return t.buildCSSClass=function(){return"vjs-fullscreen-control "+n.prototype.buildCSSClass.call(this)},t.handleFullscreenChange=function(e){this.player_.isFullscreen()?this.controlText("Non-Fullscreen"):this.controlText("Fullscreen")},t.handleClick=function(e){this.player_.isFullscreen()?this.player_.exitFullscreen():this.player_.requestFullscreen()},e}(dn);Mn.prototype.controlText_="Fullscreen",Ue.registerComponent("FullscreenToggle",Mn);var Nn=function(e){function t(){return e.apply(this,arguments)||this}return He(t,e),t.prototype.createEl=function(){return e.prototype.createEl.call(this,"div",{className:"vjs-volume-level",innerHTML:' '})},t}(Ue);Ue.registerComponent("VolumeLevel",Nn);var Bn=function(n){function e(e,t){var i;return(i=n.call(this,e,t)||this).on("slideractive",i.updateLastVolume_),i.on(e,"volumechange",i.updateARIAAttributes),e.ready(function(){return i.updateARIAAttributes()}),i}He(e,n);var t=e.prototype;return t.createEl=function(){return n.prototype.createEl.call(this,"div",{className:"vjs-volume-bar vjs-slider-bar"},{"aria-label":this.localize("Volume Level"),"aria-live":"polite"})},t.handleMouseDown=function(e){W(e)&&n.prototype.handleMouseDown.call(this,e)},t.handleMouseMove=function(e){W(e)&&(this.checkMuted(),this.player_.volume(this.calculateDistance(e)))},t.checkMuted=function(){this.player_.muted()&&this.player_.muted(!1)},t.getPercent=function(){return this.player_.muted()?0:this.player_.volume()},t.stepForward=function(){this.checkMuted(),this.player_.volume(this.player_.volume()+.1)},t.stepBack=function(){this.checkMuted(),this.player_.volume(this.player_.volume()-.1)},t.updateARIAAttributes=function(e){var t=this.player_.muted()?0:this.volumeAsPercentage_();this.el_.setAttribute("aria-valuenow",t),this.el_.setAttribute("aria-valuetext",t+"%")},t.volumeAsPercentage_=function(){return Math.round(100*this.player_.volume())},t.updateLastVolume_=function(){var e=this,t=this.player_.volume();this.one("sliderinactive",function(){0===e.player_.volume()&&e.player_.lastVolume_(t)})},e}(Ln);Bn.prototype.options_={children:["volumeLevel"],barName:"volumeLevel"},Bn.prototype.playerEvent="volumechange",Ue.registerComponent("VolumeBar",Bn);var jn=function(n){function e(e,t){var i;return void 0===t&&(t={}),t.vertical=t.vertical||!1,"undefined"!=typeof t.volumeBar&&!u(t.volumeBar)||(t.volumeBar=t.volumeBar||{},t.volumeBar.vertical=t.vertical),i=n.call(this,e,t)||this,function(e,t){t.tech_&&!t.tech_.featuresVolumeControl&&e.addClass("vjs-hidden"),e.on(t,"loadstart",function(){t.tech_.featuresVolumeControl?e.removeClass("vjs-hidden"):e.addClass("vjs-hidden")})}(Me(i),e),i.throttledHandleMouseMove=ve(ge(Me(i),i.handleMouseMove),30),i.on("mousedown",i.handleMouseDown),i.on("touchstart",i.handleMouseDown),i.on(i.volumeBar,["focus","slideractive"],function(){i.volumeBar.addClass("vjs-slider-active"),i.addClass("vjs-slider-active"),i.trigger("slideractive")}),i.on(i.volumeBar,["blur","sliderinactive"],function(){i.volumeBar.removeClass("vjs-slider-active"),i.removeClass("vjs-slider-active"),i.trigger("sliderinactive")}),i}He(e,n);var t=e.prototype;return t.createEl=function(){var e="vjs-volume-horizontal";return this.options_.vertical&&(e="vjs-volume-vertical"),n.prototype.createEl.call(this,"div",{className:"vjs-volume-control vjs-control "+e})},t.handleMouseDown=function(e){var t=this.el_.ownerDocument;this.on(t,"mousemove",this.throttledHandleMouseMove),this.on(t,"touchmove",this.throttledHandleMouseMove),this.on(t,"mouseup",this.handleMouseUp),this.on(t,"touchend",this.handleMouseUp)},t.handleMouseUp=function(e){var t=this.el_.ownerDocument;this.off(t,"mousemove",this.throttledHandleMouseMove),this.off(t,"touchmove",this.throttledHandleMouseMove),this.off(t,"mouseup",this.handleMouseUp),this.off(t,"touchend",this.handleMouseUp)},t.handleMouseMove=function(e){this.volumeBar.handleMouseMove(e)},e}(Ue);jn.prototype.options_={children:["volumeBar"]},Ue.registerComponent("VolumeControl",jn);var Fn=function(n){function e(e,t){var i;return i=n.call(this,e,t)||this,function(e,t){t.tech_&&!t.tech_.featuresMuteControl&&e.addClass("vjs-hidden"),e.on(t,"loadstart",function(){t.tech_.featuresMuteControl?e.removeClass("vjs-hidden"):e.addClass("vjs-hidden")})}(Me(i),e),i.on(e,["loadstart","volumechange"],i.update),i}He(e,n);var t=e.prototype;return t.buildCSSClass=function(){return"vjs-mute-control "+n.prototype.buildCSSClass.call(this)},t.handleClick=function(e){var t=this.player_.volume(),i=this.player_.lastVolume_();if(0===t){var n=i<.1?.1:i;this.player_.volume(n),this.player_.muted(!1)}else this.player_.muted(!this.player_.muted())},t.update=function(e){this.updateIcon_(),this.updateControlText_()},t.updateIcon_=function(){var e=this.player_.volume(),t=3;at&&this.player_.tech_&&this.player_.tech_.el_&&this.player_.muted(this.player_.tech_.el_.muted),0===e||this.player_.muted()?t=0:e<.33?t=1:e<.67&&(t=2);for(var i=0;i<4;i++)L(this.el_,"vjs-vol-"+i);w(this.el_,"vjs-vol-"+t)},t.updateControlText_=function(){var e=this.player_.muted()||0===this.player_.volume()?"Unmute":"Mute";this.controlText()!==e&&this.controlText(e)},e}(dn);Fn.prototype.controlText_="Mute",Ue.registerComponent("MuteToggle",Fn);var Hn=function(n){function e(e,t){var i;return void 0===t&&(t={}),"undefined"!=typeof t.inline?t.inline=t.inline:t.inline=!0,"undefined"!=typeof t.volumeControl&&!u(t.volumeControl)||(t.volumeControl=t.volumeControl||{},t.volumeControl.vertical=!t.inline),(i=n.call(this,e,t)||this).on(e,["loadstart"],i.volumePanelState_),i.on(i.muteToggle,"keyup",i.handleKeyPress),i.on(i.volumeControl,"keyup",i.handleVolumeControlKeyUp),i.on("keydown",i.handleKeyPress),i.on("mouseover",i.handleMouseOver),i.on("mouseout",i.handleMouseOut),i.on(i.volumeControl,["slideractive"],i.sliderActive_),i.on(i.volumeControl,["sliderinactive"],i.sliderInactive_),i}He(e,n);var t=e.prototype;return t.sliderActive_=function(){this.addClass("vjs-slider-active")},t.sliderInactive_=function(){this.removeClass("vjs-slider-active")},t.volumePanelState_=function(){this.volumeControl.hasClass("vjs-hidden")&&this.muteToggle.hasClass("vjs-hidden")&&this.addClass("vjs-hidden"),this.volumeControl.hasClass("vjs-hidden")&&!this.muteToggle.hasClass("vjs-hidden")&&this.addClass("vjs-mute-toggle-only")},t.createEl=function(){var e="vjs-volume-panel-horizontal";return this.options_.inline||(e="vjs-volume-panel-vertical"),n.prototype.createEl.call(this,"div",{className:"vjs-volume-panel vjs-control "+e})},t.dispose=function(){this.handleMouseOut(),n.prototype.dispose.call(this)},t.handleVolumeControlKeyUp=function(e){Lt.isEventKey(e,"Esc")&&this.muteToggle.focus()},t.handleMouseOver=function(e){this.addClass("vjs-hover"),he(h,"keyup",ge(this,this.handleKeyPress))},t.handleMouseOut=function(e){this.removeClass("vjs-hover"),de(h,"keyup",ge(this,this.handleKeyPress))},t.handleKeyPress=function(e){Lt.isEventKey(e,"Esc")&&this.handleMouseOut()},e}(Ue);Hn.prototype.options_={children:["muteToggle","volumeControl"]},Ue.registerComponent("VolumePanel",Hn);var Vn=function(n){function e(e,t){var i;return i=n.call(this,e,t)||this,t&&(i.menuButton_=t.menuButton),i.focusedChild_=-1,i.on("keydown",i.handleKeyDown),i.boundHandleBlur_=ge(Me(i),i.handleBlur),i.boundHandleTapClick_=ge(Me(i),i.handleTapClick),i}He(e,n);var t=e.prototype;return t.addEventListenerForItem=function(e){e instanceof Ue&&(this.on(e,"blur",this.boundHandleBlur_),this.on(e,["tap","click"],this.boundHandleTapClick_))},t.removeEventListenerForItem=function(e){e instanceof Ue&&(this.off(e,"blur",this.boundHandleBlur_),this.off(e,["tap","click"],this.boundHandleTapClick_))},t.removeChild=function(e){"string"==typeof e&&(e=this.getChild(e)),this.removeEventListenerForItem(e),n.prototype.removeChild.call(this,e)},t.addItem=function(e){var t=this.addChild(e);t&&this.addEventListenerForItem(t)},t.createEl=function(){var e=this.options_.contentElType||"ul";this.contentEl_=S(e,{className:"vjs-menu-content"}),this.contentEl_.setAttribute("role","menu");var t=n.prototype.createEl.call(this,"div",{append:this.contentEl_,className:"vjs-menu"});return t.appendChild(this.contentEl_),he(t,"click",function(e){e.preventDefault(),e.stopImmediatePropagation()}),t},t.dispose=function(){this.contentEl_=null,this.boundHandleBlur_=null,this.boundHandleTapClick_=null,n.prototype.dispose.call(this)},t.handleBlur=function(e){var t=e.relatedTarget||h.activeElement;if(!this.children().some(function(e){return e.el()===t})){var i=this.menuButton_;i&&i.buttonPressed_&&t!==i.el().firstChild&&i.unpressButton()}},t.handleTapClick=function(t){if(this.menuButton_){this.menuButton_.unpressButton();var e=this.children();if(!Array.isArray(e))return;var i=e.filter(function(e){return e.el()===t.target})[0];if(!i)return;"CaptionSettingsMenuItem"!==i.name()&&this.menuButton_.focus()}},t.handleKeyDown=function(e){Lt.isEventKey(e,"Left")||Lt.isEventKey(e,"Down")?(e.preventDefault(),e.stopPropagation(),this.stepForward()):(Lt.isEventKey(e,"Right")||Lt.isEventKey(e,"Up"))&&(e.preventDefault(),e.stopPropagation(),this.stepBack())},t.stepForward=function(){var e=0;void 0!==this.focusedChild_&&(e=this.focusedChild_+1),this.focus(e)},t.stepBack=function(){var e=0;void 0!==this.focusedChild_&&(e=this.focusedChild_-1),this.focus(e)},t.focus=function(e){void 0===e&&(e=0);var t=this.children().slice();t.length&&t[0].className&&/vjs-menu-title/.test(t[0].className)&&t.shift(),0=t.length&&(e=t.length-1),t[this.focusedChild_=e].el_.focus())},e}(Ue);Ue.registerComponent("Menu",Vn);var qn=function(r){function e(e,t){var i;void 0===t&&(t={}),(i=r.call(this,e,t)||this).menuButton_=new dn(e,t),i.menuButton_.controlText(i.controlText_),i.menuButton_.el_.setAttribute("aria-haspopup","true");var n=dn.prototype.buildCSSClass();return i.menuButton_.el_.className=i.buildCSSClass()+" "+n,i.menuButton_.removeClass("vjs-control"),i.addChild(i.menuButton_),i.update(),i.enabled_=!0,i.on(i.menuButton_,"tap",i.handleClick),i.on(i.menuButton_,"click",i.handleClick),i.on(i.menuButton_,"keydown",i.handleKeyDown),i.on(i.menuButton_,"mouseenter",function(){i.addClass("vjs-hover"),i.menu.show(),he(h,"keyup",ge(Me(i),i.handleMenuKeyUp))}),i.on("mouseleave",i.handleMouseLeave),i.on("keydown",i.handleSubmenuKeyDown),i}He(e,r);var t=e.prototype;return t.update=function(){var e=this.createMenu();this.menu&&(this.menu.dispose(),this.removeChild(this.menu)),this.menu=e,this.addChild(e),this.buttonPressed_=!1,this.menuButton_.el_.setAttribute("aria-expanded","false"),this.items&&this.items.length<=this.hideThreshold_?this.hide():this.show()},t.createMenu=function(){var e=new Vn(this.player_,{menuButton:this});if(this.hideThreshold_=0,this.options_.title){var t=S("li",{className:"vjs-menu-title",innerHTML:De(this.options_.title),tabIndex:-1});this.hideThreshold_+=1;var i=new Ue(this.player_,{el:t});e.addItem(i)}if(this.items=this.createItems(),this.items)for(var n=0;n