Source: lib/media/media_source_engine.js

  1. /*! @license
  2. * Shaka Player
  3. * Copyright 2016 Google LLC
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. goog.provide('shaka.media.MediaSourceEngine');
  7. goog.require('goog.asserts');
  8. goog.require('shaka.log');
  9. goog.require('shaka.config.CodecSwitchingStrategy');
  10. goog.require('shaka.media.Capabilities');
  11. goog.require('shaka.media.ContentWorkarounds');
  12. goog.require('shaka.media.ClosedCaptionParser');
  13. goog.require('shaka.media.IClosedCaptionParser');
  14. goog.require('shaka.media.ManifestParser');
  15. goog.require('shaka.media.SegmentReference');
  16. goog.require('shaka.media.TimeRangesUtils');
  17. goog.require('shaka.text.TextEngine');
  18. goog.require('shaka.transmuxer.TransmuxerEngine');
  19. goog.require('shaka.util.BufferUtils');
  20. goog.require('shaka.util.Destroyer');
  21. goog.require('shaka.util.Dom');
  22. goog.require('shaka.util.Error');
  23. goog.require('shaka.util.EventManager');
  24. goog.require('shaka.util.FakeEvent');
  25. goog.require('shaka.util.Functional');
  26. goog.require('shaka.util.IDestroyable');
  27. goog.require('shaka.util.Id3Utils');
  28. goog.require('shaka.util.ManifestParserUtils');
  29. goog.require('shaka.util.MimeUtils');
  30. goog.require('shaka.util.Mp4BoxParsers');
  31. goog.require('shaka.util.Mp4Parser');
  32. goog.require('shaka.util.Platform');
  33. goog.require('shaka.util.PublicPromise');
  34. goog.require('shaka.util.StreamUtils');
  35. goog.require('shaka.util.TsParser');
  36. goog.require('shaka.lcevc.Dec');
  37. /**
  38. * @summary
  39. * MediaSourceEngine wraps all operations on MediaSource and SourceBuffers.
  40. * All asynchronous operations return a Promise, and all operations are
  41. * internally synchronized and serialized as needed. Operations that can
  42. * be done in parallel will be done in parallel.
  43. *
  44. * @implements {shaka.util.IDestroyable}
  45. */
  46. shaka.media.MediaSourceEngine = class {
  47. /**
  48. * @param {HTMLMediaElement} video The video element, whose source is tied to
  49. * MediaSource during the lifetime of the MediaSourceEngine.
  50. * @param {!shaka.extern.TextDisplayer} textDisplayer
  51. * The text displayer that will be used with the text engine.
  52. * MediaSourceEngine takes ownership of the displayer. When
  53. * MediaSourceEngine is destroyed, it will destroy the displayer.
  54. * @param {!shaka.media.MediaSourceEngine.PlayerInterface} playerInterface
  55. * Interface for common player methods.
  56. * @param {?shaka.lcevc.Dec} [lcevcDec] Optional - LCEVC Decoder Object
  57. */
  58. constructor(video, textDisplayer, playerInterface, lcevcDec) {
  59. /** @private {HTMLMediaElement} */
  60. this.video_ = video;
  61. /** @private {?shaka.media.MediaSourceEngine.PlayerInterface} */
  62. this.playerInterface_ = playerInterface;
  63. /** @private {?shaka.extern.MediaSourceConfiguration} */
  64. this.config_ = null;
  65. /** @private {shaka.extern.TextDisplayer} */
  66. this.textDisplayer_ = textDisplayer;
  67. /**
  68. * @private {!Object<shaka.util.ManifestParserUtils.ContentType,
  69. * SourceBuffer>}
  70. */
  71. this.sourceBuffers_ = {};
  72. /**
  73. * @private {!Object<shaka.util.ManifestParserUtils.ContentType,
  74. * string>}
  75. */
  76. this.sourceBufferTypes_ = {};
  77. /**
  78. * @private {!Object<shaka.util.ManifestParserUtils.ContentType,
  79. * boolean>}
  80. */
  81. this.expectedEncryption_ = {};
  82. /** @private {shaka.text.TextEngine} */
  83. this.textEngine_ = null;
  84. /** @private {boolean} */
  85. this.segmentRelativeVttTiming_ = false;
  86. /** @private {?shaka.lcevc.Dec} */
  87. this.lcevcDec_ = lcevcDec || null;
  88. /**
  89. * @private {!Object<string,
  90. * !Array<shaka.media.MediaSourceEngine.Operation>>}
  91. */
  92. this.queues_ = {};
  93. /** @private {shaka.util.EventManager} */
  94. this.eventManager_ = new shaka.util.EventManager();
  95. /** @private {!Object<string, !shaka.extern.Transmuxer>} */
  96. this.transmuxers_ = {};
  97. /** @private {?shaka.media.IClosedCaptionParser} */
  98. this.captionParser_ = null;
  99. /** @private {!shaka.util.PublicPromise} */
  100. this.mediaSourceOpen_ = new shaka.util.PublicPromise();
  101. /** @private {string} */
  102. this.url_ = '';
  103. /** @private {boolean} */
  104. this.playbackHasBegun_ = false;
  105. /** @private {boolean} */
  106. this.streamingAllowed_ = true;
  107. /** @private {boolean} */
  108. this.usingRemotePlayback_ = false;
  109. /** @private {HTMLSourceElement} */
  110. this.source_ = null;
  111. /**
  112. * Fallback source element with direct media URI, used for casting
  113. * purposes.
  114. * @private {HTMLSourceElement}
  115. */
  116. this.secondarySource_ = null;
  117. /** @private {MediaSource} */
  118. this.mediaSource_ = this.createMediaSource(this.mediaSourceOpen_);
  119. /** @private {boolean} */
  120. this.reloadingMediaSource_ = false;
  121. /** @type {!shaka.util.Destroyer} */
  122. this.destroyer_ = new shaka.util.Destroyer(() => this.doDestroy_());
  123. /** @private {boolean} */
  124. this.sequenceMode_ = false;
  125. /** @private {string} */
  126. this.manifestType_ = shaka.media.ManifestParser.UNKNOWN;
  127. /** @private {boolean} */
  128. this.ignoreManifestTimestampsInSegmentsMode_ = false;
  129. /** @private {boolean} */
  130. this.attemptTimestampOffsetCalculation_ = false;
  131. /** @private {!shaka.util.PublicPromise<number>} */
  132. this.textSequenceModeOffset_ = new shaka.util.PublicPromise();
  133. /** @private {boolean} */
  134. this.needSplitMuxedContent_ = false;
  135. /** @private {?number} */
  136. this.lastDuration_ = null;
  137. /**
  138. * @private {!Object<shaka.util.ManifestParserUtils.ContentType,
  139. * !shaka.util.TsParser>}
  140. */
  141. this.tsParsers_ = {};
  142. /** @private {?number} */
  143. this.firstVideoTimestamp_ = null;
  144. /** @private {?number} */
  145. this.firstVideoReferenceStartTime_ = null;
  146. /** @private {?number} */
  147. this.firstAudioTimestamp_ = null;
  148. /** @private {?number} */
  149. this.firstAudioReferenceStartTime_ = null;
  150. /** @private {!shaka.util.PublicPromise<number>} */
  151. this.audioCompensation_ = new shaka.util.PublicPromise();
  152. if (this.video_.remote) {
  153. this.usingRemotePlayback_ = this.video_.remote.state != 'disconnected';
  154. this.eventManager_.listen(this.video_.remote, 'connect', () => {
  155. this.usingRemotePlayback_ = this.video_.remote.state != 'disconnected';
  156. });
  157. this.eventManager_.listen(this.video_.remote, 'connecting', () => {
  158. this.usingRemotePlayback_ = this.video_.remote.state != 'disconnected';
  159. });
  160. this.eventManager_.listen(this.video_.remote, 'disconnect', () => {
  161. this.usingRemotePlayback_ = this.video_.remote.state != 'disconnected';
  162. });
  163. }
  164. }
  165. /**
  166. * Create a MediaSource object, attach it to the video element, and return it.
  167. * Resolves the given promise when the MediaSource is ready.
  168. *
  169. * Replaced by unit tests.
  170. *
  171. * @param {!shaka.util.PublicPromise} p
  172. * @return {!MediaSource}
  173. */
  174. createMediaSource(p) {
  175. this.streamingAllowed_ = true;
  176. /** @type {!MediaSource} */
  177. let mediaSource;
  178. if (window.ManagedMediaSource) {
  179. if (!this.secondarySource_) {
  180. this.video_.disableRemotePlayback = true;
  181. }
  182. mediaSource = new ManagedMediaSource();
  183. this.eventManager_.listen(
  184. mediaSource, 'startstreaming', () => {
  185. shaka.log.info('MMS startstreaming');
  186. this.streamingAllowed_ = true;
  187. });
  188. this.eventManager_.listen(
  189. mediaSource, 'endstreaming', () => {
  190. shaka.log.info('MMS endstreaming');
  191. this.streamingAllowed_ = false;
  192. });
  193. } else {
  194. mediaSource = new MediaSource();
  195. }
  196. // Set up MediaSource on the video element.
  197. this.eventManager_.listenOnce(
  198. mediaSource, 'sourceopen', () => this.onSourceOpen_(p));
  199. // Correctly set when playback has begun.
  200. this.eventManager_.listenOnce(this.video_, 'playing', () => {
  201. this.playbackHasBegun_ = true;
  202. });
  203. // Store the object URL for releasing it later.
  204. this.url_ = shaka.media.MediaSourceEngine.createObjectURL(mediaSource);
  205. this.video_.removeAttribute('src');
  206. if (this.source_) {
  207. this.video_.removeChild(this.source_);
  208. }
  209. if (this.secondarySource_) {
  210. this.video_.removeChild(this.secondarySource_);
  211. }
  212. this.source_ = shaka.util.Dom.createSourceElement(this.url_);
  213. this.video_.appendChild(this.source_);
  214. if (this.secondarySource_) {
  215. this.video_.appendChild(this.secondarySource_);
  216. }
  217. this.video_.load();
  218. return mediaSource;
  219. }
  220. /**
  221. * @param {string} uri
  222. * @param {string} mimeType
  223. */
  224. addSecondarySource(uri, mimeType) {
  225. if (!this.video_ || !window.ManagedMediaSource || !this.mediaSource_) {
  226. shaka.log.warning(
  227. 'Secondary source is used only with ManagedMediaSource');
  228. return;
  229. }
  230. if (this.secondarySource_) {
  231. this.video_.removeChild(this.secondarySource_);
  232. }
  233. this.secondarySource_ = shaka.util.Dom.createSourceElement(uri, mimeType);
  234. this.video_.appendChild(this.secondarySource_);
  235. this.video_.disableRemotePlayback = false;
  236. }
  237. /**
  238. * @param {shaka.util.PublicPromise} p
  239. * @private
  240. */
  241. onSourceOpen_(p) {
  242. goog.asserts.assert(this.url_, 'Must have object URL');
  243. // Release the object URL that was previously created, to prevent memory
  244. // leak.
  245. // createObjectURL creates a strong reference to the MediaSource object
  246. // inside the browser. Setting the src of the video then creates another
  247. // reference within the video element. revokeObjectURL will remove the
  248. // strong reference to the MediaSource object, and allow it to be
  249. // garbage-collected later.
  250. URL.revokeObjectURL(this.url_);
  251. p.resolve();
  252. }
  253. /**
  254. * Checks if a certain type is supported.
  255. *
  256. * @param {shaka.extern.Stream} stream
  257. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  258. * @return {!Promise<boolean>}
  259. */
  260. static async isStreamSupported(stream, contentType) {
  261. if (stream.createSegmentIndex) {
  262. await stream.createSegmentIndex();
  263. }
  264. if (!stream.segmentIndex) {
  265. return false;
  266. }
  267. if (stream.segmentIndex.isEmpty()) {
  268. return true;
  269. }
  270. const MimeUtils = shaka.util.MimeUtils;
  271. const TransmuxerEngine = shaka.transmuxer.TransmuxerEngine;
  272. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  273. const StreamUtils = shaka.util.StreamUtils;
  274. const seenCombos = new Set();
  275. // Check each combination of mimeType and codecs within the segment index.
  276. // Unfortunately we cannot use fullMimeTypes, as we ALSO need to check the
  277. // getFullTypeWithAllCodecs (for the sake of the transmuxer) and we have no
  278. // way of going from a full mimeType to a full mimeType with all codecs.
  279. // As this function is only called in debug mode, a little inefficiency is
  280. // acceptable.
  281. for (const ref of stream.segmentIndex) {
  282. const mimeType = ref.mimeType || stream.mimeType || '';
  283. let codecs = ref.codecs || stream.codecs || '';
  284. // Optimization for the case where the codecs and mimetype of the stream
  285. // match the reference.
  286. if (mimeType == stream.mimeType && codecs == stream.codecs) {
  287. continue;
  288. }
  289. // Don't check the same combination of mimetype + codecs twice.
  290. const combo = mimeType + ':' + codecs;
  291. if (seenCombos.has(combo)) {
  292. continue;
  293. }
  294. seenCombos.add(combo);
  295. if (contentType == ContentType.TEXT) {
  296. const fullMimeType = MimeUtils.getFullType(mimeType, codecs);
  297. if (!shaka.text.TextEngine.isTypeSupported(fullMimeType)) {
  298. return false;
  299. }
  300. } else {
  301. if (contentType == ContentType.VIDEO) {
  302. codecs = StreamUtils.getCorrectVideoCodecs(codecs);
  303. } else if (contentType == ContentType.AUDIO) {
  304. codecs = StreamUtils.getCorrectAudioCodecs(codecs, mimeType);
  305. }
  306. const extendedMimeType = MimeUtils.getExtendedType(
  307. stream, mimeType, codecs);
  308. const fullMimeType = MimeUtils.getFullTypeWithAllCodecs(
  309. mimeType, codecs);
  310. if (!shaka.media.Capabilities.isTypeSupported(extendedMimeType) &&
  311. !TransmuxerEngine.isSupported(fullMimeType, stream.type)) {
  312. return false;
  313. }
  314. }
  315. }
  316. return true;
  317. }
  318. /**
  319. * Returns a map of MediaSource support for well-known types.
  320. *
  321. * @return {!Object<string, boolean>}
  322. */
  323. static probeSupport() {
  324. const testMimeTypes = [
  325. // MP4 types
  326. 'video/mp4; codecs="avc1.42E01E"',
  327. 'video/mp4; codecs="avc3.42E01E"',
  328. 'video/mp4; codecs="hev1.1.6.L93.90"',
  329. 'video/mp4; codecs="hvc1.1.6.L93.90"',
  330. 'video/mp4; codecs="hev1.2.4.L153.B0"; eotf="smpte2084"', // HDR HEVC
  331. 'video/mp4; codecs="hvc1.2.4.L153.B0"; eotf="smpte2084"', // HDR HEVC
  332. 'video/mp4; codecs="vp9"',
  333. 'video/mp4; codecs="vp09.00.10.08"',
  334. 'video/mp4; codecs="av01.0.01M.08"',
  335. 'video/mp4; codecs="dvh1.05.01"',
  336. 'video/mp4; codecs="dvh1.20.01"',
  337. 'audio/mp4; codecs="mp4a.40.2"',
  338. 'audio/mp4; codecs="ac-3"',
  339. 'audio/mp4; codecs="ec-3"',
  340. 'audio/mp4; codecs="ac-4.02.01.01"',
  341. 'audio/mp4; codecs="opus"',
  342. 'audio/mp4; codecs="flac"',
  343. 'audio/mp4; codecs="dtsc"', // DTS Digital Surround
  344. 'audio/mp4; codecs="dtse"', // DTS Express
  345. 'audio/mp4; codecs="dtsx"', // DTS:X
  346. // WebM types
  347. 'video/webm; codecs="vp8"',
  348. 'video/webm; codecs="vp9"',
  349. 'video/webm; codecs="vp09.00.10.08"',
  350. 'audio/webm; codecs="vorbis"',
  351. 'audio/webm; codecs="opus"',
  352. // MPEG2 TS types (video/ is also used for audio: https://bit.ly/TsMse)
  353. 'video/mp2t; codecs="avc1.42E01E"',
  354. 'video/mp2t; codecs="avc3.42E01E"',
  355. 'video/mp2t; codecs="hvc1.1.6.L93.90"',
  356. 'video/mp2t; codecs="mp4a.40.2"',
  357. 'video/mp2t; codecs="ac-3"',
  358. 'video/mp2t; codecs="ec-3"',
  359. // WebVTT types
  360. 'text/vtt',
  361. 'application/mp4; codecs="wvtt"',
  362. // TTML types
  363. 'application/ttml+xml',
  364. 'application/mp4; codecs="stpp"',
  365. // Containerless types
  366. ...shaka.util.MimeUtils.RAW_FORMATS,
  367. ];
  368. const support = {};
  369. for (const type of testMimeTypes) {
  370. if (shaka.text.TextEngine.isTypeSupported(type)) {
  371. support[type] = true;
  372. } else if (shaka.util.Platform.supportsMediaSource()) {
  373. support[type] = shaka.media.Capabilities.isTypeSupported(type) ||
  374. shaka.transmuxer.TransmuxerEngine.isSupported(type);
  375. } else {
  376. support[type] = shaka.util.Platform.supportsMediaType(type);
  377. }
  378. const basicType = type.split(';')[0];
  379. support[basicType] = support[basicType] || support[type];
  380. }
  381. return support;
  382. }
  383. /** @override */
  384. destroy() {
  385. return this.destroyer_.destroy();
  386. }
  387. /** @private */
  388. async doDestroy_() {
  389. const Functional = shaka.util.Functional;
  390. const cleanup = [];
  391. for (const contentType in this.queues_) {
  392. // Make a local copy of the queue and the first item.
  393. const q = this.queues_[contentType];
  394. const inProgress = q[0];
  395. // Drop everything else out of the original queue.
  396. this.queues_[contentType] = q.slice(0, 1);
  397. // We will wait for this item to complete/fail.
  398. if (inProgress) {
  399. cleanup.push(inProgress.p.catch(Functional.noop));
  400. }
  401. // The rest will be rejected silently if possible.
  402. for (const item of q.slice(1)) {
  403. item.p.reject(shaka.util.Destroyer.destroyedError());
  404. }
  405. }
  406. if (this.textEngine_) {
  407. cleanup.push(this.textEngine_.destroy());
  408. }
  409. await Promise.all(cleanup);
  410. for (const contentType in this.transmuxers_) {
  411. this.transmuxers_[contentType].destroy();
  412. }
  413. if (this.eventManager_) {
  414. this.eventManager_.release();
  415. this.eventManager_ = null;
  416. }
  417. if (this.video_ && this.secondarySource_) {
  418. this.video_.removeChild(this.secondarySource_);
  419. }
  420. if (this.video_ && this.source_) {
  421. // "unload" the video element.
  422. this.video_.removeChild(this.source_);
  423. this.video_.load();
  424. this.video_.disableRemotePlayback = false;
  425. }
  426. this.video_ = null;
  427. this.source_ = null;
  428. this.secondarySource_ = null;
  429. this.config_ = null;
  430. this.mediaSource_ = null;
  431. this.textEngine_ = null;
  432. this.textDisplayer_ = null;
  433. this.sourceBuffers_ = {};
  434. this.transmuxers_ = {};
  435. this.captionParser_ = null;
  436. if (goog.DEBUG) {
  437. for (const contentType in this.queues_) {
  438. goog.asserts.assert(
  439. this.queues_[contentType].length == 0,
  440. contentType + ' queue should be empty after destroy!');
  441. }
  442. }
  443. this.queues_ = {};
  444. // This object is owned by Player
  445. this.lcevcDec_ = null;
  446. this.tsParsers_ = {};
  447. this.playerInterface_ = null;
  448. }
  449. /**
  450. * @return {!Promise} Resolved when MediaSource is open and attached to the
  451. * media element. This process is actually initiated by the constructor.
  452. */
  453. open() {
  454. return this.mediaSourceOpen_;
  455. }
  456. /**
  457. * Initialize MediaSourceEngine.
  458. *
  459. * Note that it is not valid to call this multiple times, except to add or
  460. * reinitialize text streams.
  461. *
  462. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  463. * shaka.extern.Stream>} streamsByType
  464. * A map of content types to streams. All streams must be supported
  465. * according to MediaSourceEngine.isStreamSupported.
  466. * @param {boolean=} sequenceMode
  467. * If true, the media segments are appended to the SourceBuffer in strict
  468. * sequence.
  469. * @param {string=} manifestType
  470. * Indicates the type of the manifest.
  471. * @param {boolean=} ignoreManifestTimestampsInSegmentsMode
  472. * If true, don't adjust the timestamp offset to account for manifest
  473. * segment durations being out of sync with segment durations. In other
  474. * words, assume that there are no gaps in the segments when appending
  475. * to the SourceBuffer, even if the manifest and segment times disagree.
  476. * Indicates if the manifest has text streams.
  477. *
  478. * @return {!Promise}
  479. */
  480. async init(streamsByType, sequenceMode=false,
  481. manifestType=shaka.media.ManifestParser.UNKNOWN,
  482. ignoreManifestTimestampsInSegmentsMode=false) {
  483. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  484. await this.mediaSourceOpen_;
  485. if (this.ended() || this.closed()) {
  486. shaka.log.alwaysError('Expected MediaSource to be open during init(); ' +
  487. 'reopening the media source.');
  488. this.mediaSourceOpen_ = new shaka.util.PublicPromise();
  489. this.mediaSource_ = this.createMediaSource(this.mediaSourceOpen_);
  490. await this.mediaSourceOpen_;
  491. }
  492. this.sequenceMode_ = sequenceMode;
  493. this.manifestType_ = manifestType;
  494. this.ignoreManifestTimestampsInSegmentsMode_ =
  495. ignoreManifestTimestampsInSegmentsMode;
  496. this.attemptTimestampOffsetCalculation_ = !this.sequenceMode_ &&
  497. this.manifestType_ == shaka.media.ManifestParser.HLS &&
  498. !this.ignoreManifestTimestampsInSegmentsMode_;
  499. this.tsParsers_ = {};
  500. this.firstVideoTimestamp_ = null;
  501. this.firstVideoReferenceStartTime_ = null;
  502. this.firstAudioTimestamp_ = null;
  503. this.firstAudioReferenceStartTime_ = null;
  504. this.audioCompensation_ = new shaka.util.PublicPromise();
  505. for (const contentType of streamsByType.keys()) {
  506. const stream = streamsByType.get(contentType);
  507. // eslint-disable-next-line no-await-in-loop
  508. await this.initSourceBuffer_(contentType, stream, stream.codecs);
  509. if (this.needSplitMuxedContent_) {
  510. this.queues_[ContentType.AUDIO] = [];
  511. this.queues_[ContentType.VIDEO] = [];
  512. } else {
  513. this.queues_[contentType] = [];
  514. }
  515. }
  516. const audio = streamsByType.get(ContentType.AUDIO);
  517. if (audio && audio.isAudioMuxedInVideo) {
  518. this.needSplitMuxedContent_ = true;
  519. }
  520. }
  521. /**
  522. * Initialize a specific SourceBuffer.
  523. *
  524. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  525. * @param {shaka.extern.Stream} stream
  526. * @param {string} codecs
  527. * @return {!Promise}
  528. * @private
  529. */
  530. async initSourceBuffer_(contentType, stream, codecs) {
  531. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  532. goog.asserts.assert(
  533. await shaka.media.MediaSourceEngine.isStreamSupported(
  534. stream, contentType),
  535. 'Type negotiation should happen before MediaSourceEngine.init!');
  536. if (contentType == ContentType.AUDIO && codecs) {
  537. codecs = shaka.util.StreamUtils.getCorrectAudioCodecs(
  538. codecs, stream.mimeType);
  539. }
  540. let mimeType = shaka.util.MimeUtils.getFullType(
  541. stream.mimeType, codecs);
  542. if (contentType == ContentType.TEXT) {
  543. this.reinitText(mimeType, this.sequenceMode_, stream.external);
  544. } else {
  545. let needTransmux = this.config_.forceTransmux;
  546. if (!shaka.media.Capabilities.isTypeSupported(mimeType) ||
  547. (!this.sequenceMode_ &&
  548. shaka.util.MimeUtils.RAW_FORMATS.includes(mimeType))) {
  549. needTransmux = true;
  550. }
  551. const mimeTypeWithAllCodecs =
  552. shaka.util.MimeUtils.getFullTypeWithAllCodecs(
  553. stream.mimeType, codecs);
  554. if (needTransmux) {
  555. const audioCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  556. ContentType.AUDIO, (codecs || '').split(','));
  557. const videoCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  558. ContentType.VIDEO, (codecs || '').split(','));
  559. if (audioCodec && videoCodec) {
  560. this.needSplitMuxedContent_ = true;
  561. await this.initSourceBuffer_(ContentType.AUDIO, stream, audioCodec);
  562. await this.initSourceBuffer_(ContentType.VIDEO, stream, videoCodec);
  563. return;
  564. }
  565. const transmuxerPlugin = shaka.transmuxer.TransmuxerEngine
  566. .findTransmuxer(mimeTypeWithAllCodecs);
  567. if (transmuxerPlugin) {
  568. const transmuxer = transmuxerPlugin();
  569. this.transmuxers_[contentType] = transmuxer;
  570. mimeType =
  571. transmuxer.convertCodecs(contentType, mimeTypeWithAllCodecs);
  572. }
  573. }
  574. const type = this.addExtraFeaturesToMimeType_(mimeType);
  575. this.destroyer_.ensureNotDestroyed();
  576. let sourceBuffer;
  577. try {
  578. sourceBuffer = this.mediaSource_.addSourceBuffer(type);
  579. } catch (exception) {
  580. throw new shaka.util.Error(
  581. shaka.util.Error.Severity.CRITICAL,
  582. shaka.util.Error.Category.MEDIA,
  583. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  584. exception,
  585. 'The mediaSource_ status was ' + this.mediaSource_.readyState +
  586. ' expected \'open\'',
  587. null);
  588. }
  589. if (this.sequenceMode_) {
  590. sourceBuffer.mode =
  591. shaka.media.MediaSourceEngine.SourceBufferMode_.SEQUENCE;
  592. }
  593. this.eventManager_.listen(
  594. sourceBuffer, 'error',
  595. () => this.onError_(contentType));
  596. this.eventManager_.listen(
  597. sourceBuffer, 'updateend',
  598. () => this.onUpdateEnd_(contentType));
  599. this.sourceBuffers_[contentType] = sourceBuffer;
  600. this.sourceBufferTypes_[contentType] = mimeType;
  601. this.expectedEncryption_[contentType] = !!stream.drmInfos.length;
  602. }
  603. }
  604. /**
  605. * Called by the Player to provide an updated configuration any time it
  606. * changes. Must be called at least once before init().
  607. *
  608. * @param {shaka.extern.MediaSourceConfiguration} config
  609. */
  610. configure(config) {
  611. this.config_ = config;
  612. if (this.textEngine_) {
  613. this.textEngine_.setModifyCueCallback(config.modifyCueCallback);
  614. }
  615. }
  616. /**
  617. * Indicate if the streaming is allowed by MediaSourceEngine.
  618. * If we using MediaSource we always returns true.
  619. *
  620. * @return {boolean}
  621. */
  622. isStreamingAllowed() {
  623. return this.streamingAllowed_ && !this.usingRemotePlayback_ &&
  624. !this.reloadingMediaSource_;
  625. }
  626. /**
  627. * Reinitialize the TextEngine for a new text type.
  628. * @param {string} mimeType
  629. * @param {boolean} sequenceMode
  630. * @param {boolean} external
  631. */
  632. reinitText(mimeType, sequenceMode, external) {
  633. if (!this.textEngine_) {
  634. this.textEngine_ = new shaka.text.TextEngine(this.textDisplayer_);
  635. if (this.textEngine_) {
  636. this.textEngine_.setModifyCueCallback(this.config_.modifyCueCallback);
  637. }
  638. }
  639. this.textEngine_.initParser(mimeType, sequenceMode,
  640. external || this.segmentRelativeVttTiming_, this.manifestType_);
  641. }
  642. /**
  643. * @return {boolean} True if the MediaSource is in an "ended" state, or if the
  644. * object has been destroyed.
  645. */
  646. ended() {
  647. if (this.reloadingMediaSource_) {
  648. return false;
  649. }
  650. return this.mediaSource_ ? this.mediaSource_.readyState == 'ended' : true;
  651. }
  652. /**
  653. * @return {boolean} True if the MediaSource is in an "closed" state, or if
  654. * the object has been destroyed.
  655. */
  656. closed() {
  657. if (this.reloadingMediaSource_) {
  658. return false;
  659. }
  660. return this.mediaSource_ ? this.mediaSource_.readyState == 'closed' : true;
  661. }
  662. /**
  663. * Gets the first timestamp in buffer for the given content type.
  664. *
  665. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  666. * @return {?number} The timestamp in seconds, or null if nothing is buffered.
  667. */
  668. bufferStart(contentType) {
  669. if (!Object.keys(this.sourceBuffers_).length) {
  670. return null;
  671. }
  672. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  673. if (contentType == ContentType.TEXT) {
  674. return this.textEngine_.bufferStart();
  675. }
  676. return shaka.media.TimeRangesUtils.bufferStart(
  677. this.getBuffered_(contentType));
  678. }
  679. /**
  680. * Gets the last timestamp in buffer for the given content type.
  681. *
  682. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  683. * @return {?number} The timestamp in seconds, or null if nothing is buffered.
  684. */
  685. bufferEnd(contentType) {
  686. if (!Object.keys(this.sourceBuffers_).length) {
  687. return null;
  688. }
  689. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  690. if (contentType == ContentType.TEXT) {
  691. return this.textEngine_.bufferEnd();
  692. }
  693. return shaka.media.TimeRangesUtils.bufferEnd(
  694. this.getBuffered_(contentType));
  695. }
  696. /**
  697. * Determines if the given time is inside the buffered range of the given
  698. * content type.
  699. *
  700. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  701. * @param {number} time Playhead time
  702. * @return {boolean}
  703. */
  704. isBuffered(contentType, time) {
  705. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  706. if (contentType == ContentType.TEXT) {
  707. return this.textEngine_.isBuffered(time);
  708. } else {
  709. const buffered = this.getBuffered_(contentType);
  710. return shaka.media.TimeRangesUtils.isBuffered(buffered, time);
  711. }
  712. }
  713. /**
  714. * Computes how far ahead of the given timestamp is buffered for the given
  715. * content type.
  716. *
  717. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  718. * @param {number} time
  719. * @return {number} The amount of time buffered ahead in seconds.
  720. */
  721. bufferedAheadOf(contentType, time) {
  722. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  723. if (contentType == ContentType.TEXT) {
  724. return this.textEngine_.bufferedAheadOf(time);
  725. } else {
  726. const buffered = this.getBuffered_(contentType);
  727. return shaka.media.TimeRangesUtils.bufferedAheadOf(buffered, time);
  728. }
  729. }
  730. /**
  731. * Returns info about what is currently buffered.
  732. * @return {shaka.extern.BufferedInfo}
  733. */
  734. getBufferedInfo() {
  735. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  736. const TimeRangesUtils = shaka.media.TimeRangesUtils;
  737. const info = {
  738. total: this.reloadingMediaSource_ ? [] :
  739. TimeRangesUtils.getBufferedInfo(this.video_.buffered),
  740. audio:
  741. TimeRangesUtils.getBufferedInfo(this.getBuffered_(ContentType.AUDIO)),
  742. video:
  743. TimeRangesUtils.getBufferedInfo(this.getBuffered_(ContentType.VIDEO)),
  744. text: [],
  745. };
  746. if (this.textEngine_) {
  747. const start = this.textEngine_.bufferStart();
  748. const end = this.textEngine_.bufferEnd();
  749. if (start != null && end != null) {
  750. info.text.push({start: start, end: end});
  751. }
  752. }
  753. return info;
  754. }
  755. /**
  756. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  757. * @return {TimeRanges} The buffered ranges for the given content type, or
  758. * null if the buffered ranges could not be obtained.
  759. * @private
  760. */
  761. getBuffered_(contentType) {
  762. if (this.reloadingMediaSource_ || this.usingRemotePlayback_) {
  763. return null;
  764. }
  765. try {
  766. return this.sourceBuffers_[contentType].buffered;
  767. } catch (exception) {
  768. if (contentType in this.sourceBuffers_) {
  769. // Note: previous MediaSource errors may cause access to |buffered| to
  770. // throw.
  771. shaka.log.error('failed to get buffered range for ' + contentType,
  772. exception);
  773. }
  774. return null;
  775. }
  776. }
  777. /**
  778. * Create a new closed caption parser. This will ONLY be replaced by tests as
  779. * a way to inject fake closed caption parser instances.
  780. *
  781. * @param {string} mimeType
  782. * @return {!shaka.media.IClosedCaptionParser}
  783. */
  784. getCaptionParser(mimeType) {
  785. return new shaka.media.ClosedCaptionParser(mimeType);
  786. }
  787. /**
  788. * This method is only public for testing.
  789. *
  790. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  791. * @param {!BufferSource} data
  792. * @param {!shaka.media.SegmentReference} reference The segment reference
  793. * we are appending
  794. * @param {shaka.extern.Stream} stream
  795. * @param {!string} mimeType
  796. * @return {{timestamp: ?number, metadata: !Array<shaka.extern.ID3Metadata>}}
  797. */
  798. getTimestampAndDispatchMetadata(contentType, data, reference, stream,
  799. mimeType) {
  800. let timestamp = null;
  801. let metadata = [];
  802. const uint8ArrayData = shaka.util.BufferUtils.toUint8(data);
  803. if (shaka.util.MimeUtils.RAW_FORMATS.includes(mimeType)) {
  804. const frames = shaka.util.Id3Utils.getID3Frames(uint8ArrayData);
  805. if (frames.length && reference) {
  806. const metadataTimestamp = frames.find((frame) => {
  807. return frame.description ===
  808. 'com.apple.streaming.transportStreamTimestamp';
  809. });
  810. if (metadataTimestamp) {
  811. timestamp = Math.round(metadataTimestamp.data) / 1000;
  812. }
  813. /** @private {shaka.extern.ID3Metadata} */
  814. const id3Metadata = {
  815. cueTime: reference.startTime,
  816. data: uint8ArrayData,
  817. frames: frames,
  818. dts: reference.startTime,
  819. pts: reference.startTime,
  820. };
  821. this.playerInterface_.onMetadata(
  822. [id3Metadata], /* offset= */ 0, reference.endTime);
  823. }
  824. } else if (mimeType.includes('/mp4') &&
  825. reference &&
  826. reference.initSegmentReference &&
  827. reference.initSegmentReference.timescale) {
  828. const timescale = reference.initSegmentReference.timescale;
  829. if (!isNaN(timescale)) {
  830. const hasEmsg = ((stream.emsgSchemeIdUris != null &&
  831. stream.emsgSchemeIdUris.length > 0) ||
  832. this.config_.dispatchAllEmsgBoxes);
  833. const Mp4Parser = shaka.util.Mp4Parser;
  834. let startTime = 0;
  835. let parsedMedia = false;
  836. const parser = new Mp4Parser();
  837. if (hasEmsg) {
  838. parser.fullBox('emsg', (box) =>
  839. this.parseEMSG_(reference, stream.emsgSchemeIdUris, box));
  840. }
  841. parser.fullBox('prft', (box) => this.parsePrft_(timescale, box))
  842. .box('moof', Mp4Parser.children)
  843. .box('traf', Mp4Parser.children)
  844. .fullBox('tfdt', (box) => {
  845. if (!parsedMedia) {
  846. goog.asserts.assert(
  847. box.version == 0 || box.version == 1,
  848. 'TFDT version can only be 0 or 1');
  849. const parsed = shaka.util.Mp4BoxParsers.parseTFDTInaccurate(
  850. box.reader, box.version);
  851. startTime = parsed.baseMediaDecodeTime / timescale;
  852. parsedMedia = true;
  853. if (!hasEmsg) {
  854. box.parser.stop();
  855. }
  856. }
  857. }).parse(data, /* partialOkay= */ true);
  858. if (parsedMedia && reference.timestampOffset == 0) {
  859. timestamp = startTime;
  860. }
  861. }
  862. } else if (!mimeType.includes('/mp4') && !mimeType.includes('/webm') &&
  863. shaka.util.TsParser.probe(uint8ArrayData)) {
  864. if (!this.tsParsers_[contentType]) {
  865. this.tsParsers_[contentType] = new shaka.util.TsParser();
  866. } else {
  867. this.tsParsers_[contentType].clearData();
  868. }
  869. const tsParser = this.tsParsers_[contentType].parse(uint8ArrayData);
  870. const startTime = tsParser.getStartTime(contentType);
  871. if (startTime != null) {
  872. timestamp = startTime;
  873. }
  874. metadata = tsParser.getMetadata();
  875. }
  876. return {timestamp, metadata};
  877. }
  878. /**
  879. * Parse the EMSG box from a MP4 container.
  880. *
  881. * @param {!shaka.media.SegmentReference} reference
  882. * @param {?Array<string>} emsgSchemeIdUris Array of emsg
  883. * scheme_id_uri for which emsg boxes should be parsed.
  884. * @param {!shaka.extern.ParsedBox} box
  885. * @private
  886. * https://dashif-documents.azurewebsites.net/Events/master/event.html#emsg-format
  887. * aligned(8) class DASHEventMessageBox
  888. * extends FullBox(‘emsg’, version, flags = 0){
  889. * if (version==0) {
  890. * string scheme_id_uri;
  891. * string value;
  892. * unsigned int(32) timescale;
  893. * unsigned int(32) presentation_time_delta;
  894. * unsigned int(32) event_duration;
  895. * unsigned int(32) id;
  896. * } else if (version==1) {
  897. * unsigned int(32) timescale;
  898. * unsigned int(64) presentation_time;
  899. * unsigned int(32) event_duration;
  900. * unsigned int(32) id;
  901. * string scheme_id_uri;
  902. * string value;
  903. * }
  904. * unsigned int(8) message_data[];
  905. */
  906. parseEMSG_(reference, emsgSchemeIdUris, box) {
  907. let timescale;
  908. let id;
  909. let eventDuration;
  910. let schemeId;
  911. let startTime;
  912. let presentationTimeDelta;
  913. let value;
  914. if (box.version === 0) {
  915. schemeId = box.reader.readTerminatedString();
  916. value = box.reader.readTerminatedString();
  917. timescale = box.reader.readUint32();
  918. presentationTimeDelta = box.reader.readUint32();
  919. eventDuration = box.reader.readUint32();
  920. id = box.reader.readUint32();
  921. startTime = reference.startTime + (presentationTimeDelta / timescale);
  922. } else {
  923. timescale = box.reader.readUint32();
  924. const pts = box.reader.readUint64();
  925. startTime = (pts / timescale) + reference.timestampOffset;
  926. presentationTimeDelta = startTime - reference.startTime;
  927. eventDuration = box.reader.readUint32();
  928. id = box.reader.readUint32();
  929. schemeId = box.reader.readTerminatedString();
  930. value = box.reader.readTerminatedString();
  931. }
  932. const messageData = box.reader.readBytes(
  933. box.reader.getLength() - box.reader.getPosition());
  934. // See DASH sec. 5.10.3.3.1
  935. // If a DASH client detects an event message box with a scheme that is not
  936. // defined in MPD, the client is expected to ignore it.
  937. if ((emsgSchemeIdUris && emsgSchemeIdUris.includes(schemeId)) ||
  938. this.config_.dispatchAllEmsgBoxes) {
  939. // See DASH sec. 5.10.4.1
  940. // A special scheme in DASH used to signal manifest updates.
  941. if (schemeId == 'urn:mpeg:dash:event:2012') {
  942. this.playerInterface_.onManifestUpdate();
  943. } else {
  944. // All other schemes are dispatched as a general 'emsg' event.
  945. const endTime = startTime + (eventDuration / timescale);
  946. /** @type {shaka.extern.EmsgInfo} */
  947. const emsg = {
  948. startTime: startTime,
  949. endTime: endTime,
  950. schemeIdUri: schemeId,
  951. value: value,
  952. timescale: timescale,
  953. presentationTimeDelta: presentationTimeDelta,
  954. eventDuration: eventDuration,
  955. id: id,
  956. messageData: messageData,
  957. };
  958. // Dispatch an event to notify the application about the emsg box.
  959. const eventName = shaka.util.FakeEvent.EventName.Emsg;
  960. const data = (new Map()).set('detail', emsg);
  961. const event = new shaka.util.FakeEvent(eventName, data);
  962. // A user can call preventDefault() on a cancelable event.
  963. event.cancelable = true;
  964. this.playerInterface_.onEvent(event);
  965. if (event.defaultPrevented) {
  966. // If the caller uses preventDefault() on the 'emsg' event, don't
  967. // process any further, and don't generate an ID3 'metadata' event
  968. // for the same data.
  969. return;
  970. }
  971. // Additionally, ID3 events generate a 'metadata' event. This is a
  972. // pre-parsed version of the metadata blob already dispatched in the
  973. // 'emsg' event.
  974. if (schemeId == 'https://aomedia.org/emsg/ID3' ||
  975. schemeId == 'https://developer.apple.com/streaming/emsg-id3') {
  976. // See https://aomediacodec.github.io/id3-emsg/
  977. const frames = shaka.util.Id3Utils.getID3Frames(messageData);
  978. if (frames.length) {
  979. /** @private {shaka.extern.ID3Metadata} */
  980. const metadata = {
  981. cueTime: startTime,
  982. data: messageData,
  983. frames: frames,
  984. dts: startTime,
  985. pts: startTime,
  986. };
  987. this.playerInterface_.onMetadata(
  988. [metadata], /* offset= */ 0, endTime);
  989. }
  990. }
  991. }
  992. }
  993. }
  994. /**
  995. * Parse PRFT box.
  996. * @param {number} timescale
  997. * @param {!shaka.extern.ParsedBox} box
  998. * @private
  999. */
  1000. parsePrft_(timescale, box) {
  1001. goog.asserts.assert(
  1002. box.version == 0 || box.version == 1,
  1003. 'PRFT version can only be 0 or 1');
  1004. const parsed = shaka.util.Mp4BoxParsers.parsePRFTInaccurate(
  1005. box.reader, box.version);
  1006. const wallClockTime = this.convertNtp_(parsed.ntpTimestamp);
  1007. const programStartDate = new Date(wallClockTime -
  1008. (parsed.mediaTime / timescale) * 1000);
  1009. /** @type {shaka.extern.ProducerReferenceTime} */
  1010. const prftInfo = {
  1011. wallClockTime,
  1012. programStartDate,
  1013. };
  1014. const eventName = shaka.util.FakeEvent.EventName.Prft;
  1015. const data = (new Map()).set('detail', prftInfo);
  1016. const event = new shaka.util.FakeEvent(
  1017. eventName, data);
  1018. this.playerInterface_.onEvent(event);
  1019. }
  1020. /**
  1021. * Convert Ntp ntpTimeStamp to UTC Time
  1022. *
  1023. * @param {number} ntpTimeStamp
  1024. * @return {number} utcTime
  1025. * @private
  1026. */
  1027. convertNtp_(ntpTimeStamp) {
  1028. const start = new Date(Date.UTC(1900, 0, 1, 0, 0, 0));
  1029. return new Date(start.getTime() + ntpTimeStamp).getTime();
  1030. }
  1031. /**
  1032. * Enqueue an operation to append data to the SourceBuffer.
  1033. * Start and end times are needed for TextEngine, but not for MediaSource.
  1034. * Start and end times may be null for initialization segments; if present
  1035. * they are relative to the presentation timeline.
  1036. *
  1037. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1038. * @param {!BufferSource} data
  1039. * @param {?shaka.media.SegmentReference} reference The segment reference
  1040. * we are appending, or null for init segments
  1041. * @param {shaka.extern.Stream} stream
  1042. * @param {?boolean} hasClosedCaptions True if the buffer contains CEA closed
  1043. * captions
  1044. * @param {boolean=} seeked True if we just seeked
  1045. * @param {boolean=} adaptation True if we just automatically switched active
  1046. * variant(s).
  1047. * @param {boolean=} isChunkedData True if we add to the buffer from the
  1048. * partial read of the segment.
  1049. * @return {!Promise}
  1050. */
  1051. async appendBuffer(
  1052. contentType, data, reference, stream, hasClosedCaptions, seeked = false,
  1053. adaptation = false, isChunkedData = false, fromSplit = false) {
  1054. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1055. if (contentType == ContentType.TEXT) {
  1056. if (this.manifestType_ == shaka.media.ManifestParser.HLS) {
  1057. // This won't be known until the first video segment is appended.
  1058. const offset = await this.textSequenceModeOffset_;
  1059. this.textEngine_.setTimestampOffset(offset);
  1060. }
  1061. await this.textEngine_.appendBuffer(
  1062. data,
  1063. reference ? reference.startTime : null,
  1064. reference ? reference.endTime : null,
  1065. reference ? reference.getUris()[0] : null);
  1066. return;
  1067. }
  1068. if (!fromSplit && this.needSplitMuxedContent_) {
  1069. await this.appendBuffer(ContentType.AUDIO, data, reference, stream,
  1070. hasClosedCaptions, seeked, adaptation, isChunkedData,
  1071. /* fromSplit= */ true);
  1072. await this.appendBuffer(ContentType.VIDEO, data, reference, stream,
  1073. hasClosedCaptions, seeked, adaptation, isChunkedData,
  1074. /* fromSplit= */ true);
  1075. return;
  1076. }
  1077. if (!this.sourceBuffers_[contentType]) {
  1078. shaka.log.warning('Attempted to restore a non-existent source buffer');
  1079. return;
  1080. }
  1081. let timestampOffset = this.sourceBuffers_[contentType].timestampOffset;
  1082. let mimeType = this.sourceBufferTypes_[contentType];
  1083. if (this.transmuxers_[contentType]) {
  1084. mimeType = this.transmuxers_[contentType].getOriginalMimeType();
  1085. }
  1086. if (reference) {
  1087. const {timestamp, metadata} = this.getTimestampAndDispatchMetadata(
  1088. contentType, data, reference, stream, mimeType);
  1089. if (timestamp != null) {
  1090. if (this.firstVideoTimestamp_ == null &&
  1091. contentType == ContentType.VIDEO) {
  1092. this.firstVideoTimestamp_ = timestamp;
  1093. this.firstVideoReferenceStartTime_ = reference.startTime;
  1094. if (this.firstAudioTimestamp_ != null) {
  1095. let compensation = 0;
  1096. // Only apply compensation if video and audio segment startTime
  1097. // match, to avoid introducing sync issues.
  1098. if (this.firstVideoReferenceStartTime_ ==
  1099. this.firstAudioReferenceStartTime_) {
  1100. compensation =
  1101. this.firstVideoTimestamp_ - this.firstAudioTimestamp_;
  1102. }
  1103. this.audioCompensation_.resolve(compensation);
  1104. }
  1105. }
  1106. if (this.firstAudioTimestamp_ == null &&
  1107. contentType == ContentType.AUDIO) {
  1108. this.firstAudioTimestamp_ = timestamp;
  1109. this.firstAudioReferenceStartTime_ = reference.startTime;
  1110. if (this.firstVideoTimestamp_ != null) {
  1111. let compensation = 0;
  1112. // Only apply compensation if video and audio segment startTime
  1113. // match, to avoid introducing sync issues.
  1114. if (this.firstVideoReferenceStartTime_ ==
  1115. this.firstAudioReferenceStartTime_) {
  1116. compensation =
  1117. this.firstVideoTimestamp_ - this.firstAudioTimestamp_;
  1118. }
  1119. this.audioCompensation_.resolve(compensation);
  1120. }
  1121. }
  1122. let realTimestamp = timestamp;
  1123. const RAW_FORMATS = shaka.util.MimeUtils.RAW_FORMATS;
  1124. // For formats without containers and using segments mode, we need to
  1125. // adjust TimestampOffset relative to 0 because segments do not have
  1126. // any timestamp information.
  1127. if (!this.sequenceMode_ &&
  1128. RAW_FORMATS.includes(this.sourceBufferTypes_[contentType])) {
  1129. realTimestamp = 0;
  1130. }
  1131. const calculatedTimestampOffset = reference.startTime - realTimestamp;
  1132. const timestampOffsetDifference =
  1133. Math.abs(timestampOffset - calculatedTimestampOffset);
  1134. if ((timestampOffsetDifference >= 0.001 || seeked || adaptation) &&
  1135. (!isChunkedData || calculatedTimestampOffset > 0 ||
  1136. !timestampOffset)) {
  1137. timestampOffset = calculatedTimestampOffset;
  1138. if (this.attemptTimestampOffsetCalculation_) {
  1139. this.enqueueOperation_(
  1140. contentType,
  1141. () => this.abort_(contentType),
  1142. null);
  1143. this.enqueueOperation_(
  1144. contentType,
  1145. () => this.setTimestampOffset_(contentType, timestampOffset),
  1146. null);
  1147. }
  1148. }
  1149. // Timestamps can only be reliably extracted from video, not audio.
  1150. // Packed audio formats do not have internal timestamps at all.
  1151. // Prefer video for this when available.
  1152. const isBestSourceBufferForTimestamps =
  1153. contentType == ContentType.VIDEO ||
  1154. !(ContentType.VIDEO in this.sourceBuffers_);
  1155. if (isBestSourceBufferForTimestamps) {
  1156. this.textSequenceModeOffset_.resolve(timestampOffset);
  1157. }
  1158. }
  1159. if (metadata.length) {
  1160. this.playerInterface_.onMetadata(metadata, timestampOffset,
  1161. reference ? reference.endTime : null);
  1162. }
  1163. }
  1164. if (hasClosedCaptions && contentType == ContentType.VIDEO) {
  1165. if (!this.textEngine_) {
  1166. this.reinitText(shaka.util.MimeUtils.CEA608_CLOSED_CAPTION_MIMETYPE,
  1167. this.sequenceMode_, /* external= */ false);
  1168. }
  1169. if (!this.captionParser_) {
  1170. const basicType = mimeType.split(';', 1)[0];
  1171. this.captionParser_ = this.getCaptionParser(basicType);
  1172. }
  1173. // If it is the init segment for closed captions, initialize the closed
  1174. // caption parser.
  1175. if (!reference) {
  1176. this.captionParser_.init(data, adaptation);
  1177. } else {
  1178. const closedCaptions = this.captionParser_.parseFrom(data);
  1179. if (closedCaptions.length) {
  1180. this.textEngine_.storeAndAppendClosedCaptions(
  1181. closedCaptions,
  1182. reference.startTime,
  1183. reference.endTime,
  1184. timestampOffset);
  1185. }
  1186. }
  1187. }
  1188. if (this.transmuxers_[contentType]) {
  1189. data = await this.transmuxers_[contentType].transmux(
  1190. data, stream, reference, this.mediaSource_.duration, contentType);
  1191. }
  1192. data = this.workAroundBrokenPlatforms_(
  1193. data, reference ? reference.startTime : null, contentType,
  1194. reference ? reference.getUris()[0] : null);
  1195. if (reference && this.sequenceMode_ && contentType != ContentType.TEXT) {
  1196. // In sequence mode, for non-text streams, if we just cleared the buffer
  1197. // and are either performing an unbuffered seek or handling an automatic
  1198. // adaptation, we need to set a new timestampOffset on the sourceBuffer.
  1199. if (seeked || adaptation) {
  1200. let timestampOffset = reference.startTime;
  1201. // Audio and video may not be aligned, so we will compensate for audio
  1202. // if necessary.
  1203. if (this.manifestType_ == shaka.media.ManifestParser.HLS &&
  1204. !this.needSplitMuxedContent_ &&
  1205. contentType == ContentType.AUDIO &&
  1206. this.sourceBuffers_[ContentType.VIDEO]) {
  1207. const compensation = await this.audioCompensation_;
  1208. // Only apply compensation if the difference is greater than 100ms
  1209. if (Math.abs(compensation) > 0.1) {
  1210. timestampOffset -= compensation;
  1211. }
  1212. }
  1213. // The logic to call abort() before setting the timestampOffset is
  1214. // extended during unbuffered seeks or automatic adaptations; it is
  1215. // possible for the append state to be PARSING_MEDIA_SEGMENT from the
  1216. // previous SourceBuffer#appendBuffer() call.
  1217. this.enqueueOperation_(
  1218. contentType,
  1219. () => this.abort_(contentType),
  1220. null);
  1221. this.enqueueOperation_(
  1222. contentType,
  1223. () => this.setTimestampOffset_(contentType, timestampOffset),
  1224. null);
  1225. }
  1226. }
  1227. let bufferedBefore = null;
  1228. await this.enqueueOperation_(contentType, () => {
  1229. if (goog.DEBUG && reference && !reference.isPreload() && !isChunkedData) {
  1230. bufferedBefore = this.getBuffered_(contentType);
  1231. }
  1232. this.append_(contentType, data, timestampOffset);
  1233. }, reference ? reference.getUris()[0] : null);
  1234. if (goog.DEBUG && reference && !reference.isPreload() && !isChunkedData) {
  1235. const bufferedAfter = this.getBuffered_(contentType);
  1236. const newBuffered = shaka.media.TimeRangesUtils.computeAddedRange(
  1237. bufferedBefore, bufferedAfter);
  1238. if (newBuffered) {
  1239. const segmentDuration = reference.endTime - reference.startTime;
  1240. const timeAdded = newBuffered.end - newBuffered.start;
  1241. // Check end times instead of start times. We may be overwriting a
  1242. // buffer and only the end changes, and that would be fine.
  1243. // Also, exclude tiny segments. Sometimes alignment segments as small
  1244. // as 33ms are seen in Google DAI content. For such tiny segments,
  1245. // half a segment duration would be no issue.
  1246. const offset = Math.abs(newBuffered.end - reference.endTime);
  1247. if (segmentDuration > 0.100 && (offset > segmentDuration / 2 ||
  1248. Math.abs(segmentDuration - timeAdded) > 0.030)) {
  1249. shaka.log.error('Possible encoding problem detected!',
  1250. 'Unexpected buffered range for reference', reference,
  1251. 'from URIs', reference.getUris(),
  1252. 'should be', {start: reference.startTime, end: reference.endTime},
  1253. 'but got', newBuffered);
  1254. }
  1255. }
  1256. }
  1257. }
  1258. /**
  1259. * Set the selected closed captions Id and language.
  1260. *
  1261. * @param {string} id
  1262. */
  1263. setSelectedClosedCaptionId(id) {
  1264. const VIDEO = shaka.util.ManifestParserUtils.ContentType.VIDEO;
  1265. const videoBufferEndTime = this.bufferEnd(VIDEO) || 0;
  1266. this.textEngine_.setSelectedClosedCaptionId(id, videoBufferEndTime);
  1267. }
  1268. /** Disable embedded closed captions. */
  1269. clearSelectedClosedCaptionId() {
  1270. if (this.textEngine_) {
  1271. this.textEngine_.setSelectedClosedCaptionId('', 0);
  1272. }
  1273. }
  1274. /**
  1275. * Enqueue an operation to remove data from the SourceBuffer.
  1276. *
  1277. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1278. * @param {number} startTime relative to the start of the presentation
  1279. * @param {number} endTime relative to the start of the presentation
  1280. * @return {!Promise}
  1281. */
  1282. async remove(contentType, startTime, endTime) {
  1283. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1284. if (contentType == ContentType.TEXT) {
  1285. await this.textEngine_.remove(startTime, endTime);
  1286. } else {
  1287. await this.enqueueOperation_(
  1288. contentType,
  1289. () => this.remove_(contentType, startTime, endTime),
  1290. null);
  1291. if (this.needSplitMuxedContent_) {
  1292. await this.enqueueOperation_(
  1293. ContentType.AUDIO,
  1294. () => this.remove_(ContentType.AUDIO, startTime, endTime),
  1295. null);
  1296. }
  1297. }
  1298. }
  1299. /**
  1300. * Enqueue an operation to clear the SourceBuffer.
  1301. *
  1302. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1303. * @return {!Promise}
  1304. */
  1305. async clear(contentType) {
  1306. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1307. if (contentType == ContentType.TEXT) {
  1308. if (!this.textEngine_) {
  1309. return;
  1310. }
  1311. await this.textEngine_.remove(0, Infinity);
  1312. } else {
  1313. // Note that not all platforms allow clearing to Infinity.
  1314. await this.enqueueOperation_(
  1315. contentType,
  1316. () => this.remove_(contentType, 0, this.mediaSource_.duration),
  1317. null);
  1318. if (this.needSplitMuxedContent_) {
  1319. await this.enqueueOperation_(
  1320. ContentType.AUDIO,
  1321. () => this.remove_(
  1322. ContentType.AUDIO, 0, this.mediaSource_.duration),
  1323. null);
  1324. }
  1325. }
  1326. }
  1327. /**
  1328. * Fully reset the state of the caption parser owned by MediaSourceEngine.
  1329. */
  1330. resetCaptionParser() {
  1331. if (this.captionParser_) {
  1332. this.captionParser_.reset();
  1333. }
  1334. }
  1335. /**
  1336. * Enqueue an operation to flush the SourceBuffer.
  1337. * This is a workaround for what we believe is a Chromecast bug.
  1338. *
  1339. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1340. * @return {!Promise}
  1341. */
  1342. async flush(contentType) {
  1343. // Flush the pipeline. Necessary on Chromecast, even though we have removed
  1344. // everything.
  1345. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1346. if (contentType == ContentType.TEXT) {
  1347. // Nothing to flush for text.
  1348. return;
  1349. }
  1350. await this.enqueueOperation_(
  1351. contentType,
  1352. () => this.flush_(contentType),
  1353. null);
  1354. if (this.needSplitMuxedContent_) {
  1355. await this.enqueueOperation_(
  1356. ContentType.AUDIO,
  1357. () => this.flush_(ContentType.AUDIO),
  1358. null);
  1359. }
  1360. }
  1361. /**
  1362. * Sets the timestamp offset and append window end for the given content type.
  1363. *
  1364. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1365. * @param {number} timestampOffset The timestamp offset. Segments which start
  1366. * at time t will be inserted at time t + timestampOffset instead. This
  1367. * value does not affect segments which have already been inserted.
  1368. * @param {number} appendWindowStart The timestamp to set the append window
  1369. * start to. For future appends, frames/samples with timestamps less than
  1370. * this value will be dropped.
  1371. * @param {number} appendWindowEnd The timestamp to set the append window end
  1372. * to. For future appends, frames/samples with timestamps greater than this
  1373. * value will be dropped.
  1374. * @param {boolean} ignoreTimestampOffset If true, the timestampOffset will
  1375. * not be applied in this step.
  1376. * @param {string} mimeType
  1377. * @param {string} codecs
  1378. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  1379. * shaka.extern.Stream>} streamsByType
  1380. * A map of content types to streams. All streams must be supported
  1381. * according to MediaSourceEngine.isStreamSupported.
  1382. *
  1383. * @return {!Promise}
  1384. */
  1385. async setStreamProperties(
  1386. contentType, timestampOffset, appendWindowStart, appendWindowEnd,
  1387. ignoreTimestampOffset, mimeType, codecs, streamsByType) {
  1388. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1389. if (contentType == ContentType.TEXT) {
  1390. if (!ignoreTimestampOffset) {
  1391. this.textEngine_.setTimestampOffset(timestampOffset);
  1392. }
  1393. this.textEngine_.setAppendWindow(appendWindowStart, appendWindowEnd);
  1394. return;
  1395. }
  1396. const operations = [];
  1397. const hasChangedCodecs = await this.codecSwitchIfNecessary_(
  1398. contentType, mimeType, codecs, streamsByType);
  1399. if (!hasChangedCodecs) {
  1400. // Queue an abort() to help MSE splice together overlapping segments.
  1401. // We set appendWindowEnd when we change periods in DASH content, and the
  1402. // period transition may result in overlap.
  1403. //
  1404. // An abort() also helps with MPEG2-TS. When we append a TS segment, we
  1405. // always enter a PARSING_MEDIA_SEGMENT state and we can't change the
  1406. // timestamp offset. By calling abort(), we reset the state so we can
  1407. // set it.
  1408. operations.push(this.enqueueOperation_(
  1409. contentType,
  1410. () => this.abort_(contentType),
  1411. null));
  1412. if (this.needSplitMuxedContent_) {
  1413. operations.push(this.enqueueOperation_(
  1414. ContentType.AUDIO,
  1415. () => this.abort_(ContentType.AUDIO),
  1416. null));
  1417. }
  1418. }
  1419. if (!ignoreTimestampOffset) {
  1420. operations.push(this.enqueueOperation_(
  1421. contentType,
  1422. () => this.setTimestampOffset_(contentType, timestampOffset),
  1423. null));
  1424. if (this.needSplitMuxedContent_) {
  1425. operations.push(this.enqueueOperation_(
  1426. ContentType.AUDIO,
  1427. () => this.setTimestampOffset_(
  1428. ContentType.AUDIO, timestampOffset),
  1429. null));
  1430. }
  1431. }
  1432. if (appendWindowStart != 0 || appendWindowEnd != Infinity) {
  1433. operations.push(this.enqueueOperation_(
  1434. contentType,
  1435. () => this.setAppendWindow_(
  1436. contentType, appendWindowStart, appendWindowEnd),
  1437. null));
  1438. if (this.needSplitMuxedContent_) {
  1439. operations.push(this.enqueueOperation_(
  1440. ContentType.AUDIO,
  1441. () => this.setAppendWindow_(
  1442. ContentType.AUDIO, appendWindowStart, appendWindowEnd),
  1443. null));
  1444. }
  1445. }
  1446. if (operations.length) {
  1447. await Promise.all(operations);
  1448. }
  1449. }
  1450. /**
  1451. * Adjust timestamp offset to maintain AV sync across discontinuities.
  1452. *
  1453. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1454. * @param {number} timestampOffset
  1455. * @return {!Promise}
  1456. */
  1457. async resync(contentType, timestampOffset) {
  1458. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1459. if (contentType == ContentType.TEXT) {
  1460. // This operation is for audio and video only.
  1461. return;
  1462. }
  1463. // Reset the promise in case the timestamp offset changed during
  1464. // a period/discontinuity transition.
  1465. if (contentType == ContentType.VIDEO) {
  1466. this.textSequenceModeOffset_ = new shaka.util.PublicPromise();
  1467. }
  1468. if (!this.sequenceMode_) {
  1469. return;
  1470. }
  1471. // Avoid changing timestampOffset when the difference is less than 100 ms
  1472. // from the end of the current buffer.
  1473. const bufferEnd = this.bufferEnd(contentType);
  1474. if (bufferEnd && Math.abs(bufferEnd - timestampOffset) < 0.1) {
  1475. return;
  1476. }
  1477. // Queue an abort() to help MSE splice together overlapping segments.
  1478. // We set appendWindowEnd when we change periods in DASH content, and the
  1479. // period transition may result in overlap.
  1480. //
  1481. // An abort() also helps with MPEG2-TS. When we append a TS segment, we
  1482. // always enter a PARSING_MEDIA_SEGMENT state and we can't change the
  1483. // timestamp offset. By calling abort(), we reset the state so we can
  1484. // set it.
  1485. this.enqueueOperation_(
  1486. contentType,
  1487. () => this.abort_(contentType),
  1488. null);
  1489. if (this.needSplitMuxedContent_) {
  1490. this.enqueueOperation_(
  1491. ContentType.AUDIO,
  1492. () => this.abort_(ContentType.AUDIO),
  1493. null);
  1494. }
  1495. await this.enqueueOperation_(
  1496. contentType,
  1497. () => this.setTimestampOffset_(contentType, timestampOffset),
  1498. null);
  1499. if (this.needSplitMuxedContent_) {
  1500. await this.enqueueOperation_(
  1501. ContentType.AUDIO,
  1502. () => this.setTimestampOffset_(ContentType.AUDIO, timestampOffset),
  1503. null);
  1504. }
  1505. }
  1506. /**
  1507. * @param {string=} reason Valid reasons are 'network' and 'decode'.
  1508. * @return {!Promise}
  1509. * @see http://w3c.github.io/media-source/#idl-def-EndOfStreamError
  1510. */
  1511. async endOfStream(reason) {
  1512. await this.enqueueBlockingOperation_(() => {
  1513. // If endOfStream() has already been called on the media source,
  1514. // don't call it again. Also do not call if readyState is
  1515. // 'closed' (not attached to video element) since it is not a
  1516. // valid operation.
  1517. if (this.ended() || this.closed()) {
  1518. return;
  1519. }
  1520. // Tizen won't let us pass undefined, but it will let us omit the
  1521. // argument.
  1522. if (reason) {
  1523. this.mediaSource_.endOfStream(reason);
  1524. } else {
  1525. this.mediaSource_.endOfStream();
  1526. }
  1527. });
  1528. }
  1529. /**
  1530. * @param {number} duration
  1531. * @return {!Promise}
  1532. */
  1533. async setDuration(duration) {
  1534. await this.enqueueBlockingOperation_(() => {
  1535. // Reducing the duration causes the MSE removal algorithm to run, which
  1536. // triggers an 'updateend' event to fire. To handle this scenario, we
  1537. // have to insert a dummy operation into the beginning of each queue,
  1538. // which the 'updateend' handler will remove.
  1539. if (duration < this.mediaSource_.duration) {
  1540. for (const contentType in this.sourceBuffers_) {
  1541. const dummyOperation = {
  1542. start: () => {},
  1543. p: new shaka.util.PublicPromise(),
  1544. uri: null,
  1545. };
  1546. this.queues_[contentType].unshift(dummyOperation);
  1547. }
  1548. }
  1549. this.mediaSource_.duration = duration;
  1550. this.lastDuration_ = duration;
  1551. });
  1552. }
  1553. /**
  1554. * Get the current MediaSource duration.
  1555. *
  1556. * @return {number}
  1557. */
  1558. getDuration() {
  1559. return this.mediaSource_.duration;
  1560. }
  1561. /**
  1562. * Updates the live seekable range.
  1563. *
  1564. * @param {number} startTime
  1565. * @param {number} endTime
  1566. */
  1567. async setLiveSeekableRange(startTime, endTime) {
  1568. goog.asserts.assert('setLiveSeekableRange' in this.mediaSource_,
  1569. 'Using setLiveSeekableRange on not supported platform');
  1570. if (this.ended() || this.closed()) {
  1571. return;
  1572. }
  1573. await this.enqueueBlockingOperation_(() => {
  1574. if (this.ended() || this.closed()) {
  1575. return;
  1576. }
  1577. this.mediaSource_.setLiveSeekableRange(startTime, endTime);
  1578. });
  1579. }
  1580. /**
  1581. * Clear the current live seekable range.
  1582. */
  1583. async clearLiveSeekableRange() {
  1584. goog.asserts.assert('clearLiveSeekableRange' in this.mediaSource_,
  1585. 'Using clearLiveSeekableRange on not supported platform');
  1586. if (this.ended() || this.closed()) {
  1587. return;
  1588. }
  1589. await this.enqueueBlockingOperation_(() => {
  1590. if (this.ended() || this.closed()) {
  1591. return;
  1592. }
  1593. this.mediaSource_.clearLiveSeekableRange();
  1594. });
  1595. }
  1596. /**
  1597. * Append data to the SourceBuffer.
  1598. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1599. * @param {BufferSource} data
  1600. * @param {number} timestampOffset
  1601. * @private
  1602. */
  1603. append_(contentType, data, timestampOffset) {
  1604. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1605. // Append only video data to the LCEVC Dec.
  1606. if (contentType == ContentType.VIDEO && this.lcevcDec_) {
  1607. // Append video buffers to the LCEVC Dec for parsing and storing
  1608. // of LCEVC data.
  1609. this.lcevcDec_.appendBuffer(data, timestampOffset);
  1610. }
  1611. // This will trigger an 'updateend' event.
  1612. this.sourceBuffers_[contentType].appendBuffer(data);
  1613. }
  1614. /**
  1615. * Remove data from the SourceBuffer.
  1616. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1617. * @param {number} startTime relative to the start of the presentation
  1618. * @param {number} endTime relative to the start of the presentation
  1619. * @private
  1620. */
  1621. remove_(contentType, startTime, endTime) {
  1622. if (endTime <= startTime) {
  1623. // Ignore removal of inverted or empty ranges.
  1624. // Fake 'updateend' event to resolve the operation.
  1625. this.onUpdateEnd_(contentType);
  1626. return;
  1627. }
  1628. // This will trigger an 'updateend' event.
  1629. this.sourceBuffers_[contentType].remove(startTime, endTime);
  1630. }
  1631. /**
  1632. * Call abort() on the SourceBuffer.
  1633. * This resets MSE's last_decode_timestamp on all track buffers, which should
  1634. * trigger the splicing logic for overlapping segments.
  1635. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1636. * @private
  1637. */
  1638. abort_(contentType) {
  1639. // Save the append window, which is reset on abort().
  1640. const appendWindowStart =
  1641. this.sourceBuffers_[contentType].appendWindowStart;
  1642. const appendWindowEnd = this.sourceBuffers_[contentType].appendWindowEnd;
  1643. // This will not trigger an 'updateend' event, since nothing is happening.
  1644. // This is only to reset MSE internals, not to abort an actual operation.
  1645. this.sourceBuffers_[contentType].abort();
  1646. // Restore the append window.
  1647. this.sourceBuffers_[contentType].appendWindowStart = appendWindowStart;
  1648. this.sourceBuffers_[contentType].appendWindowEnd = appendWindowEnd;
  1649. // Fake an 'updateend' event to resolve the operation.
  1650. this.onUpdateEnd_(contentType);
  1651. }
  1652. /**
  1653. * Nudge the playhead to force the media pipeline to be flushed.
  1654. * This seems to be necessary on Chromecast to get new content to replace old
  1655. * content.
  1656. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1657. * @private
  1658. */
  1659. flush_(contentType) {
  1660. // Never use flush_ if there's data. It causes a hiccup in playback.
  1661. goog.asserts.assert(
  1662. this.video_.buffered.length == 0, 'MediaSourceEngine.flush_ should ' +
  1663. 'only be used after clearing all data!');
  1664. // Seeking forces the pipeline to be flushed.
  1665. this.video_.currentTime -= 0.001;
  1666. // Fake an 'updateend' event to resolve the operation.
  1667. this.onUpdateEnd_(contentType);
  1668. }
  1669. /**
  1670. * Set the SourceBuffer's timestamp offset.
  1671. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1672. * @param {number} timestampOffset
  1673. * @private
  1674. */
  1675. setTimestampOffset_(contentType, timestampOffset) {
  1676. // Work around for
  1677. // https://github.com/shaka-project/shaka-player/issues/1281:
  1678. // TODO(https://bit.ly/2ttKiBU): follow up when this is fixed in Edge
  1679. if (timestampOffset < 0) {
  1680. // Try to prevent rounding errors in Edge from removing the first
  1681. // keyframe.
  1682. timestampOffset += 0.001;
  1683. }
  1684. this.sourceBuffers_[contentType].timestampOffset = timestampOffset;
  1685. // Fake an 'updateend' event to resolve the operation.
  1686. this.onUpdateEnd_(contentType);
  1687. }
  1688. /**
  1689. * Set the SourceBuffer's append window end.
  1690. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1691. * @param {number} appendWindowStart
  1692. * @param {number} appendWindowEnd
  1693. * @private
  1694. */
  1695. setAppendWindow_(contentType, appendWindowStart, appendWindowEnd) {
  1696. // You can't set start > end, so first set start to 0, then set the new
  1697. // end, then set the new start. That way, there are no intermediate
  1698. // states which are invalid.
  1699. this.sourceBuffers_[contentType].appendWindowStart = 0;
  1700. this.sourceBuffers_[contentType].appendWindowEnd = appendWindowEnd;
  1701. this.sourceBuffers_[contentType].appendWindowStart = appendWindowStart;
  1702. // Fake an 'updateend' event to resolve the operation.
  1703. this.onUpdateEnd_(contentType);
  1704. }
  1705. /**
  1706. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1707. * @private
  1708. */
  1709. onError_(contentType) {
  1710. const operation = this.queues_[contentType][0];
  1711. goog.asserts.assert(operation, 'Spurious error event!');
  1712. goog.asserts.assert(!this.sourceBuffers_[contentType].updating,
  1713. 'SourceBuffer should not be updating on error!');
  1714. const code = this.video_.error ? this.video_.error.code : 0;
  1715. operation.p.reject(new shaka.util.Error(
  1716. shaka.util.Error.Severity.CRITICAL,
  1717. shaka.util.Error.Category.MEDIA,
  1718. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_FAILED,
  1719. code, operation.uri));
  1720. // Do not pop from queue. An 'updateend' event will fire next, and to
  1721. // avoid synchronizing these two event handlers, we will allow that one to
  1722. // pop from the queue as normal. Note that because the operation has
  1723. // already been rejected, the call to resolve() in the 'updateend' handler
  1724. // will have no effect.
  1725. }
  1726. /**
  1727. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1728. * @private
  1729. */
  1730. onUpdateEnd_(contentType) {
  1731. if (this.reloadingMediaSource_) {
  1732. return;
  1733. }
  1734. const operation = this.queues_[contentType][0];
  1735. goog.asserts.assert(operation, 'Spurious updateend event!');
  1736. if (!operation) {
  1737. return;
  1738. }
  1739. goog.asserts.assert(!this.sourceBuffers_[contentType].updating,
  1740. 'SourceBuffer should not be updating on updateend!');
  1741. operation.p.resolve();
  1742. this.popFromQueue_(contentType);
  1743. }
  1744. /**
  1745. * Enqueue an operation and start it if appropriate.
  1746. *
  1747. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1748. * @param {function()} start
  1749. * @param {?string} uri
  1750. * @return {!Promise}
  1751. * @private
  1752. */
  1753. enqueueOperation_(contentType, start, uri) {
  1754. this.destroyer_.ensureNotDestroyed();
  1755. const operation = {
  1756. start: start,
  1757. p: new shaka.util.PublicPromise(),
  1758. uri,
  1759. };
  1760. this.queues_[contentType].push(operation);
  1761. if (this.queues_[contentType].length == 1) {
  1762. this.startOperation_(contentType);
  1763. }
  1764. return operation.p;
  1765. }
  1766. /**
  1767. * Enqueue an operation which must block all other operations on all
  1768. * SourceBuffers.
  1769. *
  1770. * @param {function():(Promise|undefined)} run
  1771. * @return {!Promise}
  1772. * @private
  1773. */
  1774. async enqueueBlockingOperation_(run) {
  1775. this.destroyer_.ensureNotDestroyed();
  1776. /** @type {!Array<!shaka.util.PublicPromise>} */
  1777. const allWaiters = [];
  1778. /** @type {!Array<!shaka.util.ManifestParserUtils.ContentType>} */
  1779. const contentTypes = Object.keys(this.sourceBuffers_);
  1780. // Enqueue a 'wait' operation onto each queue.
  1781. // This operation signals its readiness when it starts.
  1782. // When all wait operations are ready, the real operation takes place.
  1783. for (const contentType of contentTypes) {
  1784. const ready = new shaka.util.PublicPromise();
  1785. const operation = {
  1786. start: () => ready.resolve(),
  1787. p: ready,
  1788. uri: null,
  1789. };
  1790. this.queues_[contentType].push(operation);
  1791. allWaiters.push(ready);
  1792. if (this.queues_[contentType].length == 1) {
  1793. operation.start();
  1794. }
  1795. }
  1796. // Return a Promise to the real operation, which waits to begin until
  1797. // there are no other in-progress operations on any SourceBuffers.
  1798. try {
  1799. await Promise.all(allWaiters);
  1800. } catch (error) {
  1801. // One of the waiters failed, which means we've been destroyed.
  1802. goog.asserts.assert(
  1803. this.destroyer_.destroyed(), 'Should be destroyed by now');
  1804. // We haven't popped from the queue. Canceled waiters have been removed
  1805. // by destroy. What's left now should just be resolved waiters. In
  1806. // uncompiled mode, we will maintain good hygiene and make sure the
  1807. // assert at the end of destroy passes. In compiled mode, the queues
  1808. // are wiped in destroy.
  1809. if (goog.DEBUG) {
  1810. for (const contentType of contentTypes) {
  1811. if (this.queues_[contentType].length) {
  1812. goog.asserts.assert(
  1813. this.queues_[contentType].length == 1,
  1814. 'Should be at most one item in queue!');
  1815. goog.asserts.assert(
  1816. allWaiters.includes(this.queues_[contentType][0].p),
  1817. 'The item in queue should be one of our waiters!');
  1818. this.queues_[contentType].shift();
  1819. }
  1820. }
  1821. }
  1822. throw error;
  1823. }
  1824. if (goog.DEBUG) {
  1825. // If we did it correctly, nothing is updating.
  1826. for (const contentType of contentTypes) {
  1827. goog.asserts.assert(
  1828. this.sourceBuffers_[contentType].updating == false,
  1829. 'SourceBuffers should not be updating after a blocking op!');
  1830. }
  1831. }
  1832. // Run the real operation, which can be asynchronous.
  1833. try {
  1834. await run();
  1835. } catch (exception) {
  1836. throw new shaka.util.Error(
  1837. shaka.util.Error.Severity.CRITICAL,
  1838. shaka.util.Error.Category.MEDIA,
  1839. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  1840. exception,
  1841. this.video_.error || 'No error in the media element',
  1842. null);
  1843. } finally {
  1844. // Unblock the queues.
  1845. for (const contentType of contentTypes) {
  1846. this.popFromQueue_(contentType);
  1847. }
  1848. }
  1849. }
  1850. /**
  1851. * Pop from the front of the queue and start a new operation.
  1852. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1853. * @private
  1854. */
  1855. popFromQueue_(contentType) {
  1856. goog.asserts.assert(this.queues_[contentType], 'Queue should exist');
  1857. // Remove the in-progress operation, which is now complete.
  1858. this.queues_[contentType].shift();
  1859. this.startOperation_(contentType);
  1860. }
  1861. /**
  1862. * Starts the next operation in the queue.
  1863. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1864. * @private
  1865. */
  1866. startOperation_(contentType) {
  1867. // Retrieve the next operation, if any, from the queue and start it.
  1868. const next = this.queues_[contentType][0];
  1869. if (next) {
  1870. try {
  1871. next.start();
  1872. } catch (exception) {
  1873. if (exception.name == 'QuotaExceededError') {
  1874. next.p.reject(new shaka.util.Error(
  1875. shaka.util.Error.Severity.CRITICAL,
  1876. shaka.util.Error.Category.MEDIA,
  1877. shaka.util.Error.Code.QUOTA_EXCEEDED_ERROR,
  1878. contentType));
  1879. } else {
  1880. next.p.reject(new shaka.util.Error(
  1881. shaka.util.Error.Severity.CRITICAL,
  1882. shaka.util.Error.Category.MEDIA,
  1883. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  1884. exception,
  1885. this.video_.error || 'No error in the media element',
  1886. next.uri));
  1887. }
  1888. this.popFromQueue_(contentType);
  1889. }
  1890. }
  1891. }
  1892. /**
  1893. * @return {!shaka.extern.TextDisplayer}
  1894. */
  1895. getTextDisplayer() {
  1896. goog.asserts.assert(
  1897. this.textDisplayer_,
  1898. 'TextDisplayer should only be null when this is destroyed');
  1899. return this.textDisplayer_;
  1900. }
  1901. /**
  1902. * @param {!shaka.extern.TextDisplayer} textDisplayer
  1903. */
  1904. setTextDisplayer(textDisplayer) {
  1905. this.textDisplayer_ = textDisplayer;
  1906. if (this.textEngine_) {
  1907. this.textEngine_.setDisplayer(textDisplayer);
  1908. }
  1909. }
  1910. /**
  1911. * @param {boolean} segmentRelativeVttTiming
  1912. */
  1913. setSegmentRelativeVttTiming(segmentRelativeVttTiming) {
  1914. this.segmentRelativeVttTiming_ = segmentRelativeVttTiming;
  1915. }
  1916. /**
  1917. * Apply platform-specific transformations to this segment to work around
  1918. * issues in the platform.
  1919. *
  1920. * @param {!BufferSource} segment
  1921. * @param {?number} startTime
  1922. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1923. * @param {?string} uri
  1924. * @return {!BufferSource}
  1925. * @private
  1926. */
  1927. workAroundBrokenPlatforms_(segment, startTime, contentType, uri) {
  1928. const Platform = shaka.util.Platform;
  1929. const isInitSegment = startTime == null;
  1930. const encryptionExpected = this.expectedEncryption_[contentType];
  1931. const keySystem = this.playerInterface_.getKeySystem();
  1932. // If:
  1933. // 1. the configuration tells to insert fake encryption,
  1934. // 2. and this is an init segment,
  1935. // 3. and encryption is expected,
  1936. // 4. and the platform requires encryption in all init segments,
  1937. // 5. and the content is MP4 (mimeType == "video/mp4" or "audio/mp4"),
  1938. // then insert fake encryption metadata for init segments that lack it.
  1939. // The MP4 requirement is because we can currently only do this
  1940. // transformation on MP4 containers.
  1941. // See: https://github.com/shaka-project/shaka-player/issues/2759
  1942. if (this.config_.insertFakeEncryptionInInit &&
  1943. isInitSegment &&
  1944. encryptionExpected &&
  1945. Platform.requiresEncryptionInfoInAllInitSegments(keySystem) &&
  1946. shaka.util.MimeUtils.getContainerType(
  1947. this.sourceBufferTypes_[contentType]) == 'mp4') {
  1948. shaka.log.debug('Forcing fake encryption information in init segment.');
  1949. segment = shaka.media.ContentWorkarounds.fakeEncryption(segment, uri);
  1950. }
  1951. if (isInitSegment &&
  1952. Platform.requiresEC3InitSegments() &&
  1953. shaka.util.MimeUtils.getContainerType(
  1954. this.sourceBufferTypes_[contentType]) == 'mp4') {
  1955. shaka.log.debug('Forcing fake EC-3 information in init segment.');
  1956. segment = shaka.media.ContentWorkarounds.fakeEC3(segment);
  1957. }
  1958. return segment;
  1959. }
  1960. /**
  1961. * Prepare the SourceBuffer to parse a potentially new type or codec.
  1962. *
  1963. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1964. * @param {string} mimeType
  1965. * @param {?shaka.extern.Transmuxer} transmuxer
  1966. * @private
  1967. */
  1968. change_(contentType, mimeType, transmuxer) {
  1969. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1970. if (contentType === ContentType.TEXT) {
  1971. shaka.log.debug(`Change not supported for ${contentType}`);
  1972. return;
  1973. }
  1974. shaka.log.debug(
  1975. `Change Type: ${this.sourceBufferTypes_[contentType]} -> ${mimeType}`);
  1976. if (shaka.media.Capabilities.isChangeTypeSupported()) {
  1977. if (this.transmuxers_[contentType]) {
  1978. this.transmuxers_[contentType].destroy();
  1979. delete this.transmuxers_[contentType];
  1980. }
  1981. if (transmuxer) {
  1982. this.transmuxers_[contentType] = transmuxer;
  1983. }
  1984. const type = this.addExtraFeaturesToMimeType_(mimeType);
  1985. this.sourceBuffers_[contentType].changeType(type);
  1986. this.sourceBufferTypes_[contentType] = mimeType;
  1987. } else {
  1988. shaka.log.debug('Change Type not supported');
  1989. }
  1990. // Fake an 'updateend' event to resolve the operation.
  1991. this.onUpdateEnd_(contentType);
  1992. }
  1993. /**
  1994. * Enqueue an operation to prepare the SourceBuffer to parse a potentially new
  1995. * type or codec.
  1996. *
  1997. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1998. * @param {string} mimeType
  1999. * @param {?shaka.extern.Transmuxer} transmuxer
  2000. * @return {!Promise}
  2001. */
  2002. changeType(contentType, mimeType, transmuxer) {
  2003. return this.enqueueOperation_(
  2004. contentType,
  2005. () => this.change_(contentType, mimeType, transmuxer),
  2006. null);
  2007. }
  2008. /**
  2009. * Resets the MediaSource and re-adds source buffers due to codec mismatch
  2010. *
  2011. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  2012. * shaka.extern.Stream>} streamsByType
  2013. * @private
  2014. */
  2015. async reset_(streamsByType) {
  2016. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  2017. this.reloadingMediaSource_ = true;
  2018. this.needSplitMuxedContent_ = false;
  2019. const currentTime = this.video_.currentTime;
  2020. // When codec switching if the user is currently paused we don't want
  2021. // to trigger a play when switching codec.
  2022. // Playing can also end up in a paused state after a codec switch
  2023. // so we need to remember the current states.
  2024. const previousAutoPlayState = this.video_.autoplay;
  2025. const previousPausedState = this.video_.paused;
  2026. if (this.playbackHasBegun_) {
  2027. // Only set autoplay to false if the video playback has already begun.
  2028. // When a codec switch happens before playback has begun this can cause
  2029. // autoplay not to work as expected.
  2030. this.video_.autoplay = false;
  2031. }
  2032. try {
  2033. this.eventManager_.removeAll();
  2034. for (const contentType in this.transmuxers_) {
  2035. this.transmuxers_[contentType].destroy();
  2036. }
  2037. for (const contentType in this.sourceBuffers_) {
  2038. const sourceBuffer = this.sourceBuffers_[contentType];
  2039. try {
  2040. this.mediaSource_.removeSourceBuffer(sourceBuffer);
  2041. } catch (e) {
  2042. shaka.log.debug('Exception on removeSourceBuffer', e);
  2043. }
  2044. }
  2045. this.transmuxers_ = {};
  2046. this.sourceBuffers_ = {};
  2047. const previousDuration = this.mediaSource_.duration;
  2048. this.mediaSourceOpen_ = new shaka.util.PublicPromise();
  2049. this.mediaSource_ = this.createMediaSource(this.mediaSourceOpen_);
  2050. await this.mediaSourceOpen_;
  2051. if (!isNaN(previousDuration) && previousDuration) {
  2052. this.mediaSource_.duration = previousDuration;
  2053. } else if (!isNaN(this.lastDuration_) && this.lastDuration_) {
  2054. this.mediaSource_.duration = this.lastDuration_;
  2055. }
  2056. const sourceBufferAdded = new shaka.util.PublicPromise();
  2057. const sourceBuffers =
  2058. /** @type {EventTarget} */(this.mediaSource_.sourceBuffers);
  2059. const totalOfBuffers = streamsByType.size;
  2060. let numberOfSourceBufferAdded = 0;
  2061. const onSourceBufferAdded = () => {
  2062. numberOfSourceBufferAdded++;
  2063. if (numberOfSourceBufferAdded === totalOfBuffers) {
  2064. sourceBufferAdded.resolve();
  2065. this.eventManager_.unlisten(sourceBuffers, 'addsourcebuffer',
  2066. onSourceBufferAdded);
  2067. }
  2068. };
  2069. this.eventManager_.listen(sourceBuffers, 'addsourcebuffer',
  2070. onSourceBufferAdded);
  2071. for (const contentType of streamsByType.keys()) {
  2072. const stream = streamsByType.get(contentType);
  2073. // eslint-disable-next-line no-await-in-loop
  2074. await this.initSourceBuffer_(contentType, stream, stream.codecs);
  2075. }
  2076. const audio = streamsByType.get(ContentType.AUDIO);
  2077. if (audio && audio.isAudioMuxedInVideo) {
  2078. this.needSplitMuxedContent_ = true;
  2079. }
  2080. if (this.needSplitMuxedContent_ && !this.queues_[ContentType.AUDIO]) {
  2081. this.queues_[ContentType.AUDIO] = [];
  2082. }
  2083. // Fake a seek to catchup the playhead.
  2084. this.video_.currentTime = currentTime;
  2085. await sourceBufferAdded;
  2086. } finally {
  2087. this.reloadingMediaSource_ = false;
  2088. this.destroyer_.ensureNotDestroyed();
  2089. this.eventManager_.listenOnce(this.video_, 'canplaythrough', () => {
  2090. // Don't use ensureNotDestroyed() from this event listener, because
  2091. // that results in an uncaught exception. Instead, just check the
  2092. // flag.
  2093. if (this.destroyer_.destroyed()) {
  2094. return;
  2095. }
  2096. this.video_.autoplay = previousAutoPlayState;
  2097. if (!previousPausedState) {
  2098. this.video_.play();
  2099. }
  2100. });
  2101. }
  2102. }
  2103. /**
  2104. * Resets the Media Source
  2105. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  2106. * shaka.extern.Stream>} streamsByType
  2107. * @return {!Promise}
  2108. */
  2109. reset(streamsByType) {
  2110. return this.enqueueBlockingOperation_(
  2111. () => this.reset_(streamsByType));
  2112. }
  2113. /**
  2114. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  2115. * @param {string} mimeType
  2116. * @param {string} codecs
  2117. * @return {{transmuxer: ?shaka.extern.Transmuxer,
  2118. * transmuxerMuxed: boolean, basicType: string, codec: string,
  2119. * mimeType: string}}
  2120. * @private
  2121. */
  2122. getRealInfo_(contentType, mimeType, codecs) {
  2123. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  2124. const MimeUtils = shaka.util.MimeUtils;
  2125. /** @type {?shaka.extern.Transmuxer} */
  2126. let transmuxer;
  2127. let transmuxerMuxed = false;
  2128. const audioCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  2129. ContentType.AUDIO, (codecs || '').split(','));
  2130. const videoCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  2131. ContentType.VIDEO, (codecs || '').split(','));
  2132. let codec = videoCodec;
  2133. if (contentType == ContentType.AUDIO) {
  2134. codec = audioCodec;
  2135. }
  2136. if (!codec) {
  2137. codec = codecs;
  2138. }
  2139. let newMimeType = shaka.util.MimeUtils.getFullType(mimeType, codec);
  2140. let needTransmux = this.config_.forceTransmux;
  2141. if (!shaka.media.Capabilities.isTypeSupported(newMimeType) ||
  2142. (!this.sequenceMode_ &&
  2143. shaka.util.MimeUtils.RAW_FORMATS.includes(newMimeType))) {
  2144. needTransmux = true;
  2145. }
  2146. const TransmuxerEngine = shaka.transmuxer.TransmuxerEngine;
  2147. if (needTransmux) {
  2148. const newMimeTypeWithAllCodecs =
  2149. shaka.util.MimeUtils.getFullTypeWithAllCodecs(mimeType, codec);
  2150. const transmuxerPlugin =
  2151. TransmuxerEngine.findTransmuxer(newMimeTypeWithAllCodecs);
  2152. if (transmuxerPlugin) {
  2153. transmuxer = transmuxerPlugin();
  2154. if (audioCodec && videoCodec) {
  2155. transmuxerMuxed = true;
  2156. }
  2157. newMimeType =
  2158. transmuxer.convertCodecs(contentType, newMimeTypeWithAllCodecs);
  2159. }
  2160. }
  2161. const newCodec = MimeUtils.getNormalizedCodec(
  2162. MimeUtils.getCodecs(newMimeType));
  2163. const newBasicType = MimeUtils.getBasicType(newMimeType);
  2164. return {
  2165. transmuxer,
  2166. transmuxerMuxed,
  2167. basicType: newBasicType,
  2168. codec: newCodec,
  2169. mimeType: newMimeType,
  2170. };
  2171. }
  2172. /**
  2173. * Codec switch if necessary, this will not resolve until the codec
  2174. * switch is over.
  2175. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  2176. * @param {string} mimeType
  2177. * @param {string} codecs
  2178. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  2179. * shaka.extern.Stream>} streamsByType
  2180. * @return {!Promise<boolean>} true if there was a codec switch,
  2181. * false otherwise.
  2182. * @private
  2183. */
  2184. async codecSwitchIfNecessary_(contentType, mimeType, codecs, streamsByType) {
  2185. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  2186. if (contentType == ContentType.TEXT) {
  2187. return false;
  2188. }
  2189. const MimeUtils = shaka.util.MimeUtils;
  2190. const currentCodec = MimeUtils.getNormalizedCodec(
  2191. MimeUtils.getCodecs(this.sourceBufferTypes_[contentType]));
  2192. const currentBasicType = MimeUtils.getBasicType(
  2193. this.sourceBufferTypes_[contentType]);
  2194. const realInfo = this.getRealInfo_(contentType, mimeType, codecs);
  2195. const transmuxer = realInfo.transmuxer;
  2196. const transmuxerMuxed = realInfo.transmuxerMuxed;
  2197. const newBasicType = realInfo.basicType;
  2198. const newCodec = realInfo.codec;
  2199. const newMimeType = realInfo.mimeType;
  2200. let muxedContentCheck = true;
  2201. if (transmuxerMuxed) {
  2202. const muxedRealInfo =
  2203. this.getRealInfo_(ContentType.AUDIO, mimeType, codecs);
  2204. const muxedCurrentCodec = MimeUtils.getNormalizedCodec(
  2205. MimeUtils.getCodecs(this.sourceBufferTypes_[ContentType.AUDIO]));
  2206. const muxedCurrentBasicType = MimeUtils.getBasicType(
  2207. this.sourceBufferTypes_[ContentType.AUDIO]);
  2208. muxedContentCheck = muxedCurrentCodec == muxedRealInfo.codec &&
  2209. muxedCurrentBasicType == muxedRealInfo.basicType;
  2210. if (muxedRealInfo.transmuxer) {
  2211. muxedRealInfo.transmuxer.destroy();
  2212. }
  2213. }
  2214. // Current/new codecs base and basic type match then no need to switch
  2215. if (currentCodec === newCodec && currentBasicType === newBasicType &&
  2216. muxedContentCheck) {
  2217. if (this.transmuxers_[contentType] && !transmuxer) {
  2218. this.transmuxers_[contentType].destroy();
  2219. delete this.transmuxers_[contentType];
  2220. } else if (!this.transmuxers_[contentType] && transmuxer) {
  2221. this.transmuxers_[contentType] = transmuxer;
  2222. } else if (transmuxer) {
  2223. // Compare if the transmuxer is different
  2224. if (this.transmuxers_[contentType] &&
  2225. this.transmuxers_[contentType].transmux != transmuxer.transmux) {
  2226. this.transmuxers_[contentType].destroy();
  2227. delete this.transmuxers_[contentType];
  2228. this.transmuxers_[contentType] = transmuxer;
  2229. } else {
  2230. transmuxer.destroy();
  2231. }
  2232. }
  2233. return false;
  2234. }
  2235. let allowChangeType = true;
  2236. if (this.needSplitMuxedContent_ || (transmuxerMuxed &&
  2237. transmuxer && !this.transmuxers_[contentType])) {
  2238. allowChangeType = false;
  2239. }
  2240. if (allowChangeType && this.config_.codecSwitchingStrategy ===
  2241. shaka.config.CodecSwitchingStrategy.SMOOTH &&
  2242. shaka.media.Capabilities.isChangeTypeSupported()) {
  2243. await this.changeType(contentType, newMimeType, transmuxer);
  2244. } else {
  2245. if (transmuxer) {
  2246. transmuxer.destroy();
  2247. }
  2248. await this.reset(streamsByType);
  2249. }
  2250. return true;
  2251. }
  2252. /**
  2253. * Returns true if it's necessary codec switch to load the new stream.
  2254. *
  2255. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  2256. * @param {string} refMimeType
  2257. * @param {string} refCodecs
  2258. * @return {boolean}
  2259. * @private
  2260. */
  2261. isCodecSwitchNecessary_(contentType, refMimeType, refCodecs) {
  2262. if (contentType == shaka.util.ManifestParserUtils.ContentType.TEXT) {
  2263. return false;
  2264. }
  2265. const MimeUtils = shaka.util.MimeUtils;
  2266. const currentCodec = MimeUtils.getNormalizedCodec(
  2267. MimeUtils.getCodecs(this.sourceBufferTypes_[contentType]));
  2268. const currentBasicType = MimeUtils.getBasicType(
  2269. this.sourceBufferTypes_[contentType]);
  2270. let newMimeType = shaka.util.MimeUtils.getFullType(refMimeType, refCodecs);
  2271. let needTransmux = this.config_.forceTransmux;
  2272. if (!shaka.media.Capabilities.isTypeSupported(newMimeType) ||
  2273. (!this.sequenceMode_ &&
  2274. shaka.util.MimeUtils.RAW_FORMATS.includes(newMimeType))) {
  2275. needTransmux = true;
  2276. }
  2277. const newMimeTypeWithAllCodecs =
  2278. shaka.util.MimeUtils.getFullTypeWithAllCodecs(
  2279. refMimeType, refCodecs);
  2280. const TransmuxerEngine = shaka.transmuxer.TransmuxerEngine;
  2281. if (needTransmux) {
  2282. const transmuxerPlugin =
  2283. TransmuxerEngine.findTransmuxer(newMimeTypeWithAllCodecs);
  2284. if (transmuxerPlugin) {
  2285. const transmuxer = transmuxerPlugin();
  2286. newMimeType =
  2287. transmuxer.convertCodecs(contentType, newMimeTypeWithAllCodecs);
  2288. transmuxer.destroy();
  2289. }
  2290. }
  2291. const newCodec = MimeUtils.getNormalizedCodec(
  2292. MimeUtils.getCodecs(newMimeType));
  2293. const newBasicType = MimeUtils.getBasicType(newMimeType);
  2294. return currentCodec !== newCodec || currentBasicType !== newBasicType;
  2295. }
  2296. /**
  2297. * Returns true if it's necessary reset the media source to load the
  2298. * new stream.
  2299. *
  2300. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  2301. * @param {string} mimeType
  2302. * @param {string} codecs
  2303. * @return {boolean}
  2304. */
  2305. isResetMediaSourceNecessary(contentType, mimeType, codecs) {
  2306. if (!this.isCodecSwitchNecessary_(contentType, mimeType, codecs)) {
  2307. return false;
  2308. }
  2309. return this.config_.codecSwitchingStrategy !==
  2310. shaka.config.CodecSwitchingStrategy.SMOOTH ||
  2311. !shaka.media.Capabilities.isChangeTypeSupported() ||
  2312. this.needSplitMuxedContent_;
  2313. }
  2314. /**
  2315. * Update LCEVC Decoder object when ready for LCEVC Decode.
  2316. * @param {?shaka.lcevc.Dec} lcevcDec
  2317. */
  2318. updateLcevcDec(lcevcDec) {
  2319. this.lcevcDec_ = lcevcDec;
  2320. }
  2321. /**
  2322. * @param {string} mimeType
  2323. * @return {string}
  2324. * @private
  2325. */
  2326. addExtraFeaturesToMimeType_(mimeType) {
  2327. const extraFeatures = this.config_.addExtraFeaturesToSourceBuffer(mimeType);
  2328. const extendedType = mimeType + extraFeatures;
  2329. shaka.log.debug('Using full mime type', extendedType);
  2330. return extendedType;
  2331. }
  2332. };
  2333. /**
  2334. * Internal reference to window.URL.createObjectURL function to avoid
  2335. * compatibility issues with other libraries and frameworks such as React
  2336. * Native. For use in unit tests only, not meant for external use.
  2337. *
  2338. * @type {function(?):string}
  2339. */
  2340. shaka.media.MediaSourceEngine.createObjectURL = window.URL.createObjectURL;
  2341. /**
  2342. * @typedef {{
  2343. * start: function(),
  2344. * p: !shaka.util.PublicPromise,
  2345. * uri: ?string
  2346. * }}
  2347. *
  2348. * @summary An operation in queue.
  2349. * @property {function()} start
  2350. * The function which starts the operation.
  2351. * @property {!shaka.util.PublicPromise} p
  2352. * The PublicPromise which is associated with this operation.
  2353. * @property {?string} uri
  2354. * A segment URI (if any) associated with this operation.
  2355. */
  2356. shaka.media.MediaSourceEngine.Operation;
  2357. /**
  2358. * @enum {string}
  2359. * @private
  2360. */
  2361. shaka.media.MediaSourceEngine.SourceBufferMode_ = {
  2362. SEQUENCE: 'sequence',
  2363. SEGMENTS: 'segments',
  2364. };
  2365. /**
  2366. * @typedef {{
  2367. * getKeySystem: function():?string,
  2368. * onMetadata: function(!Array<shaka.extern.ID3Metadata>, number, ?number),
  2369. * onEvent: function(!Event),
  2370. * onManifestUpdate: function()
  2371. * }}
  2372. *
  2373. * @summary Player interface
  2374. * @property {function():?string} getKeySystem
  2375. * Gets currently used key system or null if not used.
  2376. * @property {function(
  2377. * !Array<shaka.extern.ID3Metadata>, number, ?number)} onMetadata
  2378. * Callback to use when metadata arrives.
  2379. * @property {function(!Event)} onEvent
  2380. * Called when an event occurs that should be sent to the app.
  2381. * @property {function()} onManifestUpdate
  2382. * Called when an embedded 'emsg' box should trigger a manifest update.
  2383. */
  2384. shaka.media.MediaSourceEngine.PlayerInterface;