Source: lib/media/media_source_engine.js

  1. /*! @license
  2. * Shaka Player
  3. * Copyright 2016 Google LLC
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. goog.provide('shaka.media.MediaSourceEngine');
  7. goog.require('goog.asserts');
  8. goog.require('shaka.log');
  9. goog.require('shaka.config.CodecSwitchingStrategy');
  10. goog.require('shaka.media.Capabilities');
  11. goog.require('shaka.media.ContentWorkarounds');
  12. goog.require('shaka.media.ClosedCaptionParser');
  13. goog.require('shaka.media.IClosedCaptionParser');
  14. goog.require('shaka.media.ManifestParser');
  15. goog.require('shaka.media.SegmentReference');
  16. goog.require('shaka.media.TimeRangesUtils');
  17. goog.require('shaka.text.TextEngine');
  18. goog.require('shaka.transmuxer.TransmuxerEngine');
  19. goog.require('shaka.util.BufferUtils');
  20. goog.require('shaka.util.Destroyer');
  21. goog.require('shaka.util.Error');
  22. goog.require('shaka.util.EventManager');
  23. goog.require('shaka.util.Functional');
  24. goog.require('shaka.util.IDestroyable');
  25. goog.require('shaka.util.Id3Utils');
  26. goog.require('shaka.util.ManifestParserUtils');
  27. goog.require('shaka.util.MimeUtils');
  28. goog.require('shaka.util.Mp4BoxParsers');
  29. goog.require('shaka.util.Mp4Parser');
  30. goog.require('shaka.util.Platform');
  31. goog.require('shaka.util.PublicPromise');
  32. goog.require('shaka.util.StreamUtils');
  33. goog.require('shaka.util.TsParser');
  34. goog.require('shaka.lcevc.Dec');
  35. /**
  36. * @summary
  37. * MediaSourceEngine wraps all operations on MediaSource and SourceBuffers.
  38. * All asynchronous operations return a Promise, and all operations are
  39. * internally synchronized and serialized as needed. Operations that can
  40. * be done in parallel will be done in parallel.
  41. *
  42. * @implements {shaka.util.IDestroyable}
  43. */
  44. shaka.media.MediaSourceEngine = class {
  45. /**
  46. * @param {HTMLMediaElement} video The video element, whose source is tied to
  47. * MediaSource during the lifetime of the MediaSourceEngine.
  48. * @param {!shaka.extern.TextDisplayer} textDisplayer
  49. * The text displayer that will be used with the text engine.
  50. * MediaSourceEngine takes ownership of the displayer. When
  51. * MediaSourceEngine is destroyed, it will destroy the displayer.
  52. * @param {!shaka.media.MediaSourceEngine.PlayerInterface} playerInterface
  53. * Interface for common player methods.
  54. * @param {?shaka.lcevc.Dec} [lcevcDec] Optional - LCEVC Decoder Object
  55. */
  56. constructor(video, textDisplayer, playerInterface, lcevcDec) {
  57. /** @private {HTMLMediaElement} */
  58. this.video_ = video;
  59. /** @private {?shaka.media.MediaSourceEngine.PlayerInterface} */
  60. this.playerInterface_ = playerInterface;
  61. /** @private {?shaka.extern.MediaSourceConfiguration} */
  62. this.config_ = null;
  63. /** @private {shaka.extern.TextDisplayer} */
  64. this.textDisplayer_ = textDisplayer;
  65. /** @private {!Object.<shaka.util.ManifestParserUtils.ContentType,
  66. SourceBuffer>} */
  67. this.sourceBuffers_ = {};
  68. /** @private {!Object.<shaka.util.ManifestParserUtils.ContentType,
  69. string>} */
  70. this.sourceBufferTypes_ = {};
  71. /** @private {!Object.<shaka.util.ManifestParserUtils.ContentType,
  72. boolean>} */
  73. this.expectedEncryption_ = {};
  74. /** @private {shaka.text.TextEngine} */
  75. this.textEngine_ = null;
  76. /** @private {boolean} */
  77. this.segmentRelativeVttTiming_ = false;
  78. /** @private {?shaka.lcevc.Dec} */
  79. this.lcevcDec_ = lcevcDec || null;
  80. /**
  81. * @private {!Object.<string,
  82. * !Array.<shaka.media.MediaSourceEngine.Operation>>}
  83. */
  84. this.queues_ = {};
  85. /** @private {shaka.util.EventManager} */
  86. this.eventManager_ = new shaka.util.EventManager();
  87. /** @private {!Object.<string, !shaka.extern.Transmuxer>} */
  88. this.transmuxers_ = {};
  89. /** @private {?shaka.media.IClosedCaptionParser} */
  90. this.captionParser_ = null;
  91. /** @private {!shaka.util.PublicPromise} */
  92. this.mediaSourceOpen_ = new shaka.util.PublicPromise();
  93. /** @private {string} */
  94. this.url_ = '';
  95. /** @private {boolean} */
  96. this.playbackHasBegun_ = false;
  97. /** @private {MediaSource} */
  98. this.mediaSource_ = this.createMediaSource(this.mediaSourceOpen_);
  99. /** @private {boolean} */
  100. this.reloadingMediaSource_ = false;
  101. /** @type {!shaka.util.Destroyer} */
  102. this.destroyer_ = new shaka.util.Destroyer(() => this.doDestroy_());
  103. /** @private {boolean} */
  104. this.sequenceMode_ = false;
  105. /** @private {string} */
  106. this.manifestType_ = shaka.media.ManifestParser.UNKNOWN;
  107. /** @private {boolean} */
  108. this.ignoreManifestTimestampsInSegmentsMode_ = false;
  109. /** @private {boolean} */
  110. this.attemptTimestampOffsetCalculation_ = false;
  111. /** @private {!shaka.util.PublicPromise.<number>} */
  112. this.textSequenceModeOffset_ = new shaka.util.PublicPromise();
  113. /** @private {boolean} */
  114. this.needSplitMuxedContent_ = false;
  115. /** @private {boolean} */
  116. this.streamingAllowed_ = true;
  117. /** @private {?number} */
  118. this.lastDuration_ = null;
  119. /** @private {!Object.<shaka.util.ManifestParserUtils.ContentType,
  120. !shaka.util.TsParser>} */
  121. this.tsParsers_ = {};
  122. /** @private {?number} */
  123. this.firstVideoTimestamp_ = null;
  124. /** @private {?number} */
  125. this.firstVideoReferenceStartTime_ = null;
  126. /** @private {?number} */
  127. this.firstAudioTimestamp_ = null;
  128. /** @private {?number} */
  129. this.firstAudioReferenceStartTime_ = null;
  130. /** @private {!shaka.util.PublicPromise.<number>} */
  131. this.audioCompensation_ = new shaka.util.PublicPromise();
  132. }
  133. /**
  134. * Create a MediaSource object, attach it to the video element, and return it.
  135. * Resolves the given promise when the MediaSource is ready.
  136. *
  137. * Replaced by unit tests.
  138. *
  139. * @param {!shaka.util.PublicPromise} p
  140. * @return {!MediaSource}
  141. */
  142. createMediaSource(p) {
  143. this.streamingAllowed_ = true;
  144. /** @type {!MediaSource} */
  145. let mediaSource;
  146. if (window.ManagedMediaSource) {
  147. this.video_.disableRemotePlayback = true;
  148. mediaSource = new ManagedMediaSource();
  149. this.eventManager_.listen(
  150. mediaSource, 'startstreaming', () => {
  151. shaka.log.info('MMS startstreaming');
  152. this.streamingAllowed_ = true;
  153. });
  154. this.eventManager_.listen(
  155. mediaSource, 'endstreaming', () => {
  156. shaka.log.info('MMS endstreaming');
  157. this.streamingAllowed_ = false;
  158. });
  159. } else {
  160. mediaSource = new MediaSource();
  161. }
  162. // Set up MediaSource on the video element.
  163. this.eventManager_.listenOnce(
  164. mediaSource, 'sourceopen', () => this.onSourceOpen_(p));
  165. // Correctly set when playback has begun.
  166. this.eventManager_.listenOnce(this.video_, 'playing', () => {
  167. this.playbackHasBegun_ = true;
  168. });
  169. // Store the object URL for releasing it later.
  170. this.url_ = shaka.media.MediaSourceEngine.createObjectURL(mediaSource);
  171. this.video_.src = this.url_;
  172. return mediaSource;
  173. }
  174. /**
  175. * @param {shaka.util.PublicPromise} p
  176. * @private
  177. */
  178. onSourceOpen_(p) {
  179. goog.asserts.assert(this.url_, 'Must have object URL');
  180. // Release the object URL that was previously created, to prevent memory
  181. // leak.
  182. // createObjectURL creates a strong reference to the MediaSource object
  183. // inside the browser. Setting the src of the video then creates another
  184. // reference within the video element. revokeObjectURL will remove the
  185. // strong reference to the MediaSource object, and allow it to be
  186. // garbage-collected later.
  187. URL.revokeObjectURL(this.url_);
  188. p.resolve();
  189. }
  190. /**
  191. * Checks if a certain type is supported.
  192. *
  193. * @param {shaka.extern.Stream} stream
  194. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  195. * @return {!Promise.<boolean>}
  196. */
  197. static async isStreamSupported(stream, contentType) {
  198. if (stream.createSegmentIndex) {
  199. await stream.createSegmentIndex();
  200. }
  201. if (!stream.segmentIndex) {
  202. return false;
  203. }
  204. if (stream.segmentIndex.isEmpty()) {
  205. return true;
  206. }
  207. const MimeUtils = shaka.util.MimeUtils;
  208. const TransmuxerEngine = shaka.transmuxer.TransmuxerEngine;
  209. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  210. const StreamUtils = shaka.util.StreamUtils;
  211. const seenCombos = new Set();
  212. // Check each combination of mimeType and codecs within the segment index.
  213. // Unfortunately we cannot use fullMimeTypes, as we ALSO need to check the
  214. // getFullTypeWithAllCodecs (for the sake of the transmuxer) and we have no
  215. // way of going from a full mimeType to a full mimeType with all codecs.
  216. // As this function is only called in debug mode, a little inefficiency is
  217. // acceptable.
  218. for (const ref of stream.segmentIndex) {
  219. const mimeType = ref.mimeType || stream.mimeType || '';
  220. let codecs = ref.codecs || stream.codecs || '';
  221. // Optimization for the case where the codecs and mimetype of the stream
  222. // match the reference.
  223. if (mimeType == stream.mimeType && codecs == stream.codecs) {
  224. continue;
  225. }
  226. // Don't check the same combination of mimetype + codecs twice.
  227. const combo = mimeType + ':' + codecs;
  228. if (seenCombos.has(combo)) {
  229. continue;
  230. }
  231. seenCombos.add(combo);
  232. if (contentType == ContentType.TEXT) {
  233. const fullMimeType = MimeUtils.getFullType(mimeType, codecs);
  234. if (!shaka.text.TextEngine.isTypeSupported(fullMimeType)) {
  235. return false;
  236. }
  237. } else {
  238. if (contentType == ContentType.VIDEO) {
  239. codecs = StreamUtils.getCorrectVideoCodecs(codecs);
  240. } else if (contentType == ContentType.AUDIO) {
  241. codecs = StreamUtils.getCorrectAudioCodecs(codecs, mimeType);
  242. }
  243. const extendedMimeType = MimeUtils.getExtendedType(
  244. stream, mimeType, codecs);
  245. const fullMimeType = MimeUtils.getFullTypeWithAllCodecs(
  246. mimeType, codecs);
  247. if (!shaka.media.Capabilities.isTypeSupported(extendedMimeType) &&
  248. !TransmuxerEngine.isSupported(fullMimeType, stream.type)) {
  249. return false;
  250. }
  251. }
  252. }
  253. return true;
  254. }
  255. /**
  256. * Returns a map of MediaSource support for well-known types.
  257. *
  258. * @return {!Object.<string, boolean>}
  259. */
  260. static probeSupport() {
  261. const testMimeTypes = [
  262. // MP4 types
  263. 'video/mp4; codecs="avc1.42E01E"',
  264. 'video/mp4; codecs="avc3.42E01E"',
  265. 'video/mp4; codecs="hev1.1.6.L93.90"',
  266. 'video/mp4; codecs="hvc1.1.6.L93.90"',
  267. 'video/mp4; codecs="hev1.2.4.L153.B0"; eotf="smpte2084"', // HDR HEVC
  268. 'video/mp4; codecs="hvc1.2.4.L153.B0"; eotf="smpte2084"', // HDR HEVC
  269. 'video/mp4; codecs="vp9"',
  270. 'video/mp4; codecs="vp09.00.10.08"',
  271. 'video/mp4; codecs="av01.0.01M.08"',
  272. 'video/mp4; codecs="dvh1.20.01"',
  273. 'audio/mp4; codecs="mp4a.40.2"',
  274. 'audio/mp4; codecs="ac-3"',
  275. 'audio/mp4; codecs="ec-3"',
  276. 'audio/mp4; codecs="ac-4.02.01.01"',
  277. 'audio/mp4; codecs="opus"',
  278. 'audio/mp4; codecs="flac"',
  279. 'audio/mp4; codecs="dtsc"', // DTS Digital Surround
  280. 'audio/mp4; codecs="dtse"', // DTS Express
  281. 'audio/mp4; codecs="dtsx"', // DTS:X
  282. // WebM types
  283. 'video/webm; codecs="vp8"',
  284. 'video/webm; codecs="vp9"',
  285. 'video/webm; codecs="vp09.00.10.08"',
  286. 'audio/webm; codecs="vorbis"',
  287. 'audio/webm; codecs="opus"',
  288. // MPEG2 TS types (video/ is also used for audio: https://bit.ly/TsMse)
  289. 'video/mp2t; codecs="avc1.42E01E"',
  290. 'video/mp2t; codecs="avc3.42E01E"',
  291. 'video/mp2t; codecs="hvc1.1.6.L93.90"',
  292. 'video/mp2t; codecs="mp4a.40.2"',
  293. 'video/mp2t; codecs="ac-3"',
  294. 'video/mp2t; codecs="ec-3"',
  295. // WebVTT types
  296. 'text/vtt',
  297. 'application/mp4; codecs="wvtt"',
  298. // TTML types
  299. 'application/ttml+xml',
  300. 'application/mp4; codecs="stpp"',
  301. // Containerless types
  302. ...shaka.util.MimeUtils.RAW_FORMATS,
  303. ];
  304. const support = {};
  305. for (const type of testMimeTypes) {
  306. if (shaka.text.TextEngine.isTypeSupported(type)) {
  307. support[type] = true;
  308. } else if (shaka.util.Platform.supportsMediaSource()) {
  309. support[type] = shaka.media.Capabilities.isTypeSupported(type) ||
  310. shaka.transmuxer.TransmuxerEngine.isSupported(type);
  311. } else {
  312. support[type] = shaka.util.Platform.supportsMediaType(type);
  313. }
  314. const basicType = type.split(';')[0];
  315. support[basicType] = support[basicType] || support[type];
  316. }
  317. return support;
  318. }
  319. /** @override */
  320. destroy() {
  321. return this.destroyer_.destroy();
  322. }
  323. /** @private */
  324. async doDestroy_() {
  325. const Functional = shaka.util.Functional;
  326. const cleanup = [];
  327. for (const contentType in this.queues_) {
  328. // Make a local copy of the queue and the first item.
  329. const q = this.queues_[contentType];
  330. const inProgress = q[0];
  331. // Drop everything else out of the original queue.
  332. this.queues_[contentType] = q.slice(0, 1);
  333. // We will wait for this item to complete/fail.
  334. if (inProgress) {
  335. cleanup.push(inProgress.p.catch(Functional.noop));
  336. }
  337. // The rest will be rejected silently if possible.
  338. for (const item of q.slice(1)) {
  339. item.p.reject(shaka.util.Destroyer.destroyedError());
  340. }
  341. }
  342. if (this.textEngine_) {
  343. cleanup.push(this.textEngine_.destroy());
  344. }
  345. if (this.textDisplayer_) {
  346. cleanup.push(this.textDisplayer_.destroy());
  347. }
  348. for (const contentType in this.transmuxers_) {
  349. cleanup.push(this.transmuxers_[contentType].destroy());
  350. }
  351. await Promise.all(cleanup);
  352. if (this.eventManager_) {
  353. this.eventManager_.release();
  354. this.eventManager_ = null;
  355. }
  356. if (this.video_) {
  357. // "unload" the video element.
  358. this.video_.removeAttribute('src');
  359. this.video_.load();
  360. this.video_ = null;
  361. }
  362. this.config_ = null;
  363. this.mediaSource_ = null;
  364. this.textEngine_ = null;
  365. this.textDisplayer_ = null;
  366. this.sourceBuffers_ = {};
  367. this.transmuxers_ = {};
  368. this.captionParser_ = null;
  369. if (goog.DEBUG) {
  370. for (const contentType in this.queues_) {
  371. goog.asserts.assert(
  372. this.queues_[contentType].length == 0,
  373. contentType + ' queue should be empty after destroy!');
  374. }
  375. }
  376. this.queues_ = {};
  377. // This object is owned by Player
  378. this.lcevcDec_ = null;
  379. this.tsParsers_ = {};
  380. this.playerInterface_ = null;
  381. }
  382. /**
  383. * @return {!Promise} Resolved when MediaSource is open and attached to the
  384. * media element. This process is actually initiated by the constructor.
  385. */
  386. open() {
  387. return this.mediaSourceOpen_;
  388. }
  389. /**
  390. * Initialize MediaSourceEngine.
  391. *
  392. * Note that it is not valid to call this multiple times, except to add or
  393. * reinitialize text streams.
  394. *
  395. * @param {!Map.<shaka.util.ManifestParserUtils.ContentType,
  396. * shaka.extern.Stream>} streamsByType
  397. * A map of content types to streams. All streams must be supported
  398. * according to MediaSourceEngine.isStreamSupported.
  399. * @param {boolean=} sequenceMode
  400. * If true, the media segments are appended to the SourceBuffer in strict
  401. * sequence.
  402. * @param {string=} manifestType
  403. * Indicates the type of the manifest.
  404. * @param {boolean=} ignoreManifestTimestampsInSegmentsMode
  405. * If true, don't adjust the timestamp offset to account for manifest
  406. * segment durations being out of sync with segment durations. In other
  407. * words, assume that there are no gaps in the segments when appending
  408. * to the SourceBuffer, even if the manifest and segment times disagree.
  409. * Indicates if the manifest has text streams.
  410. *
  411. * @return {!Promise}
  412. */
  413. async init(streamsByType, sequenceMode=false,
  414. manifestType=shaka.media.ManifestParser.UNKNOWN,
  415. ignoreManifestTimestampsInSegmentsMode=false) {
  416. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  417. await this.mediaSourceOpen_;
  418. if (this.ended() || this.closed()) {
  419. shaka.log.alwaysError('Expected MediaSource to be open during init(); ' +
  420. 'reopening the media source.');
  421. this.mediaSourceOpen_ = new shaka.util.PublicPromise();
  422. this.mediaSource_ = this.createMediaSource(this.mediaSourceOpen_);
  423. await this.mediaSourceOpen_;
  424. }
  425. this.sequenceMode_ = sequenceMode;
  426. this.manifestType_ = manifestType;
  427. this.ignoreManifestTimestampsInSegmentsMode_ =
  428. ignoreManifestTimestampsInSegmentsMode;
  429. this.attemptTimestampOffsetCalculation_ = !this.sequenceMode_ &&
  430. this.manifestType_ == shaka.media.ManifestParser.HLS &&
  431. !this.ignoreManifestTimestampsInSegmentsMode_;
  432. this.tsParsers_ = {};
  433. this.firstVideoTimestamp_ = null;
  434. this.firstVideoReferenceStartTime_ = null;
  435. this.firstAudioTimestamp_ = null;
  436. this.firstAudioReferenceStartTime_ = null;
  437. this.audioCompensation_ = new shaka.util.PublicPromise();
  438. for (const contentType of streamsByType.keys()) {
  439. const stream = streamsByType.get(contentType);
  440. // eslint-disable-next-line no-await-in-loop
  441. await this.initSourceBuffer_(contentType, stream, stream.codecs);
  442. if (this.needSplitMuxedContent_) {
  443. this.queues_[ContentType.AUDIO] = [];
  444. this.queues_[ContentType.VIDEO] = [];
  445. } else {
  446. this.queues_[contentType] = [];
  447. }
  448. }
  449. }
  450. /**
  451. * Initialize a specific SourceBuffer.
  452. *
  453. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  454. * @param {shaka.extern.Stream} stream
  455. * @param {string} codecs
  456. * @return {!Promise}
  457. * @private
  458. */
  459. async initSourceBuffer_(contentType, stream, codecs) {
  460. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  461. goog.asserts.assert(
  462. await shaka.media.MediaSourceEngine.isStreamSupported(
  463. stream, contentType),
  464. 'Type negotiation should happen before MediaSourceEngine.init!');
  465. let mimeType = shaka.util.MimeUtils.getFullType(
  466. stream.mimeType, codecs);
  467. if (contentType == ContentType.TEXT) {
  468. this.reinitText(mimeType, this.sequenceMode_, stream.external);
  469. } else {
  470. let needTransmux = this.config_.forceTransmux;
  471. if (!shaka.media.Capabilities.isTypeSupported(mimeType) ||
  472. (!this.sequenceMode_ &&
  473. shaka.util.MimeUtils.RAW_FORMATS.includes(mimeType))) {
  474. needTransmux = true;
  475. }
  476. const mimeTypeWithAllCodecs =
  477. shaka.util.MimeUtils.getFullTypeWithAllCodecs(
  478. stream.mimeType, codecs);
  479. if (needTransmux) {
  480. const audioCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  481. ContentType.AUDIO, (codecs || '').split(','));
  482. const videoCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  483. ContentType.VIDEO, (codecs || '').split(','));
  484. if (audioCodec && videoCodec) {
  485. this.needSplitMuxedContent_ = true;
  486. await this.initSourceBuffer_(ContentType.AUDIO, stream, audioCodec);
  487. await this.initSourceBuffer_(ContentType.VIDEO, stream, videoCodec);
  488. return;
  489. }
  490. const transmuxerPlugin = shaka.transmuxer.TransmuxerEngine
  491. .findTransmuxer(mimeTypeWithAllCodecs);
  492. if (transmuxerPlugin) {
  493. const transmuxer = transmuxerPlugin();
  494. this.transmuxers_[contentType] = transmuxer;
  495. mimeType =
  496. transmuxer.convertCodecs(contentType, mimeTypeWithAllCodecs);
  497. }
  498. }
  499. const type = this.addExtraFeaturesToMimeType_(mimeType);
  500. this.destroyer_.ensureNotDestroyed();
  501. let sourceBuffer;
  502. try {
  503. sourceBuffer = this.mediaSource_.addSourceBuffer(type);
  504. } catch (exception) {
  505. throw new shaka.util.Error(
  506. shaka.util.Error.Severity.CRITICAL,
  507. shaka.util.Error.Category.MEDIA,
  508. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  509. exception,
  510. 'The mediaSource_ status was ' + this.mediaSource_.readyState +
  511. ' expected \'open\'',
  512. null);
  513. }
  514. if (this.sequenceMode_) {
  515. sourceBuffer.mode =
  516. shaka.media.MediaSourceEngine.SourceBufferMode_.SEQUENCE;
  517. }
  518. this.eventManager_.listen(
  519. sourceBuffer, 'error',
  520. () => this.onError_(contentType));
  521. this.eventManager_.listen(
  522. sourceBuffer, 'updateend',
  523. () => this.onUpdateEnd_(contentType));
  524. this.sourceBuffers_[contentType] = sourceBuffer;
  525. this.sourceBufferTypes_[contentType] = mimeType;
  526. this.expectedEncryption_[contentType] = !!stream.drmInfos.length;
  527. }
  528. }
  529. /**
  530. * Called by the Player to provide an updated configuration any time it
  531. * changes. Must be called at least once before init().
  532. *
  533. * @param {shaka.extern.MediaSourceConfiguration} config
  534. */
  535. configure(config) {
  536. this.config_ = config;
  537. if (this.textEngine_) {
  538. this.textEngine_.setModifyCueCallback(config.modifyCueCallback);
  539. }
  540. }
  541. /**
  542. * Indicate if the streaming is allowed by MediaSourceEngine.
  543. * If we using MediaSource we allways returns true.
  544. *
  545. * @return {boolean}
  546. */
  547. isStreamingAllowed() {
  548. return this.streamingAllowed_;
  549. }
  550. /**
  551. * Reinitialize the TextEngine for a new text type.
  552. * @param {string} mimeType
  553. * @param {boolean} sequenceMode
  554. * @param {boolean} external
  555. */
  556. reinitText(mimeType, sequenceMode, external) {
  557. if (!this.textEngine_) {
  558. this.textEngine_ = new shaka.text.TextEngine(this.textDisplayer_);
  559. if (this.textEngine_) {
  560. this.textEngine_.setModifyCueCallback(this.config_.modifyCueCallback);
  561. }
  562. }
  563. this.textEngine_.initParser(mimeType, sequenceMode,
  564. external || this.segmentRelativeVttTiming_, this.manifestType_);
  565. }
  566. /**
  567. * @return {boolean} True if the MediaSource is in an "ended" state, or if the
  568. * object has been destroyed.
  569. */
  570. ended() {
  571. if (this.reloadingMediaSource_) {
  572. return false;
  573. }
  574. return this.mediaSource_ ? this.mediaSource_.readyState == 'ended' : true;
  575. }
  576. /**
  577. * @return {boolean} True if the MediaSource is in an "closed" state, or if
  578. * the object has been destroyed.
  579. */
  580. closed() {
  581. if (this.reloadingMediaSource_) {
  582. return false;
  583. }
  584. return this.mediaSource_ ? this.mediaSource_.readyState == 'closed' : true;
  585. }
  586. /**
  587. * Gets the first timestamp in buffer for the given content type.
  588. *
  589. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  590. * @return {?number} The timestamp in seconds, or null if nothing is buffered.
  591. */
  592. bufferStart(contentType) {
  593. if (!Object.keys(this.sourceBuffers_).length) {
  594. return null;
  595. }
  596. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  597. if (contentType == ContentType.TEXT) {
  598. return this.textEngine_.bufferStart();
  599. }
  600. return shaka.media.TimeRangesUtils.bufferStart(
  601. this.getBuffered_(contentType));
  602. }
  603. /**
  604. * Gets the last timestamp in buffer for the given content type.
  605. *
  606. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  607. * @return {?number} The timestamp in seconds, or null if nothing is buffered.
  608. */
  609. bufferEnd(contentType) {
  610. if (!Object.keys(this.sourceBuffers_).length) {
  611. return null;
  612. }
  613. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  614. if (contentType == ContentType.TEXT) {
  615. return this.textEngine_.bufferEnd();
  616. }
  617. return shaka.media.TimeRangesUtils.bufferEnd(
  618. this.getBuffered_(contentType));
  619. }
  620. /**
  621. * Determines if the given time is inside the buffered range of the given
  622. * content type.
  623. *
  624. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  625. * @param {number} time Playhead time
  626. * @return {boolean}
  627. */
  628. isBuffered(contentType, time) {
  629. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  630. if (contentType == ContentType.TEXT) {
  631. return this.textEngine_.isBuffered(time);
  632. } else {
  633. const buffered = this.getBuffered_(contentType);
  634. return shaka.media.TimeRangesUtils.isBuffered(buffered, time);
  635. }
  636. }
  637. /**
  638. * Computes how far ahead of the given timestamp is buffered for the given
  639. * content type.
  640. *
  641. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  642. * @param {number} time
  643. * @return {number} The amount of time buffered ahead in seconds.
  644. */
  645. bufferedAheadOf(contentType, time) {
  646. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  647. if (contentType == ContentType.TEXT) {
  648. return this.textEngine_.bufferedAheadOf(time);
  649. } else {
  650. const buffered = this.getBuffered_(contentType);
  651. return shaka.media.TimeRangesUtils.bufferedAheadOf(buffered, time);
  652. }
  653. }
  654. /**
  655. * Returns info about what is currently buffered.
  656. * @return {shaka.extern.BufferedInfo}
  657. */
  658. getBufferedInfo() {
  659. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  660. const TimeRangesUtils = shaka.media.TimeRangesUtils;
  661. const info = {
  662. total: this.reloadingMediaSource_ ? [] :
  663. TimeRangesUtils.getBufferedInfo(this.video_.buffered),
  664. audio:
  665. TimeRangesUtils.getBufferedInfo(this.getBuffered_(ContentType.AUDIO)),
  666. video:
  667. TimeRangesUtils.getBufferedInfo(this.getBuffered_(ContentType.VIDEO)),
  668. text: [],
  669. };
  670. if (this.textEngine_) {
  671. const start = this.textEngine_.bufferStart();
  672. const end = this.textEngine_.bufferEnd();
  673. if (start != null && end != null) {
  674. info.text.push({start: start, end: end});
  675. }
  676. }
  677. return info;
  678. }
  679. /**
  680. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  681. * @return {TimeRanges} The buffered ranges for the given content type, or
  682. * null if the buffered ranges could not be obtained.
  683. * @private
  684. */
  685. getBuffered_(contentType) {
  686. if (this.reloadingMediaSource_) {
  687. return null;
  688. }
  689. try {
  690. return this.sourceBuffers_[contentType].buffered;
  691. } catch (exception) {
  692. if (contentType in this.sourceBuffers_) {
  693. // Note: previous MediaSource errors may cause access to |buffered| to
  694. // throw.
  695. shaka.log.error('failed to get buffered range for ' + contentType,
  696. exception);
  697. }
  698. return null;
  699. }
  700. }
  701. /**
  702. * Create a new closed caption parser. This will ONLY be replaced by tests as
  703. * a way to inject fake closed caption parser instances.
  704. *
  705. * @param {string} mimeType
  706. * @return {!shaka.media.IClosedCaptionParser}
  707. */
  708. getCaptionParser(mimeType) {
  709. return new shaka.media.ClosedCaptionParser(mimeType);
  710. }
  711. /**
  712. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  713. * @param {!BufferSource} data
  714. * @param {?shaka.media.SegmentReference} reference The segment reference
  715. * we are appending, or null for init segments
  716. * @param {!string} mimeType
  717. * @return {{timestamp: ?number, metadata: !Array.<shaka.extern.ID3Metadata>}}
  718. * @private
  719. */
  720. getTimestampAndDispatchMetadata_(contentType, data, reference, mimeType) {
  721. let timestamp = null;
  722. let metadata = [];
  723. const uint8ArrayData = shaka.util.BufferUtils.toUint8(data);
  724. if (shaka.util.MimeUtils.RAW_FORMATS.includes(mimeType)) {
  725. const frames = shaka.util.Id3Utils.getID3Frames(uint8ArrayData);
  726. if (frames.length && reference) {
  727. const metadataTimestamp = frames.find((frame) => {
  728. return frame.description ===
  729. 'com.apple.streaming.transportStreamTimestamp';
  730. });
  731. if (metadataTimestamp) {
  732. timestamp = Math.round(metadataTimestamp.data) / 1000;
  733. }
  734. /** @private {shaka.extern.ID3Metadata} */
  735. const id3Metadata = {
  736. cueTime: reference.startTime,
  737. data: uint8ArrayData,
  738. frames: frames,
  739. dts: reference.startTime,
  740. pts: reference.startTime,
  741. };
  742. this.playerInterface_.onMetadata(
  743. [id3Metadata], /* offset= */ 0, reference.endTime);
  744. }
  745. } else if (mimeType.includes('/mp4') &&
  746. reference && reference.timestampOffset == 0 &&
  747. reference.initSegmentReference &&
  748. reference.initSegmentReference.timescale) {
  749. const timescale = reference.initSegmentReference.timescale;
  750. if (!isNaN(timescale)) {
  751. const Mp4Parser = shaka.util.Mp4Parser;
  752. let startTime = 0;
  753. let parsedMedia = false;
  754. new Mp4Parser()
  755. .box('moof', Mp4Parser.children)
  756. .box('traf', Mp4Parser.children)
  757. .fullBox('tfdt', (box) => {
  758. goog.asserts.assert(
  759. box.version == 0 || box.version == 1,
  760. 'TFDT version can only be 0 or 1');
  761. const parsed = shaka.util.Mp4BoxParsers.parseTFDTInaccurate(
  762. box.reader, box.version);
  763. startTime = parsed.baseMediaDecodeTime / timescale;
  764. parsedMedia = true;
  765. box.parser.stop();
  766. }).parse(data, /* partialOkay= */ true);
  767. if (parsedMedia) {
  768. timestamp = startTime;
  769. }
  770. }
  771. } else if (!mimeType.includes('/mp4') && !mimeType.includes('/webm') &&
  772. shaka.util.TsParser.probe(uint8ArrayData)) {
  773. if (!this.tsParsers_[contentType]) {
  774. this.tsParsers_[contentType] = new shaka.util.TsParser();
  775. } else {
  776. this.tsParsers_[contentType].clearData();
  777. }
  778. const tsParser = this.tsParsers_[contentType].parse(uint8ArrayData);
  779. const startTime = tsParser.getStartTime(contentType);
  780. if (startTime != null) {
  781. timestamp = startTime;
  782. }
  783. metadata = tsParser.getMetadata();
  784. }
  785. return {timestamp, metadata};
  786. }
  787. /**
  788. * Enqueue an operation to append data to the SourceBuffer.
  789. * Start and end times are needed for TextEngine, but not for MediaSource.
  790. * Start and end times may be null for initialization segments; if present
  791. * they are relative to the presentation timeline.
  792. *
  793. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  794. * @param {!BufferSource} data
  795. * @param {?shaka.media.SegmentReference} reference The segment reference
  796. * we are appending, or null for init segments
  797. * @param {shaka.extern.Stream} stream
  798. * @param {?boolean} hasClosedCaptions True if the buffer contains CEA closed
  799. * captions
  800. * @param {boolean=} seeked True if we just seeked
  801. * @param {boolean=} adaptation True if we just automatically switched active
  802. * variant(s).
  803. * @param {boolean=} isChunkedData True if we add to the buffer from the
  804. * partial read of the segment.
  805. * @return {!Promise}
  806. */
  807. async appendBuffer(
  808. contentType, data, reference, stream, hasClosedCaptions, seeked = false,
  809. adaptation = false, isChunkedData = false, fromSplit = false) {
  810. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  811. if (contentType == ContentType.TEXT) {
  812. if (this.manifestType_ == shaka.media.ManifestParser.HLS) {
  813. // This won't be known until the first video segment is appended.
  814. const offset = await this.textSequenceModeOffset_;
  815. this.textEngine_.setTimestampOffset(offset);
  816. }
  817. await this.textEngine_.appendBuffer(
  818. data,
  819. reference ? reference.startTime : null,
  820. reference ? reference.endTime : null,
  821. reference ? reference.getUris()[0] : null);
  822. return;
  823. }
  824. if (!fromSplit && this.needSplitMuxedContent_) {
  825. await this.appendBuffer(ContentType.AUDIO, data, reference, stream,
  826. hasClosedCaptions, seeked, adaptation, isChunkedData,
  827. /* fromSplit= */ true);
  828. await this.appendBuffer(ContentType.VIDEO, data, reference, stream,
  829. hasClosedCaptions, seeked, adaptation, isChunkedData,
  830. /* fromSplit= */ true);
  831. return;
  832. }
  833. if (!this.sourceBuffers_[contentType]) {
  834. shaka.log.warning('Attempted to restore a non-existent source buffer');
  835. return;
  836. }
  837. let timestampOffset = this.sourceBuffers_[contentType].timestampOffset;
  838. let mimeType = this.sourceBufferTypes_[contentType];
  839. if (this.transmuxers_[contentType]) {
  840. mimeType = this.transmuxers_[contentType].getOriginalMimeType();
  841. }
  842. if (reference) {
  843. const {timestamp, metadata} = this.getTimestampAndDispatchMetadata_(
  844. contentType, data, reference, mimeType);
  845. if (timestamp != null) {
  846. if (this.firstVideoTimestamp_ == null &&
  847. contentType == ContentType.VIDEO) {
  848. this.firstVideoTimestamp_ = timestamp;
  849. this.firstVideoReferenceStartTime_ = reference.startTime;
  850. if (this.firstAudioTimestamp_ != null) {
  851. let compensation = 0;
  852. // Only apply compensation if video and audio segment startTime
  853. // match, to avoid introducing sync issues.
  854. if (this.firstVideoReferenceStartTime_ ==
  855. this.firstAudioReferenceStartTime_) {
  856. compensation =
  857. this.firstVideoTimestamp_ - this.firstAudioTimestamp_;
  858. }
  859. this.audioCompensation_.resolve(compensation);
  860. }
  861. }
  862. if (this.firstAudioTimestamp_ == null &&
  863. contentType == ContentType.AUDIO) {
  864. this.firstAudioTimestamp_ = timestamp;
  865. this.firstAudioReferenceStartTime_ = reference.startTime;
  866. if (this.firstVideoTimestamp_ != null) {
  867. let compensation = 0;
  868. // Only apply compensation if video and audio segment startTime
  869. // match, to avoid introducing sync issues.
  870. if (this.firstVideoReferenceStartTime_ ==
  871. this.firstAudioReferenceStartTime_) {
  872. compensation =
  873. this.firstVideoTimestamp_ - this.firstAudioTimestamp_;
  874. }
  875. this.audioCompensation_.resolve(compensation);
  876. }
  877. }
  878. let realTimestamp = timestamp;
  879. const RAW_FORMATS = shaka.util.MimeUtils.RAW_FORMATS;
  880. // For formats without containers and using segments mode, we need to
  881. // adjust TimestampOffset relative to 0 because segments do not have
  882. // any timestamp information.
  883. if (!this.sequenceMode_ &&
  884. RAW_FORMATS.includes(this.sourceBufferTypes_[contentType])) {
  885. realTimestamp = 0;
  886. }
  887. const calculatedTimestampOffset = reference.startTime - realTimestamp;
  888. const timestampOffsetDifference =
  889. Math.abs(timestampOffset - calculatedTimestampOffset);
  890. if ((timestampOffsetDifference >= 0.001 || seeked || adaptation) &&
  891. (!isChunkedData || calculatedTimestampOffset > 0 ||
  892. !timestampOffset)) {
  893. timestampOffset = calculatedTimestampOffset;
  894. if (this.attemptTimestampOffsetCalculation_) {
  895. this.enqueueOperation_(
  896. contentType,
  897. () => this.abort_(contentType),
  898. null);
  899. this.enqueueOperation_(
  900. contentType,
  901. () => this.setTimestampOffset_(contentType, timestampOffset),
  902. null);
  903. }
  904. }
  905. // Timestamps can only be reliably extracted from video, not audio.
  906. // Packed audio formats do not have internal timestamps at all.
  907. // Prefer video for this when available.
  908. const isBestSourceBufferForTimestamps =
  909. contentType == ContentType.VIDEO ||
  910. !(ContentType.VIDEO in this.sourceBuffers_);
  911. if (isBestSourceBufferForTimestamps) {
  912. this.textSequenceModeOffset_.resolve(timestampOffset);
  913. }
  914. }
  915. if (metadata.length) {
  916. this.playerInterface_.onMetadata(metadata, timestampOffset,
  917. reference ? reference.endTime : null);
  918. }
  919. }
  920. if (hasClosedCaptions && contentType == ContentType.VIDEO) {
  921. if (!this.textEngine_) {
  922. this.reinitText(shaka.util.MimeUtils.CEA608_CLOSED_CAPTION_MIMETYPE,
  923. this.sequenceMode_, /* external= */ false);
  924. }
  925. if (!this.captionParser_) {
  926. const basicType = mimeType.split(';', 1)[0];
  927. this.captionParser_ = this.getCaptionParser(basicType);
  928. }
  929. // If it is the init segment for closed captions, initialize the closed
  930. // caption parser.
  931. if (!reference) {
  932. this.captionParser_.init(data, adaptation);
  933. } else {
  934. const closedCaptions = this.captionParser_.parseFrom(data);
  935. if (closedCaptions.length) {
  936. this.textEngine_.storeAndAppendClosedCaptions(
  937. closedCaptions,
  938. reference.startTime,
  939. reference.endTime,
  940. timestampOffset);
  941. }
  942. }
  943. }
  944. if (this.transmuxers_[contentType]) {
  945. data = await this.transmuxers_[contentType].transmux(
  946. data, stream, reference, this.mediaSource_.duration, contentType);
  947. }
  948. data = this.workAroundBrokenPlatforms_(
  949. data, reference ? reference.startTime : null, contentType,
  950. reference ? reference.getUris()[0] : null);
  951. if (reference && this.sequenceMode_ && contentType != ContentType.TEXT) {
  952. // In sequence mode, for non-text streams, if we just cleared the buffer
  953. // and are either performing an unbuffered seek or handling an automatic
  954. // adaptation, we need to set a new timestampOffset on the sourceBuffer.
  955. if (seeked || adaptation) {
  956. let timestampOffset = reference.startTime;
  957. // Audio and video may not be aligned, so we will compensate for audio
  958. // if necessary.
  959. if (this.manifestType_ == shaka.media.ManifestParser.HLS &&
  960. !this.needSplitMuxedContent_ &&
  961. contentType == ContentType.AUDIO &&
  962. this.sourceBuffers_[ContentType.VIDEO]) {
  963. const compensation = await this.audioCompensation_;
  964. // Only apply compensation if the difference is greater than 100ms
  965. if (Math.abs(compensation) > 0.1) {
  966. timestampOffset -= compensation;
  967. }
  968. }
  969. // The logic to call abort() before setting the timestampOffset is
  970. // extended during unbuffered seeks or automatic adaptations; it is
  971. // possible for the append state to be PARSING_MEDIA_SEGMENT from the
  972. // previous SourceBuffer#appendBuffer() call.
  973. this.enqueueOperation_(
  974. contentType,
  975. () => this.abort_(contentType),
  976. null);
  977. this.enqueueOperation_(
  978. contentType,
  979. () => this.setTimestampOffset_(contentType, timestampOffset),
  980. null);
  981. }
  982. }
  983. let bufferedBefore = null;
  984. await this.enqueueOperation_(contentType, () => {
  985. if (goog.DEBUG && reference && !reference.isPreload() && !isChunkedData) {
  986. bufferedBefore = this.getBuffered_(contentType);
  987. }
  988. this.append_(contentType, data, timestampOffset);
  989. }, reference ? reference.getUris()[0] : null);
  990. if (goog.DEBUG && reference && !reference.isPreload() && !isChunkedData) {
  991. const bufferedAfter = this.getBuffered_(contentType);
  992. const newBuffered = shaka.media.TimeRangesUtils.computeAddedRange(
  993. bufferedBefore, bufferedAfter);
  994. if (newBuffered) {
  995. const segmentDuration = reference.endTime - reference.startTime;
  996. const timeAdded = newBuffered.end - newBuffered.start;
  997. // Check end times instead of start times. We may be overwriting a
  998. // buffer and only the end changes, and that would be fine.
  999. // Also, exclude tiny segments. Sometimes alignment segments as small
  1000. // as 33ms are seen in Google DAI content. For such tiny segments,
  1001. // half a segment duration would be no issue.
  1002. const offset = Math.abs(newBuffered.end - reference.endTime);
  1003. if (segmentDuration > 0.100 && (offset > segmentDuration / 2 ||
  1004. Math.abs(segmentDuration - timeAdded) > 0.030)) {
  1005. shaka.log.error('Possible encoding problem detected!',
  1006. 'Unexpected buffered range for reference', reference,
  1007. 'from URIs', reference.getUris(),
  1008. 'should be', {start: reference.startTime, end: reference.endTime},
  1009. 'but got', newBuffered);
  1010. }
  1011. }
  1012. }
  1013. }
  1014. /**
  1015. * Set the selected closed captions Id and language.
  1016. *
  1017. * @param {string} id
  1018. */
  1019. setSelectedClosedCaptionId(id) {
  1020. const VIDEO = shaka.util.ManifestParserUtils.ContentType.VIDEO;
  1021. const videoBufferEndTime = this.bufferEnd(VIDEO) || 0;
  1022. this.textEngine_.setSelectedClosedCaptionId(id, videoBufferEndTime);
  1023. }
  1024. /** Disable embedded closed captions. */
  1025. clearSelectedClosedCaptionId() {
  1026. if (this.textEngine_) {
  1027. this.textEngine_.setSelectedClosedCaptionId('', 0);
  1028. }
  1029. }
  1030. /**
  1031. * Enqueue an operation to remove data from the SourceBuffer.
  1032. *
  1033. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1034. * @param {number} startTime relative to the start of the presentation
  1035. * @param {number} endTime relative to the start of the presentation
  1036. * @return {!Promise}
  1037. */
  1038. async remove(contentType, startTime, endTime) {
  1039. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1040. if (contentType == ContentType.TEXT) {
  1041. await this.textEngine_.remove(startTime, endTime);
  1042. } else {
  1043. await this.enqueueOperation_(
  1044. contentType,
  1045. () => this.remove_(contentType, startTime, endTime),
  1046. null);
  1047. if (this.needSplitMuxedContent_) {
  1048. await this.enqueueOperation_(
  1049. ContentType.AUDIO,
  1050. () => this.remove_(ContentType.AUDIO, startTime, endTime),
  1051. null);
  1052. }
  1053. }
  1054. }
  1055. /**
  1056. * Enqueue an operation to clear the SourceBuffer.
  1057. *
  1058. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1059. * @return {!Promise}
  1060. */
  1061. async clear(contentType) {
  1062. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1063. if (contentType == ContentType.TEXT) {
  1064. if (!this.textEngine_) {
  1065. return;
  1066. }
  1067. await this.textEngine_.remove(0, Infinity);
  1068. } else {
  1069. // Note that not all platforms allow clearing to Infinity.
  1070. await this.enqueueOperation_(
  1071. contentType,
  1072. () => this.remove_(contentType, 0, this.mediaSource_.duration),
  1073. null);
  1074. if (this.needSplitMuxedContent_) {
  1075. await this.enqueueOperation_(
  1076. ContentType.AUDIO,
  1077. () => this.remove_(
  1078. ContentType.AUDIO, 0, this.mediaSource_.duration),
  1079. null);
  1080. }
  1081. }
  1082. }
  1083. /**
  1084. * Fully reset the state of the caption parser owned by MediaSourceEngine.
  1085. */
  1086. resetCaptionParser() {
  1087. if (this.captionParser_) {
  1088. this.captionParser_.reset();
  1089. }
  1090. }
  1091. /**
  1092. * Enqueue an operation to flush the SourceBuffer.
  1093. * This is a workaround for what we believe is a Chromecast bug.
  1094. *
  1095. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1096. * @return {!Promise}
  1097. */
  1098. async flush(contentType) {
  1099. // Flush the pipeline. Necessary on Chromecast, even though we have removed
  1100. // everything.
  1101. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1102. if (contentType == ContentType.TEXT) {
  1103. // Nothing to flush for text.
  1104. return;
  1105. }
  1106. await this.enqueueOperation_(
  1107. contentType,
  1108. () => this.flush_(contentType),
  1109. null);
  1110. if (this.needSplitMuxedContent_) {
  1111. await this.enqueueOperation_(
  1112. ContentType.AUDIO,
  1113. () => this.flush_(ContentType.AUDIO),
  1114. null);
  1115. }
  1116. }
  1117. /**
  1118. * Sets the timestamp offset and append window end for the given content type.
  1119. *
  1120. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1121. * @param {number} timestampOffset The timestamp offset. Segments which start
  1122. * at time t will be inserted at time t + timestampOffset instead. This
  1123. * value does not affect segments which have already been inserted.
  1124. * @param {number} appendWindowStart The timestamp to set the append window
  1125. * start to. For future appends, frames/samples with timestamps less than
  1126. * this value will be dropped.
  1127. * @param {number} appendWindowEnd The timestamp to set the append window end
  1128. * to. For future appends, frames/samples with timestamps greater than this
  1129. * value will be dropped.
  1130. * @param {boolean} ignoreTimestampOffset If true, the timestampOffset will
  1131. * not be applied in this step.
  1132. * @param {string} mimeType
  1133. * @param {string} codecs
  1134. * @param {!Map.<shaka.util.ManifestParserUtils.ContentType,
  1135. * shaka.extern.Stream>} streamsByType
  1136. * A map of content types to streams. All streams must be supported
  1137. * according to MediaSourceEngine.isStreamSupported.
  1138. *
  1139. * @return {!Promise}
  1140. */
  1141. async setStreamProperties(
  1142. contentType, timestampOffset, appendWindowStart, appendWindowEnd,
  1143. ignoreTimestampOffset, mimeType, codecs, streamsByType) {
  1144. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1145. if (contentType == ContentType.TEXT) {
  1146. if (!ignoreTimestampOffset) {
  1147. this.textEngine_.setTimestampOffset(timestampOffset);
  1148. }
  1149. this.textEngine_.setAppendWindow(appendWindowStart, appendWindowEnd);
  1150. return;
  1151. }
  1152. const operations = [];
  1153. const hasChangedCodecs = await this.codecSwitchIfNecessary_(
  1154. contentType, mimeType, codecs, streamsByType);
  1155. if (!hasChangedCodecs) {
  1156. // Queue an abort() to help MSE splice together overlapping segments.
  1157. // We set appendWindowEnd when we change periods in DASH content, and the
  1158. // period transition may result in overlap.
  1159. //
  1160. // An abort() also helps with MPEG2-TS. When we append a TS segment, we
  1161. // always enter a PARSING_MEDIA_SEGMENT state and we can't change the
  1162. // timestamp offset. By calling abort(), we reset the state so we can
  1163. // set it.
  1164. operations.push(this.enqueueOperation_(
  1165. contentType,
  1166. () => this.abort_(contentType),
  1167. null));
  1168. if (this.needSplitMuxedContent_) {
  1169. operations.push(this.enqueueOperation_(
  1170. ContentType.AUDIO,
  1171. () => this.abort_(ContentType.AUDIO),
  1172. null));
  1173. }
  1174. }
  1175. if (!ignoreTimestampOffset) {
  1176. operations.push(this.enqueueOperation_(
  1177. contentType,
  1178. () => this.setTimestampOffset_(contentType, timestampOffset),
  1179. null));
  1180. if (this.needSplitMuxedContent_) {
  1181. operations.push(this.enqueueOperation_(
  1182. ContentType.AUDIO,
  1183. () => this.setTimestampOffset_(
  1184. ContentType.AUDIO, timestampOffset),
  1185. null));
  1186. }
  1187. }
  1188. operations.push(this.enqueueOperation_(
  1189. contentType,
  1190. () => this.setAppendWindow_(
  1191. contentType, appendWindowStart, appendWindowEnd),
  1192. null));
  1193. if (this.needSplitMuxedContent_) {
  1194. operations.push(this.enqueueOperation_(
  1195. ContentType.AUDIO,
  1196. () => this.setAppendWindow_(
  1197. ContentType.AUDIO, appendWindowStart, appendWindowEnd),
  1198. null));
  1199. }
  1200. await Promise.all(operations);
  1201. }
  1202. /**
  1203. * Adjust timestamp offset to maintain AV sync across discontinuities.
  1204. *
  1205. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1206. * @param {number} timestampOffset
  1207. * @return {!Promise}
  1208. */
  1209. async resync(contentType, timestampOffset) {
  1210. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1211. if (contentType == ContentType.TEXT) {
  1212. // This operation is for audio and video only.
  1213. return;
  1214. }
  1215. // Reset the promise in case the timestamp offset changed during
  1216. // a period/discontinuity transition.
  1217. if (contentType == ContentType.VIDEO) {
  1218. this.textSequenceModeOffset_ = new shaka.util.PublicPromise();
  1219. }
  1220. if (!this.sequenceMode_) {
  1221. return;
  1222. }
  1223. // Avoid changing timestampOffset when the difference is less than 100 ms
  1224. // from the end of the current buffer.
  1225. const bufferEnd = this.bufferEnd(contentType);
  1226. if (bufferEnd && Math.abs(bufferEnd - timestampOffset) < 0.1) {
  1227. return;
  1228. }
  1229. // Queue an abort() to help MSE splice together overlapping segments.
  1230. // We set appendWindowEnd when we change periods in DASH content, and the
  1231. // period transition may result in overlap.
  1232. //
  1233. // An abort() also helps with MPEG2-TS. When we append a TS segment, we
  1234. // always enter a PARSING_MEDIA_SEGMENT state and we can't change the
  1235. // timestamp offset. By calling abort(), we reset the state so we can
  1236. // set it.
  1237. this.enqueueOperation_(
  1238. contentType,
  1239. () => this.abort_(contentType),
  1240. null);
  1241. if (this.needSplitMuxedContent_) {
  1242. this.enqueueOperation_(
  1243. ContentType.AUDIO,
  1244. () => this.abort_(ContentType.AUDIO),
  1245. null);
  1246. }
  1247. await this.enqueueOperation_(
  1248. contentType,
  1249. () => this.setTimestampOffset_(contentType, timestampOffset),
  1250. null);
  1251. if (this.needSplitMuxedContent_) {
  1252. await this.enqueueOperation_(
  1253. ContentType.AUDIO,
  1254. () => this.setTimestampOffset_(ContentType.AUDIO, timestampOffset),
  1255. null);
  1256. }
  1257. }
  1258. /**
  1259. * @param {string=} reason Valid reasons are 'network' and 'decode'.
  1260. * @return {!Promise}
  1261. * @see http://w3c.github.io/media-source/#idl-def-EndOfStreamError
  1262. */
  1263. async endOfStream(reason) {
  1264. await this.enqueueBlockingOperation_(() => {
  1265. // If endOfStream() has already been called on the media source,
  1266. // don't call it again. Also do not call if readyState is
  1267. // 'closed' (not attached to video element) since it is not a
  1268. // valid operation.
  1269. if (this.ended() || this.closed()) {
  1270. return;
  1271. }
  1272. // Tizen won't let us pass undefined, but it will let us omit the
  1273. // argument.
  1274. if (reason) {
  1275. this.mediaSource_.endOfStream(reason);
  1276. } else {
  1277. this.mediaSource_.endOfStream();
  1278. }
  1279. });
  1280. }
  1281. /**
  1282. * @param {number} duration
  1283. * @return {!Promise}
  1284. */
  1285. async setDuration(duration) {
  1286. await this.enqueueBlockingOperation_(() => {
  1287. // Reducing the duration causes the MSE removal algorithm to run, which
  1288. // triggers an 'updateend' event to fire. To handle this scenario, we
  1289. // have to insert a dummy operation into the beginning of each queue,
  1290. // which the 'updateend' handler will remove.
  1291. if (duration < this.mediaSource_.duration) {
  1292. for (const contentType in this.sourceBuffers_) {
  1293. const dummyOperation = {
  1294. start: () => {},
  1295. p: new shaka.util.PublicPromise(),
  1296. uri: null,
  1297. };
  1298. this.queues_[contentType].unshift(dummyOperation);
  1299. }
  1300. }
  1301. this.mediaSource_.duration = duration;
  1302. this.lastDuration_ = duration;
  1303. });
  1304. }
  1305. /**
  1306. * Get the current MediaSource duration.
  1307. *
  1308. * @return {number}
  1309. */
  1310. getDuration() {
  1311. return this.mediaSource_.duration;
  1312. }
  1313. /**
  1314. * Append data to the SourceBuffer.
  1315. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1316. * @param {BufferSource} data
  1317. * @param {number} timestampOffset
  1318. * @private
  1319. */
  1320. append_(contentType, data, timestampOffset) {
  1321. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1322. // Append only video data to the LCEVC Dec.
  1323. if (contentType == ContentType.VIDEO && this.lcevcDec_) {
  1324. // Append video buffers to the LCEVC Dec for parsing and storing
  1325. // of LCEVC data.
  1326. this.lcevcDec_.appendBuffer(data, timestampOffset);
  1327. }
  1328. // This will trigger an 'updateend' event.
  1329. this.sourceBuffers_[contentType].appendBuffer(data);
  1330. }
  1331. /**
  1332. * Remove data from the SourceBuffer.
  1333. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1334. * @param {number} startTime relative to the start of the presentation
  1335. * @param {number} endTime relative to the start of the presentation
  1336. * @private
  1337. */
  1338. remove_(contentType, startTime, endTime) {
  1339. if (endTime <= startTime) {
  1340. // Ignore removal of inverted or empty ranges.
  1341. // Fake 'updateend' event to resolve the operation.
  1342. this.onUpdateEnd_(contentType);
  1343. return;
  1344. }
  1345. // This will trigger an 'updateend' event.
  1346. this.sourceBuffers_[contentType].remove(startTime, endTime);
  1347. }
  1348. /**
  1349. * Call abort() on the SourceBuffer.
  1350. * This resets MSE's last_decode_timestamp on all track buffers, which should
  1351. * trigger the splicing logic for overlapping segments.
  1352. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1353. * @private
  1354. */
  1355. abort_(contentType) {
  1356. // Save the append window, which is reset on abort().
  1357. const appendWindowStart =
  1358. this.sourceBuffers_[contentType].appendWindowStart;
  1359. const appendWindowEnd = this.sourceBuffers_[contentType].appendWindowEnd;
  1360. // This will not trigger an 'updateend' event, since nothing is happening.
  1361. // This is only to reset MSE internals, not to abort an actual operation.
  1362. this.sourceBuffers_[contentType].abort();
  1363. // Restore the append window.
  1364. this.sourceBuffers_[contentType].appendWindowStart = appendWindowStart;
  1365. this.sourceBuffers_[contentType].appendWindowEnd = appendWindowEnd;
  1366. // Fake an 'updateend' event to resolve the operation.
  1367. this.onUpdateEnd_(contentType);
  1368. }
  1369. /**
  1370. * Nudge the playhead to force the media pipeline to be flushed.
  1371. * This seems to be necessary on Chromecast to get new content to replace old
  1372. * content.
  1373. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1374. * @private
  1375. */
  1376. flush_(contentType) {
  1377. // Never use flush_ if there's data. It causes a hiccup in playback.
  1378. goog.asserts.assert(
  1379. this.video_.buffered.length == 0, 'MediaSourceEngine.flush_ should ' +
  1380. 'only be used after clearing all data!');
  1381. // Seeking forces the pipeline to be flushed.
  1382. this.video_.currentTime -= 0.001;
  1383. // Fake an 'updateend' event to resolve the operation.
  1384. this.onUpdateEnd_(contentType);
  1385. }
  1386. /**
  1387. * Set the SourceBuffer's timestamp offset.
  1388. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1389. * @param {number} timestampOffset
  1390. * @private
  1391. */
  1392. setTimestampOffset_(contentType, timestampOffset) {
  1393. // Work around for
  1394. // https://github.com/shaka-project/shaka-player/issues/1281:
  1395. // TODO(https://bit.ly/2ttKiBU): follow up when this is fixed in Edge
  1396. if (timestampOffset < 0) {
  1397. // Try to prevent rounding errors in Edge from removing the first
  1398. // keyframe.
  1399. timestampOffset += 0.001;
  1400. }
  1401. this.sourceBuffers_[contentType].timestampOffset = timestampOffset;
  1402. // Fake an 'updateend' event to resolve the operation.
  1403. this.onUpdateEnd_(contentType);
  1404. }
  1405. /**
  1406. * Set the SourceBuffer's append window end.
  1407. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1408. * @param {number} appendWindowStart
  1409. * @param {number} appendWindowEnd
  1410. * @private
  1411. */
  1412. setAppendWindow_(contentType, appendWindowStart, appendWindowEnd) {
  1413. // You can't set start > end, so first set start to 0, then set the new
  1414. // end, then set the new start. That way, there are no intermediate
  1415. // states which are invalid.
  1416. this.sourceBuffers_[contentType].appendWindowStart = 0;
  1417. this.sourceBuffers_[contentType].appendWindowEnd = appendWindowEnd;
  1418. this.sourceBuffers_[contentType].appendWindowStart = appendWindowStart;
  1419. // Fake an 'updateend' event to resolve the operation.
  1420. this.onUpdateEnd_(contentType);
  1421. }
  1422. /**
  1423. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1424. * @private
  1425. */
  1426. onError_(contentType) {
  1427. const operation = this.queues_[contentType][0];
  1428. goog.asserts.assert(operation, 'Spurious error event!');
  1429. goog.asserts.assert(!this.sourceBuffers_[contentType].updating,
  1430. 'SourceBuffer should not be updating on error!');
  1431. const code = this.video_.error ? this.video_.error.code : 0;
  1432. operation.p.reject(new shaka.util.Error(
  1433. shaka.util.Error.Severity.CRITICAL,
  1434. shaka.util.Error.Category.MEDIA,
  1435. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_FAILED,
  1436. code, operation.uri));
  1437. // Do not pop from queue. An 'updateend' event will fire next, and to
  1438. // avoid synchronizing these two event handlers, we will allow that one to
  1439. // pop from the queue as normal. Note that because the operation has
  1440. // already been rejected, the call to resolve() in the 'updateend' handler
  1441. // will have no effect.
  1442. }
  1443. /**
  1444. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1445. * @private
  1446. */
  1447. onUpdateEnd_(contentType) {
  1448. if (this.reloadingMediaSource_) {
  1449. return;
  1450. }
  1451. const operation = this.queues_[contentType][0];
  1452. goog.asserts.assert(operation, 'Spurious updateend event!');
  1453. if (!operation) {
  1454. return;
  1455. }
  1456. goog.asserts.assert(!this.sourceBuffers_[contentType].updating,
  1457. 'SourceBuffer should not be updating on updateend!');
  1458. operation.p.resolve();
  1459. this.popFromQueue_(contentType);
  1460. }
  1461. /**
  1462. * Enqueue an operation and start it if appropriate.
  1463. *
  1464. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1465. * @param {function()} start
  1466. * @param {?string} uri
  1467. * @return {!Promise}
  1468. * @private
  1469. */
  1470. enqueueOperation_(contentType, start, uri) {
  1471. this.destroyer_.ensureNotDestroyed();
  1472. const operation = {
  1473. start: start,
  1474. p: new shaka.util.PublicPromise(),
  1475. uri,
  1476. };
  1477. this.queues_[contentType].push(operation);
  1478. if (this.queues_[contentType].length == 1) {
  1479. this.startOperation_(contentType);
  1480. }
  1481. return operation.p;
  1482. }
  1483. /**
  1484. * Enqueue an operation which must block all other operations on all
  1485. * SourceBuffers.
  1486. *
  1487. * @param {function():(Promise|undefined)} run
  1488. * @return {!Promise}
  1489. * @private
  1490. */
  1491. async enqueueBlockingOperation_(run) {
  1492. this.destroyer_.ensureNotDestroyed();
  1493. /** @type {!Array.<!shaka.util.PublicPromise>} */
  1494. const allWaiters = [];
  1495. // Enqueue a 'wait' operation onto each queue.
  1496. // This operation signals its readiness when it starts.
  1497. // When all wait operations are ready, the real operation takes place.
  1498. for (const contentType in this.sourceBuffers_) {
  1499. const ready = new shaka.util.PublicPromise();
  1500. const operation = {
  1501. start: () => ready.resolve(),
  1502. p: ready,
  1503. uri: null,
  1504. };
  1505. this.queues_[contentType].push(operation);
  1506. allWaiters.push(ready);
  1507. if (this.queues_[contentType].length == 1) {
  1508. operation.start();
  1509. }
  1510. }
  1511. // Return a Promise to the real operation, which waits to begin until
  1512. // there are no other in-progress operations on any SourceBuffers.
  1513. try {
  1514. await Promise.all(allWaiters);
  1515. } catch (error) {
  1516. // One of the waiters failed, which means we've been destroyed.
  1517. goog.asserts.assert(
  1518. this.destroyer_.destroyed(), 'Should be destroyed by now');
  1519. // We haven't popped from the queue. Canceled waiters have been removed
  1520. // by destroy. What's left now should just be resolved waiters. In
  1521. // uncompiled mode, we will maintain good hygiene and make sure the
  1522. // assert at the end of destroy passes. In compiled mode, the queues
  1523. // are wiped in destroy.
  1524. if (goog.DEBUG) {
  1525. for (const contentType in this.sourceBuffers_) {
  1526. if (this.queues_[contentType].length) {
  1527. goog.asserts.assert(
  1528. this.queues_[contentType].length == 1,
  1529. 'Should be at most one item in queue!');
  1530. goog.asserts.assert(
  1531. allWaiters.includes(this.queues_[contentType][0].p),
  1532. 'The item in queue should be one of our waiters!');
  1533. this.queues_[contentType].shift();
  1534. }
  1535. }
  1536. }
  1537. throw error;
  1538. }
  1539. if (goog.DEBUG) {
  1540. // If we did it correctly, nothing is updating.
  1541. for (const contentType in this.sourceBuffers_) {
  1542. goog.asserts.assert(
  1543. this.sourceBuffers_[contentType].updating == false,
  1544. 'SourceBuffers should not be updating after a blocking op!');
  1545. }
  1546. }
  1547. // Run the real operation, which can be asynchronous.
  1548. try {
  1549. await run();
  1550. } catch (exception) {
  1551. throw new shaka.util.Error(
  1552. shaka.util.Error.Severity.CRITICAL,
  1553. shaka.util.Error.Category.MEDIA,
  1554. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  1555. exception,
  1556. this.video_.error || 'No error in the media element',
  1557. null);
  1558. } finally {
  1559. // Unblock the queues.
  1560. for (const contentType in this.sourceBuffers_) {
  1561. this.popFromQueue_(contentType);
  1562. }
  1563. }
  1564. }
  1565. /**
  1566. * Pop from the front of the queue and start a new operation.
  1567. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1568. * @private
  1569. */
  1570. popFromQueue_(contentType) {
  1571. // Remove the in-progress operation, which is now complete.
  1572. this.queues_[contentType].shift();
  1573. this.startOperation_(contentType);
  1574. }
  1575. /**
  1576. * Starts the next operation in the queue.
  1577. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1578. * @private
  1579. */
  1580. startOperation_(contentType) {
  1581. // Retrieve the next operation, if any, from the queue and start it.
  1582. const next = this.queues_[contentType][0];
  1583. if (next) {
  1584. try {
  1585. next.start();
  1586. } catch (exception) {
  1587. if (exception.name == 'QuotaExceededError') {
  1588. next.p.reject(new shaka.util.Error(
  1589. shaka.util.Error.Severity.CRITICAL,
  1590. shaka.util.Error.Category.MEDIA,
  1591. shaka.util.Error.Code.QUOTA_EXCEEDED_ERROR,
  1592. contentType));
  1593. } else {
  1594. next.p.reject(new shaka.util.Error(
  1595. shaka.util.Error.Severity.CRITICAL,
  1596. shaka.util.Error.Category.MEDIA,
  1597. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  1598. exception,
  1599. this.video_.error || 'No error in the media element',
  1600. next.uri));
  1601. }
  1602. this.popFromQueue_(contentType);
  1603. }
  1604. }
  1605. }
  1606. /**
  1607. * @return {!shaka.extern.TextDisplayer}
  1608. */
  1609. getTextDisplayer() {
  1610. goog.asserts.assert(
  1611. this.textDisplayer_,
  1612. 'TextDisplayer should only be null when this is destroyed');
  1613. return this.textDisplayer_;
  1614. }
  1615. /**
  1616. * @param {!shaka.extern.TextDisplayer} textDisplayer
  1617. */
  1618. setTextDisplayer(textDisplayer) {
  1619. const oldTextDisplayer = this.textDisplayer_;
  1620. this.textDisplayer_ = textDisplayer;
  1621. if (oldTextDisplayer) {
  1622. textDisplayer.setTextVisibility(oldTextDisplayer.isTextVisible());
  1623. oldTextDisplayer.destroy();
  1624. }
  1625. if (this.textEngine_) {
  1626. this.textEngine_.setDisplayer(textDisplayer);
  1627. }
  1628. }
  1629. /**
  1630. * @param {boolean} segmentRelativeVttTiming
  1631. */
  1632. setSegmentRelativeVttTiming(segmentRelativeVttTiming) {
  1633. this.segmentRelativeVttTiming_ = segmentRelativeVttTiming;
  1634. }
  1635. /**
  1636. * Apply platform-specific transformations to this segment to work around
  1637. * issues in the platform.
  1638. *
  1639. * @param {!BufferSource} segment
  1640. * @param {?number} startTime
  1641. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1642. * @param {?string} uri
  1643. * @return {!BufferSource}
  1644. * @private
  1645. */
  1646. workAroundBrokenPlatforms_(segment, startTime, contentType, uri) {
  1647. const Platform = shaka.util.Platform;
  1648. const isInitSegment = startTime == null;
  1649. const encryptionExpected = this.expectedEncryption_[contentType];
  1650. const keySystem = this.playerInterface_.getKeySystem();
  1651. // If:
  1652. // 1. the configuration tells to insert fake encryption,
  1653. // 2. and this is an init segment,
  1654. // 3. and encryption is expected,
  1655. // 4. and the platform requires encryption in all init segments,
  1656. // 5. and the content is MP4 (mimeType == "video/mp4" or "audio/mp4"),
  1657. // then insert fake encryption metadata for init segments that lack it.
  1658. // The MP4 requirement is because we can currently only do this
  1659. // transformation on MP4 containers.
  1660. // See: https://github.com/shaka-project/shaka-player/issues/2759
  1661. if (this.config_.insertFakeEncryptionInInit &&
  1662. isInitSegment &&
  1663. encryptionExpected &&
  1664. Platform.requiresEncryptionInfoInAllInitSegments(keySystem) &&
  1665. shaka.util.MimeUtils.getContainerType(
  1666. this.sourceBufferTypes_[contentType]) == 'mp4') {
  1667. shaka.log.debug('Forcing fake encryption information in init segment.');
  1668. segment = shaka.media.ContentWorkarounds.fakeEncryption(segment, uri);
  1669. }
  1670. return segment;
  1671. }
  1672. /**
  1673. * Prepare the SourceBuffer to parse a potentially new type or codec.
  1674. *
  1675. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1676. * @param {string} mimeType
  1677. * @param {?shaka.extern.Transmuxer} transmuxer
  1678. * @private
  1679. */
  1680. change_(contentType, mimeType, transmuxer) {
  1681. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1682. if (contentType === ContentType.TEXT) {
  1683. shaka.log.debug(`Change not supported for ${contentType}`);
  1684. return;
  1685. }
  1686. shaka.log.debug(
  1687. `Change Type: ${this.sourceBufferTypes_[contentType]} -> ${mimeType}`);
  1688. if (shaka.media.Capabilities.isChangeTypeSupported()) {
  1689. if (this.transmuxers_[contentType]) {
  1690. this.transmuxers_[contentType].destroy();
  1691. delete this.transmuxers_[contentType];
  1692. }
  1693. if (transmuxer) {
  1694. this.transmuxers_[contentType] = transmuxer;
  1695. }
  1696. const type = this.addExtraFeaturesToMimeType_(mimeType);
  1697. this.sourceBuffers_[contentType].changeType(type);
  1698. this.sourceBufferTypes_[contentType] = mimeType;
  1699. } else {
  1700. shaka.log.debug('Change Type not supported');
  1701. }
  1702. // Fake an 'updateend' event to resolve the operation.
  1703. this.onUpdateEnd_(contentType);
  1704. }
  1705. /**
  1706. * Enqueue an operation to prepare the SourceBuffer to parse a potentially new
  1707. * type or codec.
  1708. *
  1709. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1710. * @param {string} mimeType
  1711. * @param {?shaka.extern.Transmuxer} transmuxer
  1712. * @return {!Promise}
  1713. */
  1714. changeType(contentType, mimeType, transmuxer) {
  1715. return this.enqueueOperation_(
  1716. contentType,
  1717. () => this.change_(contentType, mimeType, transmuxer),
  1718. null);
  1719. }
  1720. /**
  1721. * Returns the source buffer parameters
  1722. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1723. * @return {?shaka.media.MediaSourceEngine.SourceBufferParams}
  1724. * @private
  1725. */
  1726. getSourceBufferParams_(contentType) {
  1727. if (!this.sourceBuffers_[contentType]) {
  1728. return null;
  1729. }
  1730. return {
  1731. timestampOffset: this.sourceBuffers_[contentType].timestampOffset,
  1732. appendWindowStart: this.sourceBuffers_[contentType].appendWindowStart,
  1733. appendWindowEnd: this.sourceBuffers_[contentType].appendWindowEnd,
  1734. };
  1735. }
  1736. /**
  1737. * Restore source buffer parameters
  1738. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1739. * @param {?shaka.media.MediaSourceEngine.SourceBufferParams} params
  1740. * @private
  1741. */
  1742. restoreSourceBufferParams_(contentType, params) {
  1743. if (!params) {
  1744. return;
  1745. }
  1746. if (!this.sourceBuffers_[contentType]) {
  1747. shaka.log.warning('Attempted to restore a non-existent source buffer');
  1748. return;
  1749. }
  1750. this.sourceBuffers_[contentType].timestampOffset =
  1751. params.timestampOffset;
  1752. // `end` needs to be set before `start`
  1753. this.sourceBuffers_[contentType].appendWindowEnd =
  1754. params.appendWindowEnd;
  1755. this.sourceBuffers_[contentType].appendWindowStart =
  1756. params.appendWindowStart;
  1757. }
  1758. /**
  1759. * Resets the MediaSource and re-adds source buffers due to codec mismatch
  1760. *
  1761. * @param {!Map.<shaka.util.ManifestParserUtils.ContentType,
  1762. * shaka.extern.Stream>} streamsByType
  1763. * @private
  1764. */
  1765. async reset_(streamsByType) {
  1766. const Functional = shaka.util.Functional;
  1767. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1768. this.reloadingMediaSource_ = true;
  1769. this.needSplitMuxedContent_ = false;
  1770. const currentTime = this.video_.currentTime;
  1771. // When codec switching if the user is currently paused we don't want
  1772. // to trigger a play when switching codec.
  1773. // Playing can also end up in a paused state after a codec switch
  1774. // so we need to remember the current states.
  1775. const previousAutoPlayState = this.video_.autoplay;
  1776. const previousPausedState = this.video_.paused;
  1777. if (this.playbackHasBegun_) {
  1778. // Only set autoplay to false if the video playback has already begun.
  1779. // When a codec switch happens before playback has begun this can cause
  1780. // autoplay not to work as expected.
  1781. this.video_.autoplay = false;
  1782. }
  1783. try {
  1784. this.eventManager_.removeAll();
  1785. const cleanup = [];
  1786. for (const contentType in this.transmuxers_) {
  1787. cleanup.push(this.transmuxers_[contentType].destroy());
  1788. }
  1789. for (const contentType in this.queues_) {
  1790. // Make a local copy of the queue and the first item.
  1791. const q = this.queues_[contentType];
  1792. const inProgress = q[0];
  1793. // Drop everything else out of the original queue.
  1794. this.queues_[contentType] = q.slice(0, 1);
  1795. // We will wait for this item to complete/fail.
  1796. if (inProgress) {
  1797. cleanup.push(inProgress.p.catch(Functional.noop));
  1798. }
  1799. // The rest will be rejected silently if possible.
  1800. for (const item of q.slice(1)) {
  1801. item.p.reject(shaka.util.Destroyer.destroyedError());
  1802. }
  1803. }
  1804. for (const contentType in this.sourceBuffers_) {
  1805. const sourceBuffer = this.sourceBuffers_[contentType];
  1806. try {
  1807. this.mediaSource_.removeSourceBuffer(sourceBuffer);
  1808. } catch (e) {}
  1809. }
  1810. await Promise.all(cleanup);
  1811. this.transmuxers_ = {};
  1812. const previousDuration = this.mediaSource_.duration;
  1813. this.mediaSourceOpen_ = new shaka.util.PublicPromise();
  1814. this.mediaSource_ = this.createMediaSource(this.mediaSourceOpen_);
  1815. await this.mediaSourceOpen_;
  1816. if (!isNaN(previousDuration) && previousDuration) {
  1817. this.mediaSource_.duration = previousDuration;
  1818. } else if (!isNaN(this.lastDuration_) && this.lastDuration_) {
  1819. this.mediaSource_.duration = this.lastDuration_;
  1820. }
  1821. const sourceBufferAdded = new shaka.util.PublicPromise();
  1822. const sourceBuffers =
  1823. /** @type {EventTarget} */(this.mediaSource_.sourceBuffers);
  1824. const totalOfBuffers = streamsByType.size;
  1825. let numberOfSourceBufferAdded = 0;
  1826. const onSourceBufferAdded = () => {
  1827. numberOfSourceBufferAdded++;
  1828. if (numberOfSourceBufferAdded === totalOfBuffers) {
  1829. sourceBufferAdded.resolve();
  1830. this.eventManager_.unlisten(sourceBuffers, 'addsourcebuffer',
  1831. onSourceBufferAdded);
  1832. }
  1833. };
  1834. this.eventManager_.listen(sourceBuffers, 'addsourcebuffer',
  1835. onSourceBufferAdded);
  1836. for (const contentType of streamsByType.keys()) {
  1837. const previousParams = this.getSourceBufferParams_(contentType);
  1838. const stream = streamsByType.get(contentType);
  1839. // eslint-disable-next-line no-await-in-loop
  1840. await this.initSourceBuffer_(contentType, stream, stream.codecs);
  1841. if (this.needSplitMuxedContent_) {
  1842. this.queues_[ContentType.AUDIO] = [];
  1843. this.queues_[ContentType.VIDEO] = [];
  1844. } else {
  1845. this.queues_[contentType] = [];
  1846. }
  1847. this.restoreSourceBufferParams_(contentType, previousParams);
  1848. }
  1849. // Fake a seek to catchup the playhead.
  1850. this.video_.currentTime = currentTime;
  1851. await sourceBufferAdded;
  1852. } finally {
  1853. this.reloadingMediaSource_ = false;
  1854. this.destroyer_.ensureNotDestroyed();
  1855. this.eventManager_.listenOnce(this.video_, 'canplaythrough', () => {
  1856. // Don't use ensureNotDestroyed() from this event listener, because
  1857. // that results in an uncaught exception. Instead, just check the
  1858. // flag.
  1859. if (this.destroyer_.destroyed()) {
  1860. return;
  1861. }
  1862. this.video_.autoplay = previousAutoPlayState;
  1863. if (!previousPausedState) {
  1864. this.video_.play();
  1865. }
  1866. });
  1867. }
  1868. }
  1869. /**
  1870. * Resets the Media Source
  1871. * @param {!Map.<shaka.util.ManifestParserUtils.ContentType,
  1872. * shaka.extern.Stream>} streamsByType
  1873. * @return {!Promise}
  1874. */
  1875. reset(streamsByType) {
  1876. return this.enqueueBlockingOperation_(
  1877. () => this.reset_(streamsByType));
  1878. }
  1879. /**
  1880. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1881. * @param {string} mimeType
  1882. * @param {string} codecs
  1883. * @return {{transmuxer: ?shaka.extern.Transmuxer,
  1884. * transmuxerMuxed: boolean, basicType: string, codec: string,
  1885. * mimeType: string}}
  1886. * @private
  1887. */
  1888. getRealInfo_(contentType, mimeType, codecs) {
  1889. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1890. const MimeUtils = shaka.util.MimeUtils;
  1891. /** @type {?shaka.extern.Transmuxer} */
  1892. let transmuxer;
  1893. let transmuxerMuxed = false;
  1894. const audioCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  1895. ContentType.AUDIO, (codecs || '').split(','));
  1896. const videoCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  1897. ContentType.VIDEO, (codecs || '').split(','));
  1898. let codec = videoCodec;
  1899. if (contentType == ContentType.AUDIO) {
  1900. codec = audioCodec;
  1901. }
  1902. if (!codec) {
  1903. codec = codecs;
  1904. }
  1905. let newMimeType = shaka.util.MimeUtils.getFullType(mimeType, codec);
  1906. let needTransmux = this.config_.forceTransmux;
  1907. if (!shaka.media.Capabilities.isTypeSupported(newMimeType) ||
  1908. (!this.sequenceMode_ &&
  1909. shaka.util.MimeUtils.RAW_FORMATS.includes(newMimeType))) {
  1910. needTransmux = true;
  1911. }
  1912. const TransmuxerEngine = shaka.transmuxer.TransmuxerEngine;
  1913. if (needTransmux) {
  1914. const newMimeTypeWithAllCodecs =
  1915. shaka.util.MimeUtils.getFullTypeWithAllCodecs(mimeType, codec);
  1916. const transmuxerPlugin =
  1917. TransmuxerEngine.findTransmuxer(newMimeTypeWithAllCodecs);
  1918. if (transmuxerPlugin) {
  1919. transmuxer = transmuxerPlugin();
  1920. if (audioCodec && videoCodec) {
  1921. transmuxerMuxed = true;
  1922. }
  1923. newMimeType =
  1924. transmuxer.convertCodecs(contentType, newMimeTypeWithAllCodecs);
  1925. }
  1926. }
  1927. const newCodec = MimeUtils.getNormalizedCodec(
  1928. MimeUtils.getCodecs(newMimeType));
  1929. const newBasicType = MimeUtils.getBasicType(newMimeType);
  1930. return {
  1931. transmuxer,
  1932. transmuxerMuxed,
  1933. basicType: newBasicType,
  1934. codec: newCodec,
  1935. mimeType: newMimeType,
  1936. };
  1937. }
  1938. /**
  1939. * Codec switch if necessary, this will not resolve until the codec
  1940. * switch is over.
  1941. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1942. * @param {string} mimeType
  1943. * @param {string} codecs
  1944. * @param {!Map.<shaka.util.ManifestParserUtils.ContentType,
  1945. * shaka.extern.Stream>} streamsByType
  1946. * @return {!Promise.<boolean>} true if there was a codec switch,
  1947. * false otherwise.
  1948. * @private
  1949. */
  1950. async codecSwitchIfNecessary_(contentType, mimeType, codecs, streamsByType) {
  1951. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1952. if (contentType == ContentType.TEXT) {
  1953. return false;
  1954. }
  1955. const MimeUtils = shaka.util.MimeUtils;
  1956. const currentCodec = MimeUtils.getNormalizedCodec(
  1957. MimeUtils.getCodecs(this.sourceBufferTypes_[contentType]));
  1958. const currentBasicType = MimeUtils.getBasicType(
  1959. this.sourceBufferTypes_[contentType]);
  1960. const realInfo = this.getRealInfo_(contentType, mimeType, codecs);
  1961. const transmuxer = realInfo.transmuxer;
  1962. const transmuxerMuxed = realInfo.transmuxerMuxed;
  1963. const newBasicType = realInfo.basicType;
  1964. const newCodec = realInfo.codec;
  1965. const newMimeType = realInfo.mimeType;
  1966. let muxedContentCheck = true;
  1967. if (transmuxerMuxed) {
  1968. const muxedRealInfo =
  1969. this.getRealInfo_(ContentType.AUDIO, mimeType, codecs);
  1970. const muxedCurrentCodec = MimeUtils.getNormalizedCodec(
  1971. MimeUtils.getCodecs(this.sourceBufferTypes_[ContentType.AUDIO]));
  1972. const muxedCurrentBasicType = MimeUtils.getBasicType(
  1973. this.sourceBufferTypes_[ContentType.AUDIO]);
  1974. muxedContentCheck = muxedCurrentCodec == muxedRealInfo.codec &&
  1975. muxedCurrentBasicType == muxedRealInfo.basicType;
  1976. if (muxedRealInfo.transmuxer) {
  1977. muxedRealInfo.transmuxer.destroy();
  1978. }
  1979. }
  1980. // Current/new codecs base and basic type match then no need to switch
  1981. if (currentCodec === newCodec && currentBasicType === newBasicType &&
  1982. muxedContentCheck) {
  1983. if (this.transmuxers_[contentType] && !transmuxer) {
  1984. this.transmuxers_[contentType].destroy();
  1985. delete this.transmuxers_[contentType];
  1986. } else if (!this.transmuxers_[contentType] && transmuxer) {
  1987. this.transmuxers_[contentType] = transmuxer;
  1988. } else if (transmuxer) {
  1989. // Compare if the transmuxer is different
  1990. if (this.transmuxers_[contentType] &&
  1991. this.transmuxers_[contentType].transmux != transmuxer.transmux) {
  1992. this.transmuxers_[contentType].destroy();
  1993. delete this.transmuxers_[contentType];
  1994. this.transmuxers_[contentType] = transmuxer;
  1995. } else {
  1996. transmuxer.destroy();
  1997. }
  1998. }
  1999. return false;
  2000. }
  2001. let allowChangeType = true;
  2002. if (this.needSplitMuxedContent_ || (transmuxerMuxed &&
  2003. transmuxer && !this.transmuxers_[contentType])) {
  2004. allowChangeType = false;
  2005. }
  2006. if (allowChangeType && this.config_.codecSwitchingStrategy ===
  2007. shaka.config.CodecSwitchingStrategy.SMOOTH &&
  2008. shaka.media.Capabilities.isChangeTypeSupported()) {
  2009. await this.changeType(contentType, newMimeType, transmuxer);
  2010. } else {
  2011. if (transmuxer) {
  2012. transmuxer.destroy();
  2013. }
  2014. await this.reset(streamsByType);
  2015. }
  2016. return true;
  2017. }
  2018. /**
  2019. * Returns true if it's necessary codec switch to load the new stream.
  2020. *
  2021. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  2022. * @param {shaka.extern.Stream} stream
  2023. * @param {string} refMimeType
  2024. * @param {string} refCodecs
  2025. * @return {boolean}
  2026. * @private
  2027. */
  2028. isCodecSwitchNecessary_(contentType, stream, refMimeType, refCodecs) {
  2029. if (contentType == shaka.util.ManifestParserUtils.ContentType.TEXT) {
  2030. return false;
  2031. }
  2032. const MimeUtils = shaka.util.MimeUtils;
  2033. const currentCodec = MimeUtils.getNormalizedCodec(
  2034. MimeUtils.getCodecs(this.sourceBufferTypes_[contentType]));
  2035. const currentBasicType = MimeUtils.getBasicType(
  2036. this.sourceBufferTypes_[contentType]);
  2037. let newMimeType = shaka.util.MimeUtils.getFullType(refMimeType, refCodecs);
  2038. let needTransmux = this.config_.forceTransmux;
  2039. if (!shaka.media.Capabilities.isTypeSupported(newMimeType) ||
  2040. (!this.sequenceMode_ &&
  2041. shaka.util.MimeUtils.RAW_FORMATS.includes(newMimeType))) {
  2042. needTransmux = true;
  2043. }
  2044. const newMimeTypeWithAllCodecs =
  2045. shaka.util.MimeUtils.getFullTypeWithAllCodecs(
  2046. refMimeType, refCodecs);
  2047. const TransmuxerEngine = shaka.transmuxer.TransmuxerEngine;
  2048. if (needTransmux) {
  2049. const transmuxerPlugin =
  2050. TransmuxerEngine.findTransmuxer(newMimeTypeWithAllCodecs);
  2051. if (transmuxerPlugin) {
  2052. const transmuxer = transmuxerPlugin();
  2053. newMimeType =
  2054. transmuxer.convertCodecs(contentType, newMimeTypeWithAllCodecs);
  2055. transmuxer.destroy();
  2056. }
  2057. }
  2058. const newCodec = MimeUtils.getNormalizedCodec(
  2059. MimeUtils.getCodecs(newMimeType));
  2060. const newBasicType = MimeUtils.getBasicType(newMimeType);
  2061. return currentCodec !== newCodec || currentBasicType !== newBasicType;
  2062. }
  2063. /**
  2064. * Returns true if it's necessary reset the media source to load the
  2065. * new stream.
  2066. *
  2067. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  2068. * @param {shaka.extern.Stream} stream
  2069. * @param {string} mimeType
  2070. * @param {string} codecs
  2071. * @return {boolean}
  2072. */
  2073. isResetMediaSourceNecessary(contentType, stream, mimeType, codecs) {
  2074. if (!this.isCodecSwitchNecessary_(contentType, stream, mimeType, codecs)) {
  2075. return false;
  2076. }
  2077. return this.config_.codecSwitchingStrategy !==
  2078. shaka.config.CodecSwitchingStrategy.SMOOTH ||
  2079. !shaka.media.Capabilities.isChangeTypeSupported() ||
  2080. this.needSplitMuxedContent_;
  2081. }
  2082. /**
  2083. * Update LCEVC Decoder object when ready for LCEVC Decode.
  2084. * @param {?shaka.lcevc.Dec} lcevcDec
  2085. */
  2086. updateLcevcDec(lcevcDec) {
  2087. this.lcevcDec_ = lcevcDec;
  2088. }
  2089. /**
  2090. * @param {string} mimeType
  2091. * @return {string}
  2092. * @private
  2093. */
  2094. addExtraFeaturesToMimeType_(mimeType) {
  2095. const extraFeatures = this.config_.addExtraFeaturesToSourceBuffer(mimeType);
  2096. const extendedType = mimeType + extraFeatures;
  2097. shaka.log.debug('Using full mime type', extendedType);
  2098. return extendedType;
  2099. }
  2100. };
  2101. /**
  2102. * Internal reference to window.URL.createObjectURL function to avoid
  2103. * compatibility issues with other libraries and frameworks such as React
  2104. * Native. For use in unit tests only, not meant for external use.
  2105. *
  2106. * @type {function(?):string}
  2107. */
  2108. shaka.media.MediaSourceEngine.createObjectURL = window.URL.createObjectURL;
  2109. /**
  2110. * @typedef {{
  2111. * start: function(),
  2112. * p: !shaka.util.PublicPromise,
  2113. * uri: ?string
  2114. * }}
  2115. *
  2116. * @summary An operation in queue.
  2117. * @property {function()} start
  2118. * The function which starts the operation.
  2119. * @property {!shaka.util.PublicPromise} p
  2120. * The PublicPromise which is associated with this operation.
  2121. * @property {?string} uri
  2122. * A segment URI (if any) associated with this operation.
  2123. */
  2124. shaka.media.MediaSourceEngine.Operation;
  2125. /**
  2126. * @enum {string}
  2127. * @private
  2128. */
  2129. shaka.media.MediaSourceEngine.SourceBufferMode_ = {
  2130. SEQUENCE: 'sequence',
  2131. SEGMENTS: 'segments',
  2132. };
  2133. /**
  2134. * @typedef {{
  2135. * getKeySystem: function():?string,
  2136. * onMetadata: function(!Array<shaka.extern.ID3Metadata>, number, ?number)
  2137. * }}
  2138. *
  2139. * @summary Player interface
  2140. * @property {function():?string} getKeySystem
  2141. * Gets currently used key system or null if not used.
  2142. * @property {function(
  2143. * !Array<shaka.extern.ID3Metadata>, number, ?number)} onMetadata
  2144. * Callback to use when metadata arrives.
  2145. */
  2146. shaka.media.MediaSourceEngine.PlayerInterface;
  2147. /**
  2148. * @typedef {{
  2149. * timestampOffset: number,
  2150. * appendWindowStart: number,
  2151. * appendWindowEnd: number
  2152. * }}
  2153. */
  2154. shaka.media.MediaSourceEngine.SourceBufferParams;