Source: lib/media/media_source_engine.js

  1. /*! @license
  2. * Shaka Player
  3. * Copyright 2016 Google LLC
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. goog.provide('shaka.media.MediaSourceEngine');
  7. goog.require('goog.asserts');
  8. goog.require('shaka.log');
  9. goog.require('shaka.config.CodecSwitchingStrategy');
  10. goog.require('shaka.media.Capabilities');
  11. goog.require('shaka.media.ContentWorkarounds');
  12. goog.require('shaka.media.ClosedCaptionParser');
  13. goog.require('shaka.media.IClosedCaptionParser');
  14. goog.require('shaka.media.ManifestParser');
  15. goog.require('shaka.media.SegmentReference');
  16. goog.require('shaka.media.TimeRangesUtils');
  17. goog.require('shaka.text.TextEngine');
  18. goog.require('shaka.transmuxer.TransmuxerEngine');
  19. goog.require('shaka.util.BufferUtils');
  20. goog.require('shaka.util.Destroyer');
  21. goog.require('shaka.util.Dom');
  22. goog.require('shaka.util.Error');
  23. goog.require('shaka.util.EventManager');
  24. goog.require('shaka.util.FakeEvent');
  25. goog.require('shaka.util.Functional');
  26. goog.require('shaka.util.IDestroyable');
  27. goog.require('shaka.util.Id3Utils');
  28. goog.require('shaka.util.ManifestParserUtils');
  29. goog.require('shaka.util.MimeUtils');
  30. goog.require('shaka.util.Mp4BoxParsers');
  31. goog.require('shaka.util.Mp4Parser');
  32. goog.require('shaka.util.Platform');
  33. goog.require('shaka.util.PublicPromise');
  34. goog.require('shaka.util.StreamUtils');
  35. goog.require('shaka.util.TsParser');
  36. goog.require('shaka.lcevc.Dec');
  37. /**
  38. * @summary
  39. * MediaSourceEngine wraps all operations on MediaSource and SourceBuffers.
  40. * All asynchronous operations return a Promise, and all operations are
  41. * internally synchronized and serialized as needed. Operations that can
  42. * be done in parallel will be done in parallel.
  43. *
  44. * @implements {shaka.util.IDestroyable}
  45. */
  46. shaka.media.MediaSourceEngine = class {
  47. /**
  48. * @param {HTMLMediaElement} video The video element, whose source is tied to
  49. * MediaSource during the lifetime of the MediaSourceEngine.
  50. * @param {!shaka.extern.TextDisplayer} textDisplayer
  51. * The text displayer that will be used with the text engine.
  52. * MediaSourceEngine takes ownership of the displayer. When
  53. * MediaSourceEngine is destroyed, it will destroy the displayer.
  54. * @param {!shaka.media.MediaSourceEngine.PlayerInterface} playerInterface
  55. * Interface for common player methods.
  56. * @param {?shaka.lcevc.Dec} [lcevcDec] Optional - LCEVC Decoder Object
  57. */
  58. constructor(video, textDisplayer, playerInterface, lcevcDec) {
  59. /** @private {HTMLMediaElement} */
  60. this.video_ = video;
  61. /** @private {?shaka.media.MediaSourceEngine.PlayerInterface} */
  62. this.playerInterface_ = playerInterface;
  63. /** @private {?shaka.extern.MediaSourceConfiguration} */
  64. this.config_ = null;
  65. /** @private {shaka.extern.TextDisplayer} */
  66. this.textDisplayer_ = textDisplayer;
  67. /**
  68. * @private {!Map<shaka.util.ManifestParserUtils.ContentType, SourceBuffer>}
  69. */
  70. this.sourceBuffers_ = new Map();
  71. /**
  72. * @private {!Map<shaka.util.ManifestParserUtils.ContentType, string>}
  73. */
  74. this.sourceBufferTypes_ = new Map();
  75. /**
  76. * @private {!Map<shaka.util.ManifestParserUtils.ContentType,
  77. * boolean>}
  78. */
  79. this.expectedEncryption_ = new Map();
  80. /** @private {shaka.text.TextEngine} */
  81. this.textEngine_ = null;
  82. /** @private {boolean} */
  83. this.segmentRelativeVttTiming_ = false;
  84. /** @private {?shaka.lcevc.Dec} */
  85. this.lcevcDec_ = lcevcDec || null;
  86. /**
  87. * @private {!Map<string, !Array<shaka.media.MediaSourceEngine.Operation>>}
  88. */
  89. this.queues_ = new Map();
  90. /** @private {shaka.util.EventManager} */
  91. this.eventManager_ = new shaka.util.EventManager();
  92. /**
  93. * @private {!Map<shaka.util.ManifestParserUtils.ContentType,
  94. !shaka.extern.Transmuxer>} */
  95. this.transmuxers_ = new Map();
  96. /** @private {?shaka.media.IClosedCaptionParser} */
  97. this.captionParser_ = null;
  98. /** @private {!shaka.util.PublicPromise} */
  99. this.mediaSourceOpen_ = new shaka.util.PublicPromise();
  100. /** @private {string} */
  101. this.url_ = '';
  102. /** @private {boolean} */
  103. this.playbackHasBegun_ = false;
  104. /** @private {boolean} */
  105. this.streamingAllowed_ = true;
  106. /** @private {boolean} */
  107. this.usingRemotePlayback_ = false;
  108. /** @private {HTMLSourceElement} */
  109. this.source_ = null;
  110. /**
  111. * Fallback source element with direct media URI, used for casting
  112. * purposes.
  113. * @private {HTMLSourceElement}
  114. */
  115. this.secondarySource_ = null;
  116. /** @private {MediaSource} */
  117. this.mediaSource_ = this.createMediaSource(this.mediaSourceOpen_);
  118. /** @private {boolean} */
  119. this.reloadingMediaSource_ = false;
  120. /** @private {boolean} */
  121. this.playAfterReset_ = false;
  122. /** @type {!shaka.util.Destroyer} */
  123. this.destroyer_ = new shaka.util.Destroyer(() => this.doDestroy_());
  124. /** @private {boolean} */
  125. this.sequenceMode_ = false;
  126. /** @private {string} */
  127. this.manifestType_ = shaka.media.ManifestParser.UNKNOWN;
  128. /** @private {boolean} */
  129. this.ignoreManifestTimestampsInSegmentsMode_ = false;
  130. /** @private {boolean} */
  131. this.attemptTimestampOffsetCalculation_ = false;
  132. /** @private {!shaka.util.PublicPromise<number>} */
  133. this.textSequenceModeOffset_ = new shaka.util.PublicPromise();
  134. /** @private {boolean} */
  135. this.needSplitMuxedContent_ = false;
  136. /** @private {?number} */
  137. this.lastDuration_ = null;
  138. /**
  139. * @private {!Map<shaka.util.ManifestParserUtils.ContentType,
  140. * !shaka.util.TsParser>}
  141. */
  142. this.tsParsers_ = new Map();
  143. /** @private {?number} */
  144. this.firstVideoTimestamp_ = null;
  145. /** @private {?number} */
  146. this.firstVideoReferenceStartTime_ = null;
  147. /** @private {?number} */
  148. this.firstAudioTimestamp_ = null;
  149. /** @private {?number} */
  150. this.firstAudioReferenceStartTime_ = null;
  151. /** @private {!shaka.util.PublicPromise<number>} */
  152. this.audioCompensation_ = new shaka.util.PublicPromise();
  153. if (this.video_.remote) {
  154. this.usingRemotePlayback_ = this.video_.remote.state != 'disconnected';
  155. this.eventManager_.listen(this.video_.remote, 'connect', () => {
  156. this.usingRemotePlayback_ = this.video_.remote.state != 'disconnected';
  157. });
  158. this.eventManager_.listen(this.video_.remote, 'connecting', () => {
  159. this.usingRemotePlayback_ = this.video_.remote.state != 'disconnected';
  160. });
  161. this.eventManager_.listen(this.video_.remote, 'disconnect', () => {
  162. this.usingRemotePlayback_ = this.video_.remote.state != 'disconnected';
  163. });
  164. }
  165. }
  166. /**
  167. * Create a MediaSource object, attach it to the video element, and return it.
  168. * Resolves the given promise when the MediaSource is ready.
  169. *
  170. * Replaced by unit tests.
  171. *
  172. * @param {!shaka.util.PublicPromise} p
  173. * @return {!MediaSource}
  174. */
  175. createMediaSource(p) {
  176. this.streamingAllowed_ = true;
  177. /** @type {!MediaSource} */
  178. let mediaSource;
  179. if (window.ManagedMediaSource) {
  180. if (!this.secondarySource_) {
  181. this.video_.disableRemotePlayback = true;
  182. }
  183. mediaSource = new ManagedMediaSource();
  184. this.eventManager_.listen(
  185. mediaSource, 'startstreaming', () => {
  186. shaka.log.info('MMS startstreaming');
  187. this.streamingAllowed_ = true;
  188. });
  189. this.eventManager_.listen(
  190. mediaSource, 'endstreaming', () => {
  191. shaka.log.info('MMS endstreaming');
  192. this.streamingAllowed_ = false;
  193. });
  194. } else {
  195. mediaSource = new MediaSource();
  196. }
  197. // Set up MediaSource on the video element.
  198. this.eventManager_.listenOnce(
  199. mediaSource, 'sourceopen', () => this.onSourceOpen_(p));
  200. // Correctly set when playback has begun.
  201. this.eventManager_.listenOnce(this.video_, 'playing', () => {
  202. this.playbackHasBegun_ = true;
  203. });
  204. // Store the object URL for releasing it later.
  205. this.url_ = shaka.media.MediaSourceEngine.createObjectURL(mediaSource);
  206. this.video_.removeAttribute('src');
  207. if (this.source_) {
  208. this.video_.removeChild(this.source_);
  209. }
  210. if (this.secondarySource_) {
  211. this.video_.removeChild(this.secondarySource_);
  212. }
  213. this.source_ = shaka.util.Dom.createSourceElement(this.url_);
  214. this.video_.appendChild(this.source_);
  215. if (this.secondarySource_) {
  216. this.video_.appendChild(this.secondarySource_);
  217. }
  218. this.video_.load();
  219. return mediaSource;
  220. }
  221. /**
  222. * @param {string} uri
  223. * @param {string} mimeType
  224. */
  225. addSecondarySource(uri, mimeType) {
  226. if (!this.video_ || !window.ManagedMediaSource || !this.mediaSource_) {
  227. shaka.log.warning(
  228. 'Secondary source is used only with ManagedMediaSource');
  229. return;
  230. }
  231. if (this.secondarySource_) {
  232. this.video_.removeChild(this.secondarySource_);
  233. }
  234. this.secondarySource_ = shaka.util.Dom.createSourceElement(uri, mimeType);
  235. this.video_.appendChild(this.secondarySource_);
  236. this.video_.disableRemotePlayback = false;
  237. }
  238. /**
  239. * @param {shaka.util.PublicPromise} p
  240. * @private
  241. */
  242. onSourceOpen_(p) {
  243. goog.asserts.assert(this.url_, 'Must have object URL');
  244. // Release the object URL that was previously created, to prevent memory
  245. // leak.
  246. // createObjectURL creates a strong reference to the MediaSource object
  247. // inside the browser. Setting the src of the video then creates another
  248. // reference within the video element. revokeObjectURL will remove the
  249. // strong reference to the MediaSource object, and allow it to be
  250. // garbage-collected later.
  251. URL.revokeObjectURL(this.url_);
  252. p.resolve();
  253. }
  254. /**
  255. * Checks if a certain type is supported.
  256. *
  257. * @param {shaka.extern.Stream} stream
  258. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  259. * @return {!Promise<boolean>}
  260. */
  261. static async isStreamSupported(stream, contentType) {
  262. if (stream.createSegmentIndex) {
  263. await stream.createSegmentIndex();
  264. }
  265. if (!stream.segmentIndex) {
  266. return false;
  267. }
  268. if (stream.segmentIndex.isEmpty()) {
  269. return true;
  270. }
  271. const MimeUtils = shaka.util.MimeUtils;
  272. const TransmuxerEngine = shaka.transmuxer.TransmuxerEngine;
  273. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  274. const StreamUtils = shaka.util.StreamUtils;
  275. const seenCombos = new Set();
  276. // Check each combination of mimeType and codecs within the segment index.
  277. // Unfortunately we cannot use fullMimeTypes, as we ALSO need to check the
  278. // getFullTypeWithAllCodecs (for the sake of the transmuxer) and we have no
  279. // way of going from a full mimeType to a full mimeType with all codecs.
  280. // As this function is only called in debug mode, a little inefficiency is
  281. // acceptable.
  282. for (const ref of stream.segmentIndex) {
  283. const mimeType = ref.mimeType || stream.mimeType || '';
  284. let codecs = ref.codecs || stream.codecs || '';
  285. // Optimization for the case where the codecs and mimetype of the stream
  286. // match the reference.
  287. if (mimeType == stream.mimeType && codecs == stream.codecs) {
  288. continue;
  289. }
  290. // Don't check the same combination of mimetype + codecs twice.
  291. const combo = mimeType + ':' + codecs;
  292. if (seenCombos.has(combo)) {
  293. continue;
  294. }
  295. seenCombos.add(combo);
  296. if (contentType == ContentType.TEXT) {
  297. const fullMimeType = MimeUtils.getFullType(mimeType, codecs);
  298. if (!shaka.text.TextEngine.isTypeSupported(fullMimeType)) {
  299. return false;
  300. }
  301. } else {
  302. if (contentType == ContentType.VIDEO) {
  303. codecs = StreamUtils.getCorrectVideoCodecs(codecs);
  304. } else if (contentType == ContentType.AUDIO) {
  305. codecs = StreamUtils.getCorrectAudioCodecs(codecs, mimeType);
  306. }
  307. const extendedMimeType = MimeUtils.getExtendedType(
  308. stream, mimeType, codecs);
  309. const fullMimeType = MimeUtils.getFullTypeWithAllCodecs(
  310. mimeType, codecs);
  311. if (!shaka.media.Capabilities.isTypeSupported(extendedMimeType) &&
  312. !TransmuxerEngine.isSupported(fullMimeType, stream.type)) {
  313. return false;
  314. }
  315. }
  316. }
  317. return true;
  318. }
  319. /**
  320. * Returns a map of MediaSource support for well-known types.
  321. *
  322. * @return {!Object<string, boolean>}
  323. */
  324. static probeSupport() {
  325. const testMimeTypes = [
  326. // MP4 types
  327. 'video/mp4; codecs="avc1.42E01E"',
  328. 'video/mp4; codecs="avc3.42E01E"',
  329. 'video/mp4; codecs="hev1.1.6.L93.90"',
  330. 'video/mp4; codecs="hvc1.1.6.L93.90"',
  331. 'video/mp4; codecs="hev1.2.4.L153.B0"; eotf="smpte2084"', // HDR HEVC
  332. 'video/mp4; codecs="hvc1.2.4.L153.B0"; eotf="smpte2084"', // HDR HEVC
  333. 'video/mp4; codecs="vp9"',
  334. 'video/mp4; codecs="vp09.00.10.08"',
  335. 'video/mp4; codecs="av01.0.01M.08"',
  336. 'video/mp4; codecs="dvh1.05.01"',
  337. 'video/mp4; codecs="dvh1.20.01"',
  338. 'audio/mp4; codecs="mp4a.40.2"',
  339. 'audio/mp4; codecs="ac-3"',
  340. 'audio/mp4; codecs="ec-3"',
  341. 'audio/mp4; codecs="ac-4.02.01.01"',
  342. 'audio/mp4; codecs="opus"',
  343. 'audio/mp4; codecs="flac"',
  344. 'audio/mp4; codecs="dtsc"', // DTS Digital Surround
  345. 'audio/mp4; codecs="dtse"', // DTS Express
  346. 'audio/mp4; codecs="dtsx"', // DTS:X
  347. // WebM types
  348. 'video/webm; codecs="vp8"',
  349. 'video/webm; codecs="vp9"',
  350. 'video/webm; codecs="vp09.00.10.08"',
  351. 'audio/webm; codecs="vorbis"',
  352. 'audio/webm; codecs="opus"',
  353. // MPEG2 TS types (video/ is also used for audio: https://bit.ly/TsMse)
  354. 'video/mp2t; codecs="avc1.42E01E"',
  355. 'video/mp2t; codecs="avc3.42E01E"',
  356. 'video/mp2t; codecs="hvc1.1.6.L93.90"',
  357. 'video/mp2t; codecs="mp4a.40.2"',
  358. 'video/mp2t; codecs="ac-3"',
  359. 'video/mp2t; codecs="ec-3"',
  360. // WebVTT types
  361. 'text/vtt',
  362. 'application/mp4; codecs="wvtt"',
  363. // TTML types
  364. 'application/ttml+xml',
  365. 'application/mp4; codecs="stpp"',
  366. // Containerless types
  367. ...shaka.util.MimeUtils.RAW_FORMATS,
  368. ];
  369. const support = {};
  370. for (const type of testMimeTypes) {
  371. if (shaka.text.TextEngine.isTypeSupported(type)) {
  372. support[type] = true;
  373. } else if (shaka.util.Platform.supportsMediaSource()) {
  374. support[type] = shaka.media.Capabilities.isTypeSupported(type) ||
  375. shaka.transmuxer.TransmuxerEngine.isSupported(type);
  376. } else {
  377. support[type] = shaka.util.Platform.supportsMediaType(type);
  378. }
  379. const basicType = type.split(';')[0];
  380. support[basicType] = support[basicType] || support[type];
  381. }
  382. return support;
  383. }
  384. /** @override */
  385. destroy() {
  386. return this.destroyer_.destroy();
  387. }
  388. /** @private */
  389. async doDestroy_() {
  390. const Functional = shaka.util.Functional;
  391. const cleanup = [];
  392. for (const [key, q] of this.queues_) {
  393. // Make a local copy of the queue and the first item.
  394. const inProgress = q[0];
  395. const contentType = /** @type {string} */(key);
  396. // Drop everything else out of the original queue.
  397. this.queues_.set(contentType, q.slice(0, 1));
  398. // We will wait for this item to complete/fail.
  399. if (inProgress) {
  400. cleanup.push(inProgress.p.catch(Functional.noop));
  401. }
  402. // The rest will be rejected silently if possible.
  403. for (const item of q.slice(1)) {
  404. item.p.reject(shaka.util.Destroyer.destroyedError());
  405. }
  406. }
  407. if (this.textEngine_) {
  408. cleanup.push(this.textEngine_.destroy());
  409. }
  410. await Promise.all(cleanup);
  411. for (const transmuxer of this.transmuxers_.values()) {
  412. transmuxer.destroy();
  413. }
  414. if (this.eventManager_) {
  415. this.eventManager_.release();
  416. this.eventManager_ = null;
  417. }
  418. if (this.video_ && this.secondarySource_) {
  419. this.video_.removeChild(this.secondarySource_);
  420. }
  421. if (this.video_ && this.source_) {
  422. // "unload" the video element.
  423. this.video_.removeChild(this.source_);
  424. this.video_.load();
  425. this.video_.disableRemotePlayback = false;
  426. }
  427. this.video_ = null;
  428. this.source_ = null;
  429. this.secondarySource_ = null;
  430. this.config_ = null;
  431. this.mediaSource_ = null;
  432. this.textEngine_ = null;
  433. this.textDisplayer_ = null;
  434. this.sourceBuffers_.clear();
  435. this.expectedEncryption_.clear();
  436. this.transmuxers_.clear();
  437. this.captionParser_ = null;
  438. if (goog.DEBUG) {
  439. for (const [contentType, q] of this.queues_) {
  440. goog.asserts.assert(
  441. q.length == 0,
  442. contentType + ' queue should be empty after destroy!');
  443. }
  444. }
  445. this.queues_.clear();
  446. // This object is owned by Player
  447. this.lcevcDec_ = null;
  448. this.tsParsers_.clear();
  449. this.playerInterface_ = null;
  450. }
  451. /**
  452. * @return {!Promise} Resolved when MediaSource is open and attached to the
  453. * media element. This process is actually initiated by the constructor.
  454. */
  455. open() {
  456. return this.mediaSourceOpen_;
  457. }
  458. /**
  459. * Initialize MediaSourceEngine.
  460. *
  461. * Note that it is not valid to call this multiple times, except to add or
  462. * reinitialize text streams.
  463. *
  464. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  465. * shaka.extern.Stream>} streamsByType
  466. * A map of content types to streams. All streams must be supported
  467. * according to MediaSourceEngine.isStreamSupported.
  468. * @param {boolean=} sequenceMode
  469. * If true, the media segments are appended to the SourceBuffer in strict
  470. * sequence.
  471. * @param {string=} manifestType
  472. * Indicates the type of the manifest.
  473. * @param {boolean=} ignoreManifestTimestampsInSegmentsMode
  474. * If true, don't adjust the timestamp offset to account for manifest
  475. * segment durations being out of sync with segment durations. In other
  476. * words, assume that there are no gaps in the segments when appending
  477. * to the SourceBuffer, even if the manifest and segment times disagree.
  478. * Indicates if the manifest has text streams.
  479. *
  480. * @return {!Promise}
  481. */
  482. async init(streamsByType, sequenceMode=false,
  483. manifestType=shaka.media.ManifestParser.UNKNOWN,
  484. ignoreManifestTimestampsInSegmentsMode=false) {
  485. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  486. await this.mediaSourceOpen_;
  487. if (this.ended() || this.closed()) {
  488. shaka.log.alwaysError('Expected MediaSource to be open during init(); ' +
  489. 'reopening the media source.');
  490. this.mediaSourceOpen_ = new shaka.util.PublicPromise();
  491. this.mediaSource_ = this.createMediaSource(this.mediaSourceOpen_);
  492. await this.mediaSourceOpen_;
  493. }
  494. this.sequenceMode_ = sequenceMode;
  495. this.manifestType_ = manifestType;
  496. this.ignoreManifestTimestampsInSegmentsMode_ =
  497. ignoreManifestTimestampsInSegmentsMode;
  498. this.attemptTimestampOffsetCalculation_ = !this.sequenceMode_ &&
  499. this.manifestType_ == shaka.media.ManifestParser.HLS &&
  500. !this.ignoreManifestTimestampsInSegmentsMode_;
  501. this.tsParsers_.clear();
  502. this.firstVideoTimestamp_ = null;
  503. this.firstVideoReferenceStartTime_ = null;
  504. this.firstAudioTimestamp_ = null;
  505. this.firstAudioReferenceStartTime_ = null;
  506. this.audioCompensation_ = new shaka.util.PublicPromise();
  507. for (const contentType of streamsByType.keys()) {
  508. const stream = streamsByType.get(contentType);
  509. // eslint-disable-next-line no-await-in-loop
  510. await this.initSourceBuffer_(contentType, stream, stream.codecs);
  511. if (this.needSplitMuxedContent_) {
  512. this.queues_.set(ContentType.AUDIO, []);
  513. this.queues_.set(ContentType.VIDEO, []);
  514. } else {
  515. this.queues_.set(contentType, []);
  516. }
  517. }
  518. const audio = streamsByType.get(ContentType.AUDIO);
  519. if (audio && audio.isAudioMuxedInVideo) {
  520. this.needSplitMuxedContent_ = true;
  521. }
  522. }
  523. /**
  524. * Initialize a specific SourceBuffer.
  525. *
  526. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  527. * @param {shaka.extern.Stream} stream
  528. * @param {string} codecs
  529. * @return {!Promise}
  530. * @private
  531. */
  532. async initSourceBuffer_(contentType, stream, codecs) {
  533. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  534. goog.asserts.assert(
  535. await shaka.media.MediaSourceEngine.isStreamSupported(
  536. stream, contentType),
  537. 'Type negotiation should happen before MediaSourceEngine.init!');
  538. if (contentType == ContentType.AUDIO && codecs) {
  539. codecs = shaka.util.StreamUtils.getCorrectAudioCodecs(
  540. codecs, stream.mimeType);
  541. }
  542. let mimeType = shaka.util.MimeUtils.getFullType(
  543. stream.mimeType, codecs);
  544. if (contentType == ContentType.TEXT) {
  545. this.reinitText(mimeType, this.sequenceMode_, stream.external);
  546. } else {
  547. let needTransmux = this.config_.forceTransmux;
  548. if (!shaka.media.Capabilities.isTypeSupported(mimeType) ||
  549. (!this.sequenceMode_ &&
  550. shaka.util.MimeUtils.RAW_FORMATS.includes(mimeType))) {
  551. needTransmux = true;
  552. }
  553. const mimeTypeWithAllCodecs =
  554. shaka.util.MimeUtils.getFullTypeWithAllCodecs(
  555. stream.mimeType, codecs);
  556. if (needTransmux) {
  557. const audioCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  558. ContentType.AUDIO, (codecs || '').split(','));
  559. const videoCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  560. ContentType.VIDEO, (codecs || '').split(','));
  561. if (audioCodec && videoCodec) {
  562. this.needSplitMuxedContent_ = true;
  563. await this.initSourceBuffer_(ContentType.AUDIO, stream, audioCodec);
  564. await this.initSourceBuffer_(ContentType.VIDEO, stream, videoCodec);
  565. return;
  566. }
  567. const transmuxerPlugin = shaka.transmuxer.TransmuxerEngine
  568. .findTransmuxer(mimeTypeWithAllCodecs);
  569. if (transmuxerPlugin) {
  570. const transmuxer = transmuxerPlugin();
  571. this.transmuxers_.set(contentType, transmuxer);
  572. mimeType =
  573. transmuxer.convertCodecs(contentType, mimeTypeWithAllCodecs);
  574. }
  575. }
  576. const type = this.addExtraFeaturesToMimeType_(mimeType);
  577. this.destroyer_.ensureNotDestroyed();
  578. let sourceBuffer;
  579. try {
  580. sourceBuffer = this.mediaSource_.addSourceBuffer(type);
  581. } catch (exception) {
  582. throw new shaka.util.Error(
  583. shaka.util.Error.Severity.CRITICAL,
  584. shaka.util.Error.Category.MEDIA,
  585. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  586. exception,
  587. 'The mediaSource_ status was ' + this.mediaSource_.readyState +
  588. ' expected \'open\'',
  589. null);
  590. }
  591. if (this.sequenceMode_) {
  592. sourceBuffer.mode =
  593. shaka.media.MediaSourceEngine.SourceBufferMode_.SEQUENCE;
  594. }
  595. this.eventManager_.listen(
  596. sourceBuffer, 'error',
  597. () => this.onError_(contentType));
  598. this.eventManager_.listen(
  599. sourceBuffer, 'updateend',
  600. () => this.onUpdateEnd_(contentType));
  601. this.sourceBuffers_.set(contentType, sourceBuffer);
  602. this.sourceBufferTypes_.set(contentType, mimeType);
  603. this.expectedEncryption_.set(contentType, !!stream.drmInfos.length);
  604. }
  605. }
  606. /**
  607. * Called by the Player to provide an updated configuration any time it
  608. * changes. Must be called at least once before init().
  609. *
  610. * @param {shaka.extern.MediaSourceConfiguration} config
  611. */
  612. configure(config) {
  613. this.config_ = config;
  614. if (this.textEngine_) {
  615. this.textEngine_.setModifyCueCallback(config.modifyCueCallback);
  616. }
  617. }
  618. /**
  619. * Indicate if the streaming is allowed by MediaSourceEngine.
  620. * If we using MediaSource we always returns true.
  621. *
  622. * @return {boolean}
  623. */
  624. isStreamingAllowed() {
  625. return this.streamingAllowed_ && !this.usingRemotePlayback_ &&
  626. !this.reloadingMediaSource_;
  627. }
  628. /**
  629. * Reinitialize the TextEngine for a new text type.
  630. * @param {string} mimeType
  631. * @param {boolean} sequenceMode
  632. * @param {boolean} external
  633. */
  634. reinitText(mimeType, sequenceMode, external) {
  635. if (!this.textEngine_) {
  636. this.textEngine_ = new shaka.text.TextEngine(this.textDisplayer_);
  637. if (this.textEngine_) {
  638. this.textEngine_.setModifyCueCallback(this.config_.modifyCueCallback);
  639. }
  640. }
  641. this.textEngine_.initParser(mimeType, sequenceMode,
  642. external || this.segmentRelativeVttTiming_, this.manifestType_);
  643. }
  644. /**
  645. * @return {boolean} True if the MediaSource is in an "ended" state, or if the
  646. * object has been destroyed.
  647. */
  648. ended() {
  649. if (this.reloadingMediaSource_) {
  650. return false;
  651. }
  652. return this.mediaSource_ ? this.mediaSource_.readyState == 'ended' : true;
  653. }
  654. /**
  655. * @return {boolean} True if the MediaSource is in an "closed" state, or if
  656. * the object has been destroyed.
  657. */
  658. closed() {
  659. if (this.reloadingMediaSource_) {
  660. return false;
  661. }
  662. return this.mediaSource_ ? this.mediaSource_.readyState == 'closed' : true;
  663. }
  664. /**
  665. * Gets the first timestamp in buffer for the given content type.
  666. *
  667. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  668. * @return {?number} The timestamp in seconds, or null if nothing is buffered.
  669. */
  670. bufferStart(contentType) {
  671. if (!this.sourceBuffers_.size) {
  672. return null;
  673. }
  674. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  675. if (contentType == ContentType.TEXT) {
  676. return this.textEngine_.bufferStart();
  677. }
  678. return shaka.media.TimeRangesUtils.bufferStart(
  679. this.getBuffered_(contentType));
  680. }
  681. /**
  682. * Gets the last timestamp in buffer for the given content type.
  683. *
  684. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  685. * @return {?number} The timestamp in seconds, or null if nothing is buffered.
  686. */
  687. bufferEnd(contentType) {
  688. if (!this.sourceBuffers_.size) {
  689. return null;
  690. }
  691. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  692. if (contentType == ContentType.TEXT) {
  693. return this.textEngine_.bufferEnd();
  694. }
  695. return shaka.media.TimeRangesUtils.bufferEnd(
  696. this.getBuffered_(contentType));
  697. }
  698. /**
  699. * Determines if the given time is inside the buffered range of the given
  700. * content type.
  701. *
  702. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  703. * @param {number} time Playhead time
  704. * @return {boolean}
  705. */
  706. isBuffered(contentType, time) {
  707. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  708. if (contentType == ContentType.TEXT) {
  709. return this.textEngine_.isBuffered(time);
  710. } else {
  711. const buffered = this.getBuffered_(contentType);
  712. return shaka.media.TimeRangesUtils.isBuffered(buffered, time);
  713. }
  714. }
  715. /**
  716. * Computes how far ahead of the given timestamp is buffered for the given
  717. * content type.
  718. *
  719. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  720. * @param {number} time
  721. * @return {number} The amount of time buffered ahead in seconds.
  722. */
  723. bufferedAheadOf(contentType, time) {
  724. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  725. if (contentType == ContentType.TEXT) {
  726. return this.textEngine_.bufferedAheadOf(time);
  727. } else {
  728. const buffered = this.getBuffered_(contentType);
  729. return shaka.media.TimeRangesUtils.bufferedAheadOf(buffered, time);
  730. }
  731. }
  732. /**
  733. * Returns info about what is currently buffered.
  734. * @return {shaka.extern.BufferedInfo}
  735. */
  736. getBufferedInfo() {
  737. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  738. const TimeRangesUtils = shaka.media.TimeRangesUtils;
  739. const info = {
  740. total: this.reloadingMediaSource_ ? [] :
  741. TimeRangesUtils.getBufferedInfo(this.video_.buffered),
  742. audio:
  743. TimeRangesUtils.getBufferedInfo(this.getBuffered_(ContentType.AUDIO)),
  744. video:
  745. TimeRangesUtils.getBufferedInfo(this.getBuffered_(ContentType.VIDEO)),
  746. text: [],
  747. };
  748. if (this.textEngine_) {
  749. const start = this.textEngine_.bufferStart();
  750. const end = this.textEngine_.bufferEnd();
  751. if (start != null && end != null) {
  752. info.text.push({start: start, end: end});
  753. }
  754. }
  755. return info;
  756. }
  757. /**
  758. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  759. * @return {TimeRanges} The buffered ranges for the given content type, or
  760. * null if the buffered ranges could not be obtained.
  761. * @private
  762. */
  763. getBuffered_(contentType) {
  764. if (this.reloadingMediaSource_ || this.usingRemotePlayback_) {
  765. return null;
  766. }
  767. try {
  768. return this.sourceBuffers_.get(contentType).buffered;
  769. } catch (exception) {
  770. if (this.sourceBuffers_.has(contentType)) {
  771. // Note: previous MediaSource errors may cause access to |buffered| to
  772. // throw.
  773. shaka.log.error('failed to get buffered range for ' + contentType,
  774. exception);
  775. }
  776. return null;
  777. }
  778. }
  779. /**
  780. * Create a new closed caption parser. This will ONLY be replaced by tests as
  781. * a way to inject fake closed caption parser instances.
  782. *
  783. * @param {string} mimeType
  784. * @return {!shaka.media.IClosedCaptionParser}
  785. */
  786. getCaptionParser(mimeType) {
  787. return new shaka.media.ClosedCaptionParser(mimeType);
  788. }
  789. /**
  790. * This method is only public for testing.
  791. *
  792. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  793. * @param {!BufferSource} data
  794. * @param {!shaka.media.SegmentReference} reference The segment reference
  795. * we are appending
  796. * @param {shaka.extern.Stream} stream
  797. * @param {!string} mimeType
  798. * @return {{timestamp: ?number, metadata: !Array<shaka.extern.ID3Metadata>}}
  799. */
  800. getTimestampAndDispatchMetadata(contentType, data, reference, stream,
  801. mimeType) {
  802. let timestamp = null;
  803. let metadata = [];
  804. const uint8ArrayData = shaka.util.BufferUtils.toUint8(data);
  805. if (shaka.util.MimeUtils.RAW_FORMATS.includes(mimeType)) {
  806. const frames = shaka.util.Id3Utils.getID3Frames(uint8ArrayData);
  807. if (frames.length && reference) {
  808. const metadataTimestamp = frames.find((frame) => {
  809. return frame.description ===
  810. 'com.apple.streaming.transportStreamTimestamp';
  811. });
  812. if (metadataTimestamp) {
  813. timestamp = Math.round(metadataTimestamp.data) / 1000;
  814. }
  815. /** @private {shaka.extern.ID3Metadata} */
  816. const id3Metadata = {
  817. cueTime: reference.startTime,
  818. data: uint8ArrayData,
  819. frames: frames,
  820. dts: reference.startTime,
  821. pts: reference.startTime,
  822. };
  823. this.playerInterface_.onMetadata(
  824. [id3Metadata], /* offset= */ 0, reference.endTime);
  825. }
  826. } else if (mimeType.includes('/mp4') &&
  827. reference &&
  828. reference.initSegmentReference &&
  829. reference.initSegmentReference.timescale) {
  830. const timescale = reference.initSegmentReference.timescale;
  831. if (!isNaN(timescale)) {
  832. const hasEmsg = ((stream.emsgSchemeIdUris != null &&
  833. stream.emsgSchemeIdUris.length > 0) ||
  834. this.config_.dispatchAllEmsgBoxes);
  835. const Mp4Parser = shaka.util.Mp4Parser;
  836. let startTime = 0;
  837. let parsedMedia = false;
  838. const parser = new Mp4Parser();
  839. if (hasEmsg) {
  840. parser.fullBox('emsg', (box) =>
  841. this.parseEMSG_(reference, stream.emsgSchemeIdUris, box));
  842. }
  843. parser.fullBox('prft', (box) => this.parsePrft_(timescale, box))
  844. .box('moof', Mp4Parser.children)
  845. .box('traf', Mp4Parser.children)
  846. .fullBox('tfdt', (box) => {
  847. if (!parsedMedia) {
  848. goog.asserts.assert(
  849. box.version == 0 || box.version == 1,
  850. 'TFDT version can only be 0 or 1');
  851. const parsed = shaka.util.Mp4BoxParsers.parseTFDTInaccurate(
  852. box.reader, box.version);
  853. startTime = parsed.baseMediaDecodeTime / timescale;
  854. parsedMedia = true;
  855. if (!hasEmsg) {
  856. box.parser.stop();
  857. }
  858. }
  859. }).parse(data, /* partialOkay= */ true);
  860. if (parsedMedia && reference.timestampOffset == 0) {
  861. timestamp = startTime;
  862. }
  863. }
  864. } else if (!mimeType.includes('/mp4') && !mimeType.includes('/webm') &&
  865. shaka.util.TsParser.probe(uint8ArrayData)) {
  866. if (!this.tsParsers_.has(contentType)) {
  867. this.tsParsers_.set(contentType, new shaka.util.TsParser());
  868. } else {
  869. this.tsParsers_.get(contentType).clearData();
  870. }
  871. const tsParser = this.tsParsers_.get(contentType).parse(uint8ArrayData);
  872. const startTime = tsParser.getStartTime(contentType);
  873. if (startTime != null) {
  874. timestamp = startTime;
  875. }
  876. metadata = tsParser.getMetadata();
  877. }
  878. return {timestamp, metadata};
  879. }
  880. /**
  881. * Parse the EMSG box from a MP4 container.
  882. *
  883. * @param {!shaka.media.SegmentReference} reference
  884. * @param {?Array<string>} emsgSchemeIdUris Array of emsg
  885. * scheme_id_uri for which emsg boxes should be parsed.
  886. * @param {!shaka.extern.ParsedBox} box
  887. * @private
  888. * https://dashif-documents.azurewebsites.net/Events/master/event.html#emsg-format
  889. * aligned(8) class DASHEventMessageBox
  890. * extends FullBox(‘emsg’, version, flags = 0){
  891. * if (version==0) {
  892. * string scheme_id_uri;
  893. * string value;
  894. * unsigned int(32) timescale;
  895. * unsigned int(32) presentation_time_delta;
  896. * unsigned int(32) event_duration;
  897. * unsigned int(32) id;
  898. * } else if (version==1) {
  899. * unsigned int(32) timescale;
  900. * unsigned int(64) presentation_time;
  901. * unsigned int(32) event_duration;
  902. * unsigned int(32) id;
  903. * string scheme_id_uri;
  904. * string value;
  905. * }
  906. * unsigned int(8) message_data[];
  907. */
  908. parseEMSG_(reference, emsgSchemeIdUris, box) {
  909. let timescale;
  910. let id;
  911. let eventDuration;
  912. let schemeId;
  913. let startTime;
  914. let presentationTimeDelta;
  915. let value;
  916. if (box.version === 0) {
  917. schemeId = box.reader.readTerminatedString();
  918. value = box.reader.readTerminatedString();
  919. timescale = box.reader.readUint32();
  920. presentationTimeDelta = box.reader.readUint32();
  921. eventDuration = box.reader.readUint32();
  922. id = box.reader.readUint32();
  923. startTime = reference.startTime + (presentationTimeDelta / timescale);
  924. } else {
  925. timescale = box.reader.readUint32();
  926. const pts = box.reader.readUint64();
  927. startTime = (pts / timescale) + reference.timestampOffset;
  928. presentationTimeDelta = startTime - reference.startTime;
  929. eventDuration = box.reader.readUint32();
  930. id = box.reader.readUint32();
  931. schemeId = box.reader.readTerminatedString();
  932. value = box.reader.readTerminatedString();
  933. }
  934. const messageData = box.reader.readBytes(
  935. box.reader.getLength() - box.reader.getPosition());
  936. // See DASH sec. 5.10.3.3.1
  937. // If a DASH client detects an event message box with a scheme that is not
  938. // defined in MPD, the client is expected to ignore it.
  939. if ((emsgSchemeIdUris && emsgSchemeIdUris.includes(schemeId)) ||
  940. this.config_.dispatchAllEmsgBoxes) {
  941. // See DASH sec. 5.10.4.1
  942. // A special scheme in DASH used to signal manifest updates.
  943. if (schemeId == 'urn:mpeg:dash:event:2012') {
  944. this.playerInterface_.onManifestUpdate();
  945. } else {
  946. // All other schemes are dispatched as a general 'emsg' event.
  947. const endTime = startTime + (eventDuration / timescale);
  948. /** @type {shaka.extern.EmsgInfo} */
  949. const emsg = {
  950. startTime: startTime,
  951. endTime: endTime,
  952. schemeIdUri: schemeId,
  953. value: value,
  954. timescale: timescale,
  955. presentationTimeDelta: presentationTimeDelta,
  956. eventDuration: eventDuration,
  957. id: id,
  958. messageData: messageData,
  959. };
  960. // Dispatch an event to notify the application about the emsg box.
  961. const eventName = shaka.util.FakeEvent.EventName.Emsg;
  962. const data = (new Map()).set('detail', emsg);
  963. const event = new shaka.util.FakeEvent(eventName, data);
  964. // A user can call preventDefault() on a cancelable event.
  965. event.cancelable = true;
  966. this.playerInterface_.onEmsg(emsg);
  967. // Additionally, ID3 events generate a 'metadata' event. This is a
  968. // pre-parsed version of the metadata blob already dispatched in the
  969. // 'emsg' event.
  970. if (schemeId == 'https://aomedia.org/emsg/ID3' ||
  971. schemeId == 'https://developer.apple.com/streaming/emsg-id3') {
  972. // See https://aomediacodec.github.io/id3-emsg/
  973. const frames = shaka.util.Id3Utils.getID3Frames(messageData);
  974. if (frames.length) {
  975. /** @private {shaka.extern.ID3Metadata} */
  976. const metadata = {
  977. cueTime: startTime,
  978. data: messageData,
  979. frames: frames,
  980. dts: startTime,
  981. pts: startTime,
  982. };
  983. this.playerInterface_.onMetadata(
  984. [metadata], /* offset= */ 0, endTime);
  985. }
  986. }
  987. }
  988. }
  989. }
  990. /**
  991. * Parse PRFT box.
  992. * @param {number} timescale
  993. * @param {!shaka.extern.ParsedBox} box
  994. * @private
  995. */
  996. parsePrft_(timescale, box) {
  997. goog.asserts.assert(
  998. box.version == 0 || box.version == 1,
  999. 'PRFT version can only be 0 or 1');
  1000. const parsed = shaka.util.Mp4BoxParsers.parsePRFTInaccurate(
  1001. box.reader, box.version);
  1002. const wallClockTime = this.convertNtp_(parsed.ntpTimestamp);
  1003. const programStartDate = new Date(wallClockTime -
  1004. (parsed.mediaTime / timescale) * 1000);
  1005. /** @type {shaka.extern.ProducerReferenceTime} */
  1006. const prftInfo = {
  1007. wallClockTime,
  1008. programStartDate,
  1009. };
  1010. const eventName = shaka.util.FakeEvent.EventName.Prft;
  1011. const data = (new Map()).set('detail', prftInfo);
  1012. const event = new shaka.util.FakeEvent(
  1013. eventName, data);
  1014. this.playerInterface_.onEvent(event);
  1015. }
  1016. /**
  1017. * Convert Ntp ntpTimeStamp to UTC Time
  1018. *
  1019. * @param {number} ntpTimeStamp
  1020. * @return {number} utcTime
  1021. * @private
  1022. */
  1023. convertNtp_(ntpTimeStamp) {
  1024. const start = new Date(Date.UTC(1900, 0, 1, 0, 0, 0));
  1025. return new Date(start.getTime() + ntpTimeStamp).getTime();
  1026. }
  1027. /**
  1028. * Enqueue an operation to append data to the SourceBuffer.
  1029. * Start and end times are needed for TextEngine, but not for MediaSource.
  1030. * Start and end times may be null for initialization segments; if present
  1031. * they are relative to the presentation timeline.
  1032. *
  1033. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1034. * @param {!BufferSource} data
  1035. * @param {?shaka.media.SegmentReference} reference The segment reference
  1036. * we are appending, or null for init segments
  1037. * @param {shaka.extern.Stream} stream
  1038. * @param {?boolean} hasClosedCaptions True if the buffer contains CEA closed
  1039. * captions
  1040. * @param {boolean=} seeked True if we just seeked
  1041. * @param {boolean=} adaptation True if we just automatically switched active
  1042. * variant(s).
  1043. * @param {boolean=} isChunkedData True if we add to the buffer from the
  1044. * partial read of the segment.
  1045. * @return {!Promise}
  1046. */
  1047. async appendBuffer(
  1048. contentType, data, reference, stream, hasClosedCaptions, seeked = false,
  1049. adaptation = false, isChunkedData = false, fromSplit = false) {
  1050. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1051. if (contentType == ContentType.TEXT) {
  1052. if (this.manifestType_ == shaka.media.ManifestParser.HLS) {
  1053. // This won't be known until the first video segment is appended.
  1054. const offset = await this.textSequenceModeOffset_;
  1055. this.textEngine_.setTimestampOffset(offset);
  1056. }
  1057. await this.textEngine_.appendBuffer(
  1058. data,
  1059. reference ? reference.startTime : null,
  1060. reference ? reference.endTime : null,
  1061. reference ? reference.getUris()[0] : null);
  1062. return;
  1063. }
  1064. if (!fromSplit && this.needSplitMuxedContent_) {
  1065. await this.appendBuffer(ContentType.AUDIO, data, reference, stream,
  1066. hasClosedCaptions, seeked, adaptation, isChunkedData,
  1067. /* fromSplit= */ true);
  1068. await this.appendBuffer(ContentType.VIDEO, data, reference, stream,
  1069. hasClosedCaptions, seeked, adaptation, isChunkedData,
  1070. /* fromSplit= */ true);
  1071. return;
  1072. }
  1073. if (!this.sourceBuffers_.has(contentType)) {
  1074. shaka.log.warning('Attempted to restore a non-existent source buffer');
  1075. return;
  1076. }
  1077. let timestampOffset = this.sourceBuffers_.get(contentType).timestampOffset;
  1078. let mimeType = this.sourceBufferTypes_.get(contentType);
  1079. if (this.transmuxers_.has(contentType)) {
  1080. mimeType = this.transmuxers_.get(contentType).getOriginalMimeType();
  1081. }
  1082. if (reference) {
  1083. const {timestamp, metadata} = this.getTimestampAndDispatchMetadata(
  1084. contentType, data, reference, stream, mimeType);
  1085. if (timestamp != null) {
  1086. if (this.firstVideoTimestamp_ == null &&
  1087. contentType == ContentType.VIDEO) {
  1088. this.firstVideoTimestamp_ = timestamp;
  1089. this.firstVideoReferenceStartTime_ = reference.startTime;
  1090. if (this.firstAudioTimestamp_ != null) {
  1091. let compensation = 0;
  1092. // Only apply compensation if video and audio segment startTime
  1093. // match, to avoid introducing sync issues.
  1094. if (this.firstVideoReferenceStartTime_ ==
  1095. this.firstAudioReferenceStartTime_) {
  1096. compensation =
  1097. this.firstVideoTimestamp_ - this.firstAudioTimestamp_;
  1098. }
  1099. this.audioCompensation_.resolve(compensation);
  1100. }
  1101. }
  1102. if (this.firstAudioTimestamp_ == null &&
  1103. contentType == ContentType.AUDIO) {
  1104. this.firstAudioTimestamp_ = timestamp;
  1105. this.firstAudioReferenceStartTime_ = reference.startTime;
  1106. if (this.firstVideoTimestamp_ != null) {
  1107. let compensation = 0;
  1108. // Only apply compensation if video and audio segment startTime
  1109. // match, to avoid introducing sync issues.
  1110. if (this.firstVideoReferenceStartTime_ ==
  1111. this.firstAudioReferenceStartTime_) {
  1112. compensation =
  1113. this.firstVideoTimestamp_ - this.firstAudioTimestamp_;
  1114. }
  1115. this.audioCompensation_.resolve(compensation);
  1116. }
  1117. }
  1118. let realTimestamp = timestamp;
  1119. const RAW_FORMATS = shaka.util.MimeUtils.RAW_FORMATS;
  1120. // For formats without containers and using segments mode, we need to
  1121. // adjust TimestampOffset relative to 0 because segments do not have
  1122. // any timestamp information.
  1123. if (!this.sequenceMode_ &&
  1124. RAW_FORMATS.includes(this.sourceBufferTypes_.get(contentType))) {
  1125. realTimestamp = 0;
  1126. }
  1127. const calculatedTimestampOffset = reference.startTime - realTimestamp;
  1128. const timestampOffsetDifference =
  1129. Math.abs(timestampOffset - calculatedTimestampOffset);
  1130. if ((timestampOffsetDifference >= 0.001 || seeked || adaptation) &&
  1131. (!isChunkedData || calculatedTimestampOffset > 0 ||
  1132. !timestampOffset)) {
  1133. timestampOffset = calculatedTimestampOffset;
  1134. if (this.attemptTimestampOffsetCalculation_) {
  1135. this.enqueueOperation_(
  1136. contentType,
  1137. () => this.abort_(contentType),
  1138. null);
  1139. this.enqueueOperation_(
  1140. contentType,
  1141. () => this.setTimestampOffset_(contentType, timestampOffset),
  1142. null);
  1143. }
  1144. }
  1145. // Timestamps can only be reliably extracted from video, not audio.
  1146. // Packed audio formats do not have internal timestamps at all.
  1147. // Prefer video for this when available.
  1148. const isBestSourceBufferForTimestamps =
  1149. contentType == ContentType.VIDEO ||
  1150. !(this.sourceBuffers_.has(ContentType.VIDEO));
  1151. if (isBestSourceBufferForTimestamps) {
  1152. this.textSequenceModeOffset_.resolve(timestampOffset);
  1153. }
  1154. }
  1155. if (metadata.length) {
  1156. this.playerInterface_.onMetadata(metadata, timestampOffset,
  1157. reference ? reference.endTime : null);
  1158. }
  1159. }
  1160. if (hasClosedCaptions && contentType == ContentType.VIDEO) {
  1161. if (!this.textEngine_) {
  1162. this.reinitText(shaka.util.MimeUtils.CEA608_CLOSED_CAPTION_MIMETYPE,
  1163. this.sequenceMode_, /* external= */ false);
  1164. }
  1165. if (!this.captionParser_) {
  1166. const basicType = mimeType.split(';', 1)[0];
  1167. this.captionParser_ = this.getCaptionParser(basicType);
  1168. }
  1169. // If it is the init segment for closed captions, initialize the closed
  1170. // caption parser.
  1171. if (!reference) {
  1172. this.captionParser_.init(data, adaptation);
  1173. } else {
  1174. const closedCaptions = this.captionParser_.parseFrom(data);
  1175. if (closedCaptions.length) {
  1176. this.textEngine_.storeAndAppendClosedCaptions(
  1177. closedCaptions,
  1178. reference.startTime,
  1179. reference.endTime,
  1180. timestampOffset);
  1181. }
  1182. }
  1183. }
  1184. if (this.transmuxers_.has(contentType)) {
  1185. data = await this.transmuxers_.get(contentType).transmux(
  1186. data, stream, reference, this.mediaSource_.duration, contentType);
  1187. }
  1188. data = this.workAroundBrokenPlatforms_(
  1189. stream, data, reference, contentType);
  1190. if (reference && this.sequenceMode_ && contentType != ContentType.TEXT) {
  1191. // In sequence mode, for non-text streams, if we just cleared the buffer
  1192. // and are either performing an unbuffered seek or handling an automatic
  1193. // adaptation, we need to set a new timestampOffset on the sourceBuffer.
  1194. if (seeked || adaptation) {
  1195. let timestampOffset = reference.startTime;
  1196. // Audio and video may not be aligned, so we will compensate for audio
  1197. // if necessary.
  1198. if (this.manifestType_ == shaka.media.ManifestParser.HLS &&
  1199. !this.needSplitMuxedContent_ &&
  1200. contentType == ContentType.AUDIO &&
  1201. this.sourceBuffers_.has(ContentType.VIDEO)) {
  1202. const compensation = await this.audioCompensation_;
  1203. // Only apply compensation if the difference is greater than 150ms
  1204. if (Math.abs(compensation) > 0.15) {
  1205. timestampOffset -= compensation;
  1206. }
  1207. }
  1208. // The logic to call abort() before setting the timestampOffset is
  1209. // extended during unbuffered seeks or automatic adaptations; it is
  1210. // possible for the append state to be PARSING_MEDIA_SEGMENT from the
  1211. // previous SourceBuffer#appendBuffer() call.
  1212. this.enqueueOperation_(
  1213. contentType,
  1214. () => this.abort_(contentType),
  1215. null);
  1216. this.enqueueOperation_(
  1217. contentType,
  1218. () => this.setTimestampOffset_(contentType, timestampOffset),
  1219. null);
  1220. }
  1221. }
  1222. let bufferedBefore = null;
  1223. await this.enqueueOperation_(contentType, () => {
  1224. if (goog.DEBUG && reference && !reference.isPreload() && !isChunkedData) {
  1225. bufferedBefore = this.getBuffered_(contentType);
  1226. }
  1227. this.append_(contentType, data, timestampOffset, stream);
  1228. }, reference ? reference.getUris()[0] : null);
  1229. if (goog.DEBUG && reference && !reference.isPreload() && !isChunkedData) {
  1230. const bufferedAfter = this.getBuffered_(contentType);
  1231. const newBuffered = shaka.media.TimeRangesUtils.computeAddedRange(
  1232. bufferedBefore, bufferedAfter);
  1233. if (newBuffered) {
  1234. const segmentDuration = reference.endTime - reference.startTime;
  1235. const timeAdded = newBuffered.end - newBuffered.start;
  1236. // Check end times instead of start times. We may be overwriting a
  1237. // buffer and only the end changes, and that would be fine.
  1238. // Also, exclude tiny segments. Sometimes alignment segments as small
  1239. // as 33ms are seen in Google DAI content. For such tiny segments,
  1240. // half a segment duration would be no issue.
  1241. const offset = Math.abs(newBuffered.end - reference.endTime);
  1242. if (segmentDuration > 0.100 && (offset > segmentDuration / 2 ||
  1243. Math.abs(segmentDuration - timeAdded) > 0.030)) {
  1244. shaka.log.error('Possible encoding problem detected!',
  1245. 'Unexpected buffered range for reference', reference,
  1246. 'from URIs', reference.getUris(),
  1247. 'should be', {start: reference.startTime, end: reference.endTime},
  1248. 'but got', newBuffered);
  1249. }
  1250. }
  1251. }
  1252. }
  1253. /**
  1254. * Set the selected closed captions Id and language.
  1255. *
  1256. * @param {string} id
  1257. */
  1258. setSelectedClosedCaptionId(id) {
  1259. const VIDEO = shaka.util.ManifestParserUtils.ContentType.VIDEO;
  1260. const videoBufferEndTime = this.bufferEnd(VIDEO) || 0;
  1261. this.textEngine_.setSelectedClosedCaptionId(id, videoBufferEndTime);
  1262. }
  1263. /** Disable embedded closed captions. */
  1264. clearSelectedClosedCaptionId() {
  1265. if (this.textEngine_) {
  1266. this.textEngine_.setSelectedClosedCaptionId('', 0);
  1267. }
  1268. }
  1269. /**
  1270. * Enqueue an operation to remove data from the SourceBuffer.
  1271. *
  1272. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1273. * @param {number} startTime relative to the start of the presentation
  1274. * @param {number} endTime relative to the start of the presentation
  1275. * @return {!Promise}
  1276. */
  1277. async remove(contentType, startTime, endTime) {
  1278. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1279. if (contentType == ContentType.TEXT) {
  1280. await this.textEngine_.remove(startTime, endTime);
  1281. } else if (endTime > startTime) {
  1282. await this.enqueueOperation_(
  1283. contentType,
  1284. () => this.remove_(contentType, startTime, endTime),
  1285. null);
  1286. if (this.needSplitMuxedContent_) {
  1287. await this.enqueueOperation_(
  1288. ContentType.AUDIO,
  1289. () => this.remove_(ContentType.AUDIO, startTime, endTime),
  1290. null);
  1291. }
  1292. }
  1293. }
  1294. /**
  1295. * Enqueue an operation to clear the SourceBuffer.
  1296. *
  1297. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1298. * @return {!Promise}
  1299. */
  1300. async clear(contentType) {
  1301. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1302. if (contentType == ContentType.TEXT) {
  1303. if (!this.textEngine_) {
  1304. return;
  1305. }
  1306. await this.textEngine_.remove(0, Infinity);
  1307. } else {
  1308. // Note that not all platforms allow clearing to Infinity.
  1309. await this.enqueueOperation_(
  1310. contentType,
  1311. () => this.remove_(contentType, 0, this.mediaSource_.duration),
  1312. null);
  1313. if (this.needSplitMuxedContent_) {
  1314. await this.enqueueOperation_(
  1315. ContentType.AUDIO,
  1316. () => this.remove_(
  1317. ContentType.AUDIO, 0, this.mediaSource_.duration),
  1318. null);
  1319. }
  1320. }
  1321. }
  1322. /**
  1323. * Fully reset the state of the caption parser owned by MediaSourceEngine.
  1324. */
  1325. resetCaptionParser() {
  1326. if (this.captionParser_) {
  1327. this.captionParser_.reset();
  1328. }
  1329. }
  1330. /**
  1331. * Enqueue an operation to flush the SourceBuffer.
  1332. * This is a workaround for what we believe is a Chromecast bug.
  1333. *
  1334. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1335. * @return {!Promise}
  1336. */
  1337. async flush(contentType) {
  1338. // Flush the pipeline. Necessary on Chromecast, even though we have removed
  1339. // everything.
  1340. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1341. if (contentType == ContentType.TEXT) {
  1342. // Nothing to flush for text.
  1343. return;
  1344. }
  1345. await this.enqueueOperation_(
  1346. contentType,
  1347. () => this.flush_(contentType),
  1348. null);
  1349. if (this.needSplitMuxedContent_) {
  1350. await this.enqueueOperation_(
  1351. ContentType.AUDIO,
  1352. () => this.flush_(ContentType.AUDIO),
  1353. null);
  1354. }
  1355. }
  1356. /**
  1357. * Sets the timestamp offset and append window end for the given content type.
  1358. *
  1359. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1360. * @param {number} timestampOffset The timestamp offset. Segments which start
  1361. * at time t will be inserted at time t + timestampOffset instead. This
  1362. * value does not affect segments which have already been inserted.
  1363. * @param {number} appendWindowStart The timestamp to set the append window
  1364. * start to. For future appends, frames/samples with timestamps less than
  1365. * this value will be dropped.
  1366. * @param {number} appendWindowEnd The timestamp to set the append window end
  1367. * to. For future appends, frames/samples with timestamps greater than this
  1368. * value will be dropped.
  1369. * @param {boolean} ignoreTimestampOffset If true, the timestampOffset will
  1370. * not be applied in this step.
  1371. * @param {string} mimeType
  1372. * @param {string} codecs
  1373. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  1374. * shaka.extern.Stream>} streamsByType
  1375. * A map of content types to streams. All streams must be supported
  1376. * according to MediaSourceEngine.isStreamSupported.
  1377. *
  1378. * @return {!Promise}
  1379. */
  1380. async setStreamProperties(
  1381. contentType, timestampOffset, appendWindowStart, appendWindowEnd,
  1382. ignoreTimestampOffset, mimeType, codecs, streamsByType) {
  1383. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1384. if (contentType == ContentType.TEXT) {
  1385. if (!ignoreTimestampOffset) {
  1386. this.textEngine_.setTimestampOffset(timestampOffset);
  1387. }
  1388. this.textEngine_.setAppendWindow(appendWindowStart, appendWindowEnd);
  1389. return;
  1390. }
  1391. const operations = [];
  1392. const hasChangedCodecs = await this.codecSwitchIfNecessary_(
  1393. contentType, mimeType, codecs, streamsByType);
  1394. if (!hasChangedCodecs) {
  1395. // Queue an abort() to help MSE splice together overlapping segments.
  1396. // We set appendWindowEnd when we change periods in DASH content, and the
  1397. // period transition may result in overlap.
  1398. //
  1399. // An abort() also helps with MPEG2-TS. When we append a TS segment, we
  1400. // always enter a PARSING_MEDIA_SEGMENT state and we can't change the
  1401. // timestamp offset. By calling abort(), we reset the state so we can
  1402. // set it.
  1403. operations.push(this.enqueueOperation_(
  1404. contentType,
  1405. () => this.abort_(contentType),
  1406. null));
  1407. if (this.needSplitMuxedContent_) {
  1408. operations.push(this.enqueueOperation_(
  1409. ContentType.AUDIO,
  1410. () => this.abort_(ContentType.AUDIO),
  1411. null));
  1412. }
  1413. }
  1414. if (!ignoreTimestampOffset) {
  1415. operations.push(this.enqueueOperation_(
  1416. contentType,
  1417. () => this.setTimestampOffset_(contentType, timestampOffset),
  1418. null));
  1419. if (this.needSplitMuxedContent_) {
  1420. operations.push(this.enqueueOperation_(
  1421. ContentType.AUDIO,
  1422. () => this.setTimestampOffset_(
  1423. ContentType.AUDIO, timestampOffset),
  1424. null));
  1425. }
  1426. }
  1427. if (appendWindowStart != 0 || appendWindowEnd != Infinity) {
  1428. operations.push(this.enqueueOperation_(
  1429. contentType,
  1430. () => this.setAppendWindow_(
  1431. contentType, appendWindowStart, appendWindowEnd),
  1432. null));
  1433. if (this.needSplitMuxedContent_) {
  1434. operations.push(this.enqueueOperation_(
  1435. ContentType.AUDIO,
  1436. () => this.setAppendWindow_(
  1437. ContentType.AUDIO, appendWindowStart, appendWindowEnd),
  1438. null));
  1439. }
  1440. }
  1441. if (operations.length) {
  1442. await Promise.all(operations);
  1443. }
  1444. }
  1445. /**
  1446. * Adjust timestamp offset to maintain AV sync across discontinuities.
  1447. *
  1448. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1449. * @param {number} timestampOffset
  1450. * @return {!Promise}
  1451. */
  1452. async resync(contentType, timestampOffset) {
  1453. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1454. if (contentType == ContentType.TEXT) {
  1455. // This operation is for audio and video only.
  1456. return;
  1457. }
  1458. // Reset the promise in case the timestamp offset changed during
  1459. // a period/discontinuity transition.
  1460. if (contentType == ContentType.VIDEO) {
  1461. this.textSequenceModeOffset_ = new shaka.util.PublicPromise();
  1462. }
  1463. if (!this.sequenceMode_) {
  1464. return;
  1465. }
  1466. // Avoid changing timestampOffset when the difference is less than 100 ms
  1467. // from the end of the current buffer.
  1468. const bufferEnd = this.bufferEnd(contentType);
  1469. if (bufferEnd && Math.abs(bufferEnd - timestampOffset) < 0.1) {
  1470. return;
  1471. }
  1472. // Queue an abort() to help MSE splice together overlapping segments.
  1473. // We set appendWindowEnd when we change periods in DASH content, and the
  1474. // period transition may result in overlap.
  1475. //
  1476. // An abort() also helps with MPEG2-TS. When we append a TS segment, we
  1477. // always enter a PARSING_MEDIA_SEGMENT state and we can't change the
  1478. // timestamp offset. By calling abort(), we reset the state so we can
  1479. // set it.
  1480. this.enqueueOperation_(
  1481. contentType,
  1482. () => this.abort_(contentType),
  1483. null);
  1484. if (this.needSplitMuxedContent_) {
  1485. this.enqueueOperation_(
  1486. ContentType.AUDIO,
  1487. () => this.abort_(ContentType.AUDIO),
  1488. null);
  1489. }
  1490. await this.enqueueOperation_(
  1491. contentType,
  1492. () => this.setTimestampOffset_(contentType, timestampOffset),
  1493. null);
  1494. if (this.needSplitMuxedContent_) {
  1495. await this.enqueueOperation_(
  1496. ContentType.AUDIO,
  1497. () => this.setTimestampOffset_(ContentType.AUDIO, timestampOffset),
  1498. null);
  1499. }
  1500. }
  1501. /**
  1502. * @param {string=} reason Valid reasons are 'network' and 'decode'.
  1503. * @return {!Promise}
  1504. * @see http://w3c.github.io/media-source/#idl-def-EndOfStreamError
  1505. */
  1506. async endOfStream(reason) {
  1507. await this.enqueueBlockingOperation_(() => {
  1508. // If endOfStream() has already been called on the media source,
  1509. // don't call it again. Also do not call if readyState is
  1510. // 'closed' (not attached to video element) since it is not a
  1511. // valid operation.
  1512. if (this.ended() || this.closed()) {
  1513. return;
  1514. }
  1515. // Tizen won't let us pass undefined, but it will let us omit the
  1516. // argument.
  1517. if (reason) {
  1518. this.mediaSource_.endOfStream(reason);
  1519. } else {
  1520. this.mediaSource_.endOfStream();
  1521. }
  1522. });
  1523. }
  1524. /**
  1525. * @param {number} duration
  1526. * @return {!Promise}
  1527. */
  1528. async setDuration(duration) {
  1529. await this.enqueueBlockingOperation_(() => {
  1530. // Reducing the duration causes the MSE removal algorithm to run, which
  1531. // triggers an 'updateend' event to fire. To handle this scenario, we
  1532. // have to insert a dummy operation into the beginning of each queue,
  1533. // which the 'updateend' handler will remove.
  1534. if (duration < this.mediaSource_.duration) {
  1535. for (const contentType of this.sourceBuffers_.keys()) {
  1536. const dummyOperation = {
  1537. start: () => {},
  1538. p: new shaka.util.PublicPromise(),
  1539. uri: null,
  1540. };
  1541. this.queues_.get(contentType).unshift(dummyOperation);
  1542. }
  1543. }
  1544. this.mediaSource_.duration = duration;
  1545. this.lastDuration_ = duration;
  1546. });
  1547. }
  1548. /**
  1549. * Get the current MediaSource duration.
  1550. *
  1551. * @return {number}
  1552. */
  1553. getDuration() {
  1554. return this.mediaSource_.duration;
  1555. }
  1556. /**
  1557. * Updates the live seekable range.
  1558. *
  1559. * @param {number} startTime
  1560. * @param {number} endTime
  1561. */
  1562. async setLiveSeekableRange(startTime, endTime) {
  1563. goog.asserts.assert('setLiveSeekableRange' in this.mediaSource_,
  1564. 'Using setLiveSeekableRange on not supported platform');
  1565. if (this.ended() || this.closed()) {
  1566. return;
  1567. }
  1568. await this.enqueueBlockingOperation_(() => {
  1569. if (this.ended() || this.closed()) {
  1570. return;
  1571. }
  1572. this.mediaSource_.setLiveSeekableRange(startTime, endTime);
  1573. });
  1574. }
  1575. /**
  1576. * Clear the current live seekable range.
  1577. */
  1578. async clearLiveSeekableRange() {
  1579. goog.asserts.assert('clearLiveSeekableRange' in this.mediaSource_,
  1580. 'Using clearLiveSeekableRange on not supported platform');
  1581. if (this.ended() || this.closed()) {
  1582. return;
  1583. }
  1584. await this.enqueueBlockingOperation_(() => {
  1585. if (this.ended() || this.closed()) {
  1586. return;
  1587. }
  1588. this.mediaSource_.clearLiveSeekableRange();
  1589. });
  1590. }
  1591. /**
  1592. * Append dependency data.
  1593. * @param {BufferSource} data
  1594. * @param {number} timestampOffset
  1595. * @param {shaka.extern.Stream} stream
  1596. */
  1597. appendDependency(data, timestampOffset, stream) {
  1598. if (this.lcevcDec_) {
  1599. // Append buffers to the LCEVC Dec for parsing and storing
  1600. // of LCEVC data.
  1601. this.lcevcDec_.appendBuffer(data, timestampOffset, stream);
  1602. }
  1603. }
  1604. /**
  1605. * Append data to the SourceBuffer.
  1606. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1607. * @param {BufferSource} data
  1608. * @param {number} timestampOffset
  1609. * @param {shaka.extern.Stream} stream
  1610. * @private
  1611. */
  1612. append_(contentType, data, timestampOffset, stream) {
  1613. this.appendDependency(data, timestampOffset, stream);
  1614. // This will trigger an 'updateend' event.
  1615. this.sourceBuffers_.get(contentType).appendBuffer(data);
  1616. }
  1617. /**
  1618. * Remove data from the SourceBuffer.
  1619. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1620. * @param {number} startTime relative to the start of the presentation
  1621. * @param {number} endTime relative to the start of the presentation
  1622. * @private
  1623. */
  1624. remove_(contentType, startTime, endTime) {
  1625. if (endTime <= startTime) {
  1626. // Ignore removal of inverted or empty ranges.
  1627. // Fake 'updateend' event to resolve the operation.
  1628. this.onUpdateEnd_(contentType);
  1629. return;
  1630. }
  1631. // This will trigger an 'updateend' event.
  1632. this.sourceBuffers_.get(contentType).remove(startTime, endTime);
  1633. }
  1634. /**
  1635. * Call abort() on the SourceBuffer.
  1636. * This resets MSE's last_decode_timestamp on all track buffers, which should
  1637. * trigger the splicing logic for overlapping segments.
  1638. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1639. * @private
  1640. */
  1641. abort_(contentType) {
  1642. const sourceBuffer = this.sourceBuffers_.get(contentType);
  1643. // Save the append window, which is reset on abort().
  1644. const appendWindowStart = sourceBuffer.appendWindowStart;
  1645. const appendWindowEnd = sourceBuffer.appendWindowEnd;
  1646. // This will not trigger an 'updateend' event, since nothing is happening.
  1647. // This is only to reset MSE internals, not to abort an actual operation.
  1648. sourceBuffer.abort();
  1649. // Restore the append window.
  1650. sourceBuffer.appendWindowStart = appendWindowStart;
  1651. sourceBuffer.appendWindowEnd = appendWindowEnd;
  1652. // Fake an 'updateend' event to resolve the operation.
  1653. this.onUpdateEnd_(contentType);
  1654. }
  1655. /**
  1656. * Nudge the playhead to force the media pipeline to be flushed.
  1657. * This seems to be necessary on Chromecast to get new content to replace old
  1658. * content.
  1659. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1660. * @private
  1661. */
  1662. flush_(contentType) {
  1663. // Never use flush_ if there's data. It causes a hiccup in playback.
  1664. goog.asserts.assert(
  1665. this.video_.buffered.length == 0, 'MediaSourceEngine.flush_ should ' +
  1666. 'only be used after clearing all data!');
  1667. // Seeking forces the pipeline to be flushed.
  1668. this.video_.currentTime -= 0.001;
  1669. // Fake an 'updateend' event to resolve the operation.
  1670. this.onUpdateEnd_(contentType);
  1671. }
  1672. /**
  1673. * Set the SourceBuffer's timestamp offset.
  1674. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1675. * @param {number} timestampOffset
  1676. * @private
  1677. */
  1678. setTimestampOffset_(contentType, timestampOffset) {
  1679. // Work around for
  1680. // https://github.com/shaka-project/shaka-player/issues/1281:
  1681. // TODO(https://bit.ly/2ttKiBU): follow up when this is fixed in Edge
  1682. if (timestampOffset < 0) {
  1683. // Try to prevent rounding errors in Edge from removing the first
  1684. // keyframe.
  1685. timestampOffset += 0.001;
  1686. }
  1687. let shouldChangeTimestampOffset = true;
  1688. if (this.manifestType_ == shaka.media.ManifestParser.HLS) {
  1689. // Avoid changing timestampOffset when the difference is less than 150 ms
  1690. // from the end of the current buffer when using sequenceMode
  1691. const bufferEnd = this.bufferEnd(contentType);
  1692. if (!bufferEnd || Math.abs(bufferEnd - timestampOffset) > 0.15) {
  1693. shouldChangeTimestampOffset = true;
  1694. } else {
  1695. shouldChangeTimestampOffset = false;
  1696. }
  1697. }
  1698. if (shouldChangeTimestampOffset) {
  1699. this.sourceBuffers_.get(contentType).timestampOffset = timestampOffset;
  1700. }
  1701. // Fake an 'updateend' event to resolve the operation.
  1702. this.onUpdateEnd_(contentType);
  1703. }
  1704. /**
  1705. * Set the SourceBuffer's append window end.
  1706. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1707. * @param {number} appendWindowStart
  1708. * @param {number} appendWindowEnd
  1709. * @private
  1710. */
  1711. setAppendWindow_(contentType, appendWindowStart, appendWindowEnd) {
  1712. const sourceBuffer = this.sourceBuffers_.get(contentType);
  1713. // You can't set start > end, so first set start to 0, then set the new
  1714. // end, then set the new start. That way, there are no intermediate
  1715. // states which are invalid.
  1716. sourceBuffer.appendWindowStart = 0;
  1717. sourceBuffer.appendWindowEnd = appendWindowEnd;
  1718. sourceBuffer.appendWindowStart = appendWindowStart;
  1719. // Fake an 'updateend' event to resolve the operation.
  1720. this.onUpdateEnd_(contentType);
  1721. }
  1722. /**
  1723. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1724. * @private
  1725. */
  1726. onError_(contentType) {
  1727. const operation = this.queues_.get(contentType)[0];
  1728. goog.asserts.assert(operation, 'Spurious error event!');
  1729. goog.asserts.assert(!this.sourceBuffers_.get(contentType).updating,
  1730. 'SourceBuffer should not be updating on error!');
  1731. const code = this.video_.error ? this.video_.error.code : 0;
  1732. operation.p.reject(new shaka.util.Error(
  1733. shaka.util.Error.Severity.CRITICAL,
  1734. shaka.util.Error.Category.MEDIA,
  1735. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_FAILED,
  1736. code, operation.uri));
  1737. // Do not pop from queue. An 'updateend' event will fire next, and to
  1738. // avoid synchronizing these two event handlers, we will allow that one to
  1739. // pop from the queue as normal. Note that because the operation has
  1740. // already been rejected, the call to resolve() in the 'updateend' handler
  1741. // will have no effect.
  1742. }
  1743. /**
  1744. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1745. * @private
  1746. */
  1747. onUpdateEnd_(contentType) {
  1748. if (this.reloadingMediaSource_) {
  1749. return;
  1750. }
  1751. const operation = this.queues_.get(contentType)[0];
  1752. goog.asserts.assert(operation, 'Spurious updateend event!');
  1753. if (!operation) {
  1754. return;
  1755. }
  1756. goog.asserts.assert(!this.sourceBuffers_.get(contentType).updating,
  1757. 'SourceBuffer should not be updating on updateend!');
  1758. operation.p.resolve();
  1759. this.popFromQueue_(contentType);
  1760. }
  1761. /**
  1762. * Enqueue an operation and start it if appropriate.
  1763. *
  1764. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1765. * @param {function()} start
  1766. * @param {?string} uri
  1767. * @return {!Promise}
  1768. * @private
  1769. */
  1770. enqueueOperation_(contentType, start, uri) {
  1771. this.destroyer_.ensureNotDestroyed();
  1772. const operation = {
  1773. start: start,
  1774. p: new shaka.util.PublicPromise(),
  1775. uri,
  1776. };
  1777. this.queues_.get(contentType).push(operation);
  1778. if (this.queues_.get(contentType).length == 1) {
  1779. this.startOperation_(contentType);
  1780. }
  1781. return operation.p;
  1782. }
  1783. /**
  1784. * Enqueue an operation which must block all other operations on all
  1785. * SourceBuffers.
  1786. *
  1787. * @param {function():(Promise|undefined)} run
  1788. * @return {!Promise}
  1789. * @private
  1790. */
  1791. async enqueueBlockingOperation_(run) {
  1792. this.destroyer_.ensureNotDestroyed();
  1793. /** @type {!Array<!shaka.util.PublicPromise>} */
  1794. const allWaiters = [];
  1795. /** @type {!Array<!shaka.util.ManifestParserUtils.ContentType>} */
  1796. const contentTypes = Array.from(this.sourceBuffers_.keys());
  1797. // Enqueue a 'wait' operation onto each queue.
  1798. // This operation signals its readiness when it starts.
  1799. // When all wait operations are ready, the real operation takes place.
  1800. for (const contentType of contentTypes) {
  1801. const ready = new shaka.util.PublicPromise();
  1802. const operation = {
  1803. start: () => ready.resolve(),
  1804. p: ready,
  1805. uri: null,
  1806. };
  1807. const queue = this.queues_.get(contentType);
  1808. queue.push(operation);
  1809. allWaiters.push(ready);
  1810. if (queue.length == 1) {
  1811. operation.start();
  1812. }
  1813. }
  1814. // Return a Promise to the real operation, which waits to begin until
  1815. // there are no other in-progress operations on any SourceBuffers.
  1816. try {
  1817. await Promise.all(allWaiters);
  1818. } catch (error) {
  1819. // One of the waiters failed, which means we've been destroyed.
  1820. goog.asserts.assert(
  1821. this.destroyer_.destroyed(), 'Should be destroyed by now');
  1822. // We haven't popped from the queue. Canceled waiters have been removed
  1823. // by destroy. What's left now should just be resolved waiters. In
  1824. // uncompiled mode, we will maintain good hygiene and make sure the
  1825. // assert at the end of destroy passes. In compiled mode, the queues
  1826. // are wiped in destroy.
  1827. if (goog.DEBUG) {
  1828. for (const contentType of contentTypes) {
  1829. const queue = this.queues_.get(contentType);
  1830. if (queue.length) {
  1831. goog.asserts.assert(queue.length == 1,
  1832. 'Should be at most one item in queue!');
  1833. goog.asserts.assert(allWaiters.includes(queue[0].p),
  1834. 'The item in queue should be one of our waiters!');
  1835. queue.shift();
  1836. }
  1837. }
  1838. }
  1839. throw error;
  1840. }
  1841. if (goog.DEBUG) {
  1842. // If we did it correctly, nothing is updating.
  1843. for (const contentType of contentTypes) {
  1844. goog.asserts.assert(
  1845. this.sourceBuffers_.get(contentType).updating == false,
  1846. 'SourceBuffers should not be updating after a blocking op!');
  1847. }
  1848. }
  1849. // Run the real operation, which can be asynchronous.
  1850. try {
  1851. await run();
  1852. } catch (exception) {
  1853. throw new shaka.util.Error(
  1854. shaka.util.Error.Severity.CRITICAL,
  1855. shaka.util.Error.Category.MEDIA,
  1856. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  1857. exception,
  1858. this.video_.error || 'No error in the media element',
  1859. null);
  1860. } finally {
  1861. // Unblock the queues.
  1862. for (const contentType of contentTypes) {
  1863. this.popFromQueue_(contentType);
  1864. }
  1865. }
  1866. }
  1867. /**
  1868. * Pop from the front of the queue and start a new operation.
  1869. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1870. * @private
  1871. */
  1872. popFromQueue_(contentType) {
  1873. goog.asserts.assert(this.queues_.has(contentType), 'Queue should exist');
  1874. // Remove the in-progress operation, which is now complete.
  1875. this.queues_.get(contentType).shift();
  1876. this.startOperation_(contentType);
  1877. }
  1878. /**
  1879. * Starts the next operation in the queue.
  1880. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1881. * @private
  1882. */
  1883. startOperation_(contentType) {
  1884. // Retrieve the next operation, if any, from the queue and start it.
  1885. const next = this.queues_.get(contentType)[0];
  1886. if (next) {
  1887. try {
  1888. next.start();
  1889. } catch (exception) {
  1890. if (exception.name == 'QuotaExceededError') {
  1891. next.p.reject(new shaka.util.Error(
  1892. shaka.util.Error.Severity.CRITICAL,
  1893. shaka.util.Error.Category.MEDIA,
  1894. shaka.util.Error.Code.QUOTA_EXCEEDED_ERROR,
  1895. contentType));
  1896. } else if (!this.isStreamingAllowed()) {
  1897. next.p.reject(new shaka.util.Error(
  1898. shaka.util.Error.Severity.CRITICAL,
  1899. shaka.util.Error.Category.MEDIA,
  1900. shaka.util.Error.Code.STREAMING_NOT_ALLOWED,
  1901. contentType));
  1902. } else {
  1903. next.p.reject(new shaka.util.Error(
  1904. shaka.util.Error.Severity.CRITICAL,
  1905. shaka.util.Error.Category.MEDIA,
  1906. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  1907. exception,
  1908. this.video_.error || 'No error in the media element',
  1909. next.uri));
  1910. }
  1911. this.popFromQueue_(contentType);
  1912. }
  1913. }
  1914. }
  1915. /**
  1916. * @return {!shaka.extern.TextDisplayer}
  1917. */
  1918. getTextDisplayer() {
  1919. goog.asserts.assert(
  1920. this.textDisplayer_,
  1921. 'TextDisplayer should only be null when this is destroyed');
  1922. return this.textDisplayer_;
  1923. }
  1924. /**
  1925. * @param {!shaka.extern.TextDisplayer} textDisplayer
  1926. */
  1927. setTextDisplayer(textDisplayer) {
  1928. this.textDisplayer_ = textDisplayer;
  1929. if (this.textEngine_) {
  1930. this.textEngine_.setDisplayer(textDisplayer);
  1931. }
  1932. }
  1933. /**
  1934. * @param {boolean} segmentRelativeVttTiming
  1935. */
  1936. setSegmentRelativeVttTiming(segmentRelativeVttTiming) {
  1937. this.segmentRelativeVttTiming_ = segmentRelativeVttTiming;
  1938. }
  1939. /**
  1940. * Apply platform-specific transformations to this segment to work around
  1941. * issues in the platform.
  1942. *
  1943. * @param {shaka.extern.Stream} stream
  1944. * @param {!BufferSource} segment
  1945. * @param {?shaka.media.SegmentReference} reference
  1946. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1947. * @return {!BufferSource}
  1948. * @private
  1949. */
  1950. workAroundBrokenPlatforms_(stream, segment, reference, contentType) {
  1951. const Platform = shaka.util.Platform;
  1952. const isMp4 = shaka.util.MimeUtils.getContainerType(
  1953. this.sourceBufferTypes_.get(contentType)) == 'mp4';
  1954. if (!isMp4) {
  1955. return segment;
  1956. }
  1957. const isInitSegment = reference === null;
  1958. const encryptionExpected = this.expectedEncryption_.get(contentType);
  1959. const keySystem = this.playerInterface_.getKeySystem();
  1960. let isEncrypted = false;
  1961. if (reference && reference.initSegmentReference) {
  1962. isEncrypted = reference.initSegmentReference.encrypted;
  1963. }
  1964. const uri = reference ? reference.getUris()[0] : null;
  1965. // If:
  1966. // 1. the configuration tells to insert fake encryption,
  1967. // 2. and this is an init segment or media segment,
  1968. // 3. and encryption is expected,
  1969. // 4. and the platform requires encryption in all init or media segments
  1970. // of current content type,
  1971. // then insert fake encryption metadata for init segments that lack it.
  1972. // The MP4 requirement is because we can currently only do this
  1973. // transformation on MP4 containers.
  1974. // See: https://github.com/shaka-project/shaka-player/issues/2759
  1975. if (this.config_.insertFakeEncryptionInInit && encryptionExpected &&
  1976. Platform.requiresEncryptionInfoInAllInitSegments(keySystem,
  1977. contentType)) {
  1978. if (isInitSegment) {
  1979. shaka.log.debug('Forcing fake encryption information in init segment.');
  1980. segment =
  1981. shaka.media.ContentWorkarounds.fakeEncryption(stream, segment, uri);
  1982. } else if (!isEncrypted && Platform.requiresTfhdFix(contentType)) {
  1983. shaka.log.debug(
  1984. 'Forcing fake encryption information in media segment.');
  1985. segment = shaka.media.ContentWorkarounds.fakeMediaEncryption(segment);
  1986. }
  1987. }
  1988. if (isInitSegment && Platform.requiresEC3InitSegments()) {
  1989. shaka.log.debug('Forcing fake EC-3 information in init segment.');
  1990. segment = shaka.media.ContentWorkarounds.fakeEC3(segment);
  1991. }
  1992. return segment;
  1993. }
  1994. /**
  1995. * Prepare the SourceBuffer to parse a potentially new type or codec.
  1996. *
  1997. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1998. * @param {string} mimeType
  1999. * @param {?shaka.extern.Transmuxer} transmuxer
  2000. * @private
  2001. */
  2002. change_(contentType, mimeType, transmuxer) {
  2003. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  2004. if (contentType === ContentType.TEXT) {
  2005. shaka.log.debug(`Change not supported for ${contentType}`);
  2006. return;
  2007. }
  2008. const sourceBuffer = this.sourceBufferTypes_.get(contentType);
  2009. shaka.log.debug(
  2010. `Change Type: ${sourceBuffer} -> ${mimeType}`);
  2011. if (shaka.media.Capabilities.isChangeTypeSupported()) {
  2012. if (this.transmuxers_.has(contentType)) {
  2013. this.transmuxers_.get(contentType).destroy();
  2014. this.transmuxers_.delete(contentType);
  2015. }
  2016. if (transmuxer) {
  2017. this.transmuxers_.set(contentType, transmuxer);
  2018. }
  2019. const type = this.addExtraFeaturesToMimeType_(mimeType);
  2020. this.sourceBuffers_.get(contentType).changeType(type);
  2021. this.sourceBufferTypes_.set(contentType, mimeType);
  2022. } else {
  2023. shaka.log.debug('Change Type not supported');
  2024. }
  2025. // Fake an 'updateend' event to resolve the operation.
  2026. this.onUpdateEnd_(contentType);
  2027. }
  2028. /**
  2029. * Enqueue an operation to prepare the SourceBuffer to parse a potentially new
  2030. * type or codec.
  2031. *
  2032. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  2033. * @param {string} mimeType
  2034. * @param {?shaka.extern.Transmuxer} transmuxer
  2035. * @return {!Promise}
  2036. */
  2037. changeType(contentType, mimeType, transmuxer) {
  2038. return this.enqueueOperation_(
  2039. contentType,
  2040. () => this.change_(contentType, mimeType, transmuxer),
  2041. null);
  2042. }
  2043. /**
  2044. * Resets the MediaSource and re-adds source buffers due to codec mismatch
  2045. *
  2046. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  2047. * shaka.extern.Stream>} streamsByType
  2048. * @private
  2049. */
  2050. async reset_(streamsByType) {
  2051. if (this.reloadingMediaSource_) {
  2052. return;
  2053. }
  2054. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  2055. this.reloadingMediaSource_ = true;
  2056. this.needSplitMuxedContent_ = false;
  2057. const currentTime = this.video_.currentTime;
  2058. // When codec switching if the user is currently paused we don't want
  2059. // to trigger a play when switching codec.
  2060. // Playing can also end up in a paused state after a codec switch
  2061. // so we need to remember the current states.
  2062. const previousAutoPlayState = this.video_.autoplay;
  2063. if (!this.video_.paused) {
  2064. this.playAfterReset_ = true;
  2065. }
  2066. if (this.playbackHasBegun_) {
  2067. // Only set autoplay to false if the video playback has already begun.
  2068. // When a codec switch happens before playback has begun this can cause
  2069. // autoplay not to work as expected.
  2070. this.video_.autoplay = false;
  2071. }
  2072. try {
  2073. this.eventManager_.removeAll();
  2074. for (const transmuxer of this.transmuxers_.values()) {
  2075. transmuxer.destroy();
  2076. }
  2077. for (const sourceBuffer of this.sourceBuffers_.values()) {
  2078. try {
  2079. this.mediaSource_.removeSourceBuffer(sourceBuffer);
  2080. } catch (e) {
  2081. shaka.log.debug('Exception on removeSourceBuffer', e);
  2082. }
  2083. }
  2084. this.transmuxers_.clear();
  2085. this.sourceBuffers_.clear();
  2086. const previousDuration = this.mediaSource_.duration;
  2087. this.mediaSourceOpen_ = new shaka.util.PublicPromise();
  2088. this.mediaSource_ = this.createMediaSource(this.mediaSourceOpen_);
  2089. await this.mediaSourceOpen_;
  2090. if (!isNaN(previousDuration) && previousDuration) {
  2091. this.mediaSource_.duration = previousDuration;
  2092. } else if (!isNaN(this.lastDuration_) && this.lastDuration_) {
  2093. this.mediaSource_.duration = this.lastDuration_;
  2094. }
  2095. const sourceBufferAdded = new shaka.util.PublicPromise();
  2096. const sourceBuffers =
  2097. /** @type {EventTarget} */(this.mediaSource_.sourceBuffers);
  2098. const totalOfBuffers = streamsByType.size;
  2099. let numberOfSourceBufferAdded = 0;
  2100. const onSourceBufferAdded = () => {
  2101. numberOfSourceBufferAdded++;
  2102. if (numberOfSourceBufferAdded === totalOfBuffers) {
  2103. sourceBufferAdded.resolve();
  2104. this.eventManager_.unlisten(sourceBuffers, 'addsourcebuffer',
  2105. onSourceBufferAdded);
  2106. }
  2107. };
  2108. this.eventManager_.listen(sourceBuffers, 'addsourcebuffer',
  2109. onSourceBufferAdded);
  2110. for (const contentType of streamsByType.keys()) {
  2111. const stream = streamsByType.get(contentType);
  2112. // eslint-disable-next-line no-await-in-loop
  2113. await this.initSourceBuffer_(contentType, stream, stream.codecs);
  2114. }
  2115. const audio = streamsByType.get(ContentType.AUDIO);
  2116. if (audio && audio.isAudioMuxedInVideo) {
  2117. this.needSplitMuxedContent_ = true;
  2118. }
  2119. if (this.needSplitMuxedContent_ && !this.queues_.has(ContentType.AUDIO)) {
  2120. this.queues_.set(ContentType.AUDIO, []);
  2121. }
  2122. // Fake a seek to catchup the playhead.
  2123. this.video_.currentTime = currentTime;
  2124. await sourceBufferAdded;
  2125. } finally {
  2126. this.reloadingMediaSource_ = false;
  2127. this.destroyer_.ensureNotDestroyed();
  2128. this.eventManager_.listenOnce(this.video_, 'canplaythrough', () => {
  2129. // Don't use ensureNotDestroyed() from this event listener, because
  2130. // that results in an uncaught exception. Instead, just check the
  2131. // flag.
  2132. if (this.destroyer_.destroyed()) {
  2133. return;
  2134. }
  2135. this.video_.autoplay = previousAutoPlayState;
  2136. if (this.playAfterReset_) {
  2137. this.playAfterReset_ = false;
  2138. this.video_.play();
  2139. }
  2140. });
  2141. }
  2142. }
  2143. /**
  2144. * Resets the Media Source
  2145. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  2146. * shaka.extern.Stream>} streamsByType
  2147. * @return {!Promise}
  2148. */
  2149. reset(streamsByType) {
  2150. return this.enqueueBlockingOperation_(
  2151. () => this.reset_(streamsByType));
  2152. }
  2153. /**
  2154. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  2155. * @param {string} mimeType
  2156. * @param {string} codecs
  2157. * @return {{transmuxer: ?shaka.extern.Transmuxer,
  2158. * transmuxerMuxed: boolean, basicType: string, codec: string,
  2159. * mimeType: string}}
  2160. * @private
  2161. */
  2162. getRealInfo_(contentType, mimeType, codecs) {
  2163. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  2164. const MimeUtils = shaka.util.MimeUtils;
  2165. /** @type {?shaka.extern.Transmuxer} */
  2166. let transmuxer;
  2167. let transmuxerMuxed = false;
  2168. const audioCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  2169. ContentType.AUDIO, (codecs || '').split(','));
  2170. const videoCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  2171. ContentType.VIDEO, (codecs || '').split(','));
  2172. let codec = videoCodec;
  2173. if (contentType == ContentType.AUDIO) {
  2174. codec = audioCodec;
  2175. }
  2176. if (!codec) {
  2177. codec = codecs;
  2178. }
  2179. let newMimeType = shaka.util.MimeUtils.getFullType(mimeType, codec);
  2180. let needTransmux = this.config_.forceTransmux;
  2181. if (!shaka.media.Capabilities.isTypeSupported(newMimeType) ||
  2182. (!this.sequenceMode_ &&
  2183. shaka.util.MimeUtils.RAW_FORMATS.includes(newMimeType))) {
  2184. needTransmux = true;
  2185. }
  2186. const TransmuxerEngine = shaka.transmuxer.TransmuxerEngine;
  2187. if (needTransmux) {
  2188. const newMimeTypeWithAllCodecs =
  2189. shaka.util.MimeUtils.getFullTypeWithAllCodecs(mimeType, codec);
  2190. const transmuxerPlugin =
  2191. TransmuxerEngine.findTransmuxer(newMimeTypeWithAllCodecs);
  2192. if (transmuxerPlugin) {
  2193. transmuxer = transmuxerPlugin();
  2194. if (audioCodec && videoCodec) {
  2195. transmuxerMuxed = true;
  2196. }
  2197. newMimeType =
  2198. transmuxer.convertCodecs(contentType, newMimeTypeWithAllCodecs);
  2199. }
  2200. }
  2201. const newCodec = MimeUtils.getNormalizedCodec(
  2202. MimeUtils.getCodecs(newMimeType));
  2203. const newBasicType = MimeUtils.getBasicType(newMimeType);
  2204. return {
  2205. transmuxer,
  2206. transmuxerMuxed,
  2207. basicType: newBasicType,
  2208. codec: newCodec,
  2209. mimeType: newMimeType,
  2210. };
  2211. }
  2212. /**
  2213. * Codec switch if necessary, this will not resolve until the codec
  2214. * switch is over.
  2215. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  2216. * @param {string} mimeType
  2217. * @param {string} codecs
  2218. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  2219. * shaka.extern.Stream>} streamsByType
  2220. * @return {!Promise<boolean>} true if there was a codec switch,
  2221. * false otherwise.
  2222. * @private
  2223. */
  2224. async codecSwitchIfNecessary_(contentType, mimeType, codecs, streamsByType) {
  2225. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  2226. if (contentType == ContentType.TEXT) {
  2227. return false;
  2228. }
  2229. const MimeUtils = shaka.util.MimeUtils;
  2230. const currentCodec = MimeUtils.getNormalizedCodec(
  2231. MimeUtils.getCodecs(this.sourceBufferTypes_.get(contentType)));
  2232. const currentBasicType = MimeUtils.getBasicType(
  2233. this.sourceBufferTypes_.get(contentType));
  2234. const realInfo = this.getRealInfo_(contentType, mimeType, codecs);
  2235. const transmuxer = realInfo.transmuxer;
  2236. const transmuxerMuxed = realInfo.transmuxerMuxed;
  2237. const newBasicType = realInfo.basicType;
  2238. const newCodec = realInfo.codec;
  2239. const newMimeType = realInfo.mimeType;
  2240. let muxedContentCheck = true;
  2241. if (transmuxerMuxed &&
  2242. this.sourceBufferTypes_.has(ContentType.AUDIO)) {
  2243. const muxedRealInfo =
  2244. this.getRealInfo_(ContentType.AUDIO, mimeType, codecs);
  2245. const muxedCurrentCodec = MimeUtils.getNormalizedCodec(
  2246. MimeUtils.getCodecs(this.sourceBufferTypes_.get(ContentType.AUDIO)));
  2247. const muxedCurrentBasicType = MimeUtils.getBasicType(
  2248. this.sourceBufferTypes_.get(ContentType.AUDIO));
  2249. muxedContentCheck = muxedCurrentCodec == muxedRealInfo.codec &&
  2250. muxedCurrentBasicType == muxedRealInfo.basicType;
  2251. if (muxedRealInfo.transmuxer) {
  2252. muxedRealInfo.transmuxer.destroy();
  2253. }
  2254. }
  2255. // Current/new codecs base and basic type match then no need to switch
  2256. if (currentCodec === newCodec && currentBasicType === newBasicType &&
  2257. muxedContentCheck) {
  2258. if (this.transmuxers_.has(contentType) && !transmuxer) {
  2259. this.transmuxers_.get(contentType).destroy();
  2260. this.transmuxers_.delete(contentType);
  2261. } else if (!this.transmuxers_.has(contentType) && transmuxer) {
  2262. this.transmuxers_.set(contentType, transmuxer);
  2263. } else if (transmuxer) {
  2264. // Compare if the transmuxer is different
  2265. if (this.transmuxers_.has(contentType) &&
  2266. this.transmuxers_.get(contentType).transmux !=
  2267. transmuxer.transmux) {
  2268. this.transmuxers_.get(contentType).destroy();
  2269. this.transmuxers_.delete(contentType);
  2270. this.transmuxers_.set(contentType, transmuxer);
  2271. } else {
  2272. transmuxer.destroy();
  2273. }
  2274. }
  2275. return false;
  2276. }
  2277. let allowChangeType = true;
  2278. if (this.needSplitMuxedContent_ || (transmuxerMuxed &&
  2279. transmuxer && !this.transmuxers_.has(contentType))) {
  2280. allowChangeType = false;
  2281. }
  2282. if (allowChangeType && this.config_.codecSwitchingStrategy ===
  2283. shaka.config.CodecSwitchingStrategy.SMOOTH &&
  2284. shaka.media.Capabilities.isChangeTypeSupported()) {
  2285. await this.changeType(contentType, newMimeType, transmuxer);
  2286. } else {
  2287. if (transmuxer) {
  2288. transmuxer.destroy();
  2289. }
  2290. await this.reset(streamsByType);
  2291. }
  2292. return true;
  2293. }
  2294. /**
  2295. * Returns true if it's necessary codec switch to load the new stream.
  2296. *
  2297. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  2298. * @param {string} refMimeType
  2299. * @param {string} refCodecs
  2300. * @return {boolean}
  2301. * @private
  2302. */
  2303. isCodecSwitchNecessary_(contentType, refMimeType, refCodecs) {
  2304. if (contentType == shaka.util.ManifestParserUtils.ContentType.TEXT) {
  2305. return false;
  2306. }
  2307. const MimeUtils = shaka.util.MimeUtils;
  2308. const currentCodec = MimeUtils.getNormalizedCodec(
  2309. MimeUtils.getCodecs(this.sourceBufferTypes_.get(contentType)));
  2310. const currentBasicType = MimeUtils.getBasicType(
  2311. this.sourceBufferTypes_.get(contentType));
  2312. let newMimeType = shaka.util.MimeUtils.getFullType(refMimeType, refCodecs);
  2313. let needTransmux = this.config_.forceTransmux;
  2314. if (!shaka.media.Capabilities.isTypeSupported(newMimeType) ||
  2315. (!this.sequenceMode_ &&
  2316. shaka.util.MimeUtils.RAW_FORMATS.includes(newMimeType))) {
  2317. needTransmux = true;
  2318. }
  2319. const newMimeTypeWithAllCodecs =
  2320. shaka.util.MimeUtils.getFullTypeWithAllCodecs(
  2321. refMimeType, refCodecs);
  2322. const TransmuxerEngine = shaka.transmuxer.TransmuxerEngine;
  2323. if (needTransmux) {
  2324. const transmuxerPlugin =
  2325. TransmuxerEngine.findTransmuxer(newMimeTypeWithAllCodecs);
  2326. if (transmuxerPlugin) {
  2327. const transmuxer = transmuxerPlugin();
  2328. newMimeType =
  2329. transmuxer.convertCodecs(contentType, newMimeTypeWithAllCodecs);
  2330. transmuxer.destroy();
  2331. }
  2332. }
  2333. const newCodec = MimeUtils.getNormalizedCodec(
  2334. MimeUtils.getCodecs(newMimeType));
  2335. const newBasicType = MimeUtils.getBasicType(newMimeType);
  2336. return currentCodec !== newCodec || currentBasicType !== newBasicType;
  2337. }
  2338. /**
  2339. * Returns true if it's necessary reset the media source to load the
  2340. * new stream.
  2341. *
  2342. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  2343. * @param {string} mimeType
  2344. * @param {string} codecs
  2345. * @return {boolean}
  2346. */
  2347. isResetMediaSourceNecessary(contentType, mimeType, codecs) {
  2348. if (!this.isCodecSwitchNecessary_(contentType, mimeType, codecs)) {
  2349. return false;
  2350. }
  2351. return this.config_.codecSwitchingStrategy !==
  2352. shaka.config.CodecSwitchingStrategy.SMOOTH ||
  2353. !shaka.media.Capabilities.isChangeTypeSupported() ||
  2354. this.needSplitMuxedContent_;
  2355. }
  2356. /**
  2357. * Update LCEVC Decoder object when ready for LCEVC Decode.
  2358. * @param {?shaka.lcevc.Dec} lcevcDec
  2359. */
  2360. updateLcevcDec(lcevcDec) {
  2361. this.lcevcDec_ = lcevcDec;
  2362. }
  2363. /**
  2364. * @param {string} mimeType
  2365. * @return {string}
  2366. * @private
  2367. */
  2368. addExtraFeaturesToMimeType_(mimeType) {
  2369. const extraFeatures = this.config_.addExtraFeaturesToSourceBuffer(mimeType);
  2370. const extendedType = mimeType + extraFeatures;
  2371. shaka.log.debug('Using full mime type', extendedType);
  2372. return extendedType;
  2373. }
  2374. };
  2375. /**
  2376. * Internal reference to window.URL.createObjectURL function to avoid
  2377. * compatibility issues with other libraries and frameworks such as React
  2378. * Native. For use in unit tests only, not meant for external use.
  2379. *
  2380. * @type {function(?):string}
  2381. */
  2382. shaka.media.MediaSourceEngine.createObjectURL = window.URL.createObjectURL;
  2383. /**
  2384. * @typedef {{
  2385. * start: function(),
  2386. * p: !shaka.util.PublicPromise,
  2387. * uri: ?string
  2388. * }}
  2389. *
  2390. * @summary An operation in queue.
  2391. * @property {function()} start
  2392. * The function which starts the operation.
  2393. * @property {!shaka.util.PublicPromise} p
  2394. * The PublicPromise which is associated with this operation.
  2395. * @property {?string} uri
  2396. * A segment URI (if any) associated with this operation.
  2397. */
  2398. shaka.media.MediaSourceEngine.Operation;
  2399. /**
  2400. * @enum {string}
  2401. * @private
  2402. */
  2403. shaka.media.MediaSourceEngine.SourceBufferMode_ = {
  2404. SEQUENCE: 'sequence',
  2405. SEGMENTS: 'segments',
  2406. };
  2407. /**
  2408. * @typedef {{
  2409. * getKeySystem: function():?string,
  2410. * onMetadata: function(!Array<shaka.extern.ID3Metadata>, number, ?number),
  2411. * onEmsg: function(!shaka.extern.EmsgInfo),
  2412. * onEvent: function(!Event),
  2413. * onManifestUpdate: function()
  2414. * }}
  2415. *
  2416. * @summary Player interface
  2417. * @property {function():?string} getKeySystem
  2418. * Gets currently used key system or null if not used.
  2419. * @property {function(
  2420. * !Array<shaka.extern.ID3Metadata>, number, ?number)} onMetadata
  2421. * Callback to use when metadata arrives.
  2422. * @property {function(!shaka.extern.EmsgInfo)} onEmsg
  2423. * Callback to use when EMSG arrives.
  2424. * @property {function(!Event)} onEvent
  2425. * Called when an event occurs that should be sent to the app.
  2426. * @property {function()} onManifestUpdate
  2427. * Called when an embedded 'emsg' box should trigger a manifest update.
  2428. */
  2429. shaka.media.MediaSourceEngine.PlayerInterface;